mirror of
https://github.com/OMGeeky/google_bigquery.git
synced 2025-12-26 16:47:24 +01:00
the derive is working with the tests so far
This commit is contained in:
@@ -2,5 +2,7 @@
|
||||
|
||||
rustflags = [
|
||||
# "--cfg", "man_impl=\"true\"",
|
||||
# "--cfg", "man_impl_has_client=\"false\""
|
||||
"--cfg", "debug_assertions=\"true\"",
|
||||
"--cfg", "man_impl_has_client=\"false\""
|
||||
|
||||
] # custom flags to pass to all compiler invocations
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,4 +1,5 @@
|
||||
/target
|
||||
/Cargo.lock
|
||||
/.idea
|
||||
/auth
|
||||
/auth
|
||||
/tmp.rs
|
||||
|
||||
@@ -5,7 +5,7 @@ use std::any::Any;
|
||||
|
||||
use proc_macro2::{Ident, TokenStream};
|
||||
use quote::quote;
|
||||
use syn::{DeriveInput, parse_macro_input};
|
||||
use syn::{DeriveInput, parse_macro_input, Type};
|
||||
|
||||
struct Field {
|
||||
field_ident: quote::__private::Ident,
|
||||
@@ -15,13 +15,7 @@ struct Field {
|
||||
required: bool,
|
||||
}
|
||||
|
||||
#[proc_macro_derive(BigDataTable,
|
||||
attributes(primary_key, client, db_name, db_ignore, required))]
|
||||
pub fn big_data_table(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let ast = syn::parse(input).unwrap();
|
||||
let tokens = implement_derive(&ast);
|
||||
tokens.into()
|
||||
}
|
||||
//region HasBigQueryClient derive
|
||||
|
||||
#[proc_macro_derive(HasBigQueryClient, attributes(client))]
|
||||
pub fn has_big_query_client(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
@@ -31,6 +25,17 @@ pub fn has_big_query_client(input: proc_macro::TokenStream) -> proc_macro::Token
|
||||
}
|
||||
|
||||
fn implement_derive_has_big_query_client(ast: &DeriveInput) -> TokenStream {
|
||||
fn implement_has_bigquery_client_trait(table_ident: &Ident, client_ident: &Ident) -> TokenStream {
|
||||
let implementation_has_bigquery_client = quote! {
|
||||
impl<'a> HasBigQueryClient<'a> for #table_ident<'a> {
|
||||
fn get_client(&self) -> &'a BigqueryClient {
|
||||
self.#client_ident.unwrap()
|
||||
}
|
||||
}
|
||||
};
|
||||
implementation_has_bigquery_client
|
||||
}
|
||||
|
||||
let table_ident = &ast.ident;
|
||||
let client = get_client_field(&ast);
|
||||
let implementation_has_bigquery_client = implement_has_bigquery_client_trait(table_ident, &client.field_ident);
|
||||
@@ -39,6 +44,17 @@ fn implement_derive_has_big_query_client(ast: &DeriveInput) -> TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
//endregion HasBigQueryClient derive
|
||||
|
||||
//region BigDataTable derive
|
||||
#[proc_macro_derive(BigDataTable,
|
||||
attributes(primary_key, client, db_name, db_ignore, required))]
|
||||
pub fn big_data_table(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let ast = syn::parse(input).unwrap();
|
||||
let tokens = implement_derive(&ast);
|
||||
tokens.into()
|
||||
}
|
||||
|
||||
fn implement_derive(ast: &DeriveInput) -> TokenStream {
|
||||
let table_ident = &ast.ident;
|
||||
let pk = get_pk_field(&ast);
|
||||
@@ -59,17 +75,16 @@ fn implement_big_data_table_base_trait(table_ident: &Ident, primary_key: &Field,
|
||||
|
||||
let get_pk_name = get_get_pk_name(primary_key);
|
||||
let get_pk_value = get_get_pk_value(primary_key);
|
||||
|
||||
db_fields.retain(|f| f.local_name != client_field.local_name);
|
||||
|
||||
let get_field_name = get_get_field_name(ast, &db_fields);
|
||||
|
||||
db_fields.retain(|f|f.local_name != client_field.local_name);
|
||||
|
||||
let get_query_fields = get_get_query_fields(&db_fields);
|
||||
let write_from_table_row = get_write_from_table_row(&db_fields);
|
||||
let get_query_fields = get_get_query_fields(ast);
|
||||
let get_table_name = get_get_table_name(&table_ident);
|
||||
let create_with_pk = get_create_with_pk(ast);
|
||||
let create_from_table_row = get_create_from_table_row(ast);
|
||||
let get_query_fields_update_str = get_get_query_fields_update_str(ast);
|
||||
let get_all_query_parameters = get_get_all_query_parameters(ast);
|
||||
let create_with_pk = get_create_with_pk(&primary_key, &client_field);
|
||||
let create_from_table_row = get_create_from_table_row(&pk_ty);
|
||||
let get_all_query_parameters = get_get_all_query_parameters(&db_fields);
|
||||
quote! {
|
||||
impl<'a> BigDataTableBase<'a, #table_ident<'a>, #pk_ty> for #table_ident<'a> {
|
||||
#get_pk_name
|
||||
@@ -80,7 +95,6 @@ fn implement_big_data_table_base_trait(table_ident: &Ident, primary_key: &Field,
|
||||
#create_from_table_row
|
||||
#write_from_table_row
|
||||
#get_pk_value
|
||||
#get_query_fields_update_str
|
||||
#get_all_query_parameters
|
||||
}
|
||||
}
|
||||
@@ -89,10 +103,11 @@ fn implement_big_data_table_base_trait(table_ident: &Ident, primary_key: &Field,
|
||||
//region BigDataTableBase functions
|
||||
|
||||
fn get_get_pk_name(primary_key_field: &Field) -> TokenStream {
|
||||
let pk_name = &primary_key_field.db_name;
|
||||
let pk_name = &primary_key_field.local_name;
|
||||
quote! {
|
||||
fn get_pk_name() -> String {
|
||||
Self::get_field_name(stringify!(#pk_name)).unwrap()
|
||||
let name = #pk_name;
|
||||
Self::get_field_name(name).unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -106,18 +121,28 @@ fn get_get_pk_value(pk_field: &Field) -> TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_get_query_fields_update_str(ast: &DeriveInput) -> TokenStream {
|
||||
quote! {
|
||||
fn get_query_fields_update_str(&self) -> String {
|
||||
todo!();//TODO get_query_fields_update_str
|
||||
fn get_get_all_query_parameters(db_fields: &Vec<Field>) -> TokenStream {
|
||||
fn get_all_query_parameters(field: &Field) -> TokenStream {
|
||||
let field_ident = &field.field_ident;
|
||||
match field.required {
|
||||
true => quote! {
|
||||
parameters.push(Self::get_query_param(&Self::get_field_name(stringify!(#field_ident)).unwrap(), &Some(self.#field_ident)));
|
||||
},
|
||||
false => quote! {
|
||||
parameters.push(Self::get_query_param(&Self::get_field_name(stringify!(#field_ident)).unwrap(), &self.#field_ident));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_get_all_query_parameters(ast: &DeriveInput) -> TokenStream {
|
||||
let tokens: Vec<TokenStream> = db_fields.iter().map(|field| get_all_query_parameters(field)).collect();
|
||||
quote! {
|
||||
fn get_all_query_parameters(&self) -> Vec<QueryParameter> {
|
||||
todo!();//TODO get_all_query_parameters
|
||||
let mut parameters = Vec::new();
|
||||
|
||||
// parameters.push(Self::get_query_param(&Self::get_field_name(stringify!(info1)).unwrap(), &self.info1));
|
||||
|
||||
#(#tokens)*
|
||||
|
||||
parameters
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -138,9 +163,12 @@ fn get_write_from_table_row(db_fields: &Vec<Field>) -> TokenStream {
|
||||
.unwrap();
|
||||
*/
|
||||
quote! {
|
||||
let index = *index_to_name_mapping.get(Self::get_field_name(stringify!(#field_name))?.as_str()).unwrap();
|
||||
println!("get_write_from_table_row_single_field: field_name: (1) {}", #field_name);
|
||||
let index = *index_to_name_mapping.get(#field_name)
|
||||
.expect(format!("could not find index for field in mapping!: (1) {}", #field_name).as_str());
|
||||
self.#field_ident = row.f.as_ref()
|
||||
.unwrap()[index]
|
||||
.expect("row.f is None (1)")
|
||||
[index]
|
||||
.v.as_ref()
|
||||
.unwrap()
|
||||
.parse()
|
||||
@@ -155,9 +183,13 @@ fn get_write_from_table_row(db_fields: &Vec<Field>) -> TokenStream {
|
||||
};
|
||||
*/
|
||||
quote! {
|
||||
let index = *index_to_name_mapping.get(Self::get_field_name(stringify!(#field_name))?.as_str()).unwrap();
|
||||
self.#field_ident = match row.f.as_ref().unwrap()[index].v.as_ref() {
|
||||
Some(v) => Some(v.parse()?),
|
||||
let index = *index_to_name_mapping.get(#field_name)
|
||||
.expect(format!("could not find index for field in mapping!: (2) {}", #field_name).as_str());
|
||||
println!("get_write_from_table_row_single_field: field_name: (2) {} at index: {}", #field_name, index);
|
||||
self.#field_ident = match row.f.as_ref()
|
||||
.expect("row.f is None (1)")
|
||||
[index].v.as_ref() {
|
||||
Some(v) => Some(v.parse().expect(format!("could not parse field: {} with value {}",stringify!(#field_name),v).as_str())),
|
||||
None => None
|
||||
};
|
||||
}
|
||||
@@ -173,24 +205,42 @@ fn get_write_from_table_row(db_fields: &Vec<Field>) -> TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_create_from_table_row(ast: &DeriveInput) -> TokenStream {
|
||||
fn get_create_from_table_row(pk_ty: &Type) -> TokenStream {
|
||||
quote! {
|
||||
|
||||
fn create_from_table_row(client: &'a BigqueryClient,
|
||||
row: &google_bigquery2::api::TableRow,
|
||||
index_to_name_mapping: &HashMap<String, usize>)
|
||||
-> Result<Self, Box<dyn Error>>
|
||||
where
|
||||
Self: Sized{
|
||||
todo!();//TODO create_from_table_row
|
||||
fn create_from_table_row(client: &'a BigqueryClient,
|
||||
row: &google_bigquery2::api::TableRow,
|
||||
index_to_name_mapping: &HashMap<String, usize>)
|
||||
-> Result<Self, Box<dyn Error>>
|
||||
where
|
||||
Self: Sized {
|
||||
//TODO
|
||||
// create_from_table_row maybe push this to the convenience part.
|
||||
// NOTE: its a bit weird with the unwrap and the pk type if not implemented here, but I believe :)
|
||||
let pk_index = *index_to_name_mapping.get(&Self::get_pk_name()).unwrap();
|
||||
let pk = row
|
||||
.f.as_ref()
|
||||
.unwrap()[pk_index]
|
||||
.v.as_ref()
|
||||
.unwrap()
|
||||
.parse::<#pk_ty>()
|
||||
.unwrap();
|
||||
let mut res = Self::create_with_pk(client, pk);
|
||||
res.write_from_table_row(row, index_to_name_mapping)?;
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_create_with_pk(ast: &DeriveInput) -> TokenStream {
|
||||
fn get_create_with_pk(pk_field: &Field, client_field: &Field) -> TokenStream {
|
||||
let pk_ident = &pk_field.field_ident;
|
||||
let client_ident = &client_field.field_ident;
|
||||
quote! {
|
||||
fn create_with_pk(client: &'a BigqueryClient, pk: i64) -> Self {
|
||||
todo!();//TODO create_with_pk
|
||||
Self {
|
||||
#pk_ident: pk,
|
||||
#client_ident: Some(client),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -203,35 +253,56 @@ fn get_get_table_name(table_ident: &Ident) -> TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_get_query_fields(ast: &DeriveInput) -> TokenStream {
|
||||
fn get_get_query_fields(db_fields: &Vec<Field>) -> TokenStream {
|
||||
fn get_query_fields_single_field(field: &Field) -> TokenStream {
|
||||
let field_ident = &field.field_ident;
|
||||
let field_name = &field.db_name;
|
||||
quote! {
|
||||
fields.insert(stringify!(#field_ident).to_string(), Self::get_field_name(&stringify!(#field_ident).to_string()).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
let tokens: Vec<TokenStream> = db_fields.iter().map(|field| get_query_fields_single_field(field)).collect();
|
||||
|
||||
quote! {
|
||||
fn get_query_fields() -> HashMap<String, String> {
|
||||
todo!();//TODO get_query_fields
|
||||
let mut fields = HashMap::new();
|
||||
#(#tokens)*
|
||||
println!("get_query_fields: fields: {:?}", fields);
|
||||
fields
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_get_field_name(ast: &DeriveInput, db_fields: &Vec<Field>) -> TokenStream {
|
||||
let mut mapping: Vec<(&Ident, String)> = Vec::new();
|
||||
for db_field in db_fields {
|
||||
let field_name_local = &db_field.field_ident;
|
||||
let mut field_name_remote = &db_field.db_name;
|
||||
mapping.push((field_name_local, field_name_remote.to_string()));
|
||||
}
|
||||
|
||||
let mapping_tok: Vec<TokenStream> = mapping.iter().map(|(field_name_local, field_name_remote)| {
|
||||
// let mut mapping: Vec<(&Ident, String)> = Vec::new();
|
||||
// for db_field in db_fields {
|
||||
// let field_name_local = &db_field.field_ident;
|
||||
// let mut field_name_remote = &db_field.db_name;
|
||||
// mapping.push((field_name_local, field_name_remote.to_string()));
|
||||
// }
|
||||
//
|
||||
// let mapping_tok: Vec<TokenStream> = mapping.iter().map(|(field_name_local, field_name_remote)| {
|
||||
// quote! {
|
||||
// stringify!(#field_name_local) => Ok(#field_name_remote.to_string()),
|
||||
// }
|
||||
// }).collect();
|
||||
fn get_field_name_single_field(field: &Field) -> TokenStream {
|
||||
let field_name_local = &field.field_ident.to_string();
|
||||
let mut field_name_remote = &field.db_name;
|
||||
quote! {
|
||||
#field_name_local => Ok(#field_name_remote.to_string()),
|
||||
}
|
||||
}).collect();
|
||||
|
||||
|
||||
}
|
||||
let mapping_tok: Vec<TokenStream> = db_fields.iter().map(get_field_name_single_field).collect();
|
||||
let possible_fields: String = db_fields.iter().map(|field| field.field_ident.to_string()).collect::<Vec<String>>().join(", ");
|
||||
quote! {
|
||||
fn get_field_name(field_name: &str) -> Result<String, Box<dyn Error>> {
|
||||
println!("get_field_name: field_name: {:?}", field_name);
|
||||
match field_name {
|
||||
//ex.: "row_id" => Ok("Id".to_string()),
|
||||
#(#mapping_tok)*
|
||||
_ => Err("Field not found".into()),
|
||||
_ => Err(format!("Field not found {}\nPlease choose one of the following: {}", field_name, #possible_fields).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -239,16 +310,7 @@ fn get_get_field_name(ast: &DeriveInput, db_fields: &Vec<Field>) -> TokenStream
|
||||
|
||||
//endregion
|
||||
|
||||
fn implement_has_bigquery_client_trait(table_ident: &Ident, client_ident: &Ident) -> TokenStream {
|
||||
let implementation_has_bigquery_client = quote! {
|
||||
impl<'a> HasBigQueryClient<'a> for #table_ident<'a> {
|
||||
fn get_client(&self) -> &'a BigqueryClient {
|
||||
self.#client_ident
|
||||
}
|
||||
}
|
||||
};
|
||||
implementation_has_bigquery_client
|
||||
}
|
||||
//endregion BigDataTable derive
|
||||
|
||||
//region Helper functions
|
||||
|
||||
@@ -400,39 +462,3 @@ fn get_attributed_fields(data: &syn::Data, attribute_name: &str) -> Vec<Field> {
|
||||
}
|
||||
|
||||
//endregion
|
||||
|
||||
/*
|
||||
/// Example of [function-like procedural macro][1].
|
||||
///
|
||||
/// [1]: https://doc.rust-lang.org/reference/procedural-macros.html#function-like-procedural-macros
|
||||
#[proc_macro]
|
||||
pub fn my_macro(input: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
|
||||
let tokens = quote! {
|
||||
#input
|
||||
|
||||
struct Hello;
|
||||
};
|
||||
|
||||
tokens.into()
|
||||
}
|
||||
*/
|
||||
|
||||
/*
|
||||
/// Example of user-defined [procedural macro attribute][1].
|
||||
///
|
||||
/// [1]: https://doc.rust-lang.org/reference/procedural-macros.html#attribute-macros
|
||||
#[proc_macro_attribute]
|
||||
pub fn my_attribute(_args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
|
||||
let tokens = quote! {
|
||||
#input
|
||||
|
||||
struct Hello;
|
||||
};
|
||||
|
||||
tokens.into()
|
||||
}
|
||||
*/
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::error::Error;
|
||||
use std::fmt::{Debug, Display, Formatter};
|
||||
|
||||
use google_bigquery2::Bigquery;
|
||||
use google_bigquery2::{Bigquery, hyper, hyper_rustls, oauth2};
|
||||
use google_bigquery2::hyper::client::HttpConnector;
|
||||
use google_bigquery2::hyper_rustls::HttpsConnector;
|
||||
|
||||
@@ -13,6 +13,28 @@ pub struct BigqueryClient {
|
||||
dataset_id: String,
|
||||
}
|
||||
|
||||
impl BigqueryClient {
|
||||
pub(crate) fn empty() -> &'static BigqueryClient {
|
||||
todo!("Implement BigqueryClient::empty() or throw an error if it's not possible or something.");
|
||||
// let hyper_client = hyper::Client::builder().build(
|
||||
// hyper_rustls::HttpsConnectorBuilder::new()
|
||||
// .with_native_roots()
|
||||
// .https_or_http()
|
||||
// .enable_http1()
|
||||
// .enable_http2()
|
||||
// .build(),
|
||||
// );
|
||||
//
|
||||
// let auth = oauth2::ServiceAccountAuthenticator::with_client();
|
||||
// let client = Bigquery::new(hyper_client,auth);
|
||||
// Self {
|
||||
// dataset_id: Default::default(),
|
||||
// project_id: Default::default(),
|
||||
// client,
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
impl BigqueryClient {
|
||||
pub async fn new<S: Into<String>>(
|
||||
project_id: S,
|
||||
|
||||
@@ -17,7 +17,7 @@ pub trait BigDataTableBase<'a, TABLE, TPK>: HasBigQueryClient<'a>
|
||||
index_to_name_mapping: &HashMap<String, usize>)
|
||||
-> Result<(), Box<dyn Error>>;
|
||||
fn get_pk_value(&self) -> TPK;
|
||||
fn get_query_fields_update_str(&self) -> String;
|
||||
// fn get_query_fields_update_str(&self) -> String;
|
||||
fn get_all_query_parameters(&self) -> Vec<google_bigquery2::api::QueryParameter>;
|
||||
|
||||
fn create_from_table_row(client: &'a BigqueryClient,
|
||||
|
||||
@@ -3,7 +3,8 @@ use std::error::Error;
|
||||
use std::fmt::Debug;
|
||||
use std::str::FromStr;
|
||||
|
||||
use google_bigquery2::api::{QueryParameter, QueryParameterType, QueryParameterValue};
|
||||
use google_bigquery2::api::{QueryParameter, QueryParameterType, QueryParameterValue, QueryRequest};
|
||||
use google_bigquery2::hyper::{Body, Response};
|
||||
|
||||
use crate::client::BigqueryClient;
|
||||
use crate::data::BigDataTableBase;
|
||||
@@ -15,9 +16,19 @@ pub trait BigDataTableBaseConvenience<'a, TABLE, TPK>
|
||||
fn get_pk_param(&self) -> google_bigquery2::api::QueryParameter;
|
||||
fn get_query_fields_str() -> String;
|
||||
fn get_query_fields_insert_str() -> String;
|
||||
|
||||
fn get_query_fields_update_str(&self) -> String;
|
||||
fn get_where_part(field_name: &str, is_comparing_to_null: bool) -> String;
|
||||
//region run query
|
||||
async fn run_query(&self, req: QueryRequest, project_id: &str)
|
||||
-> Result<(Response<Body>, google_bigquery2::api::QueryResponse), Box<dyn Error>>;
|
||||
|
||||
async fn run_query_on_client(client: &'a BigqueryClient,
|
||||
req: QueryRequest,
|
||||
project_id: &str)
|
||||
-> Result<(Response<Body>, google_bigquery2::api::QueryResponse), Box<dyn Error>>;
|
||||
//endregion run query
|
||||
|
||||
//region run get query
|
||||
async fn run_get_query(&self, query: &str, project_id: &str)
|
||||
-> Result<google_bigquery2::api::QueryResponse, Box<dyn Error>>;
|
||||
|
||||
@@ -32,6 +43,8 @@ pub trait BigDataTableBaseConvenience<'a, TABLE, TPK>
|
||||
parameters: Vec<google_bigquery2::api::QueryParameter>,
|
||||
project_id: &str)
|
||||
-> Result<google_bigquery2::api::QueryResponse, Box<dyn Error>>;
|
||||
//endregion
|
||||
|
||||
|
||||
// async fn get_identifier_and_base_where(&self) -> Result<(String, String), Box<dyn Error>>;
|
||||
async fn get_identifier(&self) -> Result<String, Box<dyn Error>>;
|
||||
@@ -91,6 +104,17 @@ impl<'a, TABLE, TPK> BigDataTableBaseConvenience<'a, TABLE, TPK> for TABLE
|
||||
.join(", ")
|
||||
}
|
||||
|
||||
fn get_query_fields_update_str(&self) -> String {
|
||||
let x = Self::get_query_fields();
|
||||
let pk_name = Self::get_pk_name();
|
||||
let mut vec = x.values()
|
||||
.filter(|k| *k != &pk_name)
|
||||
.map(|k| format!("{} = @__{}", k, k))
|
||||
.collect::<Vec<String>>();
|
||||
// vec.sort();
|
||||
let update_str = vec.join(", ");
|
||||
update_str
|
||||
}
|
||||
fn get_where_part(field_name: &str, is_comparing_to_null: bool) -> String {
|
||||
if is_comparing_to_null {
|
||||
format!("{} IS NULL", field_name)
|
||||
@@ -99,6 +123,42 @@ impl<'a, TABLE, TPK> BigDataTableBaseConvenience<'a, TABLE, TPK> for TABLE
|
||||
}
|
||||
}
|
||||
|
||||
//region run query
|
||||
|
||||
async fn run_query(&self, req: QueryRequest, project_id: &str)
|
||||
-> Result<(Response<Body>, google_bigquery2::api::QueryResponse), Box<dyn Error>> {
|
||||
Self::run_query_on_client(self.get_client(), req, project_id).await
|
||||
}
|
||||
|
||||
async fn run_query_on_client(client: &'a BigqueryClient,
|
||||
req: QueryRequest,
|
||||
project_id: &str)
|
||||
-> Result<(Response<Body>, google_bigquery2::api::QueryResponse), Box<dyn Error>> {
|
||||
#[cfg(debug_assertions="true")]
|
||||
{
|
||||
println!("Query: {}", &req.query.as_ref().unwrap());//There has to be a query, this would not make any sense otherwise
|
||||
if let Some(parameters) = &req.query_parameters {
|
||||
println!("Parameters: {}", parameters.len());
|
||||
for (i, param) in parameters.iter().enumerate() {
|
||||
println!("{:2}: {:?}", i, param);
|
||||
}
|
||||
} else {
|
||||
println!("Parameters: None");
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
|
||||
let (res, query_res) = client.get_client().jobs().query(req, project_id)
|
||||
.doit().await?;
|
||||
|
||||
if res.status() != 200 {
|
||||
return Err(format!("Wrong status code returned! ({})", res.status()).into());
|
||||
}
|
||||
|
||||
Ok((res, query_res))
|
||||
}
|
||||
//endregion run query
|
||||
|
||||
async fn run_get_query(&self, query: &str, project_id: &str)
|
||||
-> Result<google_bigquery2::api::QueryResponse, Box<dyn Error>> {
|
||||
@@ -121,24 +181,19 @@ impl<'a, TABLE, TPK> BigDataTableBaseConvenience<'a, TABLE, TPK> for TABLE
|
||||
parameters: Vec<google_bigquery2::api::QueryParameter>,
|
||||
project_id: &str)
|
||||
-> Result<google_bigquery2::api::QueryResponse, Box<dyn Error>> {
|
||||
println!("Query: {}", query);
|
||||
println!("Parameters: {}", parameters.len());
|
||||
for (i, param) in parameters.iter().enumerate() {
|
||||
println!("{:2}: {:?}", i, param);
|
||||
}
|
||||
println!();
|
||||
let req = google_bigquery2::api::QueryRequest {
|
||||
query: Some(query.to_string()),
|
||||
query_parameters: Some(parameters),
|
||||
use_legacy_sql: Some(false),
|
||||
..Default::default()
|
||||
};
|
||||
let (res, query_res) = client.get_client().jobs().query(req, project_id)
|
||||
.doit().await?;
|
||||
|
||||
if res.status() != 200 {
|
||||
return Err(format!("Wrong status code returned! ({})", res.status()).into());
|
||||
}
|
||||
let (_, query_res) = Self::run_query_on_client(client, req, project_id).await?;
|
||||
// let (res, query_res) = client.get_client().jobs().query(req, project_id)
|
||||
// .doit().await?;
|
||||
//
|
||||
// if res.status() != 200 {
|
||||
// return Err(format!("Wrong status code returned! ({})", res.status()).into());
|
||||
// }
|
||||
Ok(query_res)
|
||||
}
|
||||
// async fn get_identifier_and_base_where(&self)
|
||||
@@ -166,13 +221,13 @@ impl<'a, TABLE, TPK> BigDataTableBaseConvenience<'a, TABLE, TPK> for TABLE
|
||||
default fn get_query_param<TField: BigDataValueType>(field_name: &str, field_value: &Option<TField>) -> google_bigquery2::api::QueryParameter
|
||||
{
|
||||
let type_to_string: String = TField::to_bigquery_type();
|
||||
let value: Option<google_bigquery2::api::QueryParameterValue> = match field_value {
|
||||
Some(value) => Some(google_bigquery2::api::QueryParameterValue {
|
||||
value: Some(value.to_bigquery_param_value()),//TODO: maybe add a way to use array types
|
||||
..Default::default()
|
||||
}),
|
||||
None => None,
|
||||
};
|
||||
let value: Option<google_bigquery2::api::QueryParameterValue> = Some(google_bigquery2::api::QueryParameterValue {
|
||||
value: match field_value {
|
||||
Some(value) =>Some(value.to_bigquery_param_value()),//TODO: maybe add a way to use array types
|
||||
None => None,
|
||||
},
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
google_bigquery2::api::QueryParameter {
|
||||
name: Some(format!("__{}", field_name.clone())),
|
||||
|
||||
@@ -19,6 +19,7 @@ pub trait BigDataTable<'a, TABLE, TPK>
|
||||
: HasBigQueryClient<'a>
|
||||
+ BigDataTableBaseConvenience<'a, TABLE, TPK>
|
||||
+ BigDataTableBase<'a, TABLE, TPK>
|
||||
+ Default
|
||||
where TPK: BigDataValueType + FromStr + Debug {
|
||||
async fn from_pk(
|
||||
client: &'a BigqueryClient,
|
||||
@@ -37,7 +38,7 @@ pub trait BigDataTable<'a, TABLE, TPK>
|
||||
|
||||
impl<'a, TABLE, TPK> BigDataTable<'a, TABLE, TPK> for TABLE
|
||||
where
|
||||
TABLE: HasBigQueryClient<'a> + BigDataTableBaseConvenience<'a, TABLE, TPK>,
|
||||
TABLE: HasBigQueryClient<'a> + BigDataTableBaseConvenience<'a, TABLE, TPK> + Default,
|
||||
TPK: BigDataValueType + FromStr + Debug,
|
||||
<TPK as FromStr>::Err: Debug
|
||||
{
|
||||
@@ -51,10 +52,10 @@ where
|
||||
let project_id = self.get_client().get_project_id();
|
||||
|
||||
let table_identifier = self.get_identifier().await?;
|
||||
let w = Self::get_base_where();
|
||||
let where_clause = Self::get_base_where();
|
||||
// region check for existing data
|
||||
let exists_row: bool;
|
||||
let existing_count = format!("select count(*) from {} where {} limit 1", table_identifier, w);
|
||||
let existing_count = format!("select count(*) from {} where {} limit 1", table_identifier, where_clause);
|
||||
|
||||
let req = google_bigquery2::api::QueryRequest {
|
||||
query: Some(existing_count),
|
||||
@@ -63,13 +64,13 @@ where
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
|
||||
let (res, query_res) = self.get_client().get_client().jobs().query(req, project_id)
|
||||
.doit().await?;
|
||||
|
||||
if res.status() != 200 {
|
||||
return Err(format!("Wrong status code returned! ({})", res.status()).into());
|
||||
}
|
||||
let (_, query_res) = self.run_query(req, project_id).await?;
|
||||
// let (res, query_res) = self.get_client().get_client().jobs().query(req, project_id)
|
||||
// .doit().await?;
|
||||
//
|
||||
// if res.status() != 200 {
|
||||
// return Err(format!("Wrong status code returned! ({})", res.status()).into());
|
||||
// }
|
||||
|
||||
if let None = &query_res.rows {
|
||||
return Err("No rows returned!".into());
|
||||
@@ -98,7 +99,7 @@ where
|
||||
// region update or insert
|
||||
|
||||
let query = match exists_row {
|
||||
true => format!("update {} set {} where {}", table_identifier, self.get_query_fields_update_str(), w),
|
||||
true => format!("update {} set {} where {}", table_identifier, self.get_query_fields_update_str(), where_clause),
|
||||
false => format!("insert into {} ({}, {}) values(@__{}, {})", table_identifier,
|
||||
Self::get_pk_name(),
|
||||
Self::get_query_fields_str(),
|
||||
@@ -115,12 +116,14 @@ where
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let (res, _) = self.get_client().get_client().jobs().query(req, project_id)
|
||||
.doit().await?;
|
||||
|
||||
if res.status() != 200 {
|
||||
return Err(format!("Wrong status code returned! ({})", res.status()).into());
|
||||
}
|
||||
let (_, _) = self.run_query(req, project_id).await?;
|
||||
// let (res, _) = self.get_client().get_client().jobs().query(req, project_id)
|
||||
// .doit().await?;
|
||||
//
|
||||
// if res.status() != 200 {
|
||||
// return Err(format!("Wrong status code returned! ({})", res.status()).into());
|
||||
// }
|
||||
|
||||
//endregion
|
||||
|
||||
@@ -145,6 +148,7 @@ where
|
||||
return Err(format!("Wrong amount of data returned! ({})", rows.len()).into());
|
||||
}
|
||||
let mut index_to_name_mapping: HashMap<String, usize> = get_name_index_mapping(query_res.schema);
|
||||
println!("index_to_name_mapping: {:?}", index_to_name_mapping);
|
||||
|
||||
let row = &rows[0];
|
||||
self.write_from_table_row(row, &index_to_name_mapping)
|
||||
|
||||
137
src/tests.rs
137
src/tests.rs
@@ -19,6 +19,37 @@ use super::*;
|
||||
// assert_eq!(result, 4);
|
||||
// }
|
||||
|
||||
#[tokio::test]
|
||||
async fn save() {
|
||||
let client = get_test_client().await;
|
||||
|
||||
let mut q = Infos::load_by_field(&client, stringify!(info1), Some("a"), 10).await.unwrap();
|
||||
assert_eq!(q.len(), 1);
|
||||
|
||||
let mut i1 = &mut q[0];
|
||||
assert_eq!(i1.row_id, 3);
|
||||
assert_eq!(i1.info3, Some("c".to_string()));
|
||||
assert_eq!(i1.info2, None);
|
||||
|
||||
i1.info2 = Some("b".to_string());
|
||||
i1.save_to_bigquery().await.unwrap();
|
||||
|
||||
assert_eq!(i1.info2, Some("b".to_string()));
|
||||
i1.info2 = Some("c".to_string());
|
||||
assert_eq!(i1.info2, Some("c".to_string()));
|
||||
|
||||
i1.load_from_bigquery().await.unwrap();
|
||||
assert_eq!(i1.info2, Some("b".to_string()));
|
||||
|
||||
i1.info2 = None;
|
||||
i1.save_to_bigquery().await.unwrap();
|
||||
i1.load_from_bigquery().await.unwrap();
|
||||
|
||||
assert_eq!(i1.row_id, 3);
|
||||
assert_eq!(i1.info3, Some("c".to_string()));
|
||||
assert_eq!(i1.info2, None);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn load_by_field() {
|
||||
let client = get_test_client().await;
|
||||
@@ -31,7 +62,7 @@ async fn load_by_field() {
|
||||
assert_eq!(i1.info3, Some("c".to_string()));
|
||||
|
||||
let mut q = Infos::load_by_field(&client, stringify!(yes), Some(true), 10).await.unwrap();
|
||||
// q.sort_by(|a, b| a.row_id.cmp(&b.row_id));
|
||||
q.sort_by(|a, b| a.row_id.cmp(&b.row_id));
|
||||
assert_eq!(q.len(), 3);
|
||||
|
||||
let i2 = &q[0];
|
||||
@@ -76,33 +107,49 @@ async fn get_test_client() -> BigqueryClient {
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[cfg_attr(not(man_impl_has_client="false"), derive(HasBigQueryClient))]
|
||||
#[cfg_attr(not(man_impl="true"), derive(BigDataTable))]
|
||||
#[cfg_attr(man_impl_has_client = "false", derive(HasBigQueryClient))]
|
||||
#[cfg_attr(not(man_impl = "true"), derive(BigDataTable))]
|
||||
pub struct Infos<'a> {
|
||||
#[cfg_attr(not(man_impl="true"), primary_key)]
|
||||
#[cfg_attr(not(man_impl="true"), required)]
|
||||
#[cfg_attr(not(man_impl="true"), db_name("Id"))]
|
||||
#[cfg_attr(not(man_impl = "true"), primary_key)]
|
||||
#[cfg_attr(not(man_impl = "true"), required)]
|
||||
#[cfg_attr(not(man_impl = "true"), db_name("Id"))]
|
||||
row_id: i64,
|
||||
#[cfg_attr(any(not(man_impl="true"), not(man_impl_has_client="false")), client)]
|
||||
client: &'a BigqueryClient,
|
||||
#[cfg_attr(any(not(man_impl = "true"), man_impl_has_client = "false"), client)]
|
||||
/// This client should never be left as None, doing so will cause a panic when trying to use it
|
||||
client: Option<&'a BigqueryClient>,
|
||||
info1: Option<String>,
|
||||
// #[cfg_attr(not(man_impl="true"), db_name("info"))]
|
||||
#[cfg_attr(not(man_impl="true"), db_name("info"))]
|
||||
info2: Option<String>,
|
||||
info3: Option<String>,
|
||||
// #[cfg_attr(not(man_impl="true"), db_name("info4i"))]
|
||||
#[cfg_attr(not(man_impl="true"), db_name("info4i"))]
|
||||
int_info4: Option<i64>,
|
||||
yes: Option<bool>,
|
||||
}
|
||||
|
||||
|
||||
// #[cfg(any(man_impl="true", not(man_impl_has_client="false")))]
|
||||
// impl<'a> HasBigQueryClient<'a> for Infos<'a> {
|
||||
// fn get_client(&self) -> &'a BigqueryClient {
|
||||
// self.client
|
||||
// }
|
||||
// }
|
||||
#[cfg(not(man_impl_has_client="false"))]
|
||||
impl<'a> HasBigQueryClient<'a> for Infos<'a> {
|
||||
fn get_client(&self) -> &'a BigqueryClient {
|
||||
self.client.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(man_impl="true")]
|
||||
impl<'a> Default for Infos<'a> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
// client: &BigqueryClient::new("none", "none", None).await.unwrap(),
|
||||
client: None,
|
||||
row_id: -9999,
|
||||
info1: Default::default(),
|
||||
info2: Default::default(),
|
||||
info3: Default::default(),
|
||||
int_info4: Default::default(),
|
||||
yes: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(man_impl = "true")]
|
||||
impl<'a> BigDataTableBase<'a, Infos<'a>, i64> for Infos<'a> {
|
||||
fn get_pk_name() -> String {
|
||||
Self::get_field_name(stringify!(row_id)).unwrap()
|
||||
@@ -131,6 +178,7 @@ impl<'a> BigDataTableBase<'a, Infos<'a>, i64> for Infos<'a> {
|
||||
//TODO: decide if the primary key should be included in the query fields
|
||||
fields.insert(stringify!(row_id).to_string(), Self::get_field_name(&stringify!(row_id).to_string()).unwrap());
|
||||
|
||||
println!("get_query_fields: fields: {:?}", fields);
|
||||
fields
|
||||
}
|
||||
|
||||
@@ -139,19 +187,13 @@ impl<'a> BigDataTableBase<'a, Infos<'a>, i64> for Infos<'a> {
|
||||
}
|
||||
|
||||
fn create_with_pk(client: &'a BigqueryClient, pk: i64) -> Self {
|
||||
let mut res = Self {
|
||||
Self {
|
||||
row_id: pk,
|
||||
client,
|
||||
info1: None,
|
||||
info2: None,
|
||||
info3: None,
|
||||
int_info4: None,
|
||||
yes: None,
|
||||
};
|
||||
res
|
||||
client: Some(client),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn write_from_table_row(&mut self,
|
||||
row: &google_bigquery2::api::TableRow,
|
||||
index_to_name_mapping: &HashMap<String, usize>)
|
||||
@@ -195,21 +237,34 @@ impl<'a> BigDataTableBase<'a, Infos<'a>, i64> for Infos<'a> {
|
||||
self.row_id
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
fn get_query_fields_update_str(&self) -> String {
|
||||
let mut fields = String::new();
|
||||
let info1 = Self::get_field_name(stringify!(info1)).unwrap();
|
||||
fields.push_str(&format!("{} = @__{}, ", info1, info1));
|
||||
let info2 = Self::get_field_name(stringify!(info2)).unwrap();
|
||||
fields.push_str(&format!("{} = @__{}, ", info2, info2));
|
||||
let info3 = Self::get_field_name(stringify!(info3)).unwrap();
|
||||
fields.push_str(&format!("{} = @__{}, ", info3, info3));
|
||||
let int_info4 = Self::get_field_name(stringify!(int_info4)).unwrap();
|
||||
fields.push_str(&format!("{} = @__{}, ", int_info4, int_info4));
|
||||
let yes = Self::get_field_name(stringify!(yes)).unwrap();
|
||||
fields.push_str(&format!("{} = @__{}", yes, yes));
|
||||
fields
|
||||
}
|
||||
let x = Self::get_query_fields();
|
||||
let pk_name = Self::get_pk_name();
|
||||
let mut vec = x.values()
|
||||
.filter(|k| *k != &pk_name)
|
||||
.map(|k| format!("{} = @__{}", k, k))
|
||||
.collect::<Vec<String>>();
|
||||
vec.sort();
|
||||
let x = vec
|
||||
.join(", ");
|
||||
|
||||
// let mut fields = String::new();
|
||||
// let info1 = Self::get_field_name(stringify!(info1)).unwrap();
|
||||
// fields.push_str(&format!("{} = @__{}, ", info1, info1));
|
||||
// let info2 = Self::get_field_name(stringify!(info2)).unwrap();
|
||||
// fields.push_str(&format!("{} = @__{}, ", info2, info2));
|
||||
// let info3 = Self::get_field_name(stringify!(info3)).unwrap();
|
||||
// fields.push_str(&format!("{} = @__{}, ", info3, info3));
|
||||
// let int_info4 = Self::get_field_name(stringify!(int_info4)).unwrap();
|
||||
// fields.push_str(&format!("{} = @__{}, ", int_info4, int_info4));
|
||||
// let yes = Self::get_field_name(stringify!(yes)).unwrap();
|
||||
// fields.push_str(&format!("{} = @__{}", yes, yes));
|
||||
// println!("fields: {}", fields);
|
||||
println!("x : {}", x);
|
||||
// fields
|
||||
x
|
||||
}*/
|
||||
|
||||
fn get_all_query_parameters(&self) -> Vec<QueryParameter> {
|
||||
let mut parameters = Vec::new();
|
||||
|
||||
Reference in New Issue
Block a user