working pretty good I hope

This commit is contained in:
OMGeeky
2023-03-04 16:10:02 +01:00
parent daccd5289b
commit 8e662172a2
12 changed files with 554 additions and 167 deletions

View File

@@ -15,3 +15,4 @@ serde_json = "1.0"
google-bigquery2 = "4.0.1"
async-trait = "0.1.60"
google_bigquery_derive = { path = "./google_bigquery_derive" }
chrono = "0.4.23"

View File

@@ -1,6 +1,6 @@
[package]
name = "google_bigquery_derive"
version = "0.0.0"
version = "0.0.1"
authors = ["OMGeeky <aalaalgmx@gmail.com>"]
description = "A `cargo generate` template for quick-starting a procedural macro crate"
keywords = ["template", "proc_macro", "procmacro"]

View File

@@ -1,20 +1,54 @@
#![allow(unused)]
extern crate proc_macro;
use std::any::Any;
use proc_macro2::{Ident, TokenStream};
use quote::quote;
use syn::{DeriveInput, parse_macro_input, Type};
use proc_macro2::Ident;
use syn::{DeriveInput, Type};
struct Field {
field_ident: quote::__private::Ident,
// field_ident: quote::__private::Ident,
field_ident: proc_macro2::Ident,
db_name: std::string::String,
local_name: std::string::String,
ty: syn::Type,
required: bool,
}
struct Attribute {
name: std::string::String,
value: std::string::String,
}
// // pub trait MyTrait<T> where T: Clone {
// // fn my_method(&self) -> T;
// // }
//
// #[proc_macro_derive(MyDerive, attributes(pk))]
// pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
// let ast: DeriveInput = syn::parse(input).unwrap();
// let pk_field = get_pk_field_my_derive(&ast);
//
// let pk_ident: &Ident = &pk_field.field_ident;
// let pk_type: Type = pk_field.ty;
// let struct_ident: &Ident = &ast.ident;
// let tokens = quote::quote!{
// impl<#pk_type> MyTrait<#pk_type> for #struct_ident {
// fn my_method(&self) -> #pk_type {
// self.#pk_ident.clone()
// }
// }
// };
// tokens.into()
// }
fn get_pk_field_my_derive(ast: &syn::DeriveInput) -> Field {
let mut pk_fields = get_attributed_fields(&ast.data, "pk");
if pk_fields.len() != 1 {
panic!("Exactly one pk field must be specified");
}
let pk = pk_fields.remove(0);
pk
}
//region HasBigQueryClient derive
#[proc_macro_derive(HasBigQueryClient, attributes(client))]
@@ -24,9 +58,10 @@ pub fn has_big_query_client(input: proc_macro::TokenStream) -> proc_macro::Token
tokens.into()
}
fn implement_derive_has_big_query_client(ast: &DeriveInput) -> TokenStream {
fn implement_has_bigquery_client_trait(table_ident: &Ident, client_ident: &Ident) -> TokenStream {
let implementation_has_bigquery_client = quote! {
fn implement_derive_has_big_query_client(ast: &syn::DeriveInput) -> proc_macro2::TokenStream {
fn implement_has_bigquery_client_trait(table_ident: &proc_macro2::Ident, client_ident: &proc_macro2::Ident) -> proc_macro2::TokenStream {
let implementation_has_bigquery_client = quote::quote! {
impl<'a> HasBigQueryClient<'a> for #table_ident<'a> {
fn get_client(&self) -> &'a BigqueryClient {
self.#client_ident.unwrap()
@@ -39,7 +74,7 @@ fn implement_derive_has_big_query_client(ast: &DeriveInput) -> TokenStream {
let table_ident = &ast.ident;
let client = get_client_field(&ast);
let implementation_has_bigquery_client = implement_has_bigquery_client_trait(table_ident, &client.field_ident);
quote! {
quote::quote! {
#implementation_has_bigquery_client;
}
}
@@ -55,56 +90,71 @@ pub fn big_data_table(input: proc_macro::TokenStream) -> proc_macro::TokenStream
tokens.into()
}
fn implement_derive(ast: &DeriveInput) -> TokenStream {
fn implement_derive(ast: &syn::DeriveInput) -> proc_macro2::TokenStream {
let table_ident = &ast.ident;
let pk = get_pk_field(&ast);
let implementation_big_data_table_base = implement_big_data_table_base_trait(table_ident, &pk, ast);
let tokens = quote! {
let tokens = quote::quote! {
#implementation_big_data_table_base;
};
tokens
}
fn implement_big_data_table_base_trait(table_ident: &Ident, primary_key: &Field, ast: &DeriveInput) -> TokenStream {
fn implement_big_data_table_base_trait(table_ident: &proc_macro2::Ident,
primary_key: &Field,
ast: &syn::DeriveInput) -> proc_macro2::TokenStream {
let table_name = get_table_name(&ast);
let pk_ty = &primary_key.ty;
let client_field = get_client_field(&ast);
let mut db_fields = get_fields(&ast.data);
db_fields.retain(|f| f.local_name != client_field.local_name);
let get_pk_name = get_get_pk_name(primary_key);
let get_pk_value = get_get_pk_value(primary_key);
db_fields.retain(|f| f.local_name != client_field.local_name);
let get_field_name = get_get_field_name(ast, &db_fields);
let get_query_fields = get_get_query_fields(&db_fields);
let write_from_table_row = get_write_from_table_row(&db_fields);
let get_table_name = get_get_table_name(&table_ident);
let get_table_name = get_get_table_name(&table_name);
let create_with_pk = get_create_with_pk(&primary_key, &client_field);
let create_from_table_row = get_create_from_table_row(&pk_ty);
let get_all_query_parameters = get_get_all_query_parameters(&db_fields);
quote! {
impl<'a> BigDataTableBase<'a, #table_ident<'a>, #pk_ty> for #table_ident<'a> {
quote::quote! {
impl<'a> BigDataTableHasPk<#pk_ty> for #table_ident<'a> {
#get_pk_name
#get_pk_value
}
impl<'a> BigDataTableBase<'a, #table_ident<'a>, #pk_ty> for #table_ident<'a> {
// #get_pk_name
// #get_pk_value
#get_field_name
#get_query_fields
#get_table_name
#create_with_pk
#create_from_table_row
#write_from_table_row
#get_pk_value
#get_all_query_parameters
}
}
}
fn get_table_name(ast: &DeriveInput) -> String {
for attr in get_struct_attributes(ast) {
if attr.name.eq("db_name") {
let tokens = &attr.value;
return tokens.to_string();
}
}
ast.ident.to_string()
}
//region BigDataTableBase functions
fn get_get_pk_name(primary_key_field: &Field) -> TokenStream {
fn get_get_pk_name(primary_key_field: &Field) -> proc_macro2::TokenStream {
let pk_name = &primary_key_field.local_name;
quote! {
quote::quote! {
fn get_pk_name() -> String {
let name = #pk_name;
Self::get_field_name(name).unwrap()
@@ -112,30 +162,31 @@ fn get_get_pk_name(primary_key_field: &Field) -> TokenStream {
}
}
fn get_get_pk_value(pk_field: &Field) -> TokenStream {
fn get_get_pk_value(pk_field: &Field) -> proc_macro2::TokenStream {
let pk_ident = &pk_field.field_ident;
quote! {
fn get_pk_value(&self) -> i64 {
self.#pk_ident
let pk_ty = &pk_field.ty;
quote::quote! {
fn get_pk_value(&self) -> #pk_ty {
self.#pk_ident.clone()
}
}
}
fn get_get_all_query_parameters(db_fields: &Vec<Field>) -> TokenStream {
fn get_all_query_parameters(field: &Field) -> TokenStream {
fn get_get_all_query_parameters(db_fields: &Vec<Field>) -> proc_macro2::TokenStream {
fn get_all_query_parameters(field: &Field) -> proc_macro2::TokenStream {
let field_ident = &field.field_ident;
match field.required {
true => quote! {
parameters.push(Self::get_query_param(&Self::get_field_name(stringify!(#field_ident)).unwrap(), &Some(self.#field_ident)));
true => quote::quote! {
parameters.push(Self::get_query_param(&Self::get_field_name(stringify!(#field_ident)).unwrap(), &Some(self.#field_ident.clone())));
},
false => quote! {
false => quote::quote! {
parameters.push(Self::get_query_param(&Self::get_field_name(stringify!(#field_ident)).unwrap(), &self.#field_ident));
}
}
}
let tokens: Vec<TokenStream> = db_fields.iter().map(|field| get_all_query_parameters(field)).collect();
quote! {
fn get_all_query_parameters(&self) -> Vec<QueryParameter> {
let tokens: Vec<proc_macro2::TokenStream> = db_fields.iter().map(|field| get_all_query_parameters(field)).collect();
quote::quote! {
fn get_all_query_parameters(&self) -> Vec<google_bigquery2::api::QueryParameter> {
let mut parameters = Vec::new();
// parameters.push(Self::get_query_param(&Self::get_field_name(stringify!(info1)).unwrap(), &self.info1));
@@ -147,70 +198,89 @@ fn get_get_all_query_parameters(db_fields: &Vec<Field>) -> TokenStream {
}
}
fn get_write_from_table_row(db_fields: &Vec<Field>) -> TokenStream {
fn get_write_from_table_row_single_field(field: &Field) -> TokenStream {
fn get_write_from_table_row(db_fields: &Vec<Field>) -> proc_macro2::TokenStream {
fn get_write_from_table_row_single_field(field: &Field) -> proc_macro2::TokenStream {
let field_ident = &field.field_ident;
let field_name = &field.db_name;
let field_ty = &field.ty;
// if(field.ty == chrono::D)
// let parse_fn_tok = quote::quote!{
//
// fn parse_value<T: BigDataValueType<#field_ty>>(v: &String) -> T {
// v.to_value()
// // v.parse()
// .expect(format!("could not parse field: {} with value {}", stringify!(#field_name), v)
// .as_str())
// };
//
// // parse()
// // .expect(format!("could not parse field: {} with value {}", stringify!(#field_name), v)
// // .as_str())
// };
if field.required {
/*
let pk_index = *index_to_name_mapping.get(&Self::get_pk_name()).unwrap();
let pk = row
.f.as_ref()
.unwrap()[pk_index]
.v.as_ref()
.unwrap()
.parse::<TPK>()
.unwrap();
*/
quote! {
println!("get_write_from_table_row_single_field: field_name: (1) {}", #field_name);
quote::quote! {
// println!("get_write_from_table_row_single_field: field_name: (1) {}", #field_name);
let index = *index_to_name_mapping.get(#field_name)
.expect(format!("could not find index for field in mapping!: (1) {}", #field_name).as_str());
self.#field_ident = row.f.as_ref()
.expect("row.f is None (1)")
[index]
.v.as_ref()
.unwrap()
.parse()
.unwrap();
.expect(format!("could not find index for field in mapping!: (1) {}", #field_name).as_str());
{
self.#field_ident = match row.f.as_ref()
.expect("row.f is None (1)")
[index]
.v.as_ref() {
// Some(v)=> parse_value(v),
// Some(v)=> v.to_value()
// Some(v)=>todo!(),
Some(v)=> #field_ty::from_bigquery_value(v)
.expect(format!("could not parse required field: {} with value {}", stringify!(#field_name), v)
.as_str()),
// Some(v)=> v.to_value(),
None => panic!("field is required but is None: {}", #field_name)
};
}
}
} else {
/*
let info1 = *index_to_name_mapping.get(Self::get_field_name(stringify!(info1))?.as_str()).unwrap();
self.info1 = match cell[info1].v.as_ref() {
Some(v) => Some(v.parse()?),
None => None
};
*/
quote! {
let field_option_ty = extract_type_from_option(&field_ty)
.expect(&format!("could not extract type from option: {}->{:?}", field_name, field_ty));
quote::quote! {
// println!("get_write_from_table_row_single_field: field_name: (2) {} at index: {}", #field_name, index);
let index = *index_to_name_mapping.get(#field_name)
.expect(format!("could not find index for field in mapping!: (2) {}", #field_name).as_str());
println!("get_write_from_table_row_single_field: field_name: (2) {} at index: {}", #field_name, index);
self.#field_ident = match row.f.as_ref()
.expect("row.f is None (1)")
[index].v.as_ref() {
Some(v) => Some(v.parse().expect(format!("could not parse field: {} with value {}",stringify!(#field_name),v).as_str())),
None => None
};
.expect(format!("could not find index for field in mapping!: (2) {}", #field_name).as_str());
{
self.#field_ident = match row.f.as_ref()
.expect("row.f is None (1)")
[index].v.as_ref() {
// Some(v) => Some(v.to_value()),
// Some(v) => v.to_opt_value()
// Some(v)=> todo!()
Some(v) => Option::<#field_option_ty>::from_bigquery_value(v)
.expect(format!("could not parse field: {} with value {}", stringify!(#field_name), v).as_str())
,
// Some(v) => Some(parse_value(v)),
None => None
};
}
}
}
}
let tokens: Vec<TokenStream> = db_fields.iter().map(|field| get_write_from_table_row_single_field(field)).collect();
quote! {
fn write_from_table_row(&mut self, row: &TableRow, index_to_name_mapping: &HashMap<String, usize>) -> Result<(), Box<dyn Error>> {
let tokens: Vec<proc_macro2::TokenStream> = db_fields.iter().map(|field| get_write_from_table_row_single_field(field)).collect();
quote::quote! {
fn write_from_table_row(&mut self, row: &google_bigquery2::api::TableRow, index_to_name_mapping: &std::collections::HashMap<String, usize>) -> Result<(), Box<dyn std::error::Error>> {
#(#tokens)*
Ok(())
}
}
}
fn get_create_from_table_row(pk_ty: &Type) -> TokenStream {
quote! {
fn get_create_from_table_row(pk_ty: &syn::Type) -> proc_macro2::TokenStream {
quote::quote! {
fn create_from_table_row(client: &'a BigqueryClient,
row: &google_bigquery2::api::TableRow,
index_to_name_mapping: &HashMap<String, usize>)
-> Result<Self, Box<dyn Error>>
index_to_name_mapping: &std::collections::HashMap<String, usize>)
-> Result<Self, Box<dyn std::error::Error>>
where
Self: Sized {
//TODO
@@ -231,11 +301,12 @@ fn get_create_from_table_row(pk_ty: &Type) -> TokenStream {
}
}
fn get_create_with_pk(pk_field: &Field, client_field: &Field) -> TokenStream {
fn get_create_with_pk(pk_field: &Field, client_field: &Field) -> proc_macro2::TokenStream {
let pk_ident = &pk_field.field_ident;
let pk_ty = &pk_field.ty;
let client_ident = &client_field.field_ident;
quote! {
fn create_with_pk(client: &'a BigqueryClient, pk: i64) -> Self {
quote::quote! {
fn create_with_pk(client: &'a BigqueryClient, pk: #pk_ty) -> Self {
Self {
#pk_ident: pk,
#client_ident: Some(client),
@@ -245,60 +316,60 @@ fn get_create_with_pk(pk_field: &Field, client_field: &Field) -> TokenStream {
}
}
fn get_get_table_name(table_ident: &Ident) -> TokenStream {
quote! {
fn get_get_table_name(table_name: &str) -> proc_macro2::TokenStream {
quote::quote! {
fn get_table_name() -> String {
stringify!(#table_ident).to_string()
#table_name.to_string()
}
}
}
fn get_get_query_fields(db_fields: &Vec<Field>) -> TokenStream {
fn get_query_fields_single_field(field: &Field) -> TokenStream {
fn get_get_query_fields(db_fields: &Vec<Field>) -> proc_macro2::TokenStream {
fn get_query_fields_single_field(field: &Field) -> proc_macro2::TokenStream {
let field_ident = &field.field_ident;
let field_name = &field.db_name;
quote! {
quote::quote! {
fields.insert(stringify!(#field_ident).to_string(), Self::get_field_name(&stringify!(#field_ident).to_string()).unwrap());
}
}
let tokens: Vec<TokenStream> = db_fields.iter().map(|field| get_query_fields_single_field(field)).collect();
let tokens: Vec<proc_macro2::TokenStream> = db_fields.iter().map(|field| get_query_fields_single_field(field)).collect();
quote! {
fn get_query_fields() -> HashMap<String, String> {
let mut fields = HashMap::new();
quote::quote! {
fn get_query_fields() -> std::collections::HashMap<String, String> {
let mut fields = std::collections::HashMap::new();
#(#tokens)*
println!("get_query_fields: fields: {:?}", fields);
// println!("get_query_fields: fields: {:?}", fields);
fields
}
}
}
fn get_get_field_name(ast: &DeriveInput, db_fields: &Vec<Field>) -> TokenStream {
// let mut mapping: Vec<(&Ident, String)> = Vec::new();
fn get_get_field_name(ast: &syn::DeriveInput, db_fields: &Vec<Field>) -> proc_macro2::TokenStream {
// let mut mapping: Vec<(&proc_macro2::Ident, String)> = Vec::new();
// for db_field in db_fields {
// let field_name_local = &db_field.field_ident;
// let mut field_name_remote = &db_field.db_name;
// mapping.push((field_name_local, field_name_remote.to_string()));
// }
//
// let mapping_tok: Vec<TokenStream> = mapping.iter().map(|(field_name_local, field_name_remote)| {
// quote! {
// let mapping_tok: Vec<proc_macro2::TokenStream> = mapping.iter().map(|(field_name_local, field_name_remote)| {
// quote::quote! {
// stringify!(#field_name_local) => Ok(#field_name_remote.to_string()),
// }
// }).collect();
fn get_field_name_single_field(field: &Field) -> TokenStream {
fn get_field_name_single_field(field: &Field) -> proc_macro2::TokenStream {
let field_name_local = &field.field_ident.to_string();
let mut field_name_remote = &field.db_name;
quote! {
quote::quote! {
#field_name_local => Ok(#field_name_remote.to_string()),
}
}
let mapping_tok: Vec<TokenStream> = db_fields.iter().map(get_field_name_single_field).collect();
let mapping_tok: Vec<proc_macro2::TokenStream> = db_fields.iter().map(get_field_name_single_field).collect();
let possible_fields: String = db_fields.iter().map(|field| field.field_ident.to_string()).collect::<Vec<String>>().join(", ");
quote! {
fn get_field_name(field_name: &str) -> Result<String, Box<dyn Error>> {
println!("get_field_name: field_name: {:?}", field_name);
quote::quote! {
fn get_field_name(field_name: &str) -> Result<String, Box<dyn std::error::Error>> {
// println!("get_field_name: field_name: {:?}", field_name);
match field_name {
//ex.: "row_id" => Ok("Id".to_string()),
#(#mapping_tok)*
@@ -320,7 +391,7 @@ fn get_helper_fields(ast: &syn::DeriveInput) -> (Field, Field) {
(pk, client)
}
fn get_pk_field(ast: &&DeriveInput) -> Field {
fn get_pk_field(ast: &syn::DeriveInput) -> Field {
let mut pk_fields = get_attributed_fields(&ast.data, "primary_key");
if pk_fields.len() != 1 {
panic!("Exactly one primary key field must be specified");
@@ -329,7 +400,7 @@ fn get_pk_field(ast: &&DeriveInput) -> Field {
pk
}
fn get_client_field(ast: &&DeriveInput) -> Field {
fn get_client_field(ast: &syn::DeriveInput) -> Field {
//region client
let mut client_fields = get_attributed_fields(&ast.data, "client");
if client_fields.len() != 1 {
@@ -340,6 +411,67 @@ fn get_client_field(ast: &&DeriveInput) -> Field {
client
}
fn get_struct_attributes(ast: &syn::DeriveInput) -> Vec<Attribute> {
let attrs = &ast.attrs;
let mut res = vec![];
for attr in attrs {
if attr.path.is_ident("db_name") {
let args: syn::LitStr = attr
.parse_args()
.expect("Failed to parse target name");
let args = args.value();
res.push(Attribute {
name: "db_name".to_string(),
value: args,
});
}
}
res
}
fn extract_type_from_option(ty: &syn::Type) -> Option<&syn::Type> {
use syn::{GenericArgument, Path, PathArguments, PathSegment};
fn extract_type_path(ty: &syn::Type) -> Option<&Path> {
match *ty {
syn::Type::Path(ref typepath) if typepath.qself.is_none() => Some(&typepath.path),
_ => None,
}
}
// TODO store (with lazy static) the vec of string
// TODO maybe optimization, reverse the order of segments
fn extract_option_segment(path: &Path) -> Option<&PathSegment> {
let idents_of_path = path
.segments
.iter()
.into_iter()
.fold(String::new(), |mut acc, v| {
acc.push_str(&v.ident.to_string());
acc.push('|');
acc
});
vec!["Option|", "std|option|Option|", "core|option|Option|"]
.into_iter()
.find(|s| &idents_of_path == *s)
.and_then(|_| path.segments.last())
}
extract_type_path(ty)
.and_then(|path| extract_option_segment(path))
.and_then(|path_seg| {
let type_params = &path_seg.arguments;
// It should have only on angle-bracketed param ("<String>"):
match *type_params {
PathArguments::AngleBracketed(ref params) => params.args.first(),
_ => None,
}
})
.and_then(|generic_arg| match *generic_arg {
GenericArgument::Type(ref ty) => Some(ty),
_ => None,
})
}
fn get_fields(data: &syn::Data) -> Vec<Field> {
let mut res = vec![];
@@ -457,7 +589,7 @@ fn get_attributed_fields(data: &syn::Data, attribute_name: &str) -> Vec<Field> {
}
// let res = res.iter();//.map(|(ident, ty)| (ident)).collect();
// .fold(quote!(), |es, (name, ty)| (name, ty));
// .fold(quote::quote!(), |es, (name, ty)| (name, ty));
return res;
}

View File

@@ -5,9 +5,17 @@ use std::str::FromStr;
use crate::client::{BigqueryClient, HasBigQueryClient};
use crate::utils::BigDataValueType;
pub trait BigDataTableBase<'a, TABLE, TPK>: HasBigQueryClient<'a>
where TPK: BigDataValueType + FromStr + std::fmt::Debug {
pub trait BigDataTableHasPk<TPK>
where TPK: BigDataValueType<TPK> + FromStr + std::fmt::Debug + Clone {
fn get_pk_name() -> String;
fn get_pk_value(&self) -> TPK;
}
pub trait BigDataTableBase<'a, TABLE, TPK>: HasBigQueryClient<'a> + BigDataTableHasPk<TPK>
where TPK: BigDataValueType<TPK> + FromStr + std::fmt::Debug + Clone
{
// fn get_pk_name() -> String;
// fn get_pk_value(&self) -> TPK;
fn get_field_name(field_name: &str) -> Result<String, Box<dyn Error>>;
fn get_query_fields() -> HashMap<String, String>;
fn get_table_name() -> String;
@@ -16,7 +24,6 @@ pub trait BigDataTableBase<'a, TABLE, TPK>: HasBigQueryClient<'a>
row: &google_bigquery2::api::TableRow,
index_to_name_mapping: &HashMap<String, usize>)
-> Result<(), Box<dyn Error>>;
fn get_pk_value(&self) -> TPK;
// fn get_query_fields_update_str(&self) -> String;
fn get_all_query_parameters(&self) -> Vec<google_bigquery2::api::QueryParameter>;
@@ -26,4 +33,6 @@ pub trait BigDataTableBase<'a, TABLE, TPK>: HasBigQueryClient<'a>
-> Result<Self, Box<dyn Error>>
where
Self: Sized;
// fn parse_bigquery_value<T: BigDataValueType<T>>(value: &String) -> Result<T, Box<dyn Error>>;
}

View File

@@ -12,7 +12,7 @@ use crate::utils::BigDataValueType;
pub trait BigDataTableBaseConvenience<'a, TABLE, TPK>
: BigDataTableBase<'a, TABLE, TPK>
where TPK: BigDataValueType + FromStr + Debug {
where TPK: BigDataValueType<TPK> + FromStr + Debug + Clone {
fn get_pk_param(&self) -> google_bigquery2::api::QueryParameter;
fn get_query_fields_str() -> String;
fn get_query_fields_insert_str() -> String;
@@ -53,11 +53,11 @@ pub trait BigDataTableBaseConvenience<'a, TABLE, TPK>
// async fn get_identifier_and_base_where_from_client(client: &'a BigqueryClient, pk_name: &str, table_name: &str) -> Result<(String, String), Box<dyn Error>>;
fn get_query_param<TField: BigDataValueType>(field_name: &str, field_value: &Option<TField>)
-> google_bigquery2::api::QueryParameter;
fn get_query_param<TField: BigDataValueType<TField>>(field_name: &str, field_value: &Option<TField>)
-> google_bigquery2::api::QueryParameter;
fn parse_value_to_parameter<TValue>(value: &TValue) -> String
where TValue: std::fmt::Display + BigDataValueType;
where TValue: std::fmt::Display + BigDataValueType<TValue>;
// fn create_from_table_row(client: &'a BigqueryClient,
// row: &google_bigquery2::api::TableRow,
@@ -70,7 +70,7 @@ pub trait BigDataTableBaseConvenience<'a, TABLE, TPK>
impl<'a, TABLE, TPK> BigDataTableBaseConvenience<'a, TABLE, TPK> for TABLE
where
TABLE: BigDataTableBase<'a, TABLE, TPK>,
TPK: BigDataValueType + FromStr + Debug,
TPK: BigDataValueType<TPK> + FromStr + Debug + Clone,
<TPK as FromStr>::Err: Debug,
{
fn get_pk_param(&self) -> QueryParameter {
@@ -89,32 +89,36 @@ impl<'a, TABLE, TPK> BigDataTableBaseConvenience<'a, TABLE, TPK> for TABLE
// }
}
fn get_query_fields_str() -> String {
Self::get_query_fields().values().into_iter()
let mut values = Self::get_query_fields().values()
.into_iter()
.map(|v| format!("{}", v))
.collect::<Vec<String>>()
.join(", ")
.collect::<Vec<String>>();
values.sort();
values.join(", ")
}
fn get_query_fields_insert_str() -> String {
Self::get_query_fields()
let mut values = Self::get_query_fields()
.values()
.into_iter()
.map(|v| format!("@__{}", v))
.collect::<Vec<String>>()
.join(", ")
.collect::<Vec<String>>();
values.sort();
values.join(", ")
}
fn get_query_fields_update_str(&self) -> String {
let x = Self::get_query_fields();
let pk_name = Self::get_pk_name();
let mut vec = x.values()
let mut values = x.values()
.filter(|k| *k != &pk_name)
.map(|k| format!("{} = @__{}", k, k))
.collect::<Vec<String>>();
// vec.sort();
let update_str = vec.join(", ");
values.sort();
let update_str = values.join(", ");
update_str
}
fn get_where_part(field_name: &str, is_comparing_to_null: bool) -> String {
if is_comparing_to_null {
format!("{} IS NULL", field_name)
@@ -148,7 +152,6 @@ impl<'a, TABLE, TPK> BigDataTableBaseConvenience<'a, TABLE, TPK> for TABLE
println!();
}
let (res, query_res) = client.get_client().jobs().query(req, project_id)
.doit().await?;
@@ -218,12 +221,12 @@ impl<'a, TABLE, TPK> BigDataTableBaseConvenience<'a, TABLE, TPK> for TABLE
Self::get_where_part(&pk_name, false)
}
default fn get_query_param<TField: BigDataValueType>(field_name: &str, field_value: &Option<TField>) -> google_bigquery2::api::QueryParameter
default fn get_query_param<TField: BigDataValueType<TField>>(field_name: &str, field_value: &Option<TField>) -> google_bigquery2::api::QueryParameter
{
let type_to_string: String = TField::to_bigquery_type();
let value: Option<google_bigquery2::api::QueryParameterValue> = Some(google_bigquery2::api::QueryParameterValue {
value: match field_value {
Some(value) =>Some(value.to_bigquery_param_value()),//TODO: maybe add a way to use array types
value: match field_value {
Some(value) => Some(value.to_bigquery_param_value()),//TODO: maybe add a way to use array types
None => None,
},
..Default::default()
@@ -240,7 +243,7 @@ impl<'a, TABLE, TPK> BigDataTableBaseConvenience<'a, TABLE, TPK> for TABLE
}
}
fn parse_value_to_parameter<TValue>(value: &TValue) -> String
where TValue: std::fmt::Display + BigDataValueType
where TValue: std::fmt::Display + BigDataValueType<TValue>
{
return value.to_bigquery_param_value();
}

View File

@@ -6,6 +6,7 @@ use std::str::FromStr;
use google_bigquery2::api::{QueryParameter, TableSchema};
pub use big_data_table_base::BigDataTableBase;
pub use big_data_table_base::BigDataTableHasPk;
pub use big_data_table_base_convenience::BigDataTableBaseConvenience;
use crate::client::{BigqueryClient, HasBigQueryClient};
@@ -14,22 +15,24 @@ use crate::utils::BigDataValueType;
mod big_data_table_base_convenience;
mod big_data_table_base;
// pub trait BigDataTable<'a, TABLE, TPK: BigDataValueType + FromStr + Debug>: HasBigQueryClient<'a> + BigDataTableBaseConvenience<'a, TABLE, TPK> + BigDataTableBase<'a, TABLE, TPK> {
// pub trait BigDataTable<'a, TABLE, TPK: BigDataValueType<TPK> + FromStr + Debug>: HasBigQueryClient<'a> + BigDataTableBaseConvenience<'a, TABLE, TPK> + BigDataTableBase<'a, TABLE, TPK> {
pub trait BigDataTable<'a, TABLE, TPK>
: HasBigQueryClient<'a>
+ BigDataTableHasPk<TPK>
+ BigDataTableBaseConvenience<'a, TABLE, TPK>
+ BigDataTableBase<'a, TABLE, TPK>
+ Default
where TPK: BigDataValueType + FromStr + Debug {
async fn from_pk(
where TPK: BigDataValueType<TPK> + FromStr + Debug + Clone {
async fn create_and_load_from_pk(
client: &'a BigqueryClient,
pk: TPK,
) -> Result<Self, Box<dyn Error>>
where
Self: Sized;
async fn load_from_pk(client: &'a BigqueryClient, pk: TPK) -> Result<Option<Self>, Box<dyn Error>> where Self: Sized;
async fn save_to_bigquery(&self) -> Result<(), Box<dyn Error>>;
async fn load_from_bigquery(&mut self) -> Result<(), Box<dyn Error>>;
async fn load_by_field<T: BigDataValueType>(client: &'a BigqueryClient, field_name: &str, field_value: Option<T>, max_amount: usize)
async fn load_by_field<T: BigDataValueType<T>>(client: &'a BigqueryClient, field_name: &str, field_value: Option<T>, max_amount: usize)
-> Result<Vec<TABLE>, Box<dyn Error>>;
async fn load_by_custom_query(client: &'a BigqueryClient, query: &str, parameters: Vec<QueryParameter>, max_amount: usize)
@@ -38,16 +41,22 @@ pub trait BigDataTable<'a, TABLE, TPK>
impl<'a, TABLE, TPK> BigDataTable<'a, TABLE, TPK> for TABLE
where
TABLE: HasBigQueryClient<'a> + BigDataTableBaseConvenience<'a, TABLE, TPK> + Default,
TPK: BigDataValueType + FromStr + Debug,
TABLE: HasBigQueryClient<'a> + BigDataTableBaseConvenience<'a, TABLE, TPK> + Default + BigDataTableHasPk<TPK>,
TPK: BigDataValueType<TPK> + FromStr + Debug + Clone,
<TPK as FromStr>::Err: Debug
{
async fn from_pk(client: &'a BigqueryClient, pk: TPK) -> Result<Self, Box<dyn Error>> where Self: Sized {
async fn create_and_load_from_pk(client: &'a BigqueryClient, pk: TPK) -> Result<Self, Box<dyn Error>> where Self: Sized {
let mut res = Self::create_with_pk(client, pk);
res.load_from_bigquery().await?;
Ok(res)
}
async fn load_from_pk(client: &'a BigqueryClient, pk: TPK) -> Result<Option<Self>, Box<dyn Error>> where Self: Sized {
let x = Self::load_by_field(client, &Self::get_pk_name(), Some(pk), 1).await
.map(|mut v| v.pop())?;
Ok(x)
}
async fn save_to_bigquery(&self) -> Result<(), Box<dyn Error>> {
let project_id = self.get_client().get_project_id();
@@ -100,10 +109,13 @@ where
let query = match exists_row {
true => format!("update {} set {} where {}", table_identifier, self.get_query_fields_update_str(), where_clause),
false => format!("insert into {} ({}, {}) values(@__{}, {})", table_identifier,
Self::get_pk_name(),
// false => format!("insert into {} ({}, {}) values(@__{}, {})", table_identifier,
// Self::get_pk_name(),
// Self::get_query_fields_str(),
// Self::get_pk_name(),
// Self::get_query_fields_insert_str()),
false => format!("insert into {} ({}) values({})", table_identifier,
Self::get_query_fields_str(),
Self::get_pk_name(),
Self::get_query_fields_insert_str()),
};
@@ -148,13 +160,13 @@ where
return Err(format!("Wrong amount of data returned! ({})", rows.len()).into());
}
let mut index_to_name_mapping: HashMap<String, usize> = get_name_index_mapping(query_res.schema);
println!("index_to_name_mapping: {:?}", index_to_name_mapping);
// println!("index_to_name_mapping: {:?}", index_to_name_mapping);
let row = &rows[0];
self.write_from_table_row(row, &index_to_name_mapping)
}
async fn load_by_field<T: BigDataValueType>(client: &'a BigqueryClient, field_name: &str, field_value: Option<T>, max_amount: usize)
async fn load_by_field<T: BigDataValueType<T>>(client: &'a BigqueryClient, field_name: &str, field_value: Option<T>, max_amount: usize)
-> Result<Vec<TABLE>, Box<dyn Error>> {
let field_name: String = field_name.into();
let field_name = Self::get_field_name(&field_name).expect(format!("Field '{}' not found!", field_name).as_str());

View File

@@ -2,16 +2,22 @@
#![feature(specialization)]
#![allow(unused)]
#![allow(incomplete_features)]
// #![feature(impl_trait_projections)]
pub use google_bigquery_derive::HasBigQueryClient as HasBigQueryClientDerive;
pub use google_bigquery_derive::BigDataTable as BigDataTableDerive;
// pub use google_bigquery_derive::MyDerive;
pub use client::{BigqueryClient, HasBigQueryClient};
pub use data::{BigDataTable, BigDataTableBase, BigDataTableBaseConvenience, BigDataTableHasPk};
pub mod client;
mod googlebigquery;
mod data;
mod utils;
pub mod utils;
pub use google_bigquery_derive;
pub fn add(left: usize, right: usize) -> usize {
left + right
}
// pub fn add(left: usize, right: usize) -> usize {
// left + right
// }
#[cfg(test)]
mod tests;

View File

@@ -1,15 +1,12 @@
use std::cmp::Ordering;
use std::collections::HashMap;
use std::error::Error;
use std::fmt::Display;
use google_bigquery2::api::{QueryParameter, QueryParameterType, QueryResponse, TableRow};
use google_bigquery_derive::BigDataTable;
use google_bigquery_derive::HasBigQueryClient;
use crate::client::{BigqueryClient, HasBigQueryClient};
use crate::data::{BigDataTable, BigDataTableBase, BigDataTableBaseConvenience};
use crate::utils::{BigDataValueType, ConvertTypeToBigQueryType, ConvertValueToBigqueryParamValue};
use crate::client::HasBigQueryClient;
use crate::utils::BigDataValueType;
use crate::utils::ConvertValueToBigqueryParamValue;
use super::*;
@@ -23,7 +20,7 @@ use super::*;
async fn save() {
let client = get_test_client().await;
let mut q = Infos::load_by_field(&client, stringify!(info1), Some("a"), 10).await.unwrap();
let mut q = Infos::load_by_field(&client, stringify!(info1), Some("a".to_string()), 10).await.unwrap();
assert_eq!(q.len(), 1);
let mut i1 = &mut q[0];
@@ -54,7 +51,7 @@ async fn save() {
async fn load_by_field() {
let client = get_test_client().await;
let q = Infos::load_by_field(&client, stringify!(info1), Some("a"), 10).await.unwrap();
let q = Infos::load_by_field(&client, stringify!(info1), Some("a".to_string()), 10).await.unwrap();
assert_eq!(q.len(), 1);
let i1 = &q[0];
@@ -78,7 +75,7 @@ async fn load_by_field() {
assert_eq!(i4.info3, Some("cc".to_string()));
let q = Infos::load_by_field(&client, stringify!(info1), Some("aosdinsofnpsngusn"), 10).await.unwrap();
let q = Infos::load_by_field(&client, stringify!(info1), Some("aosdinsofnpsngusn".to_string()), 10).await.unwrap();
assert_eq!(q.len(), 0);
}
@@ -92,7 +89,7 @@ async fn load_by_field_none_param() {
#[tokio::test]
async fn from_pk() {
let client = get_test_client().await;
let i1 = Infos::from_pk(&client, 3).await.unwrap();
let i1 = Infos::load_from_pk(&client, "3".to_string()).await.unwrap().unwrap();
assert_eq!(i1.row_id, 3);
assert_eq!(i1.info1, Some("a".to_string()));
assert_eq!(i1.info3, Some("c".to_string()));
@@ -110,10 +107,12 @@ async fn get_test_client() -> BigqueryClient {
#[cfg_attr(man_impl_has_client = "false", derive(HasBigQueryClient))]
#[cfg_attr(not(man_impl = "true"), derive(BigDataTable))]
pub struct Infos<'a> {
#[cfg_attr(not(man_impl = "true"), primary_key)]
#[cfg_attr(not(man_impl = "true"), required)]
#[cfg_attr(not(man_impl = "true"), db_name("Id"))]
row_id: i64,
#[cfg_attr(not(man_impl = "true"), required)]
#[cfg_attr(not(man_impl = "true"), primary_key)]
row_ids: String,
#[cfg_attr(any(not(man_impl = "true"), man_impl_has_client = "false"), client)]
/// This client should never be left as None, doing so will cause a panic when trying to use it
client: Option<&'a BigqueryClient>,
@@ -133,6 +132,16 @@ impl<'a> HasBigQueryClient<'a> for Infos<'a> {
self.client.unwrap()
}
}
//
// impl<'a> BigDataTableHasPk<String> for Infos<'a> {
// fn get_pk_name() -> String {
// "row_ids".to_string()
// }
//
// fn get_pk_value(&self) -> String {
// self.row_ids.clone()
// }
// }
impl<'a> Default for Infos<'a> {
fn default() -> Self {
@@ -140,6 +149,7 @@ impl<'a> Default for Infos<'a> {
// client: &BigqueryClient::new("none", "none", None).await.unwrap(),
client: None,
row_id: -9999,
row_ids: "-9999".to_string(),
info1: Default::default(),
info2: Default::default(),
info3: Default::default(),

View File

@@ -0,0 +1,128 @@
use std::error::Error;
use std::fmt::Debug;
use std::str::FromStr;
use chrono::{DateTime, Utc};
use crate::utils::BigDataValueType;
//
// pub fn to_value<T>(x: &String) -> Result<T, Box<dyn Error>> {
// let x = x.parse()?;
// Ok(x)
// }
// //TODO: figure out how I can make this work so i can have specific
// // implementations for DateTime<Utc> and some other types but also have a
// // generic implementation for all other types
//
// impl ConvertBigQueryValueToValue2<DateTime<Utc>> {
// pub fn to_value(x: &String) -> Result<DateTime<Utc>, Box<dyn Error>> {
// let x = DateTime::parse_from_rfc3339(x)?;
// let time = x.with_timezone(&Utc);
// Ok(time)
// }
// }
//
// impl<T> ConvertBigQueryValueToOptionValue2<T> {
// pub fn to_value(x: &String) -> Option<T> {
// ConvertBigQueryValueToValue2::<T>::to_value(x).ok()
// }
// }
//
// impl ConvertBigQueryValueToOptionValue2<DateTime<Utc>> {
// pub fn to_value(x: &String) -> Option<DateTime<Utc>> {
// ConvertBigQueryValueToValue2::<DateTime<Utc>>::to_value(x).ok()
// }
// }
//
// pub trait ConvertBigQueryValueToValue<T> {
// fn to_value(&self) -> Result<T, Box<dyn Error>>;
// }
//
// pub trait ConvertBigQueryValueToOptionValue<T> {
// fn to_opt_value(&self) -> Result<Option<T>, Box<dyn Error>>;
// }
//
// impl ConvertBigQueryValueToValue<chrono::DateTime<Utc>> for &String {
// fn to_value(&self) -> Result<chrono::DateTime<Utc>, Box<dyn Error>> {
// println!("ConvertBigQueryValueToValue DateTime<Utc> -> in: {:?}", self);
// let x = chrono::DateTime::parse_from_rfc3339(self)?;
// let time = x.with_timezone(&Utc);
// println!("ConvertBigQueryValueToValue DateTime<Utc> -> out: {:?}", time);
// Ok(time)
// }
// }
//
// impl<R: FromStr> ConvertBigQueryValueToValue<R> for &String
// where R::Err: Error + 'static
// {
// default fn to_value(&self) -> Result<R, Box<dyn Error>> {
// let x = self.parse()?;
// Ok(x)
// }
// }
//
// impl ConvertBigQueryValueToValue<String> for &String {
// default fn to_value(&self) -> Result<String, Box<dyn Error>> {
// let x = self.to_string();
// Ok(x)
// }
// }
//
// // impl<S: ConvertBigQueryValueToValue<R>, R: FromStr> ConvertBigQueryValueToOptionValue<R> for S
// // where R::Err: Error + 'static,
// // S: ConvertBigQueryValueToValue<R> {
// // default fn to_opt_value(&self) -> Result<Option<R>, Box<dyn Error>> {
// // Ok(match (self as &dyn ConvertBigQueryValueToValue<R>).to_value() {
// // Ok(x) => Some(x),
// // Err(_) => None,
// // })
// // }
// // }
//
// impl ConvertBigQueryValueToOptionValue<DateTime<Utc>> for &String
// {
// default fn to_opt_value(&self) -> Result<Option<DateTime<Utc>>, Box<dyn Error>> {
// Ok(match (self as &dyn ConvertBigQueryValueToValue<DateTime<Utc>>).to_value() {
// Ok(x) => Some(x),
// Err(_) => None,
// })
// }
// }
//
// #[cfg(test)]
// fn test123() {
// let x = &"2021-01-01T00:00:00Z".to_string();
// let y: chrono::DateTime<Utc> = x.to_value().unwrap();
// let z: Option<chrono::DateTime<Utc>> = x.to_opt_value().unwrap();
// println!("{:?}", y);
// let x = "2021-01-01T00:00:00Z".to_string();
// let y: i64 = x.to_value().unwrap();
// let z: Option<i64> = x.to_opt_value().unwrap();
// println!("{:?}", y);
// }
//
//
//
// impl<R: FromStr + Debug> ConvertBigQueryValueToValue<R> for String
// where R::Err: Error + 'static
// {
// default fn to_value(&self) -> Result<R, Box<dyn Error>> {
// println!("ConvertBigQueryValueToValue<{}> -> in: {:?}", stringify!(R), self);
// let x = self.parse()?;
// println!("ConvertBigQueryValueToValue<{}> -> out: {:?}", stringify!(R), x);
// Ok(x)
// }
// }
//
// impl<R: ConvertBigQueryValueToValue<R> + FromStr> ConvertBigQueryValueToOptionValue<R> for String
// where R::Err: Error + 'static {
// default fn to_opt_value(&self) -> Result<Option<R>, Box<dyn Error>> {
// Ok(match self.to_value() {
// Ok(x) => Some(x),
// Err(_) => None,
// })
// }
// }

View File

@@ -1,7 +1,7 @@
use std::fmt::Display;
pub trait ConvertTypeToBigQueryType {
fn to_bigquery_type() -> String;
fn to_bigquery_type() -> String where Self: Sized;
}
impl ConvertTypeToBigQueryType for bool {
@@ -33,3 +33,10 @@ impl ConvertTypeToBigQueryType for &str {
"STRING".to_string()
}
}
impl<T> ConvertTypeToBigQueryType for chrono::DateTime<T>
where T: chrono::TimeZone + Display + Send + Sync + 'static {
fn to_bigquery_type() -> String {
"DATETIME".to_string()
}
}

View File

@@ -1,7 +1,29 @@
use std::error::Error;
use std::fmt::Display;
use std::str::FromStr;
use chrono::{NaiveDateTime, Utc};
pub trait ConvertValueToBigqueryParamValue {
fn to_bigquery_param_value(&self) -> String;
fn from_bigquery_value(value :&String) -> Result<Self, Box<dyn Error>> where Self: Sized;
}
impl ConvertValueToBigqueryParamValue for i64 {
fn to_bigquery_param_value(&self) -> String {
format!("{}", self)
}
fn from_bigquery_value(value :&String) -> Result<Self, Box<dyn Error>> where Self: Sized{
Ok(value.parse()?)
}
}
impl ConvertValueToBigqueryParamValue for String {
fn to_bigquery_param_value(&self) -> String {
self.to_string()
}
fn from_bigquery_value(value :&String) -> Result<Self, Box<dyn Error>> where Self: Sized{
Ok(value.to_string())
}
}
impl ConvertValueToBigqueryParamValue for bool {
@@ -12,10 +34,61 @@ impl ConvertValueToBigqueryParamValue for bool {
_ => panic!("Invalid value for bool"),
}
}
fn from_bigquery_value(value :&String) -> Result<Self, Box<dyn Error>> where Self: Sized{
match value.as_str() {
"TRUE" => Ok(true),
"FALSE" => Ok(false),
_ => Err("Invalid value for bool".into()),
}
}
}
impl<R: Display> ConvertValueToBigqueryParamValue for R {
default fn to_bigquery_param_value(&self) -> String {
format!("{}", self)
impl ConvertValueToBigqueryParamValue for chrono::DateTime<Utc> {
fn to_bigquery_param_value(&self) -> String {
println!("ConvertValueToBigqueryParamValue::to_bigquery_param_value DateTime<Utc> -> in: {:?}", self);
let value = self.to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
let value = value.replace("Z", "").replace("T", " ");
// let value = format!("\"{}\"", value);
println!("ConvertValueToBigqueryParamValue::to_bigquery_param_value DateTime<Utc> -> out: {}", value);
value
}
}
fn from_bigquery_value(value :&String) -> Result<Self, Box<dyn Error>> where Self: Sized{
// println!("ConvertValueToBigqueryParamValue::from_bigquery_value DateTime<Utc> -> in: {:?}", value);
let value = value.replace("T", " ").replace("Z", "");
// let x = NaiveDateTime::from_str(&value)
let x = NaiveDateTime::parse_from_str(&value,"%Y-%m-%d %H:%M:%S")
.expect(&format!("Could not parse &String to NaiveDateTime: {}", value));
let time = chrono::DateTime::<Utc>::from_utc(x, Utc);
// let x = chrono::DateTime::parse_from_rfc3339(value)?;
// let time = x.with_timezone(&Utc);
// println!("ConvertValueToBigqueryParamValue::from_bigquery_value DateTime<Utc> -> out: {:?}", time);
Ok(time)
}
}
impl<R:ConvertValueToBigqueryParamValue> ConvertValueToBigqueryParamValue for Option<R>{
fn to_bigquery_param_value(&self) -> String {
match self {
Some(x) => x.to_bigquery_param_value(),
None => "NULL".to_string(),
}
}
fn from_bigquery_value(value :&String) -> Result<Option<R>, Box<dyn Error>> where Self: Sized {
if value == "NULL" {
Ok(None)
} else {
Ok(R::from_bigquery_value(value).ok())
}
}
}
// impl<R: Display + FromStr> ConvertValueToBigqueryParamValue for R where <R as FromStr>::Err: std::error::Error{
// default fn to_bigquery_param_value(&self) -> String {
// format!("{}", self)
// }
// default fn from_bigquery_value(value :&String) -> Result<Self, Box<dyn Error>> where Self: Sized{
// Ok(value.parse()?)
// }
// }

View File

@@ -1,8 +1,14 @@
// pub use convert_bigquery_value_to_value::ConvertBigQueryValueToValue2 as ConvertBigQueryValueToValue;
// pub use convert_bigquery_value_to_value::ConvertBigQueryValueToOptionValue2 as ConvertBigQueryValueToOptionValue;
pub use convert_type_to_big_query_type::ConvertTypeToBigQueryType;
pub use convert_value_to_bigquery_param_value::ConvertValueToBigqueryParamValue;
mod convert_type_to_big_query_type;
mod convert_value_to_bigquery_param_value;
mod convert_bigquery_value_to_value;
pub trait BigDataValueType<T>: ConvertTypeToBigQueryType + ConvertValueToBigqueryParamValue {}
impl<T: ConvertTypeToBigQueryType + ConvertValueToBigqueryParamValue> BigDataValueType<T> for T
{}
pub trait BigDataValueType: ConvertTypeToBigQueryType + ConvertValueToBigqueryParamValue {}
impl<T: ConvertTypeToBigQueryType + ConvertValueToBigqueryParamValue> BigDataValueType for T {}