Pretty good working

This commit is contained in:
OMGeeky
2023-04-12 13:15:41 +02:00
commit 62269f42a5
17 changed files with 2725 additions and 0 deletions

3
.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
/target
/.idea
/auth

1552
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

16
Cargo.toml Normal file
View File

@@ -0,0 +1,16 @@
[package]
name = "google_bigquery_v2"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
google-bigquery2 = "5.0.2"
serde_json = "1.0.95"
tokio = "1.0.2"
google_bigquery_v2_derive = { version = "0.0.0", path = "./google_bigquery_v2_derive" }
chrono = "0.4.24"
log = "0.4.17"
nameof = "1.2.2"
env_logger = "0.10.0"

3
google_bigquery_v2_derive/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
/target
**/*.rs.bk
Cargo.lock

View File

@@ -0,0 +1,16 @@
[package]
name = "google_bigquery_v2_derive"
version = "0.0.0"
authors = ["OMGeeky <aalaalgmx@gmail.com>"]
description = "A `cargo generate` template for quick-starting a procedural macro crate"
keywords = ["template", "proc_macro", "procmacro"]
edition = "2021"
[lib]
proc-macro = true
[dependencies]
quote = "1"
proc-macro2 = "1.0"
syn = "2.0"

View File

@@ -0,0 +1,376 @@
#[allow(unused)]
extern crate proc_macro;
use proc_macro2::TokenStream;
use syn::DeriveInput;
struct Field {
field_ident: proc_macro2::Ident,
db_name: std::string::String,
local_name: std::string::String,
ty: syn::Type,
required: bool,
}
struct Attribute {
name: std::string::String,
value: std::string::String,
}
//region Derive macro 'BigDataTableDerive'
#[proc_macro_derive(BigDataTableDerive, attributes(db_name, required, client, primary_key))]
pub fn big_query_table_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let ast = syn::parse(input).unwrap();
let tokens = impl_big_query_table_derive(&ast);
tokens.into()
}
fn impl_big_query_table_derive(ast: &syn::DeriveInput) -> proc_macro2::TokenStream {
let pk_field = get_pk_field(&ast);
let client_field = get_client_field(&ast);
implement_big_query_table_base(&ast, &pk_field, &client_field)
}
fn implement_big_query_table_base(
ast: &DeriveInput,
pk_field: &Field,
client_field: &Field,
) -> proc_macro2::TokenStream {
let table_ident = &ast.ident;
let table_name = get_table_name(&ast);
let impl_get_client = implement_get_client(&client_field);
let impl_set_client = implement_set_client(&client_field);
let impl_get_pk_field_name = implement_get_pk_field_name(&pk_field);
let impl_get_pk_db_name = implement_get_pk_db_name(&pk_field);
let impl_get_pk_value = implement_get_pk_value(&pk_field);
let impl_get_query_fields = implement_get_query_fields(&ast);
let impl_get_table_name = implement_impl_get_table_name(&table_name);
let impl_set_field_value = implement_set_field_value(&ast);
let impl_from__query_result_row = implement_from__query_result_row(&ast);
quote::quote! {
impl<'a> BigQueryTableBase<'a> for #table_ident<'a> {
#impl_get_client
#impl_set_client
#impl_get_pk_field_name
#impl_get_pk_db_name
#impl_get_pk_value
#impl_get_query_fields
#impl_get_table_name
#impl_set_field_value
#impl_from__query_result_row
}
}
}
//region method implementations
fn implement_get_client(client_field: &Field) -> TokenStream {
let client_ident = client_field.field_ident.clone();
quote::quote! {
fn get_client(&self) -> &'a BigqueryClient {
log::trace!("get_client() self={:?}", self);
&self.#client_ident
}
}
}
fn implement_set_client(client_field: &Field) -> TokenStream {
let client_ident = client_field.field_ident.clone();
quote::quote! {
fn set_client(&mut self, client: &'a BigqueryClient) {
log::trace!("set_client() self={:?}", self);
self.#client_ident = client;
}
}
}
fn implement_get_pk_field_name(pk_field: &Field) -> TokenStream {
let pk_local_name = pk_field.local_name.clone();
quote::quote! {
fn get_pk_field_name() -> String {
log::trace!("get_pk_field_name()");
String::from(#pk_local_name)
}
}
}
fn implement_get_pk_db_name(pk_field: &Field) -> TokenStream {
let pk_db_name = pk_field.db_name.clone();
quote::quote! {
fn get_pk_db_name() -> String {
log::trace!("get_pk_db_name()");
String::from(#pk_db_name)
}
}
}
fn implement_get_pk_value(pk_field: &Field) -> TokenStream {
let pk_ident = &pk_field.field_ident;
quote::quote! {
fn get_pk_value(&self) -> &dyn google_bigquery_v2::data::param_conversion::BigDataValueType {
log::trace!("get_pk_value() self={:?}", self);
&self.#pk_ident
}
}
}
fn implement_get_query_fields(ast: &DeriveInput) -> TokenStream {
fn implement_map_insert(f: Field) -> TokenStream {
let local_name = f.local_name;
let db_name = f.db_name;
quote::quote! {
map.insert(String::from(#local_name),String::from(#db_name));
}
}
let fields = get_fields(&ast.data);
let pk_field = get_pk_field(&ast);
let client_ident = get_client_field(&ast).field_ident;
let fields: Vec<TokenStream> = fields
.into_iter()
.filter(|f| {
f.field_ident != client_ident && f.field_ident != pk_field.field_ident
})
.map(implement_map_insert)
.collect();
let pk_insert = implement_map_insert(pk_field);
quote::quote! {
fn get_query_fields(include_pk: bool) -> std::collections::HashMap<String, String> {
log::trace!("get_query_fields() include_pk={}", include_pk);
let mut map = std::collections::HashMap::new();
if(include_pk) {
#pk_insert
}
#(#fields)*
map
}
}
}
fn implement_impl_get_table_name(table_name: &String) -> TokenStream {
quote::quote! {
fn get_table_name() -> String {
log::trace!("get_table_name()");
String::from(#table_name)
}
}
}
fn implement_set_field_value(ast: &DeriveInput) -> TokenStream {
fn write_set_field_value(f: Field) -> TokenStream {
let field_ident = f.field_ident;
let local_name = f.local_name;
let field_type = f.ty;
quote::quote! {
#local_name => self.#field_ident = #field_type::from_param(value)?,
}
}
let client_ident = get_client_field(&ast).field_ident;
let fields = get_fields(&ast.data);
let fields: Vec<TokenStream> = fields
.into_iter()
.filter(|f| {
f.field_ident != client_ident
})
.map(write_set_field_value)
.collect();
quote::quote! {
fn set_field_value(&mut self, field_name: &str, value: &serde_json::Value) -> Result<()>{
log::trace!("set_field_value() self={:?} field_name={} value={:?}", self, field_name, value);
use google_bigquery_v2::data::param_conversion::ConvertBigQueryParams;
match field_name {
#(#fields)*
_ => return Err(google_bigquery_v2::data::param_conversion::ConversionError::new(format!("Field '{}' not found", field_name)).into())
}
Ok(())
}
}
}
fn implement_from__query_result_row(ast: &DeriveInput) -> TokenStream {
fn set_field_value(f: Field) -> TokenStream {
let field_ident = f.field_ident;
let local_name = f.local_name;
let field_type = f.ty;
let db_name = f.db_name;
quote::quote! {
#field_ident: #field_type::from_param(&row[#db_name])?,
}
}
let client_ident = get_client_field(&ast).field_ident;
let fields = get_fields(&ast.data);
let fields: Vec<TokenStream> = fields
.into_iter()
.filter(|f| {
f.field_ident != client_ident
})
.map(set_field_value)
.collect();
quote::quote! {
fn new_from_query_result_row(
client: &'a BigqueryClient,
row: &std::collections::HashMap<String, serde_json::Value>,
) -> Result<Self>
where Self: Sized {
log::trace!("new_from_query_result_row() client={:?} row={:?}", client, row);
use google_bigquery_v2::data::param_conversion::ConvertBigQueryParams;
let result = Self{
#client_ident: client,
#(#fields)*
};
Ok(result)
}
}
}
//endregion
//endregion
//region Helper functions
fn get_table_name(ast: &DeriveInput) -> String {
for attr in get_struct_attributes(ast) {
if attr.name.eq("db_name") {
let tokens = &attr.value;
return tokens.to_string();
}
}
ast.ident.to_string()
}
fn get_pk_field(ast: &syn::DeriveInput) -> Field {
let mut pk_fields = get_fields_with_attribute(&ast.data, "primary_key");
if pk_fields.len() != 1 {
panic!("Exactly one primary key field must be specified");
}
let pk = pk_fields.remove(0);
pk
}
fn get_client_field(ast: &syn::DeriveInput) -> Field {
//region client
let mut client_fields = get_fields_with_attribute(&ast.data, "client");
if client_fields.len() != 1 {
panic!("Exactly one client field must be specified");
}
let client = client_fields.remove(0);
//endregion
client
}
fn get_struct_attributes(ast: &syn::DeriveInput) -> Vec<Attribute> {
let attrs = &ast.attrs;
let mut res = vec![];
for attr in attrs {
if attr.path().is_ident("db_name") {
let args: syn::LitStr = attr.parse_args().expect("Failed to parse target name");
let args = args.value();
res.push(Attribute {
name: "db_name".to_string(),
value: args,
});
}
}
res
}
fn get_fields(data: &syn::Data) -> Vec<Field> {
let mut res = vec![];
match data {
syn::Data::Struct(ref data_struct) => match data_struct.fields {
syn::Fields::Named(ref fields_named) => {
for field in fields_named.named.iter() {
if let Some(parsed_field) = parse_local_field(&field, false) {
res.push(parsed_field);
}
}
}
_ => (),
},
_ => panic!("Must be a struct!"),
};
return res;
}
fn parse_local_field(field: &syn::Field, include_ignored: bool) -> Option<Field> {
match &field.ident {
Some(ident) => {
let mut name = None;
let mut required = false;
let attrs = &field.attrs;
for attribute in attrs {
if attribute.path().is_ident("db_ignore") && !include_ignored {
return None; //skip this field completely
}
if attribute.path().is_ident("db_name") {
let args: syn::LitStr =
attribute.parse_args().expect("Failed to parse target name");
let args = args.value();
name = Some(args);
}
if attribute.path().is_ident("required") {
required = true;
}
}
let local_name = ident.to_string();
let name = match name {
None => local_name.clone(),
Some(n) => n,
};
let parsed_field = Field {
field_ident: ident.clone(),
local_name,
db_name: name,
ty: field.ty.clone(),
required,
};
return Some(parsed_field);
}
_ => None,
}
}
fn get_fields_with_attribute(data: &syn::Data, attribute_name: &str) -> Vec<Field> {
let mut res = vec![];
match data {
// Only process structs
syn::Data::Struct(ref data_struct) => {
// Check the kind of fields the struct contains
match data_struct.fields {
// Structs with named fields
syn::Fields::Named(ref fields_named) => {
// Iterate over the fields
for field in fields_named.named.iter() {
if let Some(_) = &field.ident {
// Get attributes #[..] on each field
for attr in field.attrs.iter() {
// Parse the attribute
if attr.path().is_ident(attribute_name) {
let parsed_field = parse_local_field(&field, true).unwrap();
res.push(parsed_field);
}
}
}
}
}
// Struct with unnamed fields
_ => (),
}
}
// Panic when we don't have a struct
_ => panic!("Must be a struct"),
}
return res;
}
//endregion

87
src/client.rs Normal file
View File

@@ -0,0 +1,87 @@
use std::error::Error;
use std::fmt::Debug;
use google_bigquery2::hyper::client::HttpConnector;
use google_bigquery2::hyper_rustls::HttpsConnector;
use google_bigquery2::Bigquery;
use google_bigquery2::{hyper, hyper_rustls, oauth2};
pub struct BigqueryClient {
client: Bigquery<HttpsConnector<HttpConnector>>,
project_id: String,
dataset_id: String,
}
impl BigqueryClient {
pub fn empty() -> BigqueryClient {
todo!()
}
}
impl BigqueryClient {
pub async fn new<S: Into<String>>(
project_id: S,
dataset_id: S,
service_account_path: Option<S>,
) -> Result<BigqueryClient, Box<dyn Error>> {
let client = get_internal_client(service_account_path).await?;
Ok(BigqueryClient {
client,
project_id: project_id.into(),
dataset_id: dataset_id.into(),
})
}
pub fn get_client(&self) -> &Bigquery<HttpsConnector<HttpConnector>> {
&self.client
}
pub fn get_project_id(&self) -> &str {
&self.project_id
}
pub fn get_dataset_id(&self) -> &str {
&self.dataset_id
}
}
impl Debug for BigqueryClient {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("BigqueryClient")
.field("project_id", &self.project_id)
.field("dataset_id", &self.dataset_id)
.finish()
}
}
async fn get_internal_client<S: Into<String>>(
service_account_path: Option<S>,
) -> Result<Bigquery<HttpsConnector<HttpConnector>>, Box<dyn Error>> {
let hyper_client = hyper::Client::builder().build(
hyper_rustls::HttpsConnectorBuilder::new()
.with_native_roots()
.https_or_http()
.enable_http1()
.enable_http2()
.build(),
);
let service_account_path = match service_account_path {
None => "auth/service_account2.json".to_string(),
Some(s) => s.into(),
};
let secret = oauth2::read_service_account_key(&service_account_path)
.await
.expect(
format!(
"Failed to read service account key from file. {}",
service_account_path
)
.as_str(),
);
let auth = oauth2::ServiceAccountAuthenticator::builder(secret)
.build()
.await
.expect("Failed to authenticate with service account key.");
let client: Bigquery<HttpsConnector<HttpConnector>> = Bigquery::new(hyper_client, auth);
Ok(client)
}

View File

@@ -0,0 +1,293 @@
use std::collections::HashMap;
use std::fmt::{Debug, Display, Formatter};
use std::marker::PhantomData;
use google_bigquery2::api::{
QueryParameter, QueryParameterType, QueryParameterValue, QueryRequest,
};
use log::{debug, log, trace};
use serde_json::Value;
use crate::client::BigqueryClient;
use crate::data::BigQueryTable;
use crate::data::param_conversion::BigDataValueType;
use crate::prelude::*;
//region OrderDirection
#[derive(Debug, Clone)]
pub enum OrderDirection {
Ascending,
Descending,
}
impl OrderDirection {
pub(crate) fn to_query_str(&self) -> String {
match self {
OrderDirection::Ascending => String::from("ASC"),
OrderDirection::Descending => String::from("DESC"),
}
}
}
//endregion
//region BigQueryBuilder
#[derive(Debug, Clone)]
pub struct BigQueryBuilder<'a, Table> {
client: Option<&'a BigqueryClient>,
required_params: Vec<QueryParameter>,
selected_fields: Option<Vec<String>>,
wheres: Vec<String>,
limit: Option<usize>,
order_bys: Vec<(String, OrderDirection)>,
_table_type_marker: PhantomData<Vec<Table>>,
}
impl<'a, Table> BigQueryBuilder<'a, Table>
where
Table: BigQueryTable<'a>,
{
//region build methods
pub async fn run(self) -> Result<Vec<Table>> {
trace!("BigQueryBuilder::run()");
//TODO: maybe return an iterator instead of a vector.
// this would allow for lazy loading of the data.
// it would also make it possible that additional
// data is loaded (if the set limit is higher than
// the number of rows returned)
let client = self.client.unwrap();
let fields = self.get_sorted_selected_fields();
let req = self.build_query_request();
debug!("req: {:?}", req);
let (res, query_res) = client
.get_client()
.jobs()
.query(req, client.get_project_id())
.doit()
.await?;
if res.status() != 200 {
return Err(format!("Wrong status code returned! ({})", res.status()).into());
}
let query_res = query_res.rows.unwrap();
println!("query_res: {:?}", query_res);
let mut result: Vec<Table> = Vec::new();
for row in query_res {
let row = row.f.unwrap();
let mut row_data: HashMap<String, Value> = HashMap::new();
for (i, field) in row.into_iter().enumerate() {
let field = field.v.unwrap_or(Value::Null);
println!("{}: {}", fields[i], field);
row_data.insert(fields[i].clone(), field);
}
let data = Table::new_from_query_result_row(client, &row_data)?;
result.push(data);
}
return Ok(result);
}
pub fn build_query_request(self) -> QueryRequest {
QueryRequest {
query: Some(self.build_query_string()),
query_parameters: Some(self.required_params),
use_legacy_sql: Some(false),
/*TODO: is this line needed?: use_legacy_sql: Some(false),*/
..Default::default()
}
}
pub fn build_query_string(&self) -> String {
let where_clause = if self.wheres.is_empty() {
String::new()
} else {
format!("WHERE {}", self.wheres.join(" AND "))
};
let order_by_clause = if self.order_bys.is_empty() {
String::new()
} else {
format!("ORDER BY {}", self.order_bys
.iter()
.map(|(key, dir)| format!("{} {}", key, dir.to_query_str()))
.collect::<Vec<String>>()
.join(", "))
};
format!(
"SELECT {} FROM {} {} {} LIMIT {}",
self.get_sorted_selected_fields()
.join(", "),
Table::get_table_identifier_from_client(self.client.unwrap()),
where_clause,
order_by_clause,
self.limit.unwrap_or(1000)
)
}
//endregion
//region add content
fn set_select_fields(self, fields: Vec<String>) -> Result<Self> {
//TODO: this method probably does not work since the logic does
// not work if (at least the required) fields are not selected
// since the parser will not be able to create the struct instance.
let selected_fields = self.selected_fields;
let mut selected_fields = match selected_fields {
Some(selected_fields) => selected_fields,
None => Vec::new(),
};
for field in fields {
let field_name = Table::get_field_db_name(&field)
.map_err(|e| format!("Error while selecting field '{}': {}", field, e))?;
selected_fields.push(field_name);
}
Ok(Self {
selected_fields: Some(selected_fields),
..self
})
}
pub fn add_where_eq<T>(self, column: &str, value: Option<&T>) -> Result<Self>
where
T: BigDataValueType + Debug,
{
let column = Table::get_field_db_name(column)?;
let mut wheres = self.wheres;
if let Some(value) = value {
let param_name = format!("__PARAM_{}", self.required_params.len());
let param = get_parameter(value, &param_name);
let mut required_params = self.required_params;
required_params.push(param);
wheres.push(format!("{} = @{}", column, param_name));
return Ok(Self {
wheres,
required_params,
..self
});
}
wheres.push(format!("{} is NULL", column));
Ok(Self { wheres, ..self })
}
pub fn add_order_by(self, column: &str, direction: OrderDirection) -> Self {
let column = Table::get_field_db_name(column).unwrap();
let mut order_bys = self.order_bys;
order_bys.push((column.to_string(), direction));
Self { order_bys, ..self }
}
//endregion
fn get_sorted_selected_fields(&self) -> Vec<String> {
trace!("get_sorted_selected_fields()");
let mut fields: Vec<String> = match &self.selected_fields {
Some(fields) => fields.clone(),
None => {
Table::get_query_fields(true)
.into_iter()
.map(|f| f.1)//get the db name
.collect()
}
};
println!("fields: {:?}", fields);
fields.sort();
fields
}
}
//region implement some convenience traits for BigQueryBuilder
impl<'a, Table> Default for BigQueryBuilder<'a, Table> {
fn default() -> Self {
Self {
client: None,
required_params: vec![],
selected_fields: None,
wheres: vec![],
limit: None,
order_bys: vec![],
_table_type_marker: PhantomData,
}
}
}
impl<'a, Table: BigQueryTable<'a>> Display for BigQueryBuilder<'a, Table> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!(
"BigQueryBuilder: {}\t\
wheres: 'where {}'\t\
order by's: 'order by {}'\t\
limit: {:?}\t\
params: {:?}",
Table::get_table_name(),
self.wheres.join(" AND "),
self.order_bys
.iter()
.map(|(key, dir)| format!("{} {}", key, dir.to_query_str()))
.collect::<Vec<String>>()
.join(", "),
self.limit,
self.required_params
))
}
}
//endregion
//endregion
fn get_parameter<T>(value: &T, param_name: &String) -> QueryParameter
where
T: BigDataValueType + Debug,
{
let param_value = serde_json::from_value(value.to_param()).unwrap();
let param_value = QueryParameterValue {
value: Some(param_value),
..Default::default()
};
let param_type = T::convert_type_to_bigquery_type();
let param_type = QueryParameterType {
type_: Some(param_type),
..Default::default()
};
let param = QueryParameter {
parameter_type: Some(param_type),
parameter_value: Some(param_value),
name: Some(param_name.clone()),
};
param
}
//region BigQueryBuilderAvailable
pub trait BigQueryBuilderAvailable<'a, Table> {
fn query(client: &'a BigqueryClient) -> BigQueryBuilder<'a, Table>;
}
impl<'a, Table> BigQueryBuilderAvailable<'a, Table> for Table
where
Table: BigQueryTable<'a>,
{
fn query(client: &'a BigqueryClient) -> BigQueryBuilder<'a, Table> {
BigQueryBuilder {
client: Some(client),
..Default::default()
}
}
}
//endregion

View File

@@ -0,0 +1,57 @@
use std::collections::HashMap;
use log::trace;
use serde_json::Value;
use crate::client::BigqueryClient;
use crate::data::param_conversion::{BigDataValueType, ConversionError};
use crate::data::param_conversion::ConvertBigQueryParams;
use crate::prelude::*;
pub trait BigQueryTableBase<'a> {
fn get_table_name() -> String;
fn get_client(&self) -> &'a BigqueryClient;
fn set_client(&mut self, client: &'a BigqueryClient);
fn get_pk_field_name() -> String;
fn get_pk_db_name() -> String;
fn get_pk_value(&self) -> &dyn BigDataValueType;
fn get_query_fields(include_pk: bool) -> HashMap<String, String>;
fn set_field_value(&mut self, field_name: &str, value: &Value) -> Result<()>;
fn new_from_query_result_row(
client: &'a BigqueryClient,
row: &HashMap<String, Value>,
) -> Result<Self>
where
Self: Sized;
}
pub trait BigQueryTable<'a>: BigQueryTableBase<'a> {
fn get_field_db_name(field_name: &str) -> Result<String> {
trace!("get_field_db_name({})", field_name);
let query_fields = Self::get_query_fields(true);
let db_name = query_fields.get(field_name);
match db_name {
None => Err(format!("Field {} not found.", field_name).into()),
Some(s) => Ok(s.to_string()),
}
}
fn get_table_identifier(&self) -> String {
trace!("get_table_identifier()");
Self::get_table_identifier_from_client(self.get_client())
}
fn get_table_identifier_from_client(client: &'a BigqueryClient) -> String {
trace!("get_table_identifier_from_client({:?})", client);
format!(
"`{}.{}.{}`",
client.get_project_id(),
client.get_dataset_id(),
Self::get_table_name()
)
}
}
impl<'a, T> BigQueryTable<'a> for T where T: BigQueryTableBase<'a> {}

6
src/data/mod.rs Normal file
View File

@@ -0,0 +1,6 @@
pub use bigquery_table::*;
// pub use bigquery_builder::*;
mod bigquery_table;
pub mod bigquery_builder;
pub mod param_conversion;

View File

@@ -0,0 +1,107 @@
use std::fmt::Debug;
use chrono::{NaiveDateTime, Utc};
use log::trace;
use serde_json::Value;
use crate::prelude::*;
pub trait ConvertBigQueryParams {
fn from_param(value: &Value) -> Result<Self> where Self: Sized;
fn to_param(&self) -> Value;
}
impl ConvertBigQueryParams for i64 {
fn from_param(value: &Value) -> Result<Self> {
let string:String = serde_json::from_value(value.clone())?;
Ok(string.parse()?)
}
fn to_param(&self) -> Value {
serde_json::to_value(self).unwrap()
}
}
impl ConvertBigQueryParams for i32 {
fn from_param(value: &Value) -> Result<Self> {
let string:String = serde_json::from_value(value.clone())?;
Ok(string.parse()?)
}
fn to_param(&self) -> Value {
serde_json::to_value(self).unwrap()
}
}
impl ConvertBigQueryParams for bool {
fn from_param(value: &Value) -> Result<Self> {
let value: String = serde_json::from_value(value.clone())?;
match value.as_str() {
"TRUE" => Ok(true),
"true" => Ok(true),
"FALSE" => Ok(false),
"false" => Ok(false),
invalid => Err(format!("Invalid value for bool: '{}'", invalid).into()),
}
}
fn to_param(&self) -> Value {
match self {
true => serde_json::to_value("TRUE").unwrap(),
false => serde_json::to_value("FALSE").unwrap(),
}
}
}
impl ConvertBigQueryParams for String {
fn from_param(value: &Value) -> Result<Self> {
let string:String = serde_json::from_value(value.clone())?;
Ok(string.parse()?)
}
fn to_param(&self) -> Value {
serde_json::to_value(self).unwrap()
}
}
impl ConvertBigQueryParams for f64 {
fn from_param(value: &Value) -> Result<Self> {
Ok(serde_json::from_value(value.clone())?)
}
fn to_param(&self) -> Value {
serde_json::to_value(self).unwrap()
}
}
impl ConvertBigQueryParams for chrono::DateTime<Utc> {
fn from_param(value: &Value) -> Result<Self> {
trace!("ConvertValueToBigqueryParamValue::from_param DateTime<Utc> -> in: {:?}", value);
let value: String = serde_json::from_value(value.clone())?;
let value = value.replace("T", " ").replace("Z", "");
let value = NaiveDateTime::parse_from_str(&value, "%Y-%m-%d %H:%M:%S")?;
let time = chrono::DateTime::<Utc>::from_utc(value, Utc);
trace!("ConvertValueToBigqueryParamValue::from_param DateTime<Utc> -> out: {:?}", time);
Ok(time)
}
fn to_param(&self) -> Value {
trace!("ConvertValueToBigqueryParamValue::to_param DateTime<Utc> -> in: {:?}", self);
let value: String = self.to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
let value: String = value.replace("Z", "").replace("T", " ");
trace!("ConvertValueToBigqueryParamValue::to_param DateTime<Utc> -> out: {:?}", value);
serde_json::to_value(value).unwrap()
}
}
impl<T: ConvertBigQueryParams + Debug> ConvertBigQueryParams for Option<T> {
fn from_param(value: &Value) -> Result<Self> where Self: Sized {
trace!("ConvertValueToBigqueryParamValue::from_param Option<T>: {:?}", value);
match value {
Value::Null => Ok(None),
_ => Ok(Some(T::from_param(value)?)),
}
}
fn to_param(&self) -> Value {
trace!("ConvertValueToBigqueryParamValue::to_param Option<T>: {:?}", self);
match self {
Some(value) => value.to_param(),
None => Value::Null,
}
}
}

View File

@@ -0,0 +1,42 @@
use std::fmt::Display;
pub trait ConvertTypeToBigQueryType {
fn convert_type_to_bigquery_type() -> String where Self: Sized;
}
impl ConvertTypeToBigQueryType for bool {
fn convert_type_to_bigquery_type() -> String {
"BOOL".to_string()
}
}
impl ConvertTypeToBigQueryType for i32 {
fn convert_type_to_bigquery_type() -> String {
"INT64".to_string()
}
}
impl ConvertTypeToBigQueryType for i64 {
fn convert_type_to_bigquery_type() -> String {
"INT64".to_string()
}
}
impl ConvertTypeToBigQueryType for String {
fn convert_type_to_bigquery_type() -> String {
"STRING".to_string()
}
}
impl ConvertTypeToBigQueryType for &str {
fn convert_type_to_bigquery_type() -> String {
"STRING".to_string()
}
}
impl<T> ConvertTypeToBigQueryType for chrono::DateTime<T>
where T: chrono::TimeZone + Display + Send + Sync + 'static {
fn convert_type_to_bigquery_type() -> String {
"DATETIME".to_string()
}
}

View File

@@ -0,0 +1,42 @@
use std::error::Error;
use std::fmt::{Debug, Display, Formatter};
pub use convert_bigquery_params::ConvertBigQueryParams;
pub use convert_type_to_big_query_type::ConvertTypeToBigQueryType;
mod convert_bigquery_params;
mod convert_type_to_big_query_type;
pub trait BigDataValueType: ConvertTypeToBigQueryType + ConvertBigQueryParams + Debug {}
impl<T: ConvertTypeToBigQueryType + ConvertBigQueryParams + Debug> BigDataValueType for T {}
//region ConversionError
#[derive(Debug)]
pub struct ConversionError {
pub message: String,
}
impl Display for ConversionError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!("{}", self.message))
}
}
impl Error for ConversionError {}
impl From<&str> for ConversionError {
fn from(message: &str) -> Self {
ConversionError::new(message)
}
}
impl ConversionError {
pub fn new(message: impl Into<String>) -> Self {
ConversionError {
message: message.into(),
}
}
}
//endregion

4
src/lib.rs Normal file
View File

@@ -0,0 +1,4 @@
pub mod client;
pub mod data;
pub mod utils;
pub mod prelude;

12
src/prelude.rs Normal file
View File

@@ -0,0 +1,12 @@
pub use google_bigquery_v2_derive::BigDataTableDerive;
pub use crate::client::BigqueryClient;
pub use crate::data::{bigquery_builder::{
BigQueryBuilder,
BigQueryBuilderAvailable,
OrderDirection,
},
BigQueryTable,
BigQueryTableBase};
pub type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>;

1
src/utils/mod.rs Normal file
View File

@@ -0,0 +1 @@

108
tests/tests.rs Normal file
View File

@@ -0,0 +1,108 @@
use log::{debug, info, LevelFilter};
use nameof::name_of;
use google_bigquery_v2::prelude::*;
#[derive(BigDataTableDerive, Debug, Clone)]
#[db_name("Infos")]
pub struct DbInfos<'a> {
#[client]
client: &'a BigqueryClient,
#[primary_key]
#[db_name("Id")]
row_id: i64,
info1: Option::<String>,
#[db_name("info")]
info2: Option::<String>,
info3: Option::<String>,
info4i: Option::<i32>,
#[db_name("yes")]
info4b: Option::<bool>,
}
#[tokio::test]
async fn test_get_table_name() {
init_logger(LevelFilter::Debug);
let pk = DbInfos::get_table_name();
println!("table name: {}", pk);
assert_eq!("Infos", pk, "table name is not correct")
}
#[tokio::test]
async fn test_get_query_fields() {
init_logger(LevelFilter::Debug);
let fields = DbInfos::get_query_fields(true);
println!("fields: {:?}", fields);
assert_eq!(6, fields.len(), "fields length is not correct");
assert_eq!("Id", fields.get("row_id").unwrap(), );
assert_eq!("info1", fields.get("info1").unwrap(), );
assert_eq!("info", fields.get("info2").unwrap());
assert_eq!("info3", fields.get("info3").unwrap());
assert_eq!("info4i", fields.get("info4i").unwrap());
assert_eq!("yes", fields.get("info4b").unwrap());
}
#[tokio::test]
async fn test_query_builder_1() {
init_logger(LevelFilter::Debug);
let client = get_test_client().await;
let query_builder: BigQueryBuilder<DbInfos> = DbInfos::query(&client);
let query_builder: BigQueryBuilder<DbInfos> = query_builder
.add_where_eq::<String>(name_of!(info1 in DbInfos), None)
.unwrap()
.add_where_eq(name_of!(info3 in DbInfos), Some(&"cc".to_string()))
.unwrap()
.add_order_by(name_of!(info2 in DbInfos), OrderDirection::Ascending);
let query_string = query_builder.clone().build_query_string();
let expected_query_string = String::from(
"SELECT Id, info, info1, info3, info4i, yes \
FROM `testrustproject-372221.test1.Infos` \
WHERE info1 is NULL AND info3 = @__PARAM_0 \
ORDER BY info ASC LIMIT 1000",
);
println!("query : {}", query_string);
println!("expected: {}", expected_query_string);
println!("request: {:?}", query_builder.clone().build_query_request());
assert_eq!(query_string, expected_query_string);
assert_eq!(
query_builder
.clone()
.build_query_request()
.query_parameters
.unwrap()
.len(),
1
);
let res = query_builder.clone().run().await.unwrap();
println!("res: {:?}", res);
}
async fn get_test_client() -> BigqueryClient {
BigqueryClient::new("testrustproject-372221", "test1", None)
.await
.unwrap()
}
#[tokio::test]
async fn simple_query() {
init_logger(LevelFilter::Info);
let client = get_test_client().await;
let q = DbInfos::query(&client)
.add_order_by(name_of!(row_id in DbInfos), OrderDirection::Descending)
.run().await.unwrap();
let mut last_num = 999999999999999999;
for line in q {
info!("line: {:?}", line);
debug!("row_id > last: {} <= {}",line.row_id, last_num);
assert!(line.row_id <= last_num);
last_num = line.row_id;
}
}
fn init_logger(level: LevelFilter) {
let _ = env_logger::builder()
.is_test(true)
.filter_level(level)
.try_init();
}