mirror of
https://github.com/OMGeeky/downloader.git
synced 2026-01-24 12:16:24 +01:00
misc
This commit is contained in:
@@ -25,6 +25,9 @@ log = "0.4"
|
||||
tracing = "0.1"
|
||||
data-test = "0.1.1"
|
||||
|
||||
tracing-appender = "0.2"
|
||||
tracing-subscriber = "0.3"
|
||||
|
||||
[patch.crates-io]
|
||||
# patch the yup-oauth2 version with a custom for to support forcing the user to choose an account.
|
||||
# this can be removed as soon as https://github.com/dermesser/yup-oauth2/ has its next release and
|
||||
|
||||
12
logger.yaml
12
logger.yaml
@@ -11,7 +11,7 @@ appenders:
|
||||
|
||||
trace_file:
|
||||
kind: rolling_file
|
||||
path: "/var/tmp/twba/logs/downloader.trace.log"
|
||||
path: "/downloader/logs/downloader.trace.log"
|
||||
filters:
|
||||
- kind: threshold
|
||||
level: trace
|
||||
@@ -24,12 +24,12 @@ appenders:
|
||||
limit: 1 gb
|
||||
roller:
|
||||
kind: fixed_window
|
||||
pattern: "/var/tmp/twba/logs/archive/downloader.trace.{}.log"
|
||||
pattern: "/downloader/logs/archive/downloader.trace.{}.log"
|
||||
count: 5
|
||||
|
||||
info_file:
|
||||
kind: rolling_file
|
||||
path: "/var/tmp/twba/logs/downloader.info.log"
|
||||
path: "/downloader/logs/downloader.info.log"
|
||||
filters:
|
||||
- kind: threshold
|
||||
level: info
|
||||
@@ -42,12 +42,12 @@ appenders:
|
||||
limit: 100mb
|
||||
roller:
|
||||
kind: fixed_window
|
||||
pattern: "/var/tmp/twba/logs/archive/downloader.info.{}.log"
|
||||
pattern: "/downloader/logs/archive/downloader.info.{}.log"
|
||||
count: 5
|
||||
|
||||
debug_file:
|
||||
kind: rolling_file
|
||||
path: "/var/tmp/twba/logs/downloader.debug.log"
|
||||
path: "/downloader/logs/downloader.debug.log"
|
||||
filters:
|
||||
- kind: threshold
|
||||
level: debug
|
||||
@@ -60,7 +60,7 @@ appenders:
|
||||
limit: 1gb
|
||||
roller:
|
||||
kind: fixed_window
|
||||
pattern: "/var/tmp/twba/logs/archive/downloader.debug.{}.log"
|
||||
pattern: "/downloader/logs/archive/downloader.debug.{}.log"
|
||||
count: 5
|
||||
|
||||
root:
|
||||
|
||||
19
src/lib.rs
19
src/lib.rs
@@ -148,7 +148,7 @@ async fn get_youtube_clients(
|
||||
let mut result = HashMap::new();
|
||||
let config = load_config();
|
||||
let streamers = get_watched_streamers(db_client).await?;
|
||||
|
||||
info!("Getting youtube clients for {:?}", streamers);
|
||||
for streamer in streamers {
|
||||
trace!("Creating youtube client");
|
||||
|
||||
@@ -169,8 +169,10 @@ async fn get_youtube_clients(
|
||||
)
|
||||
.await
|
||||
.map_err(|e| anyhow!("error creating the youtube client: {}", e))?;
|
||||
info!("Got client for user: {}", user);
|
||||
result.insert(user, youtube_client);
|
||||
}
|
||||
info!("Got youtube clients");
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
@@ -438,7 +440,7 @@ pub async fn split_video_into_parts(
|
||||
let filepath = path.canonicalize()?;
|
||||
let parent_dir = path.parent().unwrap().canonicalize();
|
||||
if parent_dir.is_err() {
|
||||
warn!("Could not canonicalize parent dir");
|
||||
warn!("Could not canonicalize parent dir: {:?}", path);
|
||||
}
|
||||
let parent_dir = parent_dir.expect("Could not canonicalize parent dir");
|
||||
|
||||
@@ -533,9 +535,10 @@ pub async fn split_video_into_parts(
|
||||
.expect("to_str on path did not work!");
|
||||
let last_path = clean(&last_path);
|
||||
let last_path = last_path.to_str().expect("to_str on path did not work!");
|
||||
//create a file to tell ffmpeg what files to join/concat
|
||||
tokio::fs::write(
|
||||
join_txt_path.clone(),
|
||||
format!("file '{}'\nfile '{}'", last_path, second_last_path_str,),
|
||||
format!("file '{}'\nfile '{}'", second_last_path_str, last_path),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -885,16 +888,16 @@ mod tests {
|
||||
|
||||
let (total_time, parts) = extract_track_info_from_playlist(sample_playlist_content)
|
||||
.expect("failed to extract track info from playlist");
|
||||
assert_eq!(total_time, 18002.0 as f64);
|
||||
assert_eq!(total_time, 18002.0f64);
|
||||
assert_eq!(parts.len(), 2);
|
||||
|
||||
assert_eq!(
|
||||
parts[0],
|
||||
("1740252892.mp4_000.mp4".to_string(), 18001.720898 as f64)
|
||||
("1740252892.mp4_000.mp4".to_string(), 18001.720898f64)
|
||||
);
|
||||
assert_eq!(
|
||||
parts[1],
|
||||
("1740252892.mp4_001.mp4".to_string(), 14633.040755 as f64)
|
||||
("1740252892.mp4_001.mp4".to_string(), 14633.040755f64)
|
||||
);
|
||||
}
|
||||
#[tokio::test]
|
||||
@@ -920,8 +923,8 @@ mod tests {
|
||||
);
|
||||
assert_eq!(parts[0], Path::join(parent_dir, "1740252892.mp4_000.mp4"));
|
||||
assert_eq!(parts[1], Path::join(parent_dir, "1740252892.mp4_001.mp4"));
|
||||
assert_eq!(second_last_time, 18001.720898 as f64);
|
||||
assert_eq!(last_time, 14633.040755 as f64);
|
||||
assert_eq!(second_last_time, 18001.720898f64);
|
||||
assert_eq!(last_time, 14633.040755f64);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
76
src/main.rs
76
src/main.rs
@@ -5,10 +5,10 @@ use std::fmt::Debug;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use downloader::prelude::*;
|
||||
use google_bigquery_v2::prelude::*;
|
||||
use google_youtube::scopes;
|
||||
use google_youtube::YoutubeClient;
|
||||
use downloader::prelude::*;
|
||||
use log4rs::append::console::ConsoleAppender;
|
||||
use log4rs::append::rolling_file::policy::compound::roll;
|
||||
use log4rs::append::rolling_file::policy::compound::roll::fixed_window::FixedWindowRoller;
|
||||
@@ -18,8 +18,11 @@ use log4rs::append::rolling_file::{RollingFileAppender, RollingFileAppenderBuild
|
||||
use log4rs::config::{Appender, Root};
|
||||
use log4rs::encode::pattern::PatternEncoder;
|
||||
use nameof::name_of;
|
||||
use simplelog::*;
|
||||
use tokio::fs::File;
|
||||
use tracing_appender::non_blocking::WorkerGuard;
|
||||
use tracing_appender::rolling;
|
||||
use tracing_appender::rolling::Rotation;
|
||||
use tracing_subscriber::prelude::*;
|
||||
use twitch_data::{
|
||||
convert_twitch_video_to_twitch_data_video, get_client, TwitchClient, Video, VideoQuality,
|
||||
};
|
||||
@@ -37,12 +40,46 @@ const DATASET_ID: &str = "backup_data";
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn Error>> {
|
||||
initialize_logger2().await;
|
||||
// initialize_logger2().await;
|
||||
let _guards = initialize_logger3().await;
|
||||
info!("Hello, world!");
|
||||
start_backup().await?;
|
||||
// sample().await?;
|
||||
Ok(())
|
||||
}
|
||||
async fn initialize_logger3() -> Result<(WorkerGuard, WorkerGuard, WorkerGuard), Box<dyn Error>> {
|
||||
let (info_daily, guard_info_daily) =
|
||||
tracing_appender::non_blocking(rolling::daily("/downloader/logs", "info.log"));
|
||||
let (debug_daily, guard_debug_daily) =
|
||||
tracing_appender::non_blocking(rolling::daily("/downloader/logs", "debug.log"));
|
||||
let (trace_daily, guard_trace_daily) =
|
||||
tracing_appender::non_blocking(rolling::daily("/downloader/logs", "trace.log"));
|
||||
|
||||
let info_layer = tracing_subscriber::fmt::layer()
|
||||
.with_writer(info_daily)
|
||||
.with_filter(tracing_subscriber::filter::LevelFilter::INFO);
|
||||
|
||||
let debug_layer = tracing_subscriber::fmt::layer()
|
||||
.with_writer(debug_daily)
|
||||
.with_filter(tracing_subscriber::filter::LevelFilter::DEBUG);
|
||||
|
||||
let trace_layer = tracing_subscriber::fmt::layer()
|
||||
.with_writer(trace_daily)
|
||||
.with_filter(tracing_subscriber::filter::LevelFilter::TRACE);
|
||||
|
||||
let stdout_layer =
|
||||
tracing_subscriber::fmt::layer().with_filter(tracing_subscriber::filter::LevelFilter::INFO);
|
||||
|
||||
let subscriber = tracing_subscriber::registry()
|
||||
.with(info_layer)
|
||||
.with(debug_layer)
|
||||
.with(trace_layer)
|
||||
.with(stdout_layer);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).expect("Failed to set global subscriber");
|
||||
print_log_start_msg();
|
||||
Ok((guard_info_daily, guard_debug_daily, guard_trace_daily))
|
||||
}
|
||||
|
||||
async fn initialize_logger2() -> Result<(), Box<dyn Error>> {
|
||||
// // example:
|
||||
@@ -84,14 +121,19 @@ async fn initialize_logger2() -> Result<(), Box<dyn Error>> {
|
||||
);
|
||||
log4rs::init_file(path, Default::default())
|
||||
.expect("Failed to initialize the logger from the file");
|
||||
|
||||
info!("==================================================================================");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_log_start_msg() {
|
||||
info!("==================================================================================");
|
||||
info!(
|
||||
"Start of new log on {}",
|
||||
chrono::Utc::now().format("%Y-%m-%d %H:%M:%S")
|
||||
);
|
||||
info!("==================================================================================");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn gb_to_bytes(gb: f32) -> u64 {
|
||||
@@ -103,25 +145,25 @@ async fn initialize_logger() -> Result<(), Box<dyn Error>> {
|
||||
tokio::fs::create_dir_all(log_folder).await?;
|
||||
let timestamp = chrono::Utc::now().format("%Y-%m-%d_%H-%M-%S").to_string();
|
||||
|
||||
CombinedLogger::init(vec![
|
||||
simplelog::CombinedLogger::init(vec![
|
||||
// SimpleLogger::new(LevelFilter::Info, Config::default()),
|
||||
TermLogger::new(
|
||||
simplelog::TermLogger::new(
|
||||
LevelFilter::Info,
|
||||
Config::default(),
|
||||
TerminalMode::Mixed,
|
||||
ColorChoice::Auto,
|
||||
simplelog::Config::default(),
|
||||
simplelog::TerminalMode::Mixed,
|
||||
simplelog::ColorChoice::Auto,
|
||||
),
|
||||
WriteLogger::new(
|
||||
simplelog::WriteLogger::new(
|
||||
LevelFilter::Info,
|
||||
Config::default(),
|
||||
simplelog::Config::default(),
|
||||
File::create(format!("{}downloader_{}.log", log_folder, timestamp))
|
||||
.await?
|
||||
.into_std()
|
||||
.await,
|
||||
),
|
||||
WriteLogger::new(
|
||||
simplelog::WriteLogger::new(
|
||||
LevelFilter::Trace,
|
||||
Config::default(),
|
||||
simplelog::Config::default(),
|
||||
File::create(format!("{}trace_{}.log", log_folder, timestamp))
|
||||
.await?
|
||||
.into_std()
|
||||
@@ -194,7 +236,8 @@ async fn sample_bigquery<'a>(client: &'a BigqueryClient) -> Result<(), Box<dyn E
|
||||
|
||||
let video_metadata = VideoMetadata::select()
|
||||
.with_client(client.clone())
|
||||
.add_where_eq(name_of!(backed_up in VideoMetadata), Some(&true)).map_err(|e|anyhow!("{}",e))?
|
||||
.add_where_eq(name_of!(backed_up in VideoMetadata), Some(&true))
|
||||
.map_err(|e| anyhow!("{}", e))?
|
||||
.set_limit(10)
|
||||
.build_query()
|
||||
.map_err(|e| anyhow!("{}", e))?
|
||||
@@ -207,7 +250,8 @@ async fn sample_bigquery<'a>(client: &'a BigqueryClient) -> Result<(), Box<dyn E
|
||||
|
||||
let watched_streamers = Streamers::select()
|
||||
.with_client(client.clone())
|
||||
.add_where_eq(name_of!(watched in Streamers), Some(&true)).map_err(|e|anyhow!("{}",e))?
|
||||
.add_where_eq(name_of!(watched in Streamers), Some(&true))
|
||||
.map_err(|e| anyhow!("{}", e))?
|
||||
.set_limit(100)
|
||||
.build_query()
|
||||
.map_err(|e| anyhow!("{}", e))?
|
||||
|
||||
@@ -1,8 +1,3 @@
|
||||
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
use log::{debug, error, info, trace, warn};
|
||||
#[cfg(feature = "tracing")]
|
||||
pub use tracing::{debug, error, info, trace, warn};
|
||||
|
||||
|
||||
pub use log::LevelFilter;
|
||||
pub use log::LevelFilter;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::env::temp_dir;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use chrono::{DateTime, NaiveDateTime, Utc};
|
||||
use chrono::{DateTime, Duration, NaiveDateTime, Utc};
|
||||
// use bigquery_googleapi::BigqueryClient;
|
||||
use downloader::prelude::*;
|
||||
use google_bigquery_v2::prelude::*;
|
||||
@@ -256,3 +257,32 @@ fn prepare_existing_video_test_data(temp_subname: i32) -> (PathBuf, PathBuf) {
|
||||
std::fs::copy(video_source, &video_path).unwrap();
|
||||
(tmp_folder_path.to_path_buf(), video_path)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn download_video_with_multi_parts() {
|
||||
init_console_logging(LevelFilter::Debug);
|
||||
//sample video: https://www.twitch.tv/videos/1592654401
|
||||
//total length 01:10:13 (HH:MM:SS)
|
||||
let x = twitch_data::get_client()
|
||||
.await
|
||||
.expect("could not get client");
|
||||
let path = temp_dir();
|
||||
info!("downloading test video to folder: {}", path.display());
|
||||
let video_path = x
|
||||
.download_video("1592654401", "160p30", &path)
|
||||
.await
|
||||
.expect("could not download video");
|
||||
info!("downloaded video: {:?}", video_path);
|
||||
info!("splitting test video");
|
||||
let paths = downloader::split_video_into_parts(
|
||||
video_path,
|
||||
Duration::minutes(20),
|
||||
Duration::minutes(35),
|
||||
)
|
||||
.await
|
||||
.expect("could not split video");
|
||||
|
||||
info!("checking results");
|
||||
assert_eq!(paths.len(), 3);
|
||||
info!(?paths);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user