youtube upload should work with bigger files (not whole file in RAM)

This commit is contained in:
OMGeeky
2023-04-07 13:46:26 +02:00
parent 0132bd636d
commit 95a64ae692
3 changed files with 26 additions and 22 deletions

View File

@@ -1,14 +1,14 @@
[package]
name = "downloader"
version = "0.1.2"
version = "0.1.3"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
google_bigquery = { git = "https://github.com/OMGeeky/google_bigquery" }
google_youtube = { git = "https://github.com/OMGeeky/google_youtube" }
twitch_data = { version="0.1.3", git = "https://github.com/OMGeeky/twitch_data" }
google_youtube = { version="0.1.1",git = "https://github.com/OMGeeky/google_youtube" }
twitch_data = { version="0.1.4", git = "https://github.com/OMGeeky/twitch_data" }
downloader_config = { version="0.2.0", git = "https://github.com/OMGeeky/downloader_config" }
tokio = "1.23"
google-bigquery2 = "4.0.1"
@@ -18,4 +18,4 @@ log = "0.4.17"
simplelog = "0.12.1"
path-clean = "1.0.1"
log-panics = { version = "2", features = ["with-backtrace"]}
log-panics = { version = "2", features = ["with-backtrace"]}

View File

@@ -191,7 +191,7 @@ async fn backup_not_downloaded_videos<'a>(
continue;
}
let video_file_path = video_file_path.unwrap();
info!("Splitting video into parts");
let mut video_parts = split_video_into_parts(
video_file_path.to_path_buf(),
Duration::minutes(config.youtube_video_length_minutes_soft_cap),
@@ -393,13 +393,17 @@ pub async fn split_video_into_parts(
//endregion
//region maybe join last two parts
trace!("Deciding if last two parts should be joined");
debug!("Deciding if last two parts should be joined");
if let Some(last_path) = last_path {
if let Some(current_path) = current_path {
let joined_time = last_time + time;
if joined_time < duration_soft_cap.num_seconds() as f64 {
//region join last two parts
info!("Joining last two parts");
info!(
"Joining last two parts. second last part duration: {} seconds, \
last part duration: {} seconds, joined duration: {} seconds",
last_time, time, joined_time
);
//remove the part from the result that is going to be joined
res.pop();

View File

@@ -66,7 +66,7 @@ async fn main() -> Result<(), Box<dyn Error>> {
}
pub async fn sample() -> Result<(), Box<dyn Error>> {
println!("Hello from the downloader lib!");
info!("Hello from the downloader lib!");
let client = BigqueryClient::new(PROJECT_ID, DATASET_ID, Some(SERVICE_ACCOUNT_PATH)).await?;
sample_bigquery(&client).await?;
@@ -86,10 +86,10 @@ pub async fn sample() -> Result<(), Box<dyn Error>> {
}
async fn sample_twitch<'a>(client: &TwitchClient<'a>) -> Result<(), Box<dyn Error>> {
println!("\n\nGetting videos...");
info!("\n\nGetting videos...");
let res = client.get_channel_info_from_login("burn").await?;
println!("got channel info: {:?}", res);
info!("got channel info: {:?}", res);
let channel_id = res.unwrap().broadcaster_id;
let videos: Vec<Video> = client
@@ -99,30 +99,30 @@ async fn sample_twitch<'a>(client: &TwitchClient<'a>) -> Result<(), Box<dyn Erro
.map(convert_twitch_video_to_twitch_data_video)
.collect();
println!("got video ids: {:?}", videos.len());
info!("got video ids: {:?}", videos.len());
for (i, video) in videos.iter().enumerate() {
println!("+======={:2}: {:?}", i, video);
info!("+======={:2}: {:?}", i, video);
}
println!("\n\nGetting video for short download...");
info!("\n\nGetting video for short download...");
let short_video_id = twitch_data::VideoId::new("1710229470".to_string());
let video_info = client.get_video_info(&short_video_id).await?;
println!("got video info: {:?}", video_info);
info!("got video info: {:?}", video_info);
let output_folder = Path::new("C:\\tmp\\videos\\");
let res = client
.download_video_by_id(&video_info.id, &VideoQuality::Source, output_folder)
.await?;
println!("downloaded video: {:?}", res);
info!("downloaded video: {:?}", res);
println!("\n\nDone!");
info!("\n\nDone!");
Ok(())
}
async fn sample_bigquery<'a>(client: &'a BigqueryClient) -> Result<(), Box<dyn Error>> {
// let x = VideoMetadata::from_pk(&client, 1638184921).await?;
let video_metadata = VideoMetadata::create_and_load_from_pk(&client, 1638184921).await?;
println!("got video_metadata by id: {:?}", video_metadata);
info!("got video_metadata by id: {:?}", video_metadata);
let video_metadata = VideoMetadata::load_by_field(
&client,
@@ -138,16 +138,16 @@ async fn sample_bigquery<'a>(client: &'a BigqueryClient) -> Result<(), Box<dyn E
print_vec_sample("got watched_streamers:", watched_streamers);
fn print_vec_sample<T: Debug>(message: &str, watched_streamers: Vec<T>) {
println!("{} {:?}", message, watched_streamers.len());
info!("{} {:?}", message, watched_streamers.len());
for (i, streamer) in watched_streamers.iter().enumerate() {
println!("+======={}: {:?}", i, streamer);
info!("+======={}: {:?}", i, streamer);
}
}
Ok(())
}
async fn sample_youtube(client: &YoutubeClient) -> Result<(), Box<dyn Error>> {
println!("Opening video file...");
info!("Opening video file...");
let file = Path::new("C:\\Users\\frede\\Videos\\test.mp4");
// let file = File::open(file).await?;
@@ -156,12 +156,12 @@ async fn sample_youtube(client: &YoutubeClient) -> Result<(), Box<dyn Error>> {
let tags = vec!["test".to_string(), "test2".to_string()];
let privacy_status = google_youtube::PrivacyStatus::Private;
println!("Uploading video...");
info!("Uploading video...");
let video = &client
.upload_video(file, title, description, tags, privacy_status)
.await?;
println!("video: \n\n{:?}\n\n", video);
info!("video: \n\n{:?}\n\n", video);
Ok(())
}