From 643173645e0ead8af98a4286f69403d22056fe77 Mon Sep 17 00:00:00 2001 From: Sophie <47993817+sdankel@users.noreply.github.com> Date: Thu, 15 May 2025 21:54:42 -0400 Subject: [PATCH 1/2] chore: additional logging in upload API --- src/file_uploader/mod.rs | 2 ++ src/file_uploader/s3.rs | 2 +- src/handlers/upload.rs | 14 +++++++++++--- src/main.rs | 21 ++++++++++++++++----- src/util.rs | 9 +++++++-- 5 files changed, 37 insertions(+), 11 deletions(-) diff --git a/src/file_uploader/mod.rs b/src/file_uploader/mod.rs index cfe99e0..b0b1a79 100644 --- a/src/file_uploader/mod.rs +++ b/src/file_uploader/mod.rs @@ -20,6 +20,7 @@ impl<'a, T: PinataClient, E: S3Client> FileUploader<'a, T, E> { } pub async fn upload_file(&self, path: &Path) -> Result { + tracing::info!("Uploading file to IPFS: {:?}", path); let ipfs_hash = self.pinata_client.upload_file_to_ipfs(path).await?; // Read file contents @@ -29,6 +30,7 @@ impl<'a, T: PinataClient, E: S3Client> FileUploader<'a, T, E> { .map_err(|_| UploadError::ReadFile)?; // Upload to S3 + tracing::info!("Uploading file to S3: {:?}", path); self.s3_client .upload_file_to_s3(path, ipfs_hash.clone()) .await?; diff --git a/src/file_uploader/s3.rs b/src/file_uploader/s3.rs index 4fce0e3..6611d6e 100644 --- a/src/file_uploader/s3.rs +++ b/src/file_uploader/s3.rs @@ -67,7 +67,7 @@ impl S3Client for S3ClientImpl { .body(ByteStream::from(buffer)) .send() .await - .map_err(|e| UploadError::S3UploadFailed(format!("{:?}", e)))?; + .map_err(|e| UploadError::S3UploadFailed(e.to_string()))?; } Ok(()) } diff --git a/src/handlers/upload.rs b/src/handlers/upload.rs index 8feb75d..020fd2f 100644 --- a/src/handlers/upload.rs +++ b/src/handlers/upload.rs @@ -95,6 +95,7 @@ pub async fn handle_project_upload<'a>( let project_dir = upload_dir.join(PROJECT_DIR); // Unpack the tarball. + tracing::info!("Unpacking tarball: {:?}", orig_tarball_path); let tarball = File::open(orig_tarball_path).map_err(|_| UploadError::OpenFile)?; let decompressed = GzDecoder::new(tarball); let mut archive = Archive::new(decompressed); @@ -105,6 +106,7 @@ pub async fn handle_project_upload<'a>( // Remove `out` directory if it exists. let _ = fs::remove_dir_all(unpacked_dir.join("out")); + tracing::info!("Executing forc build: {:?}", forc_path); let output = Command::new(format!("{}/bin/forc", forc_path.to_string_lossy())) .arg("build") .arg("--release") @@ -120,6 +122,7 @@ pub async fn handle_project_upload<'a>( } // Copy files that are part of the Sway project to a new directory. + tracing::info!("Copying files to project directory: {:?}", project_dir); let output = Command::new("rsync") .args([ "-av", @@ -145,6 +148,7 @@ pub async fn handle_project_upload<'a>( } // Pack the new tarball. + tracing::info!("Packing tarball: {:?}", upload_dir); let final_tarball_path = upload_dir.join(TARBALL_NAME); let tar_gz = File::create(&final_tarball_path).map_err(|_| UploadError::OpenFile)?; let enc = GzEncoder::new(tar_gz, Compression::default()); @@ -161,7 +165,8 @@ pub async fn handle_project_upload<'a>( let enc = tar.into_inner().map_err(|_| UploadError::CopyFiles)?; enc.finish().map_err(|_| UploadError::CopyFiles)?; - // Store the tarball in IPFS. + // Store the tarball. + tracing::info!("Uploading tarball: {:?}", final_tarball_path); let tarball_ipfs_hash = file_uploader.upload_file(&final_tarball_path).await?; fn find_file_in_dir_by_suffix(dir: &Path, suffix: &str) -> Option { @@ -184,9 +189,12 @@ pub async fn handle_project_upload<'a>( .next() } - // Store the ABI in IPFS. + // Store the ABI. let abi_ipfs_hash = match find_file_in_dir_by_suffix(&release_dir, "-abi.json") { - Some(abi_path) => Some(file_uploader.upload_file(&abi_path).await?), + Some(abi_path) => { + tracing::info!("Uploading ABI: {:?}", release_dir); + Some(file_uploader.upload_file(&abi_path).await?) + } None => None, }; diff --git a/src/main.rs b/src/main.rs index 0dc59aa..9d3c310 100644 --- a/src/main.rs +++ b/src/main.rs @@ -27,10 +27,13 @@ use forc_pub::middleware::cors::Cors; use forc_pub::middleware::session_auth::{SessionAuth, SESSION_COOKIE_NAME}; use forc_pub::middleware::token_auth::TokenAuth; use forc_pub::util::validate_or_format_semver; +use rocket::http::Status; use rocket::{ data::Capped, fs::TempFile, http::{Cookie, CookieJar}, + request::Request, + response::{self}, serde::json::Json, State, }; @@ -241,10 +244,18 @@ fn all_options() { // Intentionally left empty } -/// Catch 404 not founds. -#[catch(404)] -fn not_found() -> String { - "Not found".to_string() +/// Catch all errors and log them before returning a custom error message. +#[catch(default)] +fn default_catcher(status: Status, _req: &Request<'_>) -> response::status::Custom { + tracing::error!( + "Error occurred: {} - {:?}", + status.code, + status.reason_lossy() + ); + response::status::Custom( + status, + format!("Error: {} - {}", status.code, status.reason_lossy()), + ) } // Indicates the service is running @@ -289,5 +300,5 @@ async fn rocket() -> _ { health ], ) - .register("/", catchers![not_found]) + .register("/", catchers![default_catcher]) } diff --git a/src/util.rs b/src/util.rs index 3fa6a4d..f2d0ed5 100644 --- a/src/util.rs +++ b/src/util.rs @@ -1,5 +1,5 @@ use semver::Version; -use std::path::Path; +use std::{env, path::Path}; pub fn validate_or_format_semver(version: &str) -> Option { // Remove the leading 'v' if it exists @@ -23,7 +23,12 @@ pub fn load_env() { // Then load `.env.local`, potentially overwriting values from `.env` if let Err(e) = dotenvy::from_path_override(Path::new(".env.local")) { - tracing::error!("Could not load .env.local: {}", e); + if env::var("RUN_ENV").unwrap_or_default() == "local" { + // If RUN_ENV is not set, log the error + tracing::error!("Could not load .env.local: {}", e); + } + + tracing::info!("Could not load .env.local: {}", e); } } From ec6feb8008722c5ec221fded330c4b568e7d1fab Mon Sep 17 00:00:00 2001 From: Sophie <47993817+sdankel@users.noreply.github.com> Date: Thu, 15 May 2025 22:01:58 -0400 Subject: [PATCH 2/2] to_string_lossy --- src/handlers/upload.rs | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/src/handlers/upload.rs b/src/handlers/upload.rs index 020fd2f..3fd713c 100644 --- a/src/handlers/upload.rs +++ b/src/handlers/upload.rs @@ -95,7 +95,7 @@ pub async fn handle_project_upload<'a>( let project_dir = upload_dir.join(PROJECT_DIR); // Unpack the tarball. - tracing::info!("Unpacking tarball: {:?}", orig_tarball_path); + tracing::info!("Unpacking tarball: {}", orig_tarball_path.to_string_lossy()); let tarball = File::open(orig_tarball_path).map_err(|_| UploadError::OpenFile)?; let decompressed = GzDecoder::new(tarball); let mut archive = Archive::new(decompressed); @@ -106,7 +106,7 @@ pub async fn handle_project_upload<'a>( // Remove `out` directory if it exists. let _ = fs::remove_dir_all(unpacked_dir.join("out")); - tracing::info!("Executing forc build: {:?}", forc_path); + tracing::info!("Executing forc build: {}", forc_path.to_string_lossy()); let output = Command::new(format!("{}/bin/forc", forc_path.to_string_lossy())) .arg("build") .arg("--release") @@ -122,7 +122,10 @@ pub async fn handle_project_upload<'a>( } // Copy files that are part of the Sway project to a new directory. - tracing::info!("Copying files to project directory: {:?}", project_dir); + tracing::info!( + "Copying files to project directory: {}", + project_dir.to_string_lossy() + ); let output = Command::new("rsync") .args([ "-av", @@ -148,7 +151,7 @@ pub async fn handle_project_upload<'a>( } // Pack the new tarball. - tracing::info!("Packing tarball: {:?}", upload_dir); + tracing::info!("Packing tarball: {}", upload_dir.to_string_lossy()); let final_tarball_path = upload_dir.join(TARBALL_NAME); let tar_gz = File::create(&final_tarball_path).map_err(|_| UploadError::OpenFile)?; let enc = GzEncoder::new(tar_gz, Compression::default()); @@ -166,7 +169,10 @@ pub async fn handle_project_upload<'a>( enc.finish().map_err(|_| UploadError::CopyFiles)?; // Store the tarball. - tracing::info!("Uploading tarball: {:?}", final_tarball_path); + tracing::info!( + "Uploading tarball: {}", + final_tarball_path.to_string_lossy() + ); let tarball_ipfs_hash = file_uploader.upload_file(&final_tarball_path).await?; fn find_file_in_dir_by_suffix(dir: &Path, suffix: &str) -> Option { @@ -192,7 +198,7 @@ pub async fn handle_project_upload<'a>( // Store the ABI. let abi_ipfs_hash = match find_file_in_dir_by_suffix(&release_dir, "-abi.json") { Some(abi_path) => { - tracing::info!("Uploading ABI: {:?}", release_dir); + tracing::info!("Uploading ABI: {}", release_dir.to_string_lossy()); Some(file_uploader.upload_file(&abi_path).await?) } None => None,