From 5d2771c09effbcde03a3da61363f5751e8032b19 Mon Sep 17 00:00:00 2001 From: Nico Hinderling Date: Thu, 29 May 2025 21:37:30 -0700 Subject: [PATCH] wip - testing --- .gitignore | 4 + Cargo.lock | 2 +- Cargo.toml | 12 + README_TEST.md | 81 +++++ src/api/data_types/chunking/artifact.rs | 72 +++- src/api/data_types/chunking/mod.rs | 5 +- .../data_types/chunking/upload/capability.rs | 4 + src/api/mod.rs | 26 +- src/lib.rs | 11 + test_full_upload.rs | 339 ++++++++++++++++++ test_mobile_app_upload.rs | 250 +++++++++++++ 11 files changed, 800 insertions(+), 6 deletions(-) create mode 100644 README_TEST.md create mode 100644 src/lib.rs create mode 100644 test_full_upload.rs create mode 100644 test_mobile_app_upload.rs diff --git a/.gitignore b/.gitignore index 52a24dccc9..b02591b333 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,7 @@ yarn-error.log /sentry-cli.exe .vscode/ + + +HackerNews.xcarchive.zip +HackerNews_arm64 \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index c8106c9c96..be71ecd731 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" diff --git a/Cargo.toml b/Cargo.toml index 2684c97521..9831a1049a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,18 @@ version = "2.46.0" edition = "2021" rust-version = "1.86" +[lib] +name = "sentry_cli" +path = "src/lib.rs" + +[[bin]] +name = "test_full_upload" +path = "test_full_upload.rs" + +[[bin]] +name = "test_mobile_app_upload" +path = "test_mobile_app_upload.rs" + [dependencies] anylog = "0.6.3" anyhow = { version = "1.0.69", features = ["backtrace"] } diff --git a/README_TEST.md b/README_TEST.md new file mode 100644 index 0000000000..582f27739c --- /dev/null +++ b/README_TEST.md @@ -0,0 +1,81 @@ +# Sentry CLI Upload Tests + +This directory contains test programs that reuse the existing sentry-cli codebase to test different upload flows against a local Sentry service. + +## Test Programs + +### 1. Mobile App Upload Test (`test_mobile_app_upload`) + +Tests uploading mobile app archives (like `.xcarchive.zip` files) to the preprod artifacts endpoint. + +**Build and run:** +```bash +cargo run --bin test_mobile_app_upload +``` + +**What it tests:** +- Authentication with local Sentry service +- Chunk upload capabilities check (specifically `PreprodArtifacts` capability) +- Full chunk upload flow for large mobile app archives +- Assembly via `/projects/{org}/{project}/files/preprodartifacts/assemble/` endpoint + +**Configuration:** +- Target file: `/Users/nicolashinderling/TestUploads/HackerNews.xcarchive.zip` +- Endpoint: `/projects/sentry/internal/files/preprodartifacts/assemble/` + +### 2. Debug Files Upload Test (`test_full_upload`) + +Tests uploading debug files (like dSYM files) to the debug info files endpoint. + +**Build and run:** +```bash +cargo run --bin test_full_upload +``` + +**What it tests:** +- Authentication with local Sentry service +- Chunk upload capabilities check +- Full chunk upload + missing chunk detection flow +- Assembly via `/projects/{org}/{project}/files/difs/assemble/` endpoint + +**Configuration:** +- Target file: `/Users/nicolashinderling/TestUploads/HackerNews_arm64` +- Endpoint: `/projects/sentry/internal/files/difs/assemble/` + +## Common Configuration + +Both tests are configured for: +- **Base URL:** `http://localhost:8000` +- **Organization:** `sentry` +- **Project:** `internal` +- **Auth Token:** Your provided token (update in each test file) + +You can modify these values in the `main()` function of each test file. + +## Common Output + +Both tests will show: +- โœ…/โŒ Status indicators for each step +- Server capabilities (which ChunkUploadCapability features are supported) +- Chunk upload progress and statistics +- Assembly request/response details +- Detailed error messages if anything fails + +## API Endpoints Tested + +### Chunk Upload (Common) +- **GET** `/api/0/organizations/{org}/chunk-upload/` - Server capabilities +- **POST** `/api/0/organizations/{org}/chunk-upload/` - Upload chunks + +### Mobile App Assembly +- **POST** `/api/0/projects/{org}/{project}/files/preprodartifacts/assemble/` + +### Debug Files Assembly +- **POST** `/api/0/projects/{org}/{project}/files/difs/assemble/` + +## Notes + +- Both tests reuse the exact same code paths as the real `sentry-cli` commands +- They provide comprehensive testing of the upload functionality +- The mobile app test is for the new preprod artifacts endpoint +- The debug files test uses the existing DIF upload flow \ No newline at end of file diff --git a/src/api/data_types/chunking/artifact.rs b/src/api/data_types/chunking/artifact.rs index d160d9828a..a8b8c07d78 100644 --- a/src/api/data_types/chunking/artifact.rs +++ b/src/api/data_types/chunking/artifact.rs @@ -1,5 +1,6 @@ use serde::{Deserialize, Serialize}; use sha1_smol::Digest; +use std::collections::HashMap; use super::ChunkedFileState; @@ -13,16 +14,85 @@ pub struct ChunkedArtifactRequest<'a> { pub version: Option<&'a str>, #[serde(skip_serializing_if = "Option::is_none")] pub dist: Option<&'a str>, + #[serde(skip_serializing_if = "Option::is_none")] + pub filename: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub project_id: Option, } #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] -pub struct AssembleArtifactsResponse { +pub struct ChunkedArtifactResponse { pub state: ChunkedFileState, pub missing_chunks: Vec, pub detail: Option, } +#[derive(Debug, Serialize)] +#[serde(transparent)] +pub struct AssembleArtifactsRequest<'a>(HashMap>); + +impl<'a, T> FromIterator for AssembleArtifactsRequest<'a> +where + T: Into>, +{ + fn from_iter(iter: I) -> Self + where + I: IntoIterator, + { + Self( + iter.into_iter() + .map(|obj| obj.into()) + .map(|r| (r.checksum, r)) + .collect(), + ) + } +} + +pub type AssembleArtifactsResponse = ChunkedArtifactResponse; + +#[derive(Debug, Serialize)] +pub struct ChunkedPreprodArtifactRequest<'a> { + pub checksum: Digest, + pub chunks: &'a [Digest], + // Optional metadata fields that the server supports + #[serde(skip_serializing_if = "Option::is_none")] + pub build_version: Option<&'a str>, + #[serde(skip_serializing_if = "Option::is_none")] + pub build_number: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub build_configuration: Option<&'a str>, + #[serde(skip_serializing_if = "Option::is_none")] + pub date_built: Option<&'a str>, + #[serde(skip_serializing_if = "Option::is_none")] + pub extras: Option, +} + +impl<'a> ChunkedPreprodArtifactRequest<'a> { + /// Create a new ChunkedPreprodArtifactRequest with the required fields. + pub fn new(checksum: Digest, chunks: &'a [Digest]) -> Self { + Self { + checksum, + chunks, + build_version: None, + build_number: None, + build_configuration: None, + date_built: None, + extras: None, + } + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ChunkedPreprodArtifactResponse { + pub state: ChunkedFileState, + pub missing_chunks: Vec, + pub detail: Option, +} + +pub type AssemblePreprodArtifactsResponse = HashMap; + fn version_is_empty(version: &Option<&str>) -> bool { match version { Some(v) => v.is_empty(), diff --git a/src/api/data_types/chunking/mod.rs b/src/api/data_types/chunking/mod.rs index 0af308c87c..6ed57f0fe7 100644 --- a/src/api/data_types/chunking/mod.rs +++ b/src/api/data_types/chunking/mod.rs @@ -8,7 +8,10 @@ mod file_state; mod hash_algorithm; mod upload; -pub use self::artifact::{AssembleArtifactsResponse, ChunkedArtifactRequest}; +pub use self::artifact::{ + AssembleArtifactsRequest, AssembleArtifactsResponse, ChunkedArtifactRequest, ChunkedArtifactResponse, + ChunkedPreprodArtifactRequest, ChunkedPreprodArtifactResponse, +}; pub use self::compression::ChunkCompression; pub use self::dif::{AssembleDifsRequest, AssembleDifsResponse, ChunkedDifRequest}; pub use self::file_state::ChunkedFileState; diff --git a/src/api/data_types/chunking/upload/capability.rs b/src/api/data_types/chunking/upload/capability.rs index fe98123f9e..c0426f1871 100644 --- a/src/api/data_types/chunking/upload/capability.rs +++ b/src/api/data_types/chunking/upload/capability.rs @@ -30,6 +30,9 @@ pub enum ChunkUploadCapability { /// Upload of il2cpp line mappings Il2Cpp, + /// Upload of preprod artifacts (mobile app archives, etc.) + PreprodArtifacts, + /// Any other unsupported capability (ignored) Unknown, } @@ -49,6 +52,7 @@ impl<'de> Deserialize<'de> for ChunkUploadCapability { "sources" => ChunkUploadCapability::Sources, "bcsymbolmaps" => ChunkUploadCapability::BcSymbolmap, "il2cpp" => ChunkUploadCapability::Il2Cpp, + "preprod_artifacts" => ChunkUploadCapability::PreprodArtifacts, _ => ChunkUploadCapability::Unknown, }) } diff --git a/src/api/mod.rs b/src/api/mod.rs index 89ce9ce10f..28dabad23d 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -988,6 +988,8 @@ impl<'a> AuthenticatedApi<'a> { projects: &[], version: None, dist: None, + filename: None, + project_id: None, })? .send()? .convert_rnf(ApiErrorKind::ReleaseNotFound) @@ -1011,11 +1013,32 @@ impl<'a> AuthenticatedApi<'a> { projects, version, dist, + filename: None, + project_id: None, })? .send()? .convert_rnf(ApiErrorKind::ReleaseNotFound) } + /// Request preprod artifact assembling and processing from chunks. + pub fn assemble_preprod_artifact( + &self, + org: &str, + project: &str, + request: &ChunkedPreprodArtifactRequest<'_>, + ) -> ApiResult { + let url = format!( + "/projects/{}/{}/files/preprodartifacts/assemble/", + PathArg(org), + PathArg(project) + ); + + self.request(Method::Post, &url)? + .with_json_body(request)? + .send()? + .convert_rnf(ApiErrorKind::ProjectNotFound) + } + pub fn associate_proguard_mappings( &self, org: &str, @@ -1929,7 +1952,6 @@ pub struct AuthDetails { #[derive(Deserialize, Debug)] pub struct User { pub email: String, - #[expect(dead_code)] pub id: String, } @@ -2011,7 +2033,6 @@ pub struct UpdatedRelease { #[derive(Debug, Deserialize)] pub struct ReleaseInfo { pub version: String, - #[expect(dead_code)] pub url: Option, #[serde(rename = "dateCreated")] pub date_created: DateTime, @@ -2077,7 +2098,6 @@ pub struct DebugInfoData { #[serde(default, rename = "type")] pub kind: Option, #[serde(default)] - #[expect(dead_code)] pub features: Vec, } diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000000..2dde333aa7 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,11 @@ +#![warn(clippy::allow_attributes)] +#![warn(clippy::unnecessary_wraps)] + +pub mod api; +pub mod config; +pub mod constants; +pub mod utils; + +// Re-export commonly used types +pub use api::{Api, ChunkUploadCapability}; +pub use config::{Auth, Config}; \ No newline at end of file diff --git a/test_full_upload.rs b/test_full_upload.rs new file mode 100644 index 0000000000..a0611ae420 --- /dev/null +++ b/test_full_upload.rs @@ -0,0 +1,339 @@ +use anyhow::Result; + +// Include the sentry-cli library +extern crate sentry_cli; + +use sentry_cli::api::{Api, ChunkUploadCapability}; +use sentry_cli::config::{Auth, Config}; +use sentry_cli::utils::auth_token::AuthToken; +use sentry_cli::utils::fs::get_sha1_checksums; +use sentry_cli::utils::chunks::{upload_chunks, Chunk}; +use sentry_cli::utils::progress::ProgressStyle; + +fn create_test_config(auth_token: &str, base_url: &str) -> Result { + let mut config = Config::from_cli_config()?; + + // Set the base URL to localhost + config.set_base_url(base_url); + + // Set the auth token + let token: AuthToken = auth_token.into(); + config.set_auth(Auth::Token(token)); + + Ok(config) +} + +fn test_chunk_upload_options(org: &str) -> Result<()> { + println!("๐Ÿ” Testing chunk upload options endpoint..."); + + let api = Api::current(); + let authenticated_api = api.authenticated()?; + + match authenticated_api.get_chunk_upload_options(org)? { + Some(options) => { + println!("โœ… Chunk upload supported!"); + println!(" URL: {}", options.url); + println!(" Max chunks per request: {}", options.max_chunks); + println!(" Max request size: {}", options.max_size); + println!(" Chunk size: {}", options.chunk_size); + println!(" Concurrency: {}", options.concurrency); + + println!(" Supported capabilities:"); + let capabilities = [ + (ChunkUploadCapability::DebugFiles, "debug_files"), + (ChunkUploadCapability::ReleaseFiles, "release_files"), + (ChunkUploadCapability::ArtifactBundles, "artifact_bundles"), + (ChunkUploadCapability::ArtifactBundlesV2, "artifact_bundles_v2"), + (ChunkUploadCapability::Pdbs, "pdbs"), + (ChunkUploadCapability::PortablePdbs, "portablepdbs"), + (ChunkUploadCapability::Sources, "sources"), + (ChunkUploadCapability::BcSymbolmap, "bcsymbolmaps"), + (ChunkUploadCapability::Il2Cpp, "il2cpp"), + ]; + + for (capability, name) in capabilities { + let supported = options.supports(capability); + let status = if supported { "โœ…" } else { "โŒ" }; + println!(" {status} {name}"); + } + + Ok(()) + } + None => { + println!("โŒ Chunk upload not supported by server"); + Err(anyhow::anyhow!("Chunk upload not supported")) + } + } +} + +fn test_full_debug_file_upload_flow(org: &str, project: &str, binary_path: &str) -> Result<()> { + println!("๐Ÿš€ Testing full debug file chunk upload flow..."); + + // Read the binary file directly + let binary_path = std::path::Path::new(binary_path); + println!(" ๐Ÿ”ง Binary file: {}", binary_path.display()); + + // Read the binary file + let content = std::fs::read(&binary_path) + .map_err(|e| anyhow::anyhow!("Failed to read binary file {}: {}", binary_path.display(), e))?; + + println!(" ๐Ÿ“„ File size: {} bytes", content.len()); + + let api = Api::current(); + let authenticated_api = api.authenticated()?; + + // Get chunk upload options + let chunk_upload_options = authenticated_api.get_chunk_upload_options(org)? + .ok_or_else(|| anyhow::anyhow!("Chunk upload not supported by server"))?; + + println!(" ๐Ÿ“Š Chunk configuration:"); + println!(" โ€ข Chunk size: {} bytes", chunk_upload_options.chunk_size); + println!(" โ€ข Max chunks per request: {}", chunk_upload_options.max_chunks); + println!(" โ€ข Max request size: {} bytes", chunk_upload_options.max_size); + println!(" โ€ข Concurrency: {}", chunk_upload_options.concurrency); + + // Step 1: Prepare data and calculate checksums + let data = &content; // Binary data, not string + let chunk_size = chunk_upload_options.chunk_size as usize; + let (total_checksum, chunk_checksums) = get_sha1_checksums(data, chunk_size)?; + + println!(" ๐Ÿ”ข Checksum calculation:"); + println!(" โ€ข Total file SHA1: {}", total_checksum); + println!(" โ€ข Number of chunks: {}", chunk_checksums.len()); + + // Show individual chunk details + for (i, checksum) in chunk_checksums.iter().enumerate() { + let chunk_start = i * chunk_size; + let chunk_end = std::cmp::min(chunk_start + chunk_size, data.len()); + let chunk_size_actual = chunk_end - chunk_start; + println!(" Chunk {}: {} bytes (SHA1: {})", i + 1, chunk_size_actual, checksum); + } + + // Step 2: Check which chunks are missing (if any) + println!(" ๐Ÿ” Checking for missing chunks..."); + let missing_checksums = authenticated_api.find_missing_dif_checksums( + org, + project, + chunk_checksums.iter().copied() + )?; + + if missing_checksums.is_empty() { + println!(" โœ… All chunks already exist on server"); + } else { + println!(" ๐Ÿ“ค {} chunks need to be uploaded", missing_checksums.len()); + + // Step 3: Upload missing chunks + println!(" ๐Ÿš€ Uploading chunks..."); + let chunks_to_upload: Vec<_> = data.chunks(chunk_size) + .zip(chunk_checksums.iter()) + .filter(|(_, checksum)| missing_checksums.contains(checksum)) + .map(|(chunk_data, checksum)| (*checksum, chunk_data)) + .collect(); + + if !chunks_to_upload.is_empty() { + // Create Chunk objects for upload + let chunks: Vec<_> = chunks_to_upload.iter() + .map(|(checksum, data)| Chunk((*checksum, *data))) + .collect(); + + println!(" ๐ŸŒ Uploading {} chunks to: {}", chunks.len(), chunk_upload_options.url); + upload_chunks(&chunks, &chunk_upload_options, ProgressStyle::default_bar())?; + + println!(" โœ… Chunks uploaded successfully!"); + } + } + + // Step 4: Use debug file assembly (correct for dSYM files) + println!(" ๐Ÿ”ง Assembling debug file..."); + + // Create a ChunkedDifRequest + let filename = binary_path.file_name().unwrap().to_string_lossy(); + let dif_request = sentry_cli::api::ChunkedDifRequest::new( + filename.into(), + &chunk_checksums, + total_checksum, + ); + + // Create the AssembleDifsRequest from the single request + let assembly_request: sentry_cli::api::AssembleDifsRequest = + std::iter::once(dif_request).collect(); + + let assembly_response = authenticated_api.assemble_difs(org, project, &assembly_request)?; + + println!(" ๐Ÿ“ฆ Debug file assembly initiated!"); + + // The response is a HashMap + if let Some(response) = assembly_response.get(&total_checksum) { + println!(" State: {:?}", response.state); + println!(" Missing chunks: {}", response.missing_chunks.len()); + + if let Some(ref detail) = response.detail { + println!(" Detail: {}", detail); + } + + // Step 5: Check assembly status + match response.state { + sentry_cli::api::ChunkedFileState::Ok => { + println!(" โœ… Assembly completed successfully!"); + if let Some(ref dif) = response.dif { + println!(" Debug info file created: {}", dif.object_name); + } + } + sentry_cli::api::ChunkedFileState::NotFound => { + println!(" โŒ Assembly failed - bundle not found"); + } + sentry_cli::api::ChunkedFileState::Created => { + println!(" โณ Assembly created, waiting for processing..."); + } + sentry_cli::api::ChunkedFileState::Assembling => { + println!(" โš™๏ธ Assembly in progress..."); + } + sentry_cli::api::ChunkedFileState::Error => { + println!(" โŒ Assembly failed with error"); + return Err(anyhow::anyhow!("Assembly failed")); + } + } + + if response.missing_chunks.is_empty() { + println!(" ๐ŸŽ‰ Full debug file upload flow completed successfully!"); + println!(" ๐Ÿ“ˆ Summary:"); + println!(" โ€ข Binary: {}", binary_path.display()); + println!(" โ€ข Total chunks: {}", chunk_checksums.len()); + println!(" โ€ข Chunks uploaded: {}", missing_checksums.len()); + println!(" โ€ข File checksum: {}", total_checksum); + println!(" โ€ข Organization: {}", org); + println!(" โ€ข Project: {}", project); + } else { + println!(" โš ๏ธ Some chunks are still missing after upload attempt"); + } + } else { + println!(" โŒ No response found for file checksum {}", total_checksum); + return Err(anyhow::anyhow!("Assembly response missing")); + } + + Ok(()) +} + +fn test_assembly_only(org: &str, projects: &[String]) -> Result<()> { + println!("๐Ÿ”ง Testing artifact bundle assembly API (without chunk upload)..."); + + let api = Api::current(); + let authenticated_api = api.authenticated()?; + + // Create dummy chunk data for testing assembly endpoint + let test_data = b"test chunk data for assembly endpoint"; + let mut hasher = sha1_smol::Sha1::new(); + hasher.update(test_data); + let checksum = hasher.digest(); + + let chunks = vec![checksum]; + + println!(" ๐Ÿงช Testing with dummy data:"); + println!(" โ€ข Checksum: {}", checksum); + println!(" โ€ข Chunks: {}", chunks.len()); + + // Test artifact bundle assembly without uploading chunks first + match authenticated_api.assemble_artifact_bundle( + org, + projects, + checksum, + &chunks, + Some("test-release-v1.0.0"), + None, // dist + ) { + Ok(response) => { + println!(" โœ… Assembly API responded!"); + println!(" State: {:?}", response.state); + println!(" Missing chunks: {}", response.missing_chunks.len()); + if let Some(detail) = response.detail { + println!(" Detail: {}", detail); + } + + match response.state { + sentry_cli::api::ChunkedFileState::NotFound => { + println!(" โ„น๏ธ 'NotFound' is expected since we didn't upload chunks first"); + } + _ => { + println!(" ๐ŸŽฏ Unexpected state for non-uploaded chunks"); + } + } + } + Err(e) => { + println!(" โŒ Assembly API call failed: {}", e); + return Err(e.into()); + } + } + + Ok(()) +} + +fn main() -> Result<()> { + // Configuration + let auth_token = ""; + let base_url = "http://localhost:8000"; + let org = "sentry"; + let project = "internal"; + let test_file = "./TestUploads/HackerNews_arm64"; + + println!("๐Ÿงช Sentry CLI Full Upload Test (Debug Files)"); + println!("============================================="); + println!("Base URL: {}", base_url); + println!("Organization: {}", org); + println!("Project: {}", project); + println!("Test binary: {}", test_file); + println!(); + + // Create and bind config + let config = create_test_config(auth_token, base_url)?; + config.bind_to_process(); + + // Initialize API + let api = Api::current(); + + // Test authentication + println!("๐Ÿ” Testing authentication..."); + match api.authenticated()?.get_auth_info() { + Ok(auth_info) => { + println!("โœ… Authentication successful!"); + if let Some(user) = auth_info.user { + println!(" User: {}", user.email); + } + if let Some(auth) = auth_info.auth { + println!(" Scopes: {:?}", auth.scopes); + } + } + Err(e) => { + println!("โŒ Authentication failed: {}", e); + return Err(e.into()); + } + } + println!(); + + // // Test 1: Check chunk upload support + // if let Err(e) = test_chunk_upload_options(org) { + // println!("โŒ Chunk upload not supported, cannot proceed with full test: {}", e); + // return Err(e); + // } + println!(); + + // Test 2: Assembly API only (like the original test) - COMMENTED OUT FOR DSYM TEST + // if let Err(e) = test_assembly_only(org, &[project.to_string()]) { + // println!("โŒ Assembly-only test failed: {}", e); + // } + // println!(); + + // Test 3: Full chunk upload + assembly flow + if std::path::Path::new(test_file).exists() { + if let Err(e) = test_full_debug_file_upload_flow(org, project, test_file) { + println!("โŒ Full upload flow test failed: {}", e); + return Err(e); + } + } else { + println!("โš ๏ธ Test file {} not found, skipping full upload test", test_file); + } + + println!(); + println!("๐Ÿ All tests completed!"); + + Ok(()) +} \ No newline at end of file diff --git a/test_mobile_app_upload.rs b/test_mobile_app_upload.rs new file mode 100644 index 0000000000..3af77041a1 --- /dev/null +++ b/test_mobile_app_upload.rs @@ -0,0 +1,250 @@ +use anyhow::Result; + +// Include the sentry-cli library +extern crate sentry_cli; + +use sentry_cli::api::{Api, ChunkUploadCapability, ChunkedPreprodArtifactRequest}; +use sentry_cli::config::{Auth, Config}; +use sentry_cli::utils::auth_token::AuthToken; +use sentry_cli::utils::fs::get_sha1_checksums; +use sentry_cli::utils::chunks::{upload_chunks, Chunk}; +use sentry_cli::utils::progress::ProgressStyle; + +fn create_test_config(auth_token: &str, base_url: &str) -> Result { + let mut config = Config::from_cli_config()?; + + // Set the base URL to localhost + config.set_base_url(base_url); + + // Set the auth token + let token: AuthToken = auth_token.into(); + config.set_auth(Auth::Token(token)); + + Ok(config) +} + +fn test_chunk_upload_options(org: &str) -> Result<()> { + println!("๐Ÿ” Testing chunk upload options endpoint..."); + + let api = Api::current(); + let authenticated_api = api.authenticated()?; + + match authenticated_api.get_chunk_upload_options(org)? { + Some(options) => { + println!("โœ… Chunk upload supported!"); + println!(" URL: {}", options.url); + println!(" Max chunks per request: {}", options.max_chunks); + println!(" Max request size: {}", options.max_size); + println!(" Chunk size: {}", options.chunk_size); + println!(" Concurrency: {}", options.concurrency); + + + // Check specifically for preprod artifacts capability + if !options.supports(ChunkUploadCapability::PreprodArtifacts) { + println!(" โš ๏ธ PreprodArtifacts capability not supported"); + return Err(anyhow::anyhow!("PreprodArtifacts capability not supported")); + } + + Ok(()) + } + None => { + println!("โŒ Chunk upload not supported by server"); + Err(anyhow::anyhow!("Chunk upload not supported")) + } + } +} + +fn test_full_mobile_app_upload_flow(org: &str, project: &str, archive_path: &str) -> Result<()> { + println!("๐Ÿš€ Testing full mobile app archive upload flow..."); + + // Read the archive file directly + let archive_path = std::path::Path::new(archive_path); + println!(" ๐Ÿ“ฆ Archive file: {}", archive_path.display()); + + // Read the archive file + let content = std::fs::read(&archive_path) + .map_err(|e| anyhow::anyhow!("Failed to read archive file {}: {}", archive_path.display(), e))?; + + println!(" ๐Ÿ“„ File size: {} bytes ({:.2} MB)", content.len(), content.len() as f64 / 1024.0 / 1024.0); + + let api = Api::current(); + let authenticated_api = api.authenticated()?; + + // Get chunk upload options + let chunk_upload_options = authenticated_api.get_chunk_upload_options(org)? + .ok_or_else(|| anyhow::anyhow!("Chunk upload not supported by server"))?; + + println!(" ๐Ÿ“Š Chunk configuration:"); + println!(" โ€ข Chunk size: {} bytes", chunk_upload_options.chunk_size); + println!(" โ€ข Max chunks per request: {}", chunk_upload_options.max_chunks); + println!(" โ€ข Max request size: {} bytes", chunk_upload_options.max_size); + println!(" โ€ข Concurrency: {}", chunk_upload_options.concurrency); + + // Step 1: Prepare data and calculate checksums + let data = &content; + let chunk_size = chunk_upload_options.chunk_size as usize; + let (total_checksum, chunk_checksums) = get_sha1_checksums(data, chunk_size)?; + + println!(" ๐Ÿ”ข Checksum calculation:"); + println!(" โ€ข Total archive SHA1: {}", total_checksum); + println!(" โ€ข Number of chunks: {}", chunk_checksums.len()); + + // Show individual chunk details (limit for large files) + let max_chunks_to_show = 5; + for (i, checksum) in chunk_checksums.iter().enumerate().take(max_chunks_to_show) { + let chunk_start = i * chunk_size; + let chunk_end = std::cmp::min(chunk_start + chunk_size, data.len()); + let chunk_size_actual = chunk_end - chunk_start; + println!(" Chunk {}: {} bytes (SHA1: {})", i + 1, chunk_size_actual, checksum); + } + if chunk_checksums.len() > max_chunks_to_show { + println!(" ... and {} more chunks", chunk_checksums.len() - max_chunks_to_show); + } + + // Step 2: Upload all chunks (preprod artifacts likely need all chunks) + println!(" ๐Ÿš€ Uploading chunks..."); + let chunks_to_upload: Vec<_> = data.chunks(chunk_size) + .zip(chunk_checksums.iter()) + .map(|(chunk_data, checksum)| (*checksum, chunk_data)) + .collect(); + + if !chunks_to_upload.is_empty() { + // Create Chunk objects for upload + let chunks: Vec<_> = chunks_to_upload.iter() + .map(|(checksum, data)| Chunk((*checksum, *data))) + .collect(); + + println!(" ๐ŸŒ Uploading {} chunks to: {}", chunks.len(), chunk_upload_options.url); + upload_chunks(&chunks, &chunk_upload_options, ProgressStyle::default_bar())?; + + println!(" โœ… Chunks uploaded successfully!"); + } + + // Step 3: Assemble using the preprod artifact endpoint + println!(" ๐Ÿ”ง Assembling mobile app artifact..."); + + // Create the simple preprod artifact request (mirroring DIF request pattern) + let assembly_request = ChunkedPreprodArtifactRequest::new( + total_checksum, + &chunk_checksums, + ); + + println!(" ๐Ÿ“ก Request JSON:"); + println!("{}", serde_json::to_string_pretty(&assembly_request)?); + + let assembly_response = authenticated_api.assemble_preprod_artifact(org, project, &assembly_request)?; + + println!(" ๐Ÿ“ฆ Mobile app assembly initiated!"); + println!(" State: {:?}", assembly_response.state); + println!(" Missing chunks: {}", assembly_response.missing_chunks.len()); + + if let Some(ref detail) = assembly_response.detail { + println!(" Detail: {}", detail); + } + + // Step 4: Check assembly status + match assembly_response.state { + sentry_cli::api::ChunkedFileState::Ok => { + println!(" โœ… Assembly completed successfully!"); + } + sentry_cli::api::ChunkedFileState::NotFound => { + println!(" โŒ Assembly failed - bundle not found"); + } + sentry_cli::api::ChunkedFileState::Created => { + println!(" โณ Assembly created, waiting for processing..."); + } + sentry_cli::api::ChunkedFileState::Assembling => { + println!(" โš™๏ธ Assembly in progress..."); + } + sentry_cli::api::ChunkedFileState::Error => { + println!(" โŒ Assembly failed with error"); + return Err(anyhow::anyhow!("Assembly failed")); + } + } + + if assembly_response.missing_chunks.is_empty() { + println!(" ๐ŸŽ‰ Full mobile app upload flow completed successfully!"); + println!(" ๐Ÿ“ˆ Summary:"); + println!(" โ€ข Archive: {}", archive_path.display()); + println!(" โ€ข Total chunks: {}", chunk_checksums.len()); + println!(" โ€ข File checksum: {}", total_checksum); + println!(" โ€ข Organization: {}", org); + println!(" โ€ข Project: {}", project); + println!(" โ€ข Endpoint: /projects/{}/{}/files/preprodartifacts/assemble/", org, project); + } else { + println!(" โš ๏ธ Some chunks are still missing after upload attempt"); + println!(" Missing: {:?}", assembly_response.missing_chunks); + } + + Ok(()) +} + +fn main() -> Result<()> { + // Configuration + let auth_token = ""; + let base_url = "http://localhost:8000"; + let org = "sentry"; + let project = "internal"; + let test_archive = "./TestUploads/HackerNews.xcarchive.zip"; + + println!("๐Ÿ“ฑ Sentry CLI Mobile App Upload Test"); + println!("===================================="); + println!("Base URL: {}", base_url); + println!("Organization: {}", org); + println!("Project: {}", project); + println!("Test archive: {}", test_archive); + println!(); + + // Create and bind config + let config = create_test_config(auth_token, base_url)?; + config.bind_to_process(); + + // Initialize API + let api = Api::current(); + + // Test authentication + println!("๐Ÿ” Testing authentication..."); + match api.authenticated()?.get_auth_info() { + Ok(auth_info) => { + println!("โœ… Authentication successful!"); + if let Some(user) = auth_info.user { + println!(" User: {}", user.email); + } + if let Some(auth) = auth_info.auth { + println!(" Scopes: {:?}", auth.scopes); + } + } + Err(e) => { + println!("โŒ Authentication failed: {}", e); + return Err(e.into()); + } + } + println!(); + + // Test 1: Check chunk upload support + if let Err(e) = test_chunk_upload_options(org) { + println!("โŒ Chunk upload not supported, cannot proceed with test: {}", e); + return Err(e); + } + println!(); + + // Test 2: Full chunk upload + assembly flow + if std::path::Path::new(test_archive).exists() { + if let Err(e) = test_full_mobile_app_upload_flow(org, project, test_archive) { + println!("โŒ Full mobile app upload flow test failed: {}", e); + return Err(e); + } + } else { + println!("โš ๏ธ Test archive {} not found, skipping upload test", test_archive); + } + + println!(); + println!("๐Ÿ Mobile app upload test completed!"); + println!(); + println!("๐Ÿ“ Next steps:"); + println!(" 1. Test with different archive types and sizes"); + println!(" 2. Verify server-side processing of the preprodartifact/assemble endpoint"); + println!(" 3. Integration with the mobile_app upload command"); + + Ok(()) +} \ No newline at end of file