diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index 6bdef025..2976520b 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -449,6 +449,23 @@ reset_typo_tolerance_1: |- .reset_typo_tolerance() .await .unwrap(); +get_all_batches_1: |- + let mut query = meilisearch_sdk::batches::BatchesQuery::new(&client); + query.with_limit(20); + let batches: meilisearch_sdk::batches::BatchesResults = + client.get_batches_with(&query).await.unwrap(); +get_batch_1: |- + let uid: u32 = 42; + let batch: meilisearch_sdk::batches::Batch = client + .get_batch(uid) + .await + .unwrap(); +get_all_batches_paginating_1: |- + let mut query = meilisearch_sdk::batches::BatchesQuery::new(&client); + query.with_limit(2); + query.with_from(40); + let batches: meilisearch_sdk::batches::BatchesResults = + client.get_batches_with(&query).await.unwrap(); get_stop_words_1: |- let stop_words: Vec = client .index("movies") diff --git a/src/batches.rs b/src/batches.rs new file mode 100644 index 00000000..39895768 --- /dev/null +++ b/src/batches.rs @@ -0,0 +1,199 @@ +use serde::{Deserialize, Serialize}; +use time::OffsetDateTime; + +use crate::{client::Client, errors::Error, request::HttpClient}; + +/// Types and queries for the Meilisearch Batches API. +/// +/// See: https://www.meilisearch.com/docs/reference/api/batches +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Batch { + /// Unique identifier of the batch. + pub uid: u32, + /// When the batch was enqueued. + #[serde(default, with = "time::serde::rfc3339::option")] + pub enqueued_at: Option, + /// When the batch started processing. + #[serde(default, with = "time::serde::rfc3339::option")] + pub started_at: Option, + /// When the batch finished processing. + #[serde(default, with = "time::serde::rfc3339::option")] + pub finished_at: Option, + /// Index uid related to this batch (if applicable). + #[serde(skip_serializing_if = "Option::is_none")] + pub index_uid: Option, + /// The task uids that are part of this batch. + #[serde(skip_serializing_if = "Option::is_none")] + pub task_uids: Option>, + /// The strategy that caused the autobatcher to stop batching tasks. + /// + /// Introduced in Meilisearch v1.15. + #[serde(skip_serializing_if = "Option::is_none")] + pub batch_strategy: Option, +} + +/// Reason why the autobatcher stopped batching tasks. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +#[non_exhaustive] +pub enum BatchStrategy { + /// The batch reached its configured size threshold. + SizeLimitReached, + /// The batch reached its configured time window threshold. + TimeLimitReached, + /// Unknown strategy (forward-compatibility). + #[serde(other)] + Unknown, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BatchesResults { + pub results: Vec, + pub total: u32, + pub limit: u32, + #[serde(skip_serializing_if = "Option::is_none")] + pub from: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub next: Option, +} + +/// Query builder for listing batches. +#[derive(Debug, Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct BatchesQuery<'a, Http: HttpClient> { + #[serde(skip_serializing)] + client: &'a Client, + /// Maximum number of batches to return. + #[serde(skip_serializing_if = "Option::is_none")] + limit: Option, + /// The first batch uid that should be returned. + #[serde(skip_serializing_if = "Option::is_none")] + from: Option, +} + +impl<'a, Http: HttpClient> BatchesQuery<'a, Http> { + #[must_use] + pub fn new(client: &'a Client) -> BatchesQuery<'a, Http> { + BatchesQuery { + client, + limit: None, + from: None, + } + } + + #[must_use] + pub fn with_limit(&mut self, limit: u32) -> &mut Self { + self.limit = Some(limit); + self + } + + #[must_use] + pub fn with_from(&mut self, from: u32) -> &mut Self { + self.from = Some(from); + self + } + + /// Execute the query and list batches. + pub async fn execute(&self) -> Result { + self.client.get_batches_with(self).await + } +} + +#[cfg(test)] +mod tests { + use crate::batches::BatchStrategy; + use crate::client::Client; + + #[tokio::test] + async fn test_get_batches_parses_batch_strategy() { + let mut s = mockito::Server::new_async().await; + let base = s.url(); + + let response_body = serde_json::json!({ + "results": [ + { + "uid": 42, + "enqueuedAt": "2024-10-11T11:49:53.000Z", + "startedAt": "2024-10-11T11:49:54.000Z", + "finishedAt": "2024-10-11T11:49:55.000Z", + "indexUid": "movies", + "taskUids": [1, 2, 3], + "batchStrategy": "time_limit_reached" + } + ], + "limit": 20, + "from": null, + "next": null, + "total": 1 + }) + .to_string(); + + let _m = s + .mock("GET", "/batches") + .with_status(200) + .with_header("content-type", "application/json") + .with_body(response_body) + .create_async() + .await; + + let client = Client::new(base, None::).unwrap(); + let batches = client.get_batches().await.expect("list batches failed"); + assert_eq!(batches.results.len(), 1); + let b = &batches.results[0]; + assert_eq!(b.uid, 42); + assert_eq!(b.batch_strategy, Some(BatchStrategy::TimeLimitReached)); + } + + #[tokio::test] + async fn test_get_batch_by_uid_parses_batch_strategy() { + let mut s = mockito::Server::new_async().await; + let base = s.url(); + + let response_body = serde_json::json!({ + "uid": 99, + "batchStrategy": "size_limit_reached", + "taskUids": [10, 11] + }) + .to_string(); + + let _m = s + .mock("GET", "/batches/99") + .with_status(200) + .with_header("content-type", "application/json") + .with_body(response_body) + .create_async() + .await; + + let client = Client::new(base, None::).unwrap(); + let batch = client.get_batch(99).await.expect("get batch failed"); + assert_eq!(batch.uid, 99); + assert_eq!(batch.batch_strategy, Some(BatchStrategy::SizeLimitReached)); + } + + #[tokio::test] + async fn test_query_serialization_for_batches() { + use mockito::Matcher; + let mut s = mockito::Server::new_async().await; + let base = s.url(); + + let _m = s + .mock("GET", "/batches") + .match_query(Matcher::AllOf(vec![ + Matcher::UrlEncoded("limit".into(), "2".into()), + Matcher::UrlEncoded("from".into(), "40".into()), + ])) + .with_status(200) + .with_header("content-type", "application/json") + .with_body(r#"{"results":[],"limit":2,"total":0}"#) + .create_async() + .await; + + let client = Client::new(base, None::).unwrap(); + let mut q = crate::batches::BatchesQuery::new(&client); + let _ = q.with_limit(2).with_from(40); + let res = client.get_batches_with(&q).await.expect("request failed"); + assert_eq!(res.limit, 2); + } +} diff --git a/src/client.rs b/src/client.rs index efb40683..5c79952f 100644 --- a/src/client.rs +++ b/src/client.rs @@ -1112,6 +1112,92 @@ impl Client { Ok(tasks) } + /// List batches using the Batches API. + /// + /// See: https://www.meilisearch.com/docs/reference/api/batches + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::client::Client; + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let batches = client.get_batches().await.unwrap(); + /// # let _ = batches; + /// # }); + /// ``` + pub async fn get_batches(&self) -> Result { + let res = self + .http_client + .request::<(), (), crate::batches::BatchesResults>( + &format!("{}/batches", self.host), + Method::Get { query: () }, + 200, + ) + .await?; + Ok(res) + } + + /// List batches with pagination filters. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::Client, batches::BatchesQuery}; + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut query = BatchesQuery::new(&client); + /// query.with_limit(1); + /// let batches = client.get_batches_with(&query).await.unwrap(); + /// # let _ = batches; + /// # }); + /// ``` + pub async fn get_batches_with( + &self, + query: &crate::batches::BatchesQuery<'_, Http>, + ) -> Result { + let res = self + .http_client + .request::<&crate::batches::BatchesQuery<'_, Http>, (), crate::batches::BatchesResults>( + &format!("{}/batches", self.host), + Method::Get { query }, + 200, + ) + .await?; + Ok(res) + } + + /// Get a single batch by its uid. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::client::Client; + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let uid: u32 = 42; + /// let batch = client.get_batch(uid).await.unwrap(); + /// # let _ = batch; + /// # }); + /// ``` + pub async fn get_batch(&self, uid: u32) -> Result { + let res = self + .http_client + .request::<(), (), crate::batches::Batch>( + &format!("{}/batches/{}", self.host, uid), + Method::Get { query: () }, + 200, + ) + .await?; + Ok(res) + } + /// Generates a new tenant token. /// /// # Example diff --git a/src/lib.rs b/src/lib.rs index 19a595c9..3eb4f105 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -230,6 +230,8 @@ #![warn(clippy::all)] #![allow(clippy::needless_doctest_main)] +/// Module to interact with the Batches API. +pub mod batches; /// Module containing the [`Client`](client::Client) struct. pub mod client; /// Module representing the [documents] structures.