diff --git a/cli/Cargo.lock b/cli/Cargo.lock index 01faf42..afaf82e 100644 --- a/cli/Cargo.lock +++ b/cli/Cargo.lock @@ -19,9 +19,9 @@ checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "anstream" -version = "0.6.17" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23a1e53f0f5d86382dafe1cf314783b2044280f406e7e1506368220ad11b1338" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", @@ -125,9 +125,9 @@ checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" [[package]] name = "cc" -version = "1.1.34" +version = "1.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b9470d453346108f93a59222a9a1a5724db32d0a4727b7ab7ace4b4d822dc9" +checksum = "baee610e9452a8f6f0a1b6194ec09ff9e2d85dea54432acdae41aa0761c95d70" dependencies = [ "shlex", ] @@ -405,9 +405,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.0" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" +checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3" [[package]] name = "heck" @@ -1087,9 +1087,9 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustix" -version = "0.38.38" +version = "0.38.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa260229e6538e52293eeb577aabd09945a09d6d9cc0fc550ed7529056c2e32a" +checksum = "375116bee2be9ed569afe2154ea6a99dfdffd257f533f187498c2a8f5feaf4ee" dependencies = [ "bitflags", "errno", @@ -1265,7 +1265,7 @@ dependencies = [ [[package]] name = "spider-client" -version = "0.1.22" +version = "0.1.23" dependencies = [ "reqwest", "serde", @@ -1276,7 +1276,7 @@ dependencies = [ [[package]] name = "spider-cloud-cli" -version = "0.1.22" +version = "0.1.23" dependencies = [ "clap", "keyring", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 3dc9249..04ef516 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "spider-cloud-cli" -version = "0.1.22" +version = "0.1.23" edition = "2021" authors = [ "j-mendez "] description = "The Spider Cloud CLI for web crawling and scraping" diff --git a/javascript/package-lock.json b/javascript/package-lock.json index f4a48eb..d3a3106 100644 --- a/javascript/package-lock.json +++ b/javascript/package-lock.json @@ -1,12 +1,12 @@ { "name": "@spider-cloud/spider-client", - "version": "0.1.22", + "version": "0.1.23", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@spider-cloud/spider-client", - "version": "0.1.22", + "version": "0.1.23", "license": "MIT", "devDependencies": { "@types/node": "22.7.5", diff --git a/javascript/package.json b/javascript/package.json index b37023e..a53ff16 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -1,6 +1,6 @@ { "name": "@spider-cloud/spider-client", - "version": "0.1.22", + "version": "0.1.23", "description": "Isomorphic Javascript SDK for Spider Cloud services", "scripts": { "test": "node --import tsx --test __tests__/*test.ts", diff --git a/python/spider/async_spider.py b/python/spider/async_spider.py index 9e29093..e8e31ba 100644 --- a/python/spider/async_spider.py +++ b/python/spider/async_spider.py @@ -430,7 +430,7 @@ def _prepare_headers( return { "Content-Type": content_type, "Authorization": f"Bearer {self.api_key}", - "User-Agent": "AsyncSpider-Client/0.1.22", + "User-Agent": "AsyncSpider-Client/0.1.23", } async def _handle_error(self, response: ClientResponse, action: str) -> None: diff --git a/python/spider/spider.py b/python/spider/spider.py index dbb8153..26fd241 100644 --- a/python/spider/spider.py +++ b/python/spider/spider.py @@ -415,7 +415,7 @@ def _prepare_headers(self, content_type: str = "application/json"): return { "Content-Type": content_type, "Authorization": f"Bearer {self.api_key}", - "User-Agent": f"Spider-Client/0.1.22", + "User-Agent": f"Spider-Client/0.1.23", } def _post_request(self, url: str, data, headers, stream=False): diff --git a/rust/Cargo.lock b/rust/Cargo.lock index 1ad6576..13a91e8 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -70,9 +70,9 @@ checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" [[package]] name = "cc" -version = "1.1.34" +version = "1.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b9470d453346108f93a59222a9a1a5724db32d0a4727b7ab7ace4b4d822dc9" +checksum = "baee610e9452a8f6f0a1b6194ec09ff9e2d85dea54432acdae41aa0761c95d70" dependencies = [ "shlex", ] @@ -276,9 +276,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.0" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" +checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3" [[package]] name = "hermit-abi" @@ -817,9 +817,9 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustix" -version = "0.38.38" +version = "0.38.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa260229e6538e52293eeb577aabd09945a09d6d9cc0fc550ed7529056c2e32a" +checksum = "375116bee2be9ed569afe2154ea6a99dfdffd257f533f187498c2a8f5feaf4ee" dependencies = [ "bitflags", "errno", @@ -982,7 +982,7 @@ dependencies = [ [[package]] name = "spider-client" -version = "0.1.22" +version = "0.1.23" dependencies = [ "dotenv", "lazy_static", diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 1cdd30d..d89c2cb 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "spider-client" -version = "0.1.22" +version = "0.1.23" edition = "2021" authors = [ "j-mendez "] description = "Spider Cloud client" diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 2fcc600..3a7062d 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -65,7 +65,6 @@ use reqwest::Client; use reqwest::{Error, Response}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; -use std::default; use tokio_stream::StreamExt; /// Structure representing the Chunking algorithm dictionary. @@ -104,7 +103,7 @@ pub enum WebAutomation { ScrollX { pixels: i32 }, ScrollY { pixels: i32 }, Fill { selector: String, value: String }, - InfiniteScroll { times: u32 } + InfiniteScroll { times: u32 }, } #[derive(Default, Serialize, Deserialize, Debug, Clone)] @@ -112,7 +111,7 @@ pub enum WebAutomation { pub enum RedirectPolicy { Loose, #[default] - Strict + Strict, } pub type WebAutomationMap = std::collections::HashMap>; @@ -379,7 +378,7 @@ pub struct RequestParams { /// Perform web automated tasks on a url or url path. You need to make your `request` `chrome` or `smart` pub automation_scripts: Option, /// The redirect policy for HTTP request. Set the value to Loose to allow all. - pub redirect_policy: Option + pub redirect_policy: Option, } /// The structure representing request parameters for a search request. @@ -1160,8 +1159,6 @@ impl Spider { #[cfg(test)] mod tests { - use std::time::Duration; - use super::*; use dotenv::dotenv; use lazy_static::lazy_static; @@ -1171,16 +1168,14 @@ mod tests { static ref SPIDER_CLIENT: Spider = { dotenv().ok(); let client = ClientBuilder::new(); - let client = client - .tcp_keepalive(Some(Duration::from_secs(5))) - .build() - .unwrap(); + let client = client.user_agent("SpiderBot").build().unwrap(); Spider::new_with_client(None, client).expect("client to build") }; } - #[tokio::test(flavor = "multi_thread")] + #[tokio::test] + #[ignore] async fn test_scrape_url() { let response = SPIDER_CLIENT .scrape_url("https://example.com", None, "application/json") @@ -1188,7 +1183,7 @@ mod tests { assert!(response.is_ok()); } - #[tokio::test(flavor = "multi_thread")] + #[tokio::test] async fn test_crawl_url() { let response = SPIDER_CLIENT .crawl_url( @@ -1202,7 +1197,8 @@ mod tests { assert!(response.is_ok()); } - #[tokio::test(flavor = "multi_thread")] + #[tokio::test] + #[ignore] async fn test_links() { let response: Result = SPIDER_CLIENT .links("https://example.com", None, false, "application/json") @@ -1210,7 +1206,8 @@ mod tests { assert!(response.is_ok()); } - #[tokio::test(flavor = "multi_thread")] + #[tokio::test] + #[ignore] async fn test_screenshot() { let mut params = RequestParams::default(); params.limit = Some(1); @@ -1241,7 +1238,8 @@ mod tests { // assert!(response.is_ok()); // } - #[tokio::test(flavor = "multi_thread")] + #[tokio::test] + #[ignore] async fn test_transform() { let data = vec![HashMap::from([( "

Transformation

".into(), @@ -1253,7 +1251,8 @@ mod tests { assert!(response.is_ok()); } - #[tokio::test(flavor = "multi_thread")] + #[tokio::test] + #[ignore] async fn test_extract_contacts() { let response = SPIDER_CLIENT .extract_contacts("https://example.com", None, false, "application/json") @@ -1261,7 +1260,8 @@ mod tests { assert!(response.is_ok()); } - #[tokio::test(flavor = "multi_thread")] + #[tokio::test] + #[ignore] async fn test_label() { let response = SPIDER_CLIENT .label("https://example.com", None, false, "application/json") @@ -1269,7 +1269,7 @@ mod tests { assert!(response.is_ok()); } - #[tokio::test(flavor = "multi_thread")] + #[tokio::test] async fn test_create_signed_url() { let response = SPIDER_CLIENT .create_signed_url(Some("example.com"), None) @@ -1277,7 +1277,7 @@ mod tests { assert!(response.is_ok()); } - #[tokio::test(flavor = "multi_thread")] + #[tokio::test] async fn test_get_crawl_state() { let response = SPIDER_CLIENT .get_crawl_state("https://example.com", None, "application/json") @@ -1285,7 +1285,7 @@ mod tests { assert!(response.is_ok()); } - #[tokio::test(flavor = "multi_thread")] + #[tokio::test] async fn test_query() { let mut query = QueryRequest::default(); @@ -1295,7 +1295,7 @@ mod tests { assert!(response.is_ok()); } - #[tokio::test(flavor = "multi_thread")] + #[tokio::test] async fn test_get_credits() { let response = SPIDER_CLIENT.get_credits().await; assert!(response.is_ok());