From 17ad13c45af065665a699d6ac4467e753119eee6 Mon Sep 17 00:00:00 2001 From: j-mendez Date: Sun, 26 Nov 2023 11:29:35 -0500 Subject: [PATCH] chore(napi): add rust_tls flag --- examples/Cargo.toml | 4 ++-- spider/Cargo.toml | 9 +++++---- spider/README.md | 16 ++++++++-------- spider_cli/Cargo.toml | 4 ++-- spider_worker/Cargo.toml | 4 ++-- 5 files changed, 19 insertions(+), 18 deletions(-) diff --git a/examples/Cargo.toml b/examples/Cargo.toml index 6a9287d8b4..7639f471fb 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "spider_examples" -version = "1.50.3" +version = "1.50.4" authors = ["madeindjs ", "j-mendez "] description = "Multithreaded web crawler written in Rust." repository = "https://github.com/spider-rs/spider" @@ -22,7 +22,7 @@ htr = "0.5.27" flexbuffers = "2.0.0" [dependencies.spider] -version = "1.50.3" +version = "1.50.4" path = "../spider" features = ["serde"] diff --git a/spider/Cargo.toml b/spider/Cargo.toml index add88de3b6..32b1a17d53 100644 --- a/spider/Cargo.toml +++ b/spider/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "spider" -version = "1.50.3" +version = "1.50.4" authors = ["madeindjs ", "j-mendez "] description = "The fastest web crawler written in Rust." repository = "https://github.com/spider-rs/spider" @@ -15,7 +15,7 @@ edition = "2018" maintenance = { status = "as-is" } [dependencies] -reqwest = { version = "0.11.22", features = [ "brotli", "gzip", "deflate", "native-tls-alpn", "stream" ] } +reqwest = { version = "0.11.22", features = [ "brotli", "gzip", "deflate", "stream" ] } url = "2.4.0" tokio = { version = "1.34.0", features = [ "rt-multi-thread", "macros", "time", "parking_lot" ] } tokio-stream = "0.1.14" @@ -52,7 +52,7 @@ napi = { version = "2", features = ["async", "tokio_rt", "napi4"], optional = tr tikv-jemallocator = { version = "0.5.0", optional = true } [features] -default = ["sync"] +default = ["sync", "reqwest/native-tls-alpn"] regex = ["dep:regex"] glob = ["dep:regex", "dep:itertools"] ua_generator = ["dep:ua_generator"] @@ -76,4 +76,5 @@ chrome_cpu = ["chrome"] chrome_stealth = ["chrome"] cookies = ["reqwest/cookies"] cron = ["dep:chrono", "dep:cron", "dep:async-trait"] -napi = ["dep:napi"] \ No newline at end of file +napi = ["dep:napi"] +napi_rust_tls = ["dep:napi", "reqwest/rust-tls"] \ No newline at end of file diff --git a/spider/README.md b/spider/README.md index 6dd964201e..32eb1bfb29 100644 --- a/spider/README.md +++ b/spider/README.md @@ -16,7 +16,7 @@ This is a basic async example crawling a web page, add spider to your `Cargo.tom ```toml [dependencies] -spider = "1.50.3" +spider = "1.50.4" ``` And then the code: @@ -91,7 +91,7 @@ We have a couple optional feature flags. Regex blacklisting, jemaloc backend, gl ```toml [dependencies] -spider = { version = "1.50.3", features = ["regex", "ua_generator"] } +spider = { version = "1.50.4", features = ["regex", "ua_generator"] } ``` 1. `ua_generator`: Enables auto generating a random real User-Agent. @@ -122,7 +122,7 @@ Move processing to a worker, drastically increases performance even if worker is ```toml [dependencies] -spider = { version = "1.50.3", features = ["decentralized"] } +spider = { version = "1.50.4", features = ["decentralized"] } ``` ```sh @@ -142,7 +142,7 @@ Use the subscribe method to get a broadcast channel. ```toml [dependencies] -spider = { version = "1.50.3", features = ["sync"] } +spider = { version = "1.50.4", features = ["sync"] } ``` ```rust,no_run @@ -172,7 +172,7 @@ Allow regex for blacklisting routes ```toml [dependencies] -spider = { version = "1.50.3", features = ["regex"] } +spider = { version = "1.50.4", features = ["regex"] } ``` ```rust,no_run @@ -199,7 +199,7 @@ If you are performing large workloads you may need to control the crawler by ena ```toml [dependencies] -spider = { version = "1.50.3", features = ["control"] } +spider = { version = "1.50.4", features = ["control"] } ``` ```rust @@ -269,7 +269,7 @@ Use cron jobs to run crawls continuously at anytime. ```toml [dependencies] -spider = { version = "1.50.3", features = ["sync", "cron"] } +spider = { version = "1.50.4", features = ["sync", "cron"] } ``` ```rust,no_run @@ -305,7 +305,7 @@ async fn main() { ```toml [dependencies] -spider = { version = "1.50.3", features = ["chrome"] } +spider = { version = "1.50.4", features = ["chrome"] } ``` You can use `website.crawl_concurrent_raw` to perform a crawl without chromium when needed. Use the feature flag `chrome_headed` to enable headful browser usage if needed to debug. diff --git a/spider_cli/Cargo.toml b/spider_cli/Cargo.toml index 7c739df0ff..71e7130b0a 100644 --- a/spider_cli/Cargo.toml +++ b/spider_cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "spider_cli" -version = "1.50.3" +version = "1.50.4" authors = ["madeindjs ", "j-mendez "] description = "The fastest web crawler CLI written in Rust." repository = "https://github.com/spider-rs/spider" @@ -26,7 +26,7 @@ quote = "1.0.18" failure_derive = "0.1.8" [dependencies.spider] -version = "1.50.3" +version = "1.50.4" path = "../spider" [[bin]] diff --git a/spider_worker/Cargo.toml b/spider_worker/Cargo.toml index 5ea8c7e026..686665aadc 100644 --- a/spider_worker/Cargo.toml +++ b/spider_worker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "spider_worker" -version = "1.50.3" +version = "1.50.4" authors = ["madeindjs ", "j-mendez "] description = "The fastest web crawler as a worker or proxy." repository = "https://github.com/spider-rs/spider" @@ -22,7 +22,7 @@ lazy_static = "1.4.0" env_logger = "0.10.0" [dependencies.spider] -version = "1.50.3" +version = "1.50.4" path = "../spider" features = ["serde", "flexbuffers"]