Skip to content

Commit

Permalink
chore(napi): add rust_tls flag
Browse files Browse the repository at this point in the history
  • Loading branch information
j-mendez committed Nov 26, 2023
1 parent 4401693 commit 17ad13c
Show file tree
Hide file tree
Showing 5 changed files with 19 additions and 18 deletions.
4 changes: 2 additions & 2 deletions examples/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "spider_examples"
version = "1.50.3"
version = "1.50.4"
authors = ["madeindjs <[email protected]>", "j-mendez <[email protected]>"]
description = "Multithreaded web crawler written in Rust."
repository = "https://github.com/spider-rs/spider"
Expand All @@ -22,7 +22,7 @@ htr = "0.5.27"
flexbuffers = "2.0.0"

[dependencies.spider]
version = "1.50.3"
version = "1.50.4"
path = "../spider"
features = ["serde"]

Expand Down
9 changes: 5 additions & 4 deletions spider/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "spider"
version = "1.50.3"
version = "1.50.4"
authors = ["madeindjs <[email protected]>", "j-mendez <[email protected]>"]
description = "The fastest web crawler written in Rust."
repository = "https://github.com/spider-rs/spider"
Expand All @@ -15,7 +15,7 @@ edition = "2018"
maintenance = { status = "as-is" }

[dependencies]
reqwest = { version = "0.11.22", features = [ "brotli", "gzip", "deflate", "native-tls-alpn", "stream" ] }
reqwest = { version = "0.11.22", features = [ "brotli", "gzip", "deflate", "stream" ] }
url = "2.4.0"
tokio = { version = "1.34.0", features = [ "rt-multi-thread", "macros", "time", "parking_lot" ] }
tokio-stream = "0.1.14"
Expand Down Expand Up @@ -52,7 +52,7 @@ napi = { version = "2", features = ["async", "tokio_rt", "napi4"], optional = tr
tikv-jemallocator = { version = "0.5.0", optional = true }

[features]
default = ["sync"]
default = ["sync", "reqwest/native-tls-alpn"]
regex = ["dep:regex"]
glob = ["dep:regex", "dep:itertools"]
ua_generator = ["dep:ua_generator"]
Expand All @@ -76,4 +76,5 @@ chrome_cpu = ["chrome"]
chrome_stealth = ["chrome"]
cookies = ["reqwest/cookies"]
cron = ["dep:chrono", "dep:cron", "dep:async-trait"]
napi = ["dep:napi"]
napi = ["dep:napi"]
napi_rust_tls = ["dep:napi", "reqwest/rust-tls"]
16 changes: 8 additions & 8 deletions spider/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ This is a basic async example crawling a web page, add spider to your `Cargo.tom

```toml
[dependencies]
spider = "1.50.3"
spider = "1.50.4"
```

And then the code:
Expand Down Expand Up @@ -91,7 +91,7 @@ We have a couple optional feature flags. Regex blacklisting, jemaloc backend, gl

```toml
[dependencies]
spider = { version = "1.50.3", features = ["regex", "ua_generator"] }
spider = { version = "1.50.4", features = ["regex", "ua_generator"] }
```

1. `ua_generator`: Enables auto generating a random real User-Agent.
Expand Down Expand Up @@ -122,7 +122,7 @@ Move processing to a worker, drastically increases performance even if worker is

```toml
[dependencies]
spider = { version = "1.50.3", features = ["decentralized"] }
spider = { version = "1.50.4", features = ["decentralized"] }
```

```sh
Expand All @@ -142,7 +142,7 @@ Use the subscribe method to get a broadcast channel.

```toml
[dependencies]
spider = { version = "1.50.3", features = ["sync"] }
spider = { version = "1.50.4", features = ["sync"] }
```

```rust,no_run
Expand Down Expand Up @@ -172,7 +172,7 @@ Allow regex for blacklisting routes

```toml
[dependencies]
spider = { version = "1.50.3", features = ["regex"] }
spider = { version = "1.50.4", features = ["regex"] }
```

```rust,no_run
Expand All @@ -199,7 +199,7 @@ If you are performing large workloads you may need to control the crawler by ena

```toml
[dependencies]
spider = { version = "1.50.3", features = ["control"] }
spider = { version = "1.50.4", features = ["control"] }
```

```rust
Expand Down Expand Up @@ -269,7 +269,7 @@ Use cron jobs to run crawls continuously at anytime.

```toml
[dependencies]
spider = { version = "1.50.3", features = ["sync", "cron"] }
spider = { version = "1.50.4", features = ["sync", "cron"] }
```

```rust,no_run
Expand Down Expand Up @@ -305,7 +305,7 @@ async fn main() {

```toml
[dependencies]
spider = { version = "1.50.3", features = ["chrome"] }
spider = { version = "1.50.4", features = ["chrome"] }
```

You can use `website.crawl_concurrent_raw` to perform a crawl without chromium when needed. Use the feature flag `chrome_headed` to enable headful browser usage if needed to debug.
Expand Down
4 changes: 2 additions & 2 deletions spider_cli/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "spider_cli"
version = "1.50.3"
version = "1.50.4"
authors = ["madeindjs <[email protected]>", "j-mendez <[email protected]>"]
description = "The fastest web crawler CLI written in Rust."
repository = "https://github.com/spider-rs/spider"
Expand All @@ -26,7 +26,7 @@ quote = "1.0.18"
failure_derive = "0.1.8"

[dependencies.spider]
version = "1.50.3"
version = "1.50.4"
path = "../spider"

[[bin]]
Expand Down
4 changes: 2 additions & 2 deletions spider_worker/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "spider_worker"
version = "1.50.3"
version = "1.50.4"
authors = ["madeindjs <[email protected]>", "j-mendez <[email protected]>"]
description = "The fastest web crawler as a worker or proxy."
repository = "https://github.com/spider-rs/spider"
Expand All @@ -22,7 +22,7 @@ lazy_static = "1.4.0"
env_logger = "0.10.0"

[dependencies.spider]
version = "1.50.3"
version = "1.50.4"
path = "../spider"
features = ["serde", "flexbuffers"]

Expand Down

0 comments on commit 17ad13c

Please sign in to comment.