From 5bb054029bba1ac894faa815fcaf9f04b8c77151 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jind=C5=99ich=20B=C3=A4r?= Date: Wed, 30 Oct 2024 13:19:20 +0100 Subject: [PATCH] fix: remove zwsp characters from markdown docs --- docs/guides/docker_images.mdx | 2 +- docs/guides/proxy_management.mdx | 2 +- docs/guides/session_management.mdx | 2 +- website/versioned_docs/version-3.0/guides/docker_images.mdx | 2 +- website/versioned_docs/version-3.0/guides/proxy_management.mdx | 2 +- .../versioned_docs/version-3.0/guides/session_management.mdx | 2 +- website/versioned_docs/version-3.1/guides/docker_images.mdx | 2 +- website/versioned_docs/version-3.1/guides/proxy_management.mdx | 2 +- .../versioned_docs/version-3.1/guides/session_management.mdx | 2 +- website/versioned_docs/version-3.2/guides/docker_images.mdx | 2 +- website/versioned_docs/version-3.2/guides/proxy_management.mdx | 2 +- .../versioned_docs/version-3.2/guides/session_management.mdx | 2 +- 12 files changed, 12 insertions(+), 12 deletions(-) diff --git a/docs/guides/docker_images.mdx b/docs/guides/docker_images.mdx index a702f814f9..d5af0c5885 100644 --- a/docs/guides/docker_images.mdx +++ b/docs/guides/docker_images.mdx @@ -103,7 +103,7 @@ When you use only what you need, you'll be rewarded with reasonable build and st This is the smallest image we have based on Alpine Linux. It does not include any browsers, and it's therefore best used with `CheerioCrawler`. It benefits from lightning fast builds and container startups. -​`PuppeteerCrawler`, `PlaywrightCrawler` and other browser based features will **NOT** work with this image. +`PuppeteerCrawler`, `PlaywrightCrawler` and other browser based features will **NOT** work with this image. ```dockerfile FROM apify/actor-node:16 diff --git a/docs/guides/proxy_management.mdx b/docs/guides/proxy_management.mdx index 597bb24fe0..3b0f011135 100644 --- a/docs/guides/proxy_management.mdx +++ b/docs/guides/proxy_management.mdx @@ -80,7 +80,7 @@ Your crawlers will now use the selected proxies for all connections. ### IP Rotation and session management -​`proxyConfiguration.newUrl()` allows you to pass a `sessionId` parameter. It will then be used to create a `sessionId`-`proxyUrl` pair, and subsequent `newUrl()` calls with the same `sessionId` will always return the same `proxyUrl`. This is extremely useful in scraping, because you want to create the impression of a real user. See the [session management guide](../guides/session-management) and `SessionPool` class for more information on how keeping a real session helps you avoid blocking. +`proxyConfiguration.newUrl()` allows you to pass a `sessionId` parameter. It will then be used to create a `sessionId`-`proxyUrl` pair, and subsequent `newUrl()` calls with the same `sessionId` will always return the same `proxyUrl`. This is extremely useful in scraping, because you want to create the impression of a real user. See the [session management guide](../guides/session-management) and `SessionPool` class for more information on how keeping a real session helps you avoid blocking. When no `sessionId` is provided, your proxy URLs are rotated round-robin, whereas Apify Proxy manages their rotation using black magic to get the best performance. diff --git a/docs/guides/session_management.mdx b/docs/guides/session_management.mdx index 710fc576fe..9691d2a832 100644 --- a/docs/guides/session_management.mdx +++ b/docs/guides/session_management.mdx @@ -6,7 +6,7 @@ title: Session Management import ApiLink from '@site/src/components/ApiLink'; import { CrawleeApiLink } from '@site/src/components/CrawleeLinks'; -​`SessionPool` is a class that allows you to handle the rotation of proxy IP addresses along with cookies and other custom settings in Apify SDK. +`SessionPool` is a class that allows you to handle the rotation of proxy IP addresses along with cookies and other custom settings in Apify SDK. The main benefit of a Session pool is that you can filter out blocked or non-working proxies, so your actor does not retry requests over known blocked/non-working proxies. diff --git a/website/versioned_docs/version-3.0/guides/docker_images.mdx b/website/versioned_docs/version-3.0/guides/docker_images.mdx index a702f814f9..d5af0c5885 100644 --- a/website/versioned_docs/version-3.0/guides/docker_images.mdx +++ b/website/versioned_docs/version-3.0/guides/docker_images.mdx @@ -103,7 +103,7 @@ When you use only what you need, you'll be rewarded with reasonable build and st This is the smallest image we have based on Alpine Linux. It does not include any browsers, and it's therefore best used with `CheerioCrawler`. It benefits from lightning fast builds and container startups. -​`PuppeteerCrawler`, `PlaywrightCrawler` and other browser based features will **NOT** work with this image. +`PuppeteerCrawler`, `PlaywrightCrawler` and other browser based features will **NOT** work with this image. ```dockerfile FROM apify/actor-node:16 diff --git a/website/versioned_docs/version-3.0/guides/proxy_management.mdx b/website/versioned_docs/version-3.0/guides/proxy_management.mdx index 597bb24fe0..3b0f011135 100644 --- a/website/versioned_docs/version-3.0/guides/proxy_management.mdx +++ b/website/versioned_docs/version-3.0/guides/proxy_management.mdx @@ -80,7 +80,7 @@ Your crawlers will now use the selected proxies for all connections. ### IP Rotation and session management -​`proxyConfiguration.newUrl()` allows you to pass a `sessionId` parameter. It will then be used to create a `sessionId`-`proxyUrl` pair, and subsequent `newUrl()` calls with the same `sessionId` will always return the same `proxyUrl`. This is extremely useful in scraping, because you want to create the impression of a real user. See the [session management guide](../guides/session-management) and `SessionPool` class for more information on how keeping a real session helps you avoid blocking. +`proxyConfiguration.newUrl()` allows you to pass a `sessionId` parameter. It will then be used to create a `sessionId`-`proxyUrl` pair, and subsequent `newUrl()` calls with the same `sessionId` will always return the same `proxyUrl`. This is extremely useful in scraping, because you want to create the impression of a real user. See the [session management guide](../guides/session-management) and `SessionPool` class for more information on how keeping a real session helps you avoid blocking. When no `sessionId` is provided, your proxy URLs are rotated round-robin, whereas Apify Proxy manages their rotation using black magic to get the best performance. diff --git a/website/versioned_docs/version-3.0/guides/session_management.mdx b/website/versioned_docs/version-3.0/guides/session_management.mdx index 710fc576fe..9691d2a832 100644 --- a/website/versioned_docs/version-3.0/guides/session_management.mdx +++ b/website/versioned_docs/version-3.0/guides/session_management.mdx @@ -6,7 +6,7 @@ title: Session Management import ApiLink from '@site/src/components/ApiLink'; import { CrawleeApiLink } from '@site/src/components/CrawleeLinks'; -​`SessionPool` is a class that allows you to handle the rotation of proxy IP addresses along with cookies and other custom settings in Apify SDK. +`SessionPool` is a class that allows you to handle the rotation of proxy IP addresses along with cookies and other custom settings in Apify SDK. The main benefit of a Session pool is that you can filter out blocked or non-working proxies, so your actor does not retry requests over known blocked/non-working proxies. diff --git a/website/versioned_docs/version-3.1/guides/docker_images.mdx b/website/versioned_docs/version-3.1/guides/docker_images.mdx index a702f814f9..d5af0c5885 100644 --- a/website/versioned_docs/version-3.1/guides/docker_images.mdx +++ b/website/versioned_docs/version-3.1/guides/docker_images.mdx @@ -103,7 +103,7 @@ When you use only what you need, you'll be rewarded with reasonable build and st This is the smallest image we have based on Alpine Linux. It does not include any browsers, and it's therefore best used with `CheerioCrawler`. It benefits from lightning fast builds and container startups. -​`PuppeteerCrawler`, `PlaywrightCrawler` and other browser based features will **NOT** work with this image. +`PuppeteerCrawler`, `PlaywrightCrawler` and other browser based features will **NOT** work with this image. ```dockerfile FROM apify/actor-node:16 diff --git a/website/versioned_docs/version-3.1/guides/proxy_management.mdx b/website/versioned_docs/version-3.1/guides/proxy_management.mdx index 597bb24fe0..3b0f011135 100644 --- a/website/versioned_docs/version-3.1/guides/proxy_management.mdx +++ b/website/versioned_docs/version-3.1/guides/proxy_management.mdx @@ -80,7 +80,7 @@ Your crawlers will now use the selected proxies for all connections. ### IP Rotation and session management -​`proxyConfiguration.newUrl()` allows you to pass a `sessionId` parameter. It will then be used to create a `sessionId`-`proxyUrl` pair, and subsequent `newUrl()` calls with the same `sessionId` will always return the same `proxyUrl`. This is extremely useful in scraping, because you want to create the impression of a real user. See the [session management guide](../guides/session-management) and `SessionPool` class for more information on how keeping a real session helps you avoid blocking. +`proxyConfiguration.newUrl()` allows you to pass a `sessionId` parameter. It will then be used to create a `sessionId`-`proxyUrl` pair, and subsequent `newUrl()` calls with the same `sessionId` will always return the same `proxyUrl`. This is extremely useful in scraping, because you want to create the impression of a real user. See the [session management guide](../guides/session-management) and `SessionPool` class for more information on how keeping a real session helps you avoid blocking. When no `sessionId` is provided, your proxy URLs are rotated round-robin, whereas Apify Proxy manages their rotation using black magic to get the best performance. diff --git a/website/versioned_docs/version-3.1/guides/session_management.mdx b/website/versioned_docs/version-3.1/guides/session_management.mdx index b944002b3c..3c634c7d40 100644 --- a/website/versioned_docs/version-3.1/guides/session_management.mdx +++ b/website/versioned_docs/version-3.1/guides/session_management.mdx @@ -6,7 +6,7 @@ title: Session Management import ApiLink from '@site/src/components/ApiLink'; import { CrawleeApiLink } from '@site/src/components/CrawleeLinks'; -​`SessionPool` is a class that allows you to handle the rotation of proxy IP addresses along with cookies and other custom settings in Apify SDK. +`SessionPool` is a class that allows you to handle the rotation of proxy IP addresses along with cookies and other custom settings in Apify SDK. The main benefit of a Session pool is that you can filter out blocked or non-working proxies, so your Actor does not retry requests over known blocked/non-working proxies. diff --git a/website/versioned_docs/version-3.2/guides/docker_images.mdx b/website/versioned_docs/version-3.2/guides/docker_images.mdx index a702f814f9..d5af0c5885 100644 --- a/website/versioned_docs/version-3.2/guides/docker_images.mdx +++ b/website/versioned_docs/version-3.2/guides/docker_images.mdx @@ -103,7 +103,7 @@ When you use only what you need, you'll be rewarded with reasonable build and st This is the smallest image we have based on Alpine Linux. It does not include any browsers, and it's therefore best used with `CheerioCrawler`. It benefits from lightning fast builds and container startups. -​`PuppeteerCrawler`, `PlaywrightCrawler` and other browser based features will **NOT** work with this image. +`PuppeteerCrawler`, `PlaywrightCrawler` and other browser based features will **NOT** work with this image. ```dockerfile FROM apify/actor-node:16 diff --git a/website/versioned_docs/version-3.2/guides/proxy_management.mdx b/website/versioned_docs/version-3.2/guides/proxy_management.mdx index 597bb24fe0..3b0f011135 100644 --- a/website/versioned_docs/version-3.2/guides/proxy_management.mdx +++ b/website/versioned_docs/version-3.2/guides/proxy_management.mdx @@ -80,7 +80,7 @@ Your crawlers will now use the selected proxies for all connections. ### IP Rotation and session management -​`proxyConfiguration.newUrl()` allows you to pass a `sessionId` parameter. It will then be used to create a `sessionId`-`proxyUrl` pair, and subsequent `newUrl()` calls with the same `sessionId` will always return the same `proxyUrl`. This is extremely useful in scraping, because you want to create the impression of a real user. See the [session management guide](../guides/session-management) and `SessionPool` class for more information on how keeping a real session helps you avoid blocking. +`proxyConfiguration.newUrl()` allows you to pass a `sessionId` parameter. It will then be used to create a `sessionId`-`proxyUrl` pair, and subsequent `newUrl()` calls with the same `sessionId` will always return the same `proxyUrl`. This is extremely useful in scraping, because you want to create the impression of a real user. See the [session management guide](../guides/session-management) and `SessionPool` class for more information on how keeping a real session helps you avoid blocking. When no `sessionId` is provided, your proxy URLs are rotated round-robin, whereas Apify Proxy manages their rotation using black magic to get the best performance. diff --git a/website/versioned_docs/version-3.2/guides/session_management.mdx b/website/versioned_docs/version-3.2/guides/session_management.mdx index 710fc576fe..9691d2a832 100644 --- a/website/versioned_docs/version-3.2/guides/session_management.mdx +++ b/website/versioned_docs/version-3.2/guides/session_management.mdx @@ -6,7 +6,7 @@ title: Session Management import ApiLink from '@site/src/components/ApiLink'; import { CrawleeApiLink } from '@site/src/components/CrawleeLinks'; -​`SessionPool` is a class that allows you to handle the rotation of proxy IP addresses along with cookies and other custom settings in Apify SDK. +`SessionPool` is a class that allows you to handle the rotation of proxy IP addresses along with cookies and other custom settings in Apify SDK. The main benefit of a Session pool is that you can filter out blocked or non-working proxies, so your actor does not retry requests over known blocked/non-working proxies.