diff --git a/CHANGELOG.md b/CHANGELOG.md index 9edd7e5..fad3ca6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,27 @@ +# [v9.0.0](https://github.com/coder-hxl/x-crawl/compare/v8.3.1..v9.0.0) (2024-03-16) + +### 🚨 Breaking Changes + +- The enableRandomFingerprint attribute of XCrawlConfig configuration is changed to false by default +- Drop support for Node16 + +### ⛓️ Dependencies + +- puppeteer upgraded from 21.6.1 to 22.5.0 +- https-proxy-agent upgraded from 7.0.1 to 7.0.4 + +--- + +### 🚨 重大改变 + +- XCrawlConfig 配置的 enableRandomFingerprint 属性默认改为 false +- 放弃对 Node16 的支持 + +### ⛓️ 依赖关系 + +- puppeteer 从 21.6.1 升至 22.5.0 +- https-proxy-agent 从 7.0.1 升至 7.0.4 + # [v8.3.1](https://github.com/coder-hxl/x-crawl/compare/v8.3.0..v8.3.1) (2023-12-26) ### 🚀 Features diff --git a/package.json b/package.json index d6dedb1..979e0cd 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "private": true, "name": "x-crawl", - "version": "8.3.1", + "version": "9.0.0", "author": "coderHXL", "description": "x-crawl is a flexible Node.js multifunctional crawler library.", "license": "MIT", diff --git a/publish/README.md b/publish/README.md index a26ebfc..dd293d4 100644 --- a/publish/README.md +++ b/publish/README.md @@ -258,7 +258,7 @@ const myXCrawl = xCrawl({ }) ``` -The enableRandomFingerprint option defaults to true. +The enableRandomFingerprint option defaults to false. - true: Enable random device fingerprinting. The fingerprint configuration of the target can be specified through advanced configuration or detailed target configuration. - false: Turns off random device fingerprinting, does not affect the fingerprint configuration specified for the target by advanced configuration or detailed target configuration. @@ -1535,7 +1535,7 @@ export interface XCrawlConfig extends CrawlCommonConfig { **Default Value** - mode: 'async' -- enableRandomFingerprint: true +- enableRandomFingerprint: false - baseUrl: undefined - intervalTime: undefined - log: { start: true, process: true, result: true } @@ -2073,7 +2073,7 @@ The crawlPage API has built-in [puppeteer](https://github.com/puppeteer/puppetee ### Using crawlPage API causes the program to crash -If you need to crawl many pages in one crawlPage, it is recommended that after crawling each page, use [onCrawlItemComplete life cycle function] (#onCrawlItemComplete) to process the results of each target and close the page instance. If no shutdown operation is performed, then The program may crash due to too many pages being opened (related to the performance of the device itself). +If you need to crawl many pages in one crawlPage, it is recommended that after crawling each page, use [onCrawlItemComplete life cycle function](#onCrawlItemComplete) to process the results of each target and close the page instance. If no shutdown operation is performed, then The program may crash due to too many pages being opened (related to the performance of the device itself). ```js import xCrawl from 'x-crawl' diff --git a/publish/package.json b/publish/package.json index 92cb144..de47ec4 100644 --- a/publish/package.json +++ b/publish/package.json @@ -1,6 +1,6 @@ { "name": "x-crawl", - "version": "8.3.1", + "version": "9.0.0", "author": "coderHXL", "description": "x-crawl is a flexible Node.js multifunctional crawler library.", "license": "MIT", @@ -35,11 +35,11 @@ } }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" }, "dependencies": { "chalk": "4.1.2", - "https-proxy-agent": "^7.0.1", + "https-proxy-agent": "^7.0.4", "puppeteer": "22.5.0" } }