2026-01-12 01:09:30 +08:00
|
|
|
|
import * as puppeteer from 'puppeteer';
|
|
|
|
|
|
import { Logger } from '@nestjs/common';
|
|
|
|
|
|
|
|
|
|
|
|
// 模拟人类鼠标移动
|
|
|
|
|
|
async function simulateHumanMouseMovement(page: puppeteer.Page) {
|
|
|
|
|
|
const viewport = page.viewport();
|
|
|
|
|
|
if (!viewport) return;
|
|
|
|
|
|
|
|
|
|
|
|
const movements = 5 + Math.floor(Math.random() * 5); // 5-10次随机移动
|
|
|
|
|
|
|
|
|
|
|
|
for (let i = 0; i < movements; i++) {
|
|
|
|
|
|
const x = Math.floor(Math.random() * viewport.width);
|
|
|
|
|
|
const y = Math.floor(Math.random() * viewport.height);
|
2026-01-14 22:26:32 +08:00
|
|
|
|
|
2026-01-12 01:09:30 +08:00
|
|
|
|
await page.mouse.move(x, y, {
|
2026-01-14 22:26:32 +08:00
|
|
|
|
steps: 10 + Math.floor(Math.random() * 20), // 10-30步,使移动更平滑
|
2026-01-12 01:09:30 +08:00
|
|
|
|
});
|
2026-01-14 22:26:32 +08:00
|
|
|
|
|
2026-01-12 01:09:30 +08:00
|
|
|
|
// 随机停顿 100-500ms
|
2026-01-14 22:26:32 +08:00
|
|
|
|
await new Promise((r) => setTimeout(r, 100 + Math.random() * 400));
|
2026-01-12 01:09:30 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 模拟人类滚动
|
|
|
|
|
|
async function simulateHumanScrolling(page: puppeteer.Page) {
|
|
|
|
|
|
const scrollCount = 3 + Math.floor(Math.random() * 5); // 3-7次滚动
|
|
|
|
|
|
|
|
|
|
|
|
for (let i = 0; i < scrollCount; i++) {
|
|
|
|
|
|
const scrollDistance = 100 + Math.floor(Math.random() * 400); // 100-500px
|
2026-01-14 22:26:32 +08:00
|
|
|
|
|
2026-01-12 01:09:30 +08:00
|
|
|
|
await page.evaluate((distance) => {
|
|
|
|
|
|
window.scrollBy({
|
|
|
|
|
|
top: distance,
|
2026-01-14 22:26:32 +08:00
|
|
|
|
behavior: 'smooth',
|
2026-01-12 01:09:30 +08:00
|
|
|
|
});
|
|
|
|
|
|
}, scrollDistance);
|
|
|
|
|
|
|
|
|
|
|
|
// 随机停顿 500-1500ms
|
2026-01-14 22:26:32 +08:00
|
|
|
|
await new Promise((r) => setTimeout(r, 500 + Math.random() * 1000));
|
2026-01-12 01:09:30 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 滚动回顶部
|
|
|
|
|
|
await page.evaluate(() => {
|
|
|
|
|
|
window.scrollTo({ top: 0, behavior: 'smooth' });
|
|
|
|
|
|
});
|
2026-01-14 22:26:32 +08:00
|
|
|
|
await new Promise((r) => setTimeout(r, 1000));
|
2026-01-12 01:09:30 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-01-15 14:03:46 +08:00
|
|
|
|
// 检查错误是否为代理隧道连接失败
|
|
|
|
|
|
function isTunnelConnectionFailedError(error: unknown): boolean {
|
|
|
|
|
|
if (error instanceof Error) {
|
|
|
|
|
|
return (
|
|
|
|
|
|
error.message.includes('net::ERR_TUNNEL_CONNECTION_FAILED') ||
|
|
|
|
|
|
error.message.includes('ERR_TUNNEL_CONNECTION_FAILED')
|
|
|
|
|
|
);
|
|
|
|
|
|
}
|
|
|
|
|
|
return false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 延迟重试函数
|
|
|
|
|
|
async function delayRetry(
|
|
|
|
|
|
operation: () => Promise<void>,
|
|
|
|
|
|
maxRetries: number = 3,
|
|
|
|
|
|
delayMs: number = 5000,
|
|
|
|
|
|
logger?: Logger,
|
|
|
|
|
|
): Promise<void> {
|
|
|
|
|
|
let lastError: Error | unknown;
|
|
|
|
|
|
|
|
|
|
|
|
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
|
|
|
|
try {
|
|
|
|
|
|
await operation();
|
|
|
|
|
|
return;
|
|
|
|
|
|
} catch (error) {
|
|
|
|
|
|
lastError = error;
|
|
|
|
|
|
|
|
|
|
|
|
if (isTunnelConnectionFailedError(error)) {
|
|
|
|
|
|
if (attempt < maxRetries) {
|
|
|
|
|
|
const delay = delayMs * attempt; // 递增延迟
|
|
|
|
|
|
logger?.warn(
|
|
|
|
|
|
`代理隧道连接失败,第 ${attempt} 次尝试失败,${delay / 1000} 秒后重试...`,
|
|
|
|
|
|
);
|
|
|
|
|
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
|
|
|
|
} else {
|
|
|
|
|
|
logger?.error(
|
|
|
|
|
|
`代理隧道连接失败,已达到最大重试次数 ${maxRetries} 次`,
|
|
|
|
|
|
);
|
|
|
|
|
|
throw error;
|
|
|
|
|
|
}
|
|
|
|
|
|
} else {
|
|
|
|
|
|
// 非代理错误,直接抛出
|
|
|
|
|
|
throw error;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
throw lastError;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-01-12 01:09:30 +08:00
|
|
|
|
export interface CgnpcResult {
|
|
|
|
|
|
title: string;
|
|
|
|
|
|
publishDate: Date;
|
|
|
|
|
|
url: string;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-01-14 22:26:32 +08:00
|
|
|
|
interface CgnpcCrawlerType {
|
|
|
|
|
|
name: string;
|
|
|
|
|
|
url: string;
|
|
|
|
|
|
baseUrl: string;
|
|
|
|
|
|
extract(html: string): CgnpcResult[];
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-01-12 01:09:30 +08:00
|
|
|
|
export const CgnpcCrawler = {
|
|
|
|
|
|
name: '中广核电子商务平台',
|
|
|
|
|
|
url: 'https://ecp.cgnpc.com.cn/zbgg.html',
|
2026-01-12 18:59:17 +08:00
|
|
|
|
baseUrl: 'https://ecp.cgnpc.com.cn/',
|
2026-01-12 01:09:30 +08:00
|
|
|
|
|
2026-01-14 22:26:32 +08:00
|
|
|
|
async crawl(
|
|
|
|
|
|
this: CgnpcCrawlerType,
|
|
|
|
|
|
browser: puppeteer.Browser,
|
|
|
|
|
|
): Promise<CgnpcResult[]> {
|
2026-01-12 01:09:30 +08:00
|
|
|
|
const logger = new Logger('CgnpcCrawler');
|
|
|
|
|
|
const page = await browser.newPage();
|
|
|
|
|
|
|
|
|
|
|
|
const username = process.env.PROXY_USERNAME;
|
|
|
|
|
|
const password = process.env.PROXY_PASSWORD;
|
|
|
|
|
|
if (username && password) {
|
|
|
|
|
|
await page.authenticate({ username, password });
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
await page.evaluateOnNewDocument(() => {
|
|
|
|
|
|
Object.defineProperty(navigator, 'webdriver', { get: () => false });
|
2026-01-14 22:26:32 +08:00
|
|
|
|
Object.defineProperty(navigator, 'language', { get: () => 'zh-CN' });
|
|
|
|
|
|
Object.defineProperty(navigator, 'plugins', {
|
|
|
|
|
|
get: () => [1, 2, 3, 4, 5],
|
|
|
|
|
|
});
|
2026-01-12 01:09:30 +08:00
|
|
|
|
});
|
|
|
|
|
|
|
2026-01-14 22:26:32 +08:00
|
|
|
|
await page.setUserAgent(
|
|
|
|
|
|
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36',
|
|
|
|
|
|
);
|
2026-01-12 01:09:30 +08:00
|
|
|
|
await page.setViewport({ width: 1920, height: 1080 });
|
|
|
|
|
|
|
|
|
|
|
|
const allResults: CgnpcResult[] = [];
|
|
|
|
|
|
let currentPage = 1;
|
|
|
|
|
|
const maxPages = 5;
|
|
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
|
logger.log(`Navigating to ${this.url}...`);
|
2026-01-15 14:03:46 +08:00
|
|
|
|
await delayRetry(
|
|
|
|
|
|
async () => {
|
|
|
|
|
|
await page.goto(this.url, { waitUntil: 'networkidle2', timeout: 60000 });
|
|
|
|
|
|
},
|
|
|
|
|
|
3,
|
|
|
|
|
|
5000,
|
|
|
|
|
|
logger,
|
|
|
|
|
|
);
|
2026-01-12 01:09:30 +08:00
|
|
|
|
|
|
|
|
|
|
// 模拟人类行为
|
|
|
|
|
|
logger.log('Simulating human mouse movements...');
|
|
|
|
|
|
await simulateHumanMouseMovement(page);
|
2026-01-14 22:26:32 +08:00
|
|
|
|
|
2026-01-12 01:09:30 +08:00
|
|
|
|
logger.log('Simulating human scrolling...');
|
|
|
|
|
|
await simulateHumanScrolling(page);
|
|
|
|
|
|
|
|
|
|
|
|
while (currentPage <= maxPages) {
|
|
|
|
|
|
logger.log(`Processing page ${currentPage}...`);
|
|
|
|
|
|
|
|
|
|
|
|
const content = await page.content();
|
|
|
|
|
|
const pageResults = this.extract(content);
|
|
|
|
|
|
|
|
|
|
|
|
if (pageResults.length === 0) {
|
|
|
|
|
|
logger.warn(`No results found on page ${currentPage}, stopping.`);
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
allResults.push(...pageResults);
|
2026-01-14 22:26:32 +08:00
|
|
|
|
logger.log(
|
|
|
|
|
|
`Extracted ${pageResults.length} items from page ${currentPage}`,
|
|
|
|
|
|
);
|
2026-01-12 01:09:30 +08:00
|
|
|
|
|
|
|
|
|
|
// 模拟人类行为 - 翻页前
|
|
|
|
|
|
logger.log('Simulating human mouse movements before pagination...');
|
|
|
|
|
|
await simulateHumanMouseMovement(page);
|
2026-01-14 22:26:32 +08:00
|
|
|
|
|
2026-01-12 01:09:30 +08:00
|
|
|
|
logger.log('Simulating human scrolling before pagination...');
|
|
|
|
|
|
await simulateHumanScrolling(page);
|
|
|
|
|
|
|
|
|
|
|
|
// 查找下一页按钮 - 中广核网站分页结构
|
|
|
|
|
|
// 分页结构: <button type="button" class="btn-prev" onclick="setPageIndex(2)"><i class="fa fa-angle-right"></i></button>
|
|
|
|
|
|
const nextButtonSelector = 'button.btn-prev:not([disabled])';
|
|
|
|
|
|
const nextButton = await page.$(nextButtonSelector);
|
|
|
|
|
|
|
|
|
|
|
|
if (!nextButton) {
|
|
|
|
|
|
logger.log('Next page button not found. Reached end of list.');
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
logger.log(`Navigating to page ${currentPage + 1}...`);
|
|
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
|
// 点击下一页按钮
|
|
|
|
|
|
await nextButton.click();
|
2026-01-14 22:26:32 +08:00
|
|
|
|
await new Promise((r) => setTimeout(r, 3000)); // 等待页面加载
|
2026-01-12 01:09:30 +08:00
|
|
|
|
} catch (navError) {
|
2026-01-14 22:26:32 +08:00
|
|
|
|
const navErrorMessage =
|
|
|
|
|
|
navError instanceof Error ? navError.message : String(navError);
|
|
|
|
|
|
logger.error(
|
|
|
|
|
|
`Navigation to page ${currentPage + 1} failed: ${navErrorMessage}`,
|
|
|
|
|
|
);
|
2026-01-12 01:09:30 +08:00
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
currentPage++;
|
|
|
|
|
|
|
|
|
|
|
|
// 模拟人类行为 - 翻页后
|
|
|
|
|
|
logger.log('Simulating human mouse movements after pagination...');
|
|
|
|
|
|
await simulateHumanMouseMovement(page);
|
2026-01-14 22:26:32 +08:00
|
|
|
|
|
2026-01-12 01:09:30 +08:00
|
|
|
|
logger.log('Simulating human scrolling after pagination...');
|
|
|
|
|
|
await simulateHumanScrolling(page);
|
|
|
|
|
|
|
|
|
|
|
|
// Random delay between pages
|
|
|
|
|
|
const delay = Math.floor(Math.random() * (3000 - 1000 + 1)) + 1000;
|
2026-01-14 22:26:32 +08:00
|
|
|
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
2026-01-12 01:09:30 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return allResults;
|
|
|
|
|
|
} catch (error) {
|
2026-01-14 22:26:32 +08:00
|
|
|
|
const errorMessage =
|
|
|
|
|
|
error instanceof Error ? error.message : String(error);
|
|
|
|
|
|
logger.error(`Failed to crawl ${this.name}: ${errorMessage}`);
|
2026-01-15 14:28:04 +08:00
|
|
|
|
throw error;
|
2026-01-12 01:09:30 +08:00
|
|
|
|
} finally {
|
|
|
|
|
|
await page.close();
|
|
|
|
|
|
}
|
|
|
|
|
|
},
|
|
|
|
|
|
|
2026-01-14 22:26:32 +08:00
|
|
|
|
extract(this: CgnpcCrawlerType, html: string): CgnpcResult[] {
|
2026-01-12 01:09:30 +08:00
|
|
|
|
const results: CgnpcResult[] = [];
|
|
|
|
|
|
/**
|
|
|
|
|
|
* Regex groups for ecp.cgnpc.com.cn:
|
|
|
|
|
|
* 1: URL (href属性)
|
|
|
|
|
|
* 2: Title (title属性)
|
|
|
|
|
|
* 3: Date (发布时间,格式:2026-01-23 17:00)
|
|
|
|
|
|
*
|
|
|
|
|
|
* HTML结构示例:
|
|
|
|
|
|
* <div class="zbnr">
|
|
|
|
|
|
* <div class="zbnr_left" style="width: calc(100% - 290px);">
|
|
|
|
|
|
* <a title="中广核新能源新疆公司2026年-2028年各场站线路运维检修服务框架协议"
|
|
|
|
|
|
* href="https://ecp.cgnpc.com.cn/Details.html?dataId=xxx&detailId=xxx" target="_blank">
|
|
|
|
|
|
* <h2><i>中广核新能源新疆公司2026年-2028年各场站线路运维检修服务框架协议</i></h2>
|
|
|
|
|
|
* </a>
|
|
|
|
|
|
* </div>
|
|
|
|
|
|
* <div class="zbnr_right" style="width: 270px;">
|
|
|
|
|
|
* <dl>
|
|
|
|
|
|
* <dt><p>文件获取截止时间</p><h2>2026-01-23 17:00</h2></dt>
|
|
|
|
|
|
* <dt><p>投标截止时间</p><h2>2026-01-30 09:00</h2></dt>
|
|
|
|
|
|
* </dl>
|
|
|
|
|
|
* </div>
|
|
|
|
|
|
* </div>
|
|
|
|
|
|
*/
|
2026-01-14 22:26:32 +08:00
|
|
|
|
const regex =
|
|
|
|
|
|
/<div class="zbnr">[\s\S]*?<a[^>]*title="([^"]*)"[^>]*href="([^"]*)"[^>]*>[\s\S]*?<dt>[\s\S]*?<p>文件获取截止时间<\/p>[\s\S]*?<h2>\s*(\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2})\s*<\/h2>[\s\S]*?<\/div>/gs;
|
2026-01-12 01:09:30 +08:00
|
|
|
|
|
2026-01-14 22:26:32 +08:00
|
|
|
|
let match: RegExpExecArray | null;
|
2026-01-12 01:09:30 +08:00
|
|
|
|
while ((match = regex.exec(html)) !== null) {
|
2026-01-14 22:26:32 +08:00
|
|
|
|
const title = match[1]?.trim() ?? '';
|
|
|
|
|
|
const url = match[2]?.trim() ?? '';
|
|
|
|
|
|
const dateStr = match[3]?.trim() ?? '';
|
2026-01-12 01:09:30 +08:00
|
|
|
|
|
|
|
|
|
|
if (title && url) {
|
2026-01-13 18:07:00 +08:00
|
|
|
|
const fullUrl = url.startsWith('http') ? url : this.baseUrl + url;
|
2026-01-12 01:09:30 +08:00
|
|
|
|
results.push({
|
|
|
|
|
|
title,
|
|
|
|
|
|
publishDate: dateStr ? new Date(dateStr) : new Date(),
|
2026-01-14 22:26:32 +08:00
|
|
|
|
url: fullUrl.replace(/\/\//g, '/'),
|
2026-01-12 01:09:30 +08:00
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return results;
|
2026-01-14 22:26:32 +08:00
|
|
|
|
},
|
2026-01-12 01:09:30 +08:00
|
|
|
|
};
|