Files
bidding_watcher/src/crawler/services/chdtp_target.ts

254 lines
7.3 KiB
TypeScript
Raw Normal View History

2026-01-09 23:18:52 +08:00
import * as puppeteer from 'puppeteer';
import { Logger } from '@nestjs/common';
async function simulateHumanMouseMovement(page: puppeteer.Page) {
const viewport = page.viewport();
if (!viewport) return;
const movements = 5 + Math.floor(Math.random() * 5);
for (let i = 0; i < movements; i++) {
const x = Math.floor(Math.random() * viewport.width);
const y = Math.floor(Math.random() * viewport.height);
await page.mouse.move(x, y, {
steps: 10 + Math.floor(Math.random() * 20),
});
await new Promise((r) => setTimeout(r, 100 + Math.random() * 400));
}
}
async function simulateHumanScrolling(page: puppeteer.Page) {
const scrollCount = 3 + Math.floor(Math.random() * 5);
for (let i = 0; i < scrollCount; i++) {
const scrollDistance = 100 + Math.floor(Math.random() * 400);
await page.evaluate((distance) => {
window.scrollBy({
top: distance,
behavior: 'smooth',
});
}, scrollDistance);
await new Promise((r) => setTimeout(r, 500 + Math.random() * 1000));
}
await page.evaluate(() => {
window.scrollTo({ top: 0, behavior: 'smooth' });
});
await new Promise((r) => setTimeout(r, 1000));
}
2026-01-09 23:18:52 +08:00
export interface ChdtpResult {
title: string;
publishDate: Date;
url: string;
2026-01-09 23:18:52 +08:00
}
interface ChdtpCrawlerType {
name: string;
url: string;
baseUrl: string;
extract(html: string): ChdtpResult[];
}
// 检查错误是否为代理隧道连接失败
function isTunnelConnectionFailedError(error: unknown): boolean {
if (error instanceof Error) {
return (
error.message.includes('net::ERR_TUNNEL_CONNECTION_FAILED') ||
error.message.includes('ERR_TUNNEL_CONNECTION_FAILED')
);
}
return false;
}
// 延迟重试函数
async function delayRetry(
operation: () => Promise<void>,
maxRetries: number = 3,
delayMs: number = 5000,
logger?: Logger,
): Promise<void> {
let lastError: Error | unknown;
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
await operation();
return;
} catch (error) {
lastError = error;
if (isTunnelConnectionFailedError(error)) {
if (attempt < maxRetries) {
const delay = delayMs * attempt; // 递增延迟
logger?.warn(
`代理隧道连接失败,第 ${attempt} 次尝试失败,${delay / 1000} 秒后重试...`,
);
await new Promise((resolve) => setTimeout(resolve, delay));
} else {
logger?.error(
`代理隧道连接失败,已达到最大重试次数 ${maxRetries}`,
);
throw error;
}
} else {
// 非代理错误,直接抛出
throw error;
}
}
}
throw lastError;
}
2026-01-09 23:18:52 +08:00
export const ChdtpCrawler = {
name: '华电集团电子商务平台 ',
2026-01-09 23:18:52 +08:00
url: 'https://www.chdtp.com/webs/queryWebZbgg.action?zbggType=1',
baseUrl: 'https://www.chdtp.com/webs/',
async crawl(
this: ChdtpCrawlerType,
browser: puppeteer.Browser,
): Promise<ChdtpResult[]> {
2026-01-09 23:18:52 +08:00
const logger = new Logger('ChdtpCrawler');
const page = await browser.newPage();
const username = process.env.PROXY_USERNAME;
const password = process.env.PROXY_PASSWORD;
if (username && password) {
await page.authenticate({ username, password });
}
await page.setUserAgent(
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36',
);
2026-01-09 23:18:52 +08:00
const allResults: ChdtpResult[] = [];
let currentPage = 1;
const maxPages = 5; // Safety limit to prevent infinite loops during testing
try {
logger.log(`Navigating to ${this.url}...`);
await delayRetry(
async () => {
await page.goto(this.url, { waitUntil: 'networkidle2', timeout: 60000 });
},
3,
5000,
logger,
);
2026-01-09 23:18:52 +08:00
logger.log('Simulating human mouse movements...');
await simulateHumanMouseMovement(page);
logger.log('Simulating human scrolling...');
await simulateHumanScrolling(page);
2026-01-09 23:18:52 +08:00
while (currentPage <= maxPages) {
const content = await page.content();
const pageResults = this.extract(content);
2026-01-09 23:18:52 +08:00
if (pageResults.length === 0) {
logger.warn(`No results found on page ${currentPage}, stopping.`);
break;
}
allResults.push(...pageResults);
logger.log(
`Extracted ${pageResults.length} items from page ${currentPage}`,
);
2026-01-09 23:18:52 +08:00
logger.log('Simulating human mouse movements before pagination...');
await simulateHumanMouseMovement(page);
logger.log('Simulating human scrolling before pagination...');
await simulateHumanScrolling(page);
2026-01-09 23:18:52 +08:00
// Find the "Next Page" button
// Using partial match for src to be robust against path variations
const nextButtonSelector = 'input[type="image"][src*="page-next.png"]';
const nextButton = await page.$(nextButtonSelector);
if (!nextButton) {
logger.log('Next page button not found. Reached end of list.');
break;
}
logger.log(`Navigating to page ${currentPage + 1}...`);
2026-01-09 23:18:52 +08:00
try {
await Promise.all([
page.waitForNavigation({
waitUntil: 'networkidle2',
timeout: 60000,
}),
2026-01-09 23:18:52 +08:00
nextButton.click(),
]);
} catch (navError) {
const navErrorMessage =
navError instanceof Error ? navError.message : String(navError);
logger.error(
`Navigation to page ${currentPage + 1} failed: ${navErrorMessage}`,
);
2026-01-09 23:18:52 +08:00
break;
}
currentPage++;
logger.log('Simulating human mouse movements after pagination...');
await simulateHumanMouseMovement(page);
logger.log('Simulating human scrolling after pagination...');
await simulateHumanScrolling(page);
2026-01-09 23:18:52 +08:00
// Random delay between pages
const delay = Math.floor(Math.random() * (3000 - 1000 + 1)) + 1000;
await new Promise((resolve) => setTimeout(resolve, delay));
2026-01-09 23:18:52 +08:00
}
return allResults;
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : String(error);
logger.error(`Failed to crawl ${this.name}: ${errorMessage}`);
throw error;
2026-01-09 23:18:52 +08:00
} finally {
await page.close();
}
},
extract(this: ChdtpCrawlerType, html: string): ChdtpResult[] {
2026-01-09 23:18:52 +08:00
const results: ChdtpResult[] = [];
/**
* Regex groups for chdtp.com:
* 1: Status
* 2: URL suffix
* 3: Title
* 4: Business Type
* 5: Date
*/
const regex =
/<tr[^>]*>\s*<td class="td_1">.*?<span[^>]*>\s*(.*?)\s*<\/span>.*?<\/td>\s*<td class="td_2">\s*<a[^>]*href="javascript:toGetContent\('(.*?)'\)" title="(.*?)">.*?<\/a><\/td>\s*<td class="td_3">\s*<a[^>]*>\s*(.*?)\s*<\/a>\s*<\/td>\s*<td class="td_4"><span>\[(.*?)\]<\/span><\/td>/gs;
2026-01-09 23:18:52 +08:00
let match: RegExpExecArray | null;
2026-01-09 23:18:52 +08:00
while ((match = regex.exec(html)) !== null) {
const urlSuffix = match[2]?.trim() ?? '';
const title = match[3]?.trim() ?? '';
const dateStr = match[5]?.trim() ?? '';
2026-01-09 23:18:52 +08:00
if (title && urlSuffix) {
const fullUrl = this.baseUrl + urlSuffix;
2026-01-09 23:18:52 +08:00
results.push({
title,
publishDate: dateStr ? new Date(dateStr) : new Date(),
url: fullUrl.replace(/\/\//g, '/'),
2026-01-09 23:18:52 +08:00
});
}
}
return results;
},
};