- Add timezone support to database module (+08:00) - Extract date formatting utilities to shared modules - Standardize timezone handling across frontend and backend - Improve date formatting consistency in UI components - Refactor crawler page.goto options for better readability
257 lines
7.4 KiB
TypeScript
257 lines
7.4 KiB
TypeScript
import * as puppeteer from 'puppeteer';
|
|
import { Logger } from '@nestjs/common';
|
|
|
|
async function simulateHumanMouseMovement(page: puppeteer.Page) {
|
|
const viewport = page.viewport();
|
|
if (!viewport) return;
|
|
|
|
const movements = 5 + Math.floor(Math.random() * 5);
|
|
|
|
for (let i = 0; i < movements; i++) {
|
|
const x = Math.floor(Math.random() * viewport.width);
|
|
const y = Math.floor(Math.random() * viewport.height);
|
|
|
|
await page.mouse.move(x, y, {
|
|
steps: 10 + Math.floor(Math.random() * 20),
|
|
});
|
|
|
|
await new Promise((r) => setTimeout(r, 100 + Math.random() * 400));
|
|
}
|
|
}
|
|
|
|
async function simulateHumanScrolling(page: puppeteer.Page) {
|
|
const scrollCount = 3 + Math.floor(Math.random() * 5);
|
|
|
|
for (let i = 0; i < scrollCount; i++) {
|
|
const scrollDistance = 100 + Math.floor(Math.random() * 400);
|
|
|
|
await page.evaluate((distance) => {
|
|
window.scrollBy({
|
|
top: distance,
|
|
behavior: 'smooth',
|
|
});
|
|
}, scrollDistance);
|
|
|
|
await new Promise((r) => setTimeout(r, 500 + Math.random() * 1000));
|
|
}
|
|
|
|
await page.evaluate(() => {
|
|
window.scrollTo({ top: 0, behavior: 'smooth' });
|
|
});
|
|
await new Promise((r) => setTimeout(r, 1000));
|
|
}
|
|
|
|
export interface ChdtpResult {
|
|
title: string;
|
|
publishDate: Date;
|
|
url: string;
|
|
}
|
|
|
|
interface ChdtpCrawlerType {
|
|
name: string;
|
|
url: string;
|
|
baseUrl: string;
|
|
extract(html: string): ChdtpResult[];
|
|
}
|
|
|
|
// 检查错误是否为代理隧道连接失败
|
|
function isTunnelConnectionFailedError(error: unknown): boolean {
|
|
if (error instanceof Error) {
|
|
return (
|
|
error.message.includes('net::ERR_TUNNEL_CONNECTION_FAILED') ||
|
|
error.message.includes('ERR_TUNNEL_CONNECTION_FAILED')
|
|
);
|
|
}
|
|
return false;
|
|
}
|
|
|
|
// 延迟重试函数
|
|
async function delayRetry(
|
|
operation: () => Promise<void>,
|
|
maxRetries: number = 3,
|
|
delayMs: number = 5000,
|
|
logger?: Logger,
|
|
): Promise<void> {
|
|
let lastError: Error | unknown;
|
|
|
|
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
try {
|
|
await operation();
|
|
return;
|
|
} catch (error) {
|
|
lastError = error;
|
|
|
|
if (isTunnelConnectionFailedError(error)) {
|
|
if (attempt < maxRetries) {
|
|
const delay = delayMs * attempt; // 递增延迟
|
|
logger?.warn(
|
|
`代理隧道连接失败,第 ${attempt} 次尝试失败,${delay / 1000} 秒后重试...`,
|
|
);
|
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
} else {
|
|
logger?.error(
|
|
`代理隧道连接失败,已达到最大重试次数 ${maxRetries} 次`,
|
|
);
|
|
throw error;
|
|
}
|
|
} else {
|
|
// 非代理错误,直接抛出
|
|
throw error;
|
|
}
|
|
}
|
|
}
|
|
|
|
throw lastError;
|
|
}
|
|
|
|
export const ChdtpCrawler = {
|
|
name: '华电集团电子商务平台 ',
|
|
url: 'https://www.chdtp.com/webs/queryWebZbgg.action?zbggType=1',
|
|
baseUrl: 'https://www.chdtp.com/webs/',
|
|
|
|
async crawl(
|
|
this: ChdtpCrawlerType,
|
|
browser: puppeteer.Browser,
|
|
): Promise<ChdtpResult[]> {
|
|
const logger = new Logger('ChdtpCrawler');
|
|
const page = await browser.newPage();
|
|
|
|
const username = process.env.PROXY_USERNAME;
|
|
const password = process.env.PROXY_PASSWORD;
|
|
if (username && password) {
|
|
await page.authenticate({ username, password });
|
|
}
|
|
|
|
await page.setUserAgent(
|
|
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36',
|
|
);
|
|
|
|
const allResults: ChdtpResult[] = [];
|
|
let currentPage = 1;
|
|
const maxPages = 5; // Safety limit to prevent infinite loops during testing
|
|
|
|
try {
|
|
logger.log(`Navigating to ${this.url}...`);
|
|
await delayRetry(
|
|
async () => {
|
|
await page.goto(this.url, {
|
|
waitUntil: 'networkidle2',
|
|
timeout: 60000,
|
|
});
|
|
},
|
|
3,
|
|
5000,
|
|
logger,
|
|
);
|
|
|
|
logger.log('Simulating human mouse movements...');
|
|
await simulateHumanMouseMovement(page);
|
|
|
|
logger.log('Simulating human scrolling...');
|
|
await simulateHumanScrolling(page);
|
|
|
|
while (currentPage <= maxPages) {
|
|
const content = await page.content();
|
|
const pageResults = this.extract(content);
|
|
|
|
if (pageResults.length === 0) {
|
|
logger.warn(`No results found on page ${currentPage}, stopping.`);
|
|
break;
|
|
}
|
|
|
|
allResults.push(...pageResults);
|
|
logger.log(
|
|
`Extracted ${pageResults.length} items from page ${currentPage}`,
|
|
);
|
|
|
|
logger.log('Simulating human mouse movements before pagination...');
|
|
await simulateHumanMouseMovement(page);
|
|
|
|
logger.log('Simulating human scrolling before pagination...');
|
|
await simulateHumanScrolling(page);
|
|
|
|
// Find the "Next Page" button
|
|
// Using partial match for src to be robust against path variations
|
|
const nextButtonSelector = 'input[type="image"][src*="page-next.png"]';
|
|
const nextButton = await page.$(nextButtonSelector);
|
|
|
|
if (!nextButton) {
|
|
logger.log('Next page button not found. Reached end of list.');
|
|
break;
|
|
}
|
|
|
|
logger.log(`Navigating to page ${currentPage + 1}...`);
|
|
|
|
try {
|
|
await Promise.all([
|
|
page.waitForNavigation({
|
|
waitUntil: 'networkidle2',
|
|
timeout: 60000,
|
|
}),
|
|
nextButton.click(),
|
|
]);
|
|
} catch (navError) {
|
|
const navErrorMessage =
|
|
navError instanceof Error ? navError.message : String(navError);
|
|
logger.error(
|
|
`Navigation to page ${currentPage + 1} failed: ${navErrorMessage}`,
|
|
);
|
|
break;
|
|
}
|
|
|
|
currentPage++;
|
|
|
|
logger.log('Simulating human mouse movements after pagination...');
|
|
await simulateHumanMouseMovement(page);
|
|
|
|
logger.log('Simulating human scrolling after pagination...');
|
|
await simulateHumanScrolling(page);
|
|
|
|
// Random delay between pages
|
|
const delay = Math.floor(Math.random() * (3000 - 1000 + 1)) + 1000;
|
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
}
|
|
|
|
return allResults;
|
|
} catch (error) {
|
|
const errorMessage =
|
|
error instanceof Error ? error.message : String(error);
|
|
logger.error(`Failed to crawl ${this.name}: ${errorMessage}`);
|
|
throw error;
|
|
} finally {
|
|
await page.close();
|
|
}
|
|
},
|
|
|
|
extract(this: ChdtpCrawlerType, html: string): ChdtpResult[] {
|
|
const results: ChdtpResult[] = [];
|
|
/**
|
|
* Regex groups for chdtp.com:
|
|
* 1: Status
|
|
* 2: URL suffix
|
|
* 3: Title
|
|
* 4: Business Type
|
|
* 5: Date
|
|
*/
|
|
const regex =
|
|
/<tr[^>]*>\s*<td class="td_1">.*?<span[^>]*>\s*(.*?)\s*<\/span>.*?<\/td>\s*<td class="td_2">\s*<a[^>]*href="javascript:toGetContent\('(.*?)'\)" title="(.*?)">.*?<\/a><\/td>\s*<td class="td_3">\s*<a[^>]*>\s*(.*?)\s*<\/a>\s*<\/td>\s*<td class="td_4"><span>\[(.*?)\]<\/span><\/td>/gs;
|
|
|
|
let match: RegExpExecArray | null;
|
|
while ((match = regex.exec(html)) !== null) {
|
|
const urlSuffix = match[2]?.trim() ?? '';
|
|
const title = match[3]?.trim() ?? '';
|
|
const dateStr = match[5]?.trim() ?? '';
|
|
|
|
if (title && urlSuffix) {
|
|
const fullUrl = this.baseUrl + urlSuffix;
|
|
results.push({
|
|
title,
|
|
publishDate: dateStr ? new Date(dateStr) : new Date(),
|
|
url: fullUrl.replace(/\/\//g, '/'),
|
|
});
|
|
}
|
|
}
|
|
return results;
|
|
},
|
|
};
|