2026-01-09 23:18:52 +08:00
|
|
|
|
import { Injectable, Logger } from '@nestjs/common';
|
2026-01-11 18:20:43 +08:00
|
|
|
|
import { ConfigService } from '@nestjs/config';
|
2026-01-09 23:18:52 +08:00
|
|
|
|
import * as puppeteer from 'puppeteer';
|
|
|
|
|
|
import { BidsService } from '../../bids/services/bid.service';
|
|
|
|
|
|
import { ChdtpCrawler } from './chdtp_target';
|
2026-01-11 18:20:43 +08:00
|
|
|
|
import { ChngCrawler } from './chng_target';
|
2026-01-11 22:34:38 +08:00
|
|
|
|
import { SzecpCrawler } from './szecp_target';
|
|
|
|
|
|
import { CdtCrawler } from './cdt_target';
|
2026-01-11 22:48:10 +08:00
|
|
|
|
import { EpsCrawler } from './eps_target';
|
2026-01-11 23:32:09 +08:00
|
|
|
|
import { CnncecpCrawler } from './cnncecp_target';
|
2026-01-12 01:09:30 +08:00
|
|
|
|
import { CgnpcCrawler } from './cgnpc_target';
|
2026-01-12 02:09:48 +08:00
|
|
|
|
import { CeicCrawler } from './ceic_target';
|
|
|
|
|
|
import { EspicCrawler } from './espic_target';
|
|
|
|
|
|
import { PowerbeijingCrawler } from './powerbeijing_target';
|
2026-01-09 23:18:52 +08:00
|
|
|
|
|
|
|
|
|
|
@Injectable()
|
|
|
|
|
|
export class BidCrawlerService {
|
|
|
|
|
|
private readonly logger = new Logger(BidCrawlerService.name);
|
|
|
|
|
|
|
|
|
|
|
|
constructor(
|
|
|
|
|
|
private bidsService: BidsService,
|
2026-01-11 18:20:43 +08:00
|
|
|
|
private configService: ConfigService,
|
2026-01-09 23:18:52 +08:00
|
|
|
|
) {}
|
|
|
|
|
|
|
|
|
|
|
|
async crawlAll() {
|
|
|
|
|
|
this.logger.log('Starting crawl task with Puppeteer...');
|
|
|
|
|
|
|
2026-01-12 10:46:10 +08:00
|
|
|
|
// 设置最大执行时间为3小时
|
|
|
|
|
|
const maxExecutionTime = 3 * 60 * 60 * 1000; // 3小时(毫秒)
|
2026-01-12 02:09:48 +08:00
|
|
|
|
const startTime = Date.now();
|
|
|
|
|
|
|
2026-01-12 10:46:10 +08:00
|
|
|
|
// 统计结果
|
|
|
|
|
|
const crawlResults: Record<string, { success: number; error?: string }> = {};
|
|
|
|
|
|
|
2026-01-12 12:28:37 +08:00
|
|
|
|
// 记录数据为0的爬虫,用于重试
|
|
|
|
|
|
const zeroDataCrawlers: any[] = [];
|
|
|
|
|
|
|
2026-01-11 18:20:43 +08:00
|
|
|
|
// 从环境变量读取代理配置
|
|
|
|
|
|
const proxyHost = this.configService.get<string>('PROXY_HOST');
|
|
|
|
|
|
const proxyPort = this.configService.get<string>('PROXY_PORT');
|
|
|
|
|
|
const proxyUsername = this.configService.get<string>('PROXY_USERNAME');
|
|
|
|
|
|
const proxyPassword = this.configService.get<string>('PROXY_PASSWORD');
|
|
|
|
|
|
|
|
|
|
|
|
// 构建代理参数
|
|
|
|
|
|
const args = [
|
|
|
|
|
|
'--no-sandbox',
|
|
|
|
|
|
'--disable-setuid-sandbox',
|
|
|
|
|
|
'--disable-blink-features=AutomationControlled',
|
|
|
|
|
|
'--disable-infobars',
|
|
|
|
|
|
'--window-position=0,0',
|
|
|
|
|
|
'--ignore-certifcate-errors',
|
|
|
|
|
|
'--ignore-certifcate-errors-spki-list',
|
|
|
|
|
|
];
|
|
|
|
|
|
|
|
|
|
|
|
if (proxyHost && proxyPort) {
|
|
|
|
|
|
const proxyUrl = proxyUsername && proxyPassword
|
|
|
|
|
|
? `http://${proxyUsername}:${proxyPassword}@${proxyHost}:${proxyPort}`
|
|
|
|
|
|
: `http://${proxyHost}:${proxyPort}`;
|
|
|
|
|
|
args.push(`--proxy-server=${proxyUrl}`);
|
|
|
|
|
|
this.logger.log(`Using proxy: ${proxyHost}:${proxyPort}`);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-01-09 23:18:52 +08:00
|
|
|
|
const browser = await puppeteer.launch({
|
2026-01-11 21:35:24 +08:00
|
|
|
|
headless: false,
|
2026-01-11 18:20:43 +08:00
|
|
|
|
args,
|
2026-01-09 23:18:52 +08:00
|
|
|
|
});
|
|
|
|
|
|
|
2026-01-12 02:09:48 +08:00
|
|
|
|
const crawlers = [ChdtpCrawler, ChngCrawler, SzecpCrawler, CdtCrawler, EpsCrawler, CnncecpCrawler, CgnpcCrawler, CeicCrawler, EspicCrawler, PowerbeijingCrawler];
|
2026-01-11 18:20:43 +08:00
|
|
|
|
|
2026-01-09 23:18:52 +08:00
|
|
|
|
try {
|
2026-01-11 18:20:43 +08:00
|
|
|
|
for (const crawler of crawlers) {
|
|
|
|
|
|
this.logger.log(`Crawling: ${crawler.name}`);
|
2026-01-12 02:09:48 +08:00
|
|
|
|
|
|
|
|
|
|
// 检查是否超时
|
|
|
|
|
|
const elapsedTime = Date.now() - startTime;
|
|
|
|
|
|
if (elapsedTime > maxExecutionTime) {
|
2026-01-12 10:46:10 +08:00
|
|
|
|
this.logger.warn(`⚠️ Crawl task exceeded maximum execution time of 3 hours. Stopping...`);
|
2026-01-12 02:09:48 +08:00
|
|
|
|
this.logger.warn(`⚠️ Total elapsed time: ${Math.floor(elapsedTime / 1000 / 60)} minutes`);
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-01-11 18:20:43 +08:00
|
|
|
|
try {
|
|
|
|
|
|
const results = await crawler.crawl(browser);
|
|
|
|
|
|
this.logger.log(`Extracted ${results.length} items from ${crawler.name}`);
|
2026-01-12 10:46:10 +08:00
|
|
|
|
|
|
|
|
|
|
// 记录成功数量
|
|
|
|
|
|
crawlResults[crawler.name] = { success: results.length };
|
2026-01-09 23:18:52 +08:00
|
|
|
|
|
2026-01-12 12:28:37 +08:00
|
|
|
|
// 如果数据为0,记录下来用于重试
|
|
|
|
|
|
if (results.length === 0) {
|
|
|
|
|
|
zeroDataCrawlers.push(crawler);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-01-11 18:20:43 +08:00
|
|
|
|
for (const item of results) {
|
|
|
|
|
|
await this.bidsService.createOrUpdate({
|
|
|
|
|
|
title: item.title,
|
|
|
|
|
|
url: item.url,
|
|
|
|
|
|
publishDate: item.publishDate,
|
2026-01-12 10:46:10 +08:00
|
|
|
|
source: crawler.name,
|
|
|
|
|
|
unit: '',
|
2026-01-11 18:20:43 +08:00
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
} catch (err) {
|
|
|
|
|
|
this.logger.error(`Error crawling ${crawler.name}: ${err.message}`);
|
2026-01-12 10:46:10 +08:00
|
|
|
|
// 记录错误信息
|
|
|
|
|
|
crawlResults[crawler.name] = { success: 0, error: err.message };
|
2026-01-11 18:20:43 +08:00
|
|
|
|
}
|
2026-01-09 23:18:52 +08:00
|
|
|
|
}
|
2026-01-12 12:28:37 +08:00
|
|
|
|
|
|
|
|
|
|
// 对数据为0的爬虫进行重试
|
|
|
|
|
|
if (zeroDataCrawlers.length > 0) {
|
|
|
|
|
|
this.logger.log(`Retrying ${zeroDataCrawlers.length} crawlers with zero data...`);
|
|
|
|
|
|
|
|
|
|
|
|
for (const crawler of zeroDataCrawlers) {
|
|
|
|
|
|
this.logger.log(`Retrying: ${crawler.name}`);
|
|
|
|
|
|
|
|
|
|
|
|
// 检查是否超时
|
|
|
|
|
|
const elapsedTime = Date.now() - startTime;
|
|
|
|
|
|
if (elapsedTime > maxExecutionTime) {
|
|
|
|
|
|
this.logger.warn(`⚠️ Crawl task exceeded maximum execution time of 3 hours. Stopping retry...`);
|
|
|
|
|
|
this.logger.warn(`⚠️ Total elapsed time: ${Math.floor(elapsedTime / 1000 / 60)} minutes`);
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
|
const results = await crawler.crawl(browser);
|
|
|
|
|
|
this.logger.log(`Retry extracted ${results.length} items from ${crawler.name}`);
|
|
|
|
|
|
|
|
|
|
|
|
// 更新统计结果
|
|
|
|
|
|
crawlResults[crawler.name] = { success: results.length };
|
|
|
|
|
|
|
|
|
|
|
|
for (const item of results) {
|
|
|
|
|
|
await this.bidsService.createOrUpdate({
|
|
|
|
|
|
title: item.title,
|
|
|
|
|
|
url: item.url,
|
|
|
|
|
|
publishDate: item.publishDate,
|
|
|
|
|
|
source: crawler.name,
|
|
|
|
|
|
unit: '',
|
|
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
} catch (err) {
|
|
|
|
|
|
this.logger.error(`Error retrying ${crawler.name}: ${err.message}`);
|
|
|
|
|
|
// 记录错误信息
|
|
|
|
|
|
crawlResults[crawler.name] = { success: 0, error: err.message };
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2026-01-09 23:18:52 +08:00
|
|
|
|
} catch (error) {
|
|
|
|
|
|
this.logger.error(`Crawl task failed: ${error.message}`);
|
|
|
|
|
|
} finally {
|
|
|
|
|
|
await browser.close();
|
2026-01-12 02:09:48 +08:00
|
|
|
|
|
|
|
|
|
|
const totalTime = Date.now() - startTime;
|
|
|
|
|
|
const minutes = Math.floor(totalTime / 1000 / 60);
|
|
|
|
|
|
this.logger.log(`Crawl task finished. Total time: ${minutes} minutes`);
|
|
|
|
|
|
|
|
|
|
|
|
if (totalTime > maxExecutionTime) {
|
2026-01-12 10:46:10 +08:00
|
|
|
|
this.logger.warn(`⚠️ Crawl task exceeded maximum execution time of 3 hours.`);
|
2026-01-12 02:09:48 +08:00
|
|
|
|
}
|
2026-01-12 10:46:10 +08:00
|
|
|
|
|
|
|
|
|
|
// 输出统计总结
|
|
|
|
|
|
this.logger.log('='.repeat(50));
|
|
|
|
|
|
this.logger.log('爬虫执行总结 / Crawl Summary');
|
|
|
|
|
|
this.logger.log('='.repeat(50));
|
|
|
|
|
|
|
|
|
|
|
|
let totalSuccess = 0;
|
|
|
|
|
|
let errorCount = 0;
|
|
|
|
|
|
|
|
|
|
|
|
for (const [source, result] of Object.entries(crawlResults)) {
|
|
|
|
|
|
if (result.error) {
|
|
|
|
|
|
this.logger.error(`❌ ${source}: 出错 - ${result.error}`);
|
|
|
|
|
|
errorCount++;
|
|
|
|
|
|
} else {
|
|
|
|
|
|
this.logger.log(`✅ ${source}: 成功获取 ${result.success} 条工程信息`);
|
|
|
|
|
|
totalSuccess += result.success;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
this.logger.log('='.repeat(50));
|
|
|
|
|
|
this.logger.log(`总计: ${totalSuccess} 条工程信息, ${errorCount} 个来源出错`);
|
|
|
|
|
|
this.logger.log(`Total: ${totalSuccess} items, ${errorCount} sources failed`);
|
|
|
|
|
|
this.logger.log('='.repeat(50));
|
2026-01-09 23:18:52 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|