Files
bidding_watcher/src/crawler/services/bid-crawler.service.ts

108 lines
3.8 KiB
TypeScript
Raw Normal View History

2026-01-09 23:18:52 +08:00
import { Injectable, Logger } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
2026-01-09 23:18:52 +08:00
import * as puppeteer from 'puppeteer';
import { BidsService } from '../../bids/services/bid.service';
import { ChdtpCrawler } from './chdtp_target';
import { ChngCrawler } from './chng_target';
import { SzecpCrawler } from './szecp_target';
import { CdtCrawler } from './cdt_target';
import { EpsCrawler } from './eps_target';
import { CnncecpCrawler } from './cnncecp_target';
import { CgnpcCrawler } from './cgnpc_target';
import { CeicCrawler } from './ceic_target';
import { EspicCrawler } from './espic_target';
import { PowerbeijingCrawler } from './powerbeijing_target';
2026-01-09 23:18:52 +08:00
@Injectable()
export class BidCrawlerService {
private readonly logger = new Logger(BidCrawlerService.name);
constructor(
private bidsService: BidsService,
private configService: ConfigService,
2026-01-09 23:18:52 +08:00
) {}
async crawlAll() {
this.logger.log('Starting crawl task with Puppeteer...');
// 设置最大执行时间为1小时
const maxExecutionTime = 60 * 60 * 1000; // 1小时毫秒
const startTime = Date.now();
// 从环境变量读取代理配置
const proxyHost = this.configService.get<string>('PROXY_HOST');
const proxyPort = this.configService.get<string>('PROXY_PORT');
const proxyUsername = this.configService.get<string>('PROXY_USERNAME');
const proxyPassword = this.configService.get<string>('PROXY_PASSWORD');
// 构建代理参数
const args = [
'--no-sandbox',
'--disable-setuid-sandbox',
'--disable-blink-features=AutomationControlled',
'--disable-infobars',
'--window-position=0,0',
'--ignore-certifcate-errors',
'--ignore-certifcate-errors-spki-list',
];
if (proxyHost && proxyPort) {
const proxyUrl = proxyUsername && proxyPassword
? `http://${proxyUsername}:${proxyPassword}@${proxyHost}:${proxyPort}`
: `http://${proxyHost}:${proxyPort}`;
args.push(`--proxy-server=${proxyUrl}`);
this.logger.log(`Using proxy: ${proxyHost}:${proxyPort}`);
}
2026-01-09 23:18:52 +08:00
const browser = await puppeteer.launch({
headless: false,
args,
2026-01-09 23:18:52 +08:00
});
const crawlers = [ChdtpCrawler, ChngCrawler, SzecpCrawler, CdtCrawler, EpsCrawler, CnncecpCrawler, CgnpcCrawler, CeicCrawler, EspicCrawler, PowerbeijingCrawler];
2026-01-09 23:18:52 +08:00
try {
for (const crawler of crawlers) {
this.logger.log(`Crawling: ${crawler.name}`);
// 检查是否超时
const elapsedTime = Date.now() - startTime;
if (elapsedTime > maxExecutionTime) {
this.logger.warn(`⚠️ Crawl task exceeded maximum execution time of 1 hour. Stopping...`);
this.logger.warn(`⚠️ Total elapsed time: ${Math.floor(elapsedTime / 1000 / 60)} minutes`);
break;
}
try {
const results = await crawler.crawl(browser);
this.logger.log(`Extracted ${results.length} items from ${crawler.name}`);
2026-01-09 23:18:52 +08:00
for (const item of results) {
await this.bidsService.createOrUpdate({
title: item.title,
url: item.url,
publishDate: item.publishDate,
source: crawler.name,
unit: '',
});
}
} catch (err) {
this.logger.error(`Error crawling ${crawler.name}: ${err.message}`);
}
2026-01-09 23:18:52 +08:00
}
} catch (error) {
this.logger.error(`Crawl task failed: ${error.message}`);
} finally {
await browser.close();
const totalTime = Date.now() - startTime;
const minutes = Math.floor(totalTime / 1000 / 60);
this.logger.log(`Crawl task finished. Total time: ${minutes} minutes`);
if (totalTime > maxExecutionTime) {
this.logger.warn(`⚠️ Crawl task exceeded maximum execution time of 1 hour.`);
}
2026-01-09 23:18:52 +08:00
}
}
}