2026-01-09 23:18:52 +08:00
|
|
|
import { Injectable, Logger } from '@nestjs/common';
|
2026-01-11 18:20:43 +08:00
|
|
|
import { ConfigService } from '@nestjs/config';
|
2026-01-09 23:18:52 +08:00
|
|
|
import * as puppeteer from 'puppeteer';
|
|
|
|
|
import { BidsService } from '../../bids/services/bid.service';
|
|
|
|
|
import { ChdtpCrawler } from './chdtp_target';
|
2026-01-11 18:20:43 +08:00
|
|
|
import { ChngCrawler } from './chng_target';
|
2026-01-11 22:34:38 +08:00
|
|
|
import { SzecpCrawler } from './szecp_target';
|
|
|
|
|
import { CdtCrawler } from './cdt_target';
|
2026-01-11 22:48:10 +08:00
|
|
|
import { EpsCrawler } from './eps_target';
|
2026-01-11 23:32:09 +08:00
|
|
|
import { CnncecpCrawler } from './cnncecp_target';
|
2026-01-12 01:09:30 +08:00
|
|
|
import { CgnpcCrawler } from './cgnpc_target';
|
2026-01-09 23:18:52 +08:00
|
|
|
|
|
|
|
|
@Injectable()
|
|
|
|
|
export class BidCrawlerService {
|
|
|
|
|
private readonly logger = new Logger(BidCrawlerService.name);
|
|
|
|
|
|
|
|
|
|
constructor(
|
|
|
|
|
private bidsService: BidsService,
|
2026-01-11 18:20:43 +08:00
|
|
|
private configService: ConfigService,
|
2026-01-09 23:18:52 +08:00
|
|
|
) {}
|
|
|
|
|
|
|
|
|
|
async crawlAll() {
|
|
|
|
|
this.logger.log('Starting crawl task with Puppeteer...');
|
|
|
|
|
|
2026-01-11 18:20:43 +08:00
|
|
|
// 从环境变量读取代理配置
|
|
|
|
|
const proxyHost = this.configService.get<string>('PROXY_HOST');
|
|
|
|
|
const proxyPort = this.configService.get<string>('PROXY_PORT');
|
|
|
|
|
const proxyUsername = this.configService.get<string>('PROXY_USERNAME');
|
|
|
|
|
const proxyPassword = this.configService.get<string>('PROXY_PASSWORD');
|
|
|
|
|
|
|
|
|
|
// 构建代理参数
|
|
|
|
|
const args = [
|
|
|
|
|
'--no-sandbox',
|
|
|
|
|
'--disable-setuid-sandbox',
|
|
|
|
|
'--disable-blink-features=AutomationControlled',
|
|
|
|
|
'--disable-infobars',
|
|
|
|
|
'--window-position=0,0',
|
|
|
|
|
'--ignore-certifcate-errors',
|
|
|
|
|
'--ignore-certifcate-errors-spki-list',
|
|
|
|
|
];
|
|
|
|
|
|
|
|
|
|
if (proxyHost && proxyPort) {
|
|
|
|
|
const proxyUrl = proxyUsername && proxyPassword
|
|
|
|
|
? `http://${proxyUsername}:${proxyPassword}@${proxyHost}:${proxyPort}`
|
|
|
|
|
: `http://${proxyHost}:${proxyPort}`;
|
|
|
|
|
args.push(`--proxy-server=${proxyUrl}`);
|
|
|
|
|
this.logger.log(`Using proxy: ${proxyHost}:${proxyPort}`);
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-09 23:18:52 +08:00
|
|
|
const browser = await puppeteer.launch({
|
2026-01-11 21:35:24 +08:00
|
|
|
headless: false,
|
2026-01-11 18:20:43 +08:00
|
|
|
args,
|
2026-01-09 23:18:52 +08:00
|
|
|
});
|
|
|
|
|
|
2026-01-12 01:09:30 +08:00
|
|
|
const crawlers = [ChdtpCrawler, ChngCrawler, SzecpCrawler, CdtCrawler, EpsCrawler, CnncecpCrawler, CgnpcCrawler];
|
2026-01-11 18:20:43 +08:00
|
|
|
|
2026-01-09 23:18:52 +08:00
|
|
|
try {
|
2026-01-11 18:20:43 +08:00
|
|
|
for (const crawler of crawlers) {
|
|
|
|
|
this.logger.log(`Crawling: ${crawler.name}`);
|
|
|
|
|
try {
|
|
|
|
|
const results = await crawler.crawl(browser);
|
|
|
|
|
this.logger.log(`Extracted ${results.length} items from ${crawler.name}`);
|
2026-01-09 23:18:52 +08:00
|
|
|
|
2026-01-11 18:20:43 +08:00
|
|
|
for (const item of results) {
|
|
|
|
|
await this.bidsService.createOrUpdate({
|
|
|
|
|
title: item.title,
|
|
|
|
|
url: item.url,
|
|
|
|
|
publishDate: item.publishDate,
|
|
|
|
|
source: crawler.name,
|
|
|
|
|
unit: '',
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
} catch (err) {
|
|
|
|
|
this.logger.error(`Error crawling ${crawler.name}: ${err.message}`);
|
|
|
|
|
}
|
2026-01-09 23:18:52 +08:00
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
this.logger.error(`Crawl task failed: ${error.message}`);
|
|
|
|
|
} finally {
|
|
|
|
|
await browser.close();
|
|
|
|
|
this.logger.log('Crawl task finished.');
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|