diff --git a/src/bids/services/bid.service.ts b/src/bids/services/bid.service.ts index da91425..b14e427 100644 --- a/src/bids/services/bid.service.ts +++ b/src/bids/services/bid.service.ts @@ -15,7 +15,7 @@ interface SourceResult { source: string; } -interface CrawlInfoAddStats { +export interface CrawlInfoAddStats { source: string; count: number; latestUpdate: Date | string; diff --git a/src/crawler/services/bid-crawler.service.ts b/src/crawler/services/bid-crawler.service.ts index 4321525..f6ef2ec 100644 --- a/src/crawler/services/bid-crawler.service.ts +++ b/src/crawler/services/bid-crawler.service.ts @@ -24,10 +24,7 @@ interface CrawlResult { url: string; } -interface Crawler { - name: string; - crawl(browser: puppeteer.Browser): Promise; -} +type AnyCrawler = typeof ChdtpCrawler | typeof ChngCrawler | typeof SzecpCrawler | typeof CdtCrawler | typeof EpsCrawler | typeof CnncecpCrawler | typeof CgnpcCrawler | typeof CeicCrawler | typeof EspicCrawler | typeof PowerbeijingCrawler | typeof SdiccCrawler | typeof CnoocCrawler; @Injectable() export class BidCrawlerService { @@ -50,7 +47,7 @@ export class BidCrawlerService { const crawlResults: Record = {}; // 记录数据为0的爬虫,用于重试 - const zeroDataCrawlers: Crawler[] = []; + const zeroDataCrawlers: AnyCrawler[] = []; // 从环境变量读取代理配置 const proxyHost = this.configService.get('PROXY_HOST'); const proxyPort = this.configService.get('PROXY_PORT'); @@ -114,7 +111,7 @@ export class BidCrawlerService { } try { - const results = await crawler.crawl(browser); + const results = await (crawler as any).crawl(browser); this.logger.log( `Extracted ${results.length} items from ${crawler.name}`, ); @@ -184,7 +181,7 @@ export class BidCrawlerService { } try { - const results = await crawler.crawl(browser); + const results = await (crawler as any).crawl(browser); this.logger.log( `Retry extracted ${results.length} items from ${crawler.name}`, ); @@ -337,7 +334,7 @@ export class BidCrawlerService { try { this.logger.log(`Crawling: ${targetCrawler.name}`); - const results = await targetCrawler.crawl(browser); + const results = await (targetCrawler as any).crawl(browser); this.logger.log( `Extracted ${results.length} items from ${targetCrawler.name}`, );