feat: 添加单源爬取功能并优化数据库同步
新增单源爬取功能,支持在界面上单独更新每个数据源 添加数据库同步脚本,支持主从数据库结构同步和数据同步 优化华能集团爬虫的页面导航和稳定性 新增系统托盘功能,支持最小化到托盘
This commit is contained in:
@@ -216,6 +216,97 @@ export class BidCrawlerService {
|
||||
}
|
||||
}
|
||||
|
||||
async crawlSingleSource(sourceName: string) {
|
||||
this.logger.log(`Starting single source crawl for: ${sourceName}`);
|
||||
|
||||
// 从环境变量读取代理配置
|
||||
const proxyHost = this.configService.get<string>('PROXY_HOST');
|
||||
const proxyPort = this.configService.get<string>('PROXY_PORT');
|
||||
const proxyUsername = this.configService.get<string>('PROXY_USERNAME');
|
||||
const proxyPassword = this.configService.get<string>('PROXY_PASSWORD');
|
||||
|
||||
// 构建代理参数
|
||||
const args = [
|
||||
'--no-sandbox',
|
||||
'--disable-setuid-sandbox',
|
||||
'--disable-blink-features=AutomationControlled',
|
||||
'--disable-infobars',
|
||||
'--window-position=0,0',
|
||||
'--ignore-certifcate-errors',
|
||||
'--ignore-certifcate-errors-spki-list',
|
||||
];
|
||||
|
||||
if (proxyHost && proxyPort) {
|
||||
const proxyUrl = proxyUsername && proxyPassword
|
||||
? `http://${proxyUsername}:${proxyPassword}@${proxyHost}:${proxyPort}`
|
||||
: `http://${proxyHost}:${proxyPort}`;
|
||||
args.push(`--proxy-server=${proxyUrl}`);
|
||||
this.logger.log(`Using proxy: ${proxyHost}:${proxyPort}`);
|
||||
}
|
||||
|
||||
const browser = await puppeteer.launch({
|
||||
headless: false,
|
||||
args,
|
||||
});
|
||||
|
||||
const crawlers = [ChdtpCrawler, ChngCrawler, SzecpCrawler, CdtCrawler, EpsCrawler, CnncecpCrawler, CgnpcCrawler, CeicCrawler, EspicCrawler, PowerbeijingCrawler, SdiccCrawler, CnoocCrawler];
|
||||
|
||||
const targetCrawler = crawlers.find(c => c.name === sourceName);
|
||||
|
||||
if (!targetCrawler) {
|
||||
await browser.close();
|
||||
throw new Error(`Crawler not found for source: ${sourceName}`);
|
||||
}
|
||||
|
||||
try {
|
||||
this.logger.log(`Crawling: ${targetCrawler.name}`);
|
||||
|
||||
const results = await targetCrawler.crawl(browser);
|
||||
this.logger.log(`Extracted ${results.length} items from ${targetCrawler.name}`);
|
||||
|
||||
// 获取最新的发布日期
|
||||
const latestPublishDate = results.length > 0
|
||||
? results.reduce((latest, item) => {
|
||||
const itemDate = new Date(item.publishDate);
|
||||
return itemDate > latest ? itemDate : latest;
|
||||
}, new Date(0))
|
||||
: null;
|
||||
|
||||
for (const item of results) {
|
||||
await this.bidsService.createOrUpdate({
|
||||
title: item.title,
|
||||
url: item.url,
|
||||
publishDate: item.publishDate,
|
||||
source: targetCrawler.name,
|
||||
});
|
||||
}
|
||||
|
||||
// 保存爬虫统计信息到数据库
|
||||
await this.saveCrawlInfo(targetCrawler.name, results.length, latestPublishDate);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
source: targetCrawler.name,
|
||||
count: results.length,
|
||||
latestPublishDate,
|
||||
};
|
||||
} catch (err) {
|
||||
this.logger.error(`Error crawling ${targetCrawler.name}: ${err.message}`);
|
||||
|
||||
// 保存错误信息到数据库
|
||||
await this.saveCrawlInfo(targetCrawler.name, 0, null, err.message);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
source: targetCrawler.name,
|
||||
count: 0,
|
||||
error: err.message,
|
||||
};
|
||||
} finally {
|
||||
await browser.close();
|
||||
}
|
||||
}
|
||||
|
||||
private async saveCrawlInfo(
|
||||
source: string,
|
||||
count: number,
|
||||
|
||||
@@ -4,53 +4,75 @@ import { ChdtpResult } from './chdtp_target';
|
||||
|
||||
// 模拟人类鼠标移动
|
||||
async function simulateHumanMouseMovement(page: puppeteer.Page) {
|
||||
const viewport = page.viewport();
|
||||
if (!viewport) return;
|
||||
try {
|
||||
const viewport = page.viewport();
|
||||
if (!viewport) return;
|
||||
|
||||
const movements = 5 + Math.floor(Math.random() * 5); // 5-10次随机移动
|
||||
const movements = 5 + Math.floor(Math.random() * 5); // 5-10次随机移动
|
||||
|
||||
for (let i = 0; i < movements; i++) {
|
||||
const x = Math.floor(Math.random() * viewport.width);
|
||||
const y = Math.floor(Math.random() * viewport.height);
|
||||
|
||||
await page.mouse.move(x, y, {
|
||||
steps: 10 + Math.floor(Math.random() * 20) // 10-30步,使移动更平滑
|
||||
});
|
||||
|
||||
// 随机停顿 100-500ms
|
||||
await new Promise(r => setTimeout(r, 100 + Math.random() * 400));
|
||||
for (let i = 0; i < movements; i++) {
|
||||
// 检查页面是否仍然有效
|
||||
if (page.isClosed()) {
|
||||
console.log('Page was closed during mouse movement simulation');
|
||||
return;
|
||||
}
|
||||
|
||||
const x = Math.floor(Math.random() * viewport.width);
|
||||
const y = Math.floor(Math.random() * viewport.height);
|
||||
|
||||
await page.mouse.move(x, y, {
|
||||
steps: 10 + Math.floor(Math.random() * 20) // 10-30步,使移动更平滑
|
||||
});
|
||||
|
||||
// 随机停顿 100-500ms
|
||||
await new Promise(r => setTimeout(r, 100 + Math.random() * 400));
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Mouse movement simulation interrupted:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// 模拟人类滚动
|
||||
async function simulateHumanScrolling(page: puppeteer.Page) {
|
||||
const scrollCount = 3 + Math.floor(Math.random() * 5); // 3-7次滚动
|
||||
try {
|
||||
const scrollCount = 3 + Math.floor(Math.random() * 5); // 3-7次滚动
|
||||
|
||||
for (let i = 0; i < scrollCount; i++) {
|
||||
const scrollDistance = 100 + Math.floor(Math.random() * 400); // 100-500px
|
||||
|
||||
await page.evaluate((distance) => {
|
||||
window.scrollBy({
|
||||
top: distance,
|
||||
behavior: 'smooth'
|
||||
for (let i = 0; i < scrollCount; i++) {
|
||||
// 检查页面是否仍然有效
|
||||
if (page.isClosed()) {
|
||||
console.log('Page was closed during scrolling simulation');
|
||||
return;
|
||||
}
|
||||
|
||||
const scrollDistance = 100 + Math.floor(Math.random() * 400); // 100-500px
|
||||
|
||||
await page.evaluate((distance) => {
|
||||
window.scrollBy({
|
||||
top: distance,
|
||||
behavior: 'smooth'
|
||||
});
|
||||
}, scrollDistance);
|
||||
|
||||
// 随机停顿 500-1500ms
|
||||
await new Promise(r => setTimeout(r, 500 + Math.random() * 1000));
|
||||
}
|
||||
|
||||
// 滚动回顶部
|
||||
if (!page.isClosed()) {
|
||||
await page.evaluate(() => {
|
||||
window.scrollTo({ top: 0, behavior: 'smooth' });
|
||||
});
|
||||
}, scrollDistance);
|
||||
|
||||
// 随机停顿 500-1500ms
|
||||
await new Promise(r => setTimeout(r, 500 + Math.random() * 1000));
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Scrolling simulation interrupted:', error.message);
|
||||
}
|
||||
|
||||
// 滚动回顶部
|
||||
await page.evaluate(() => {
|
||||
window.scrollTo({ top: 0, behavior: 'smooth' });
|
||||
});
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
}
|
||||
|
||||
export const ChngCrawler = {
|
||||
name: '华能集团电子商务平台',
|
||||
url: 'https://ec.chng.com.cn/ecmall/index.html#/purchase/home?top=0',
|
||||
baseUrl: 'https://ec.chng.com.cn/ecmall/index.html',
|
||||
url: 'https://ec.chng.com.cn/channel/home/#/purchase?top=0',
|
||||
baseUrl: 'https://ec.chng.com.cn/channel/home/#',
|
||||
|
||||
async crawl(browser: puppeteer.Browser): Promise<ChdtpResult[]> {
|
||||
const logger = new Logger('ChngCrawler');
|
||||
@@ -106,14 +128,16 @@ export const ChngCrawler = {
|
||||
await page.authenticate({ username, password });
|
||||
}
|
||||
}
|
||||
// 模拟人类行为
|
||||
// 模拟人类行为
|
||||
logger.log('Simulating human mouse movements...');
|
||||
await simulateHumanMouseMovement(page);
|
||||
|
||||
logger.log('Simulating human scrolling...');
|
||||
await simulateHumanScrolling(page);
|
||||
|
||||
await page.waitForNavigation({ waitUntil: 'domcontentloaded' }).catch(() => {});
|
||||
// 等待页面稳定,不强制等待导航
|
||||
await new Promise(r => setTimeout(r, 3000));
|
||||
|
||||
// 模拟人类行为
|
||||
logger.log('Simulating human mouse movements...');
|
||||
await simulateHumanMouseMovement(page);
|
||||
@@ -215,8 +239,24 @@ export const ChngCrawler = {
|
||||
const nextButton = await page.$('svg[data-icon="right"]');
|
||||
if (!nextButton) break;
|
||||
|
||||
// 点击下一页前保存当前页面状态
|
||||
const currentUrl = page.url();
|
||||
|
||||
await nextButton.click();
|
||||
await new Promise(r => setTimeout(r, 5000));
|
||||
|
||||
// 等待页面导航完成
|
||||
try {
|
||||
await page.waitForFunction(
|
||||
(oldUrl) => window.location.href !== oldUrl,
|
||||
{ timeout: 10000 },
|
||||
currentUrl
|
||||
);
|
||||
} catch (e) {
|
||||
logger.warn('Navigation timeout, continuing anyway');
|
||||
}
|
||||
|
||||
// 等待页面内容加载
|
||||
await new Promise(r => setTimeout(r, 15000));
|
||||
currentPage++;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user