chore: 更新.gitignore并添加新文件
在.gitignore中添加对*.png、*.log、*-lock.json、*.woff2文件的忽略规则,并新增OFL.txt文件。同时,添加vue.svg图标文件以支持前端展示。更新多个TypeScript文件以优化代码格式和增强可读性。
This commit is contained in:
@@ -11,13 +11,13 @@ async function simulateHumanMouseMovement(page: puppeteer.Page) {
|
||||
for (let i = 0; i < movements; i++) {
|
||||
const x = Math.floor(Math.random() * viewport.width);
|
||||
const y = Math.floor(Math.random() * viewport.height);
|
||||
|
||||
|
||||
await page.mouse.move(x, y, {
|
||||
steps: 10 + Math.floor(Math.random() * 20) // 10-30步,使移动更平滑
|
||||
steps: 10 + Math.floor(Math.random() * 20), // 10-30步,使移动更平滑
|
||||
});
|
||||
|
||||
|
||||
// 随机停顿 100-500ms
|
||||
await new Promise(r => setTimeout(r, 100 + Math.random() * 400));
|
||||
await new Promise((r) => setTimeout(r, 100 + Math.random() * 400));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,23 +27,23 @@ async function simulateHumanScrolling(page: puppeteer.Page) {
|
||||
|
||||
for (let i = 0; i < scrollCount; i++) {
|
||||
const scrollDistance = 100 + Math.floor(Math.random() * 400); // 100-500px
|
||||
|
||||
|
||||
await page.evaluate((distance) => {
|
||||
window.scrollBy({
|
||||
top: distance,
|
||||
behavior: 'smooth'
|
||||
behavior: 'smooth',
|
||||
});
|
||||
}, scrollDistance);
|
||||
|
||||
// 随机停顿 500-1500ms
|
||||
await new Promise(r => setTimeout(r, 500 + Math.random() * 1000));
|
||||
await new Promise((r) => setTimeout(r, 500 + Math.random() * 1000));
|
||||
}
|
||||
|
||||
// 滚动回顶部
|
||||
await page.evaluate(() => {
|
||||
window.scrollTo({ top: 0, behavior: 'smooth' });
|
||||
});
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
await new Promise((r) => setTimeout(r, 1000));
|
||||
}
|
||||
|
||||
export interface EpsResult {
|
||||
@@ -52,12 +52,22 @@ export interface EpsResult {
|
||||
url: string;
|
||||
}
|
||||
|
||||
interface EpsCrawlerType {
|
||||
name: string;
|
||||
url: string;
|
||||
baseUrl: string;
|
||||
extract(html: string): EpsResult[];
|
||||
}
|
||||
|
||||
export const EpsCrawler = {
|
||||
name: '中国三峡集团电子商务平台',
|
||||
url: 'https://eps.ctg.com.cn/cms/channel/1ywgg1/index.htm',
|
||||
baseUrl: 'https://eps.ctg.com.cn/',
|
||||
|
||||
async crawl(browser: puppeteer.Browser): Promise<EpsResult[]> {
|
||||
async crawl(
|
||||
this: EpsCrawlerType,
|
||||
browser: puppeteer.Browser,
|
||||
): Promise<EpsResult[]> {
|
||||
const logger = new Logger('EpsCrawler');
|
||||
const page = await browser.newPage();
|
||||
|
||||
@@ -69,11 +79,15 @@ export const EpsCrawler = {
|
||||
|
||||
await page.evaluateOnNewDocument(() => {
|
||||
Object.defineProperty(navigator, 'webdriver', { get: () => false });
|
||||
Object.defineProperty(navigator, 'language', { get: () => "zh-CN"});
|
||||
Object.defineProperty(navigator, 'plugins', { get: () => [1,2,3,4,5]});
|
||||
Object.defineProperty(navigator, 'language', { get: () => 'zh-CN' });
|
||||
Object.defineProperty(navigator, 'plugins', {
|
||||
get: () => [1, 2, 3, 4, 5],
|
||||
});
|
||||
});
|
||||
|
||||
await page.setUserAgent('Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36');
|
||||
await page.setUserAgent(
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36',
|
||||
);
|
||||
await page.setViewport({ width: 1920, height: 1080 });
|
||||
|
||||
const allResults: EpsResult[] = [];
|
||||
@@ -87,7 +101,7 @@ export const EpsCrawler = {
|
||||
// 模拟人类行为
|
||||
logger.log('Simulating human mouse movements...');
|
||||
await simulateHumanMouseMovement(page);
|
||||
|
||||
|
||||
logger.log('Simulating human scrolling...');
|
||||
await simulateHumanScrolling(page);
|
||||
|
||||
@@ -103,12 +117,14 @@ export const EpsCrawler = {
|
||||
}
|
||||
|
||||
allResults.push(...pageResults);
|
||||
logger.log(`Extracted ${pageResults.length} items from page ${currentPage}`);
|
||||
logger.log(
|
||||
`Extracted ${pageResults.length} items from page ${currentPage}`,
|
||||
);
|
||||
|
||||
// 模拟人类行为 - 翻页前
|
||||
logger.log('Simulating human mouse movements before pagination...');
|
||||
await simulateHumanMouseMovement(page);
|
||||
|
||||
|
||||
logger.log('Simulating human scrolling before pagination...');
|
||||
await simulateHumanScrolling(page);
|
||||
|
||||
@@ -127,9 +143,13 @@ export const EpsCrawler = {
|
||||
try {
|
||||
// 点击下一页按钮,等待页面更新
|
||||
await nextButton.click();
|
||||
await new Promise(r => setTimeout(r, 3000)); // 等待页面加载
|
||||
await new Promise((r) => setTimeout(r, 3000)); // 等待页面加载
|
||||
} catch (navError) {
|
||||
logger.error(`Navigation to page ${currentPage + 1} failed: ${navError.message}`);
|
||||
const navErrorMessage =
|
||||
navError instanceof Error ? navError.message : String(navError);
|
||||
logger.error(
|
||||
`Navigation to page ${currentPage + 1} failed: ${navErrorMessage}`,
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -138,26 +158,27 @@ export const EpsCrawler = {
|
||||
// 模拟人类行为 - 翻页后
|
||||
logger.log('Simulating human mouse movements after pagination...');
|
||||
await simulateHumanMouseMovement(page);
|
||||
|
||||
|
||||
logger.log('Simulating human scrolling after pagination...');
|
||||
await simulateHumanScrolling(page);
|
||||
|
||||
// Random delay between pages
|
||||
const delay = Math.floor(Math.random() * (3000 - 1000 + 1)) + 1000;
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||
}
|
||||
|
||||
return allResults;
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Failed to crawl ${this.name}: ${error.message}`);
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
logger.error(`Failed to crawl ${this.name}: ${errorMessage}`);
|
||||
return allResults;
|
||||
} finally {
|
||||
await page.close();
|
||||
}
|
||||
},
|
||||
|
||||
extract(html: string): EpsResult[] {
|
||||
extract(this: EpsCrawlerType, html: string): EpsResult[] {
|
||||
const results: EpsResult[] = [];
|
||||
/**
|
||||
* Regex groups for eps.ctg.com.cn:
|
||||
@@ -179,24 +200,25 @@ export const EpsCrawler = {
|
||||
* </a>
|
||||
* </li>
|
||||
*/
|
||||
const regex = /<li[^>]*name="li_name"[^>]*>[\s\S]*?<a[^>]*href="([^"]*)"[^>]*title="([^"]*)"[^>]*>[\s\S]*?<em>\s*(\d{4}-\d{2}-\d{2})\s*<\/em>[\s\S]*?<\/a>[\s\S]*?<\/li>/gs;
|
||||
const regex =
|
||||
/<li[^>]*name="li_name"[^>]*>[\s\S]*?<a[^>]*href="([^"]*)"[^>]*title="([^"]*)"[^>]*>[\s\S]*?<em>\s*(\d{4}-\d{2}-\d{2})\s*<\/em>[\s\S]*?<\/a>[\s\S]*?<\/li>/gs;
|
||||
|
||||
let match;
|
||||
let match: RegExpExecArray | null;
|
||||
while ((match = regex.exec(html)) !== null) {
|
||||
const url = match[1]?.trim();
|
||||
const title = match[2]?.trim();
|
||||
const dateStr = match[3]?.trim();
|
||||
const url = match[1]?.trim() ?? '';
|
||||
const title = match[2]?.trim() ?? '';
|
||||
const dateStr = match[3]?.trim() ?? '';
|
||||
|
||||
if (title && url) {
|
||||
const fullUrl = url.startsWith('http') ? url : this.baseUrl + url;
|
||||
results.push({
|
||||
title,
|
||||
publishDate: dateStr ? new Date(dateStr) : new Date(),
|
||||
url: fullUrl.replace(/\/\//g, '/')
|
||||
url: fullUrl.replace(/\/\//g, '/'),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user