import { CnncecpCrawler } from './cnncecp_target'; import * as puppeteer from 'puppeteer'; // Increase timeout to 60 seconds for network operations jest.setTimeout(60000*5); describe('CnncecpCrawler Real Site Test', () => { let browser: puppeteer.Browser; beforeAll(async () => { browser = await puppeteer.launch({ headless: false, // Change to false to see browser UI args: ['--no-sandbox', '--disable-setuid-sandbox'], }); }); afterAll(async () => { if (browser) { await browser.close(); } }); it('should visit website and list all found bid information', async () => { console.log(`\nStarting crawl for: ${CnncecpCrawler.name}`); console.log(`Target URL: ${CnncecpCrawler.url}`); const results = await CnncecpCrawler.crawl(browser); console.log(`\nSuccessfully found ${results.length} items:\n`); console.log('----------------------------------------'); results.forEach((item, index) => { console.log(`${index + 1}. [${item.publishDate.toLocaleDateString()}] ${item.title}`); console.log(` Link: ${item.url}`); console.log('----------------------------------------'); }); // Basic assertions to ensure crawler is working expect(results).toBeDefined(); expect(Array.isArray(results)).toBeTruthy(); // Warn but don't fail if site returns 0 items (could be empty or changed structure) if (results.length === 0) { console.warn('Warning: No items found. Check if website structure has changed or if list is currently empty.'); } else { // Check data integrity of first item const firstItem = results[0]; expect(firstItem.title).toBeTruthy(); expect(firstItem.url).toMatch(/^https?:\/\//); expect(firstItem.publishDate).toBeInstanceOf(Date); } }); });