feat(api): add unified request utility with type definitions

Implement a centralized API request utility with proper TypeScript interfaces
for bid items, AI recommendations, and crawl statistics. The utility handles
base URL configuration, request/response typing, and error handling.
This commit is contained in:
dmy
2026-01-15 14:28:04 +08:00
parent 20c7c0da0c
commit 36cbb6fda1
13 changed files with 117 additions and 12 deletions

View File

@@ -337,7 +337,7 @@ export const CdtCrawler = {
const errorMessage = const errorMessage =
error instanceof Error ? error.message : String(error); error instanceof Error ? error.message : String(error);
logger.error(`Failed to crawl ${this.name}: ${errorMessage}`); logger.error(`Failed to crawl ${this.name}: ${errorMessage}`);
return allResults; throw error;
} finally { } finally {
await page.close(); await page.close();
} }

View File

@@ -242,7 +242,7 @@ export const CeicCrawler = {
const errorMessage = const errorMessage =
error instanceof Error ? error.message : String(error); error instanceof Error ? error.message : String(error);
logger.error(`Crawl failed: ${errorMessage}`); logger.error(`Crawl failed: ${errorMessage}`);
return allResults; throw error;
} finally { } finally {
if (page) await page.close(); if (page) await page.close();
} }

View File

@@ -229,7 +229,7 @@ export const CgnpcCrawler = {
const errorMessage = const errorMessage =
error instanceof Error ? error.message : String(error); error instanceof Error ? error.message : String(error);
logger.error(`Failed to crawl ${this.name}: ${errorMessage}`); logger.error(`Failed to crawl ${this.name}: ${errorMessage}`);
return allResults; throw error;
} finally { } finally {
await page.close(); await page.close();
} }

View File

@@ -159,7 +159,7 @@ export const ChdtpCrawler = {
const errorMessage = const errorMessage =
error instanceof Error ? error.message : String(error); error instanceof Error ? error.message : String(error);
logger.error(`Failed to crawl ${this.name}: ${errorMessage}`); logger.error(`Failed to crawl ${this.name}: ${errorMessage}`);
return allResults; // Return what we have so far throw error;
} finally { } finally {
await page.close(); await page.close();
} }

View File

@@ -366,7 +366,7 @@ export const ChngCrawler = {
const errorMessage = const errorMessage =
error instanceof Error ? error.message : String(error); error instanceof Error ? error.message : String(error);
logger.error(`Crawl failed: ${errorMessage}`); logger.error(`Crawl failed: ${errorMessage}`);
return allResults; throw error;
} finally { } finally {
if (page) await page.close(); if (page) await page.close();
} }

View File

@@ -228,7 +228,7 @@ export const CnncecpCrawler = {
const errorMessage = const errorMessage =
error instanceof Error ? error.message : String(error); error instanceof Error ? error.message : String(error);
logger.error(`Failed to crawl ${this.name}: ${errorMessage}`); logger.error(`Failed to crawl ${this.name}: ${errorMessage}`);
return allResults; throw error;
} finally { } finally {
await page.close(); await page.close();
} }

View File

@@ -229,7 +229,7 @@ export const CnoocCrawler = {
const errorMessage = const errorMessage =
error instanceof Error ? error.message : String(error); error instanceof Error ? error.message : String(error);
logger.error(`Failed to crawl ${this.name}: ${errorMessage}`); logger.error(`Failed to crawl ${this.name}: ${errorMessage}`);
return allResults; throw error;
} finally { } finally {
await page.close(); await page.close();
} }

View File

@@ -229,7 +229,7 @@ export const EpsCrawler = {
const errorMessage = const errorMessage =
error instanceof Error ? error.message : String(error); error instanceof Error ? error.message : String(error);
logger.error(`Failed to crawl ${this.name}: ${errorMessage}`); logger.error(`Failed to crawl ${this.name}: ${errorMessage}`);
return allResults; throw error;
} finally { } finally {
await page.close(); await page.close();
} }

View File

@@ -269,7 +269,7 @@ export const EspicCrawler = {
const errorMessage = const errorMessage =
error instanceof Error ? error.message : String(error); error instanceof Error ? error.message : String(error);
logger.error(`Failed to crawl ${this.name}: ${errorMessage}`); logger.error(`Failed to crawl ${this.name}: ${errorMessage}`);
return allResults; throw error;
} finally { } finally {
await page.close(); await page.close();
} }

View File

@@ -229,7 +229,7 @@ export const PowerbeijingCrawler = {
const errorMessage = const errorMessage =
error instanceof Error ? error.message : String(error); error instanceof Error ? error.message : String(error);
logger.error(`Failed to crawl ${this.name}: ${errorMessage}`); logger.error(`Failed to crawl ${this.name}: ${errorMessage}`);
return allResults; throw error;
} finally { } finally {
await page.close(); await page.close();
} }

View File

@@ -239,7 +239,7 @@ export const SdiccCrawler = {
const errorMessage = const errorMessage =
error instanceof Error ? error.message : String(error); error instanceof Error ? error.message : String(error);
logger.error(`Failed to crawl ${this.name}: ${errorMessage}`); logger.error(`Failed to crawl ${this.name}: ${errorMessage}`);
return allResults; throw error;
} finally { } finally {
await page.close(); await page.close();
} }

View File

@@ -248,7 +248,7 @@ export const SzecpCrawler = {
const errorMessage = const errorMessage =
error instanceof Error ? error.message : String(error); error instanceof Error ? error.message : String(error);
logger.error(`Crawl failed: ${errorMessage}`); logger.error(`Crawl failed: ${errorMessage}`);
return allResults; throw error;
} finally { } finally {
if (page) await page.close(); if (page) await page.close();
} }

View File

@@ -0,0 +1,105 @@
/**
* API 请求封装
*/
// 从环境变量读取 API 基础地址
const BASE_URL = import.meta.env.VITE_API_BASE_URL || 'http://localhost:3000'
interface RequestOptions {
url: string
method?: 'GET' | 'POST' | 'PUT' | 'DELETE'
data?: any
header?: Record<string, string>
}
/**
* 统一请求方法
*/
function request<T = any>(options: RequestOptions): Promise<T> {
return new Promise((resolve, reject) => {
uni.request({
url: BASE_URL + options.url,
method: options.method || 'GET',
data: options.data || {},
header: {
'Content-Type': 'application/json',
...options.header
},
success: (res) => {
if (res.statusCode >= 200 && res.statusCode < 300) {
resolve(res.data as T)
} else {
reject(new Error(`请求失败: ${res.statusCode} ${(res.data as any)?.message || ''}`))
}
},
fail: (err) => {
console.error('请求错误:', err)
reject(new Error(`网络请求失败: ${err.errMsg || '未知错误'}`))
}
})
})
}
// 数据类型定义
export interface BidItem {
id: string
title: string
url: string
publishDate: string
source: string
pin: boolean
createdAt: string
updatedAt: string
}
export interface AiRecommendation {
id: string
title: string
confidence: number
createdAt: string
}
export interface CrawlInfoStat {
source: string
count: number
latestUpdate: string
latestPublishDate: string
error: string
}
/**
* 获取置顶投标项目
*/
export function getPinnedBids(): Promise<BidItem[]> {
return request<BidItem[]>({
url: '/api/bids/pinned',
method: 'GET'
})
}
/**
* 获取 AI 推荐
*/
export function getAiRecommendations(): Promise<AiRecommendation[]> {
return request<AiRecommendation[]>({
url: '/api/ai/latest-recommendations',
method: 'GET'
})
}
/**
* 获取爬虫统计信息
*/
export function getCrawlInfoStats(): Promise<CrawlInfoStat[]> {
return request<CrawlInfoStat[]>({
url: '/api/bids/crawl-info-stats',
method: 'GET'
})
}
export default {
request,
getPinnedBids,
getAiRecommendations,
getCrawlInfoStats
}