feat: 添加Tailwind CSS支持并修复URL拼接问题

添加Tailwind CSS及相关配置
修复多个爬虫服务中的URL拼接问题,避免双斜杠
调整前端导航菜单项顺序
This commit is contained in:
dmy
2026-01-13 18:07:00 +08:00
parent b261ff074c
commit b3d784f1e3
16 changed files with 43 additions and 21 deletions

View File

@@ -17,9 +17,13 @@
"vue": "^3.5.24"
},
"devDependencies": {
"@tailwindcss/postcss": "^4.1.18",
"@types/node": "^24.10.1",
"@vitejs/plugin-vue": "^6.0.1",
"@vue/tsconfig": "^0.8.1",
"autoprefixer": "^10.4.23",
"postcss": "^8.5.6",
"tailwindcss": "^4.1.18",
"typescript": "~5.9.3",
"vite": "^7.2.4",
"vue-tsc": "^3.1.4"

View File

@@ -0,0 +1,6 @@
export default {
plugins: {
'@tailwindcss/postcss': {},
autoprefixer: {},
},
}

View File

@@ -4,18 +4,19 @@
<div class="logo">投标信息一览</div>
<el-menu active-text-color="#ffd04b" background-color="#545c64" class="el-menu-vertical-demo" default-active="1"
text-color="#fff" @select="handleSelect">
<el-menu-item index="1">
<el-icon>
<DataBoard />
</el-icon>
<span>Dashboard</span>
</el-menu-item>
<el-menu-item index="2">
<el-icon>
<MagicStick />
</el-icon>
<span>Dashboard AI</span>
</el-menu-item>
<el-menu-item index="2">
<el-icon>
<DataBoard />
</el-icon>
<span>Dashboard</span>
</el-menu-item>
<el-menu-item index="3">
<el-icon>
<Document />
@@ -43,8 +44,8 @@
</el-header>
<el-main>
<DashboardAI v-if="activeIndex === '2'" :bids="bids" :high-priority-bids="highPriorityBids" />
<Dashboard v-if="activeIndex === '1'" :today-bids="todayBids" :high-priority-bids="highPriorityBids"
<DashboardAI v-if="activeIndex === '1'" :bids="bids" :high-priority-bids="highPriorityBids" />
<Dashboard v-if="activeIndex === '2'" :today-bids="todayBids" :high-priority-bids="highPriorityBids"
:keywords="keywords" :loading="loading" :is-crawling="isCrawling" @refresh="fetchData"
@update-bids="updateBidsByDateRange" />

View File

@@ -1,3 +1,5 @@
@import "tailwindcss";
:root {
font-family: system-ui, Avenir, Helvetica, Arial, sans-serif;
line-height: 1.5;

View File

@@ -55,7 +55,7 @@ export interface CdtResult {
export const CdtCrawler = {
name: '中国大唐集团电子商务平台',
url: 'https://tang.cdt-ec.com/home/index.html',
baseUrl: 'https://tang.cdt-ec.com/',
baseUrl: 'https://tang.cdt-ec.com',
async crawl(browser: puppeteer.Browser): Promise<CdtResult[]> {
const logger = new Logger('CdtCrawler');
@@ -252,10 +252,11 @@ export const CdtCrawler = {
const dateStr = match[3]?.trim();
if (title && url) {
const fullUrl = url.startsWith('http') ? url : this.baseUrl + url;
results.push({
title,
publishDate: dateStr ? new Date(dateStr) : new Date(),
url: url.startsWith('http') ? url : this.baseUrl + url
url: fullUrl.replace(/\/\//g, '/')
});
}
}

View File

@@ -132,7 +132,7 @@ export const CeicCrawler = {
allResults.push(...pageResults.map(r => ({
title: r.title,
publishDate: r.dateStr ? new Date(r.dateStr) : new Date(),
url: r.url
url: r.url.replace(/\/\//g, '/')
})));
logger.log(`Extracted ${pageResults.length} items.`);

View File

@@ -190,10 +190,11 @@ export const CgnpcCrawler = {
const dateStr = match[3]?.trim();
if (title && url) {
const fullUrl = url.startsWith('http') ? url : this.baseUrl + url;
results.push({
title,
publishDate: dateStr ? new Date(dateStr) : new Date(),
url: url.startsWith('http') ? url : this.baseUrl + url
url: fullUrl.replace(/\/\//g, '/')
});
}
}

View File

@@ -105,10 +105,11 @@ export const ChdtpCrawler = {
const dateStr = match[5]?.trim();
if (title && urlSuffix) {
const fullUrl = this.baseUrl + urlSuffix;
results.push({
title,
publishDate: dateStr ? new Date(dateStr) : new Date(),
url: this.baseUrl + urlSuffix
url: fullUrl.replace(/\/\//g, '/')
});
}
}

View File

@@ -206,7 +206,7 @@ export const ChngCrawler = {
allResults.push(...pageResults.map(r => ({
title: r!.title,
publishDate: new Date(r!.dateStr),
url: r!.url
url: r!.url.replace(/\/\//g, '/')
})));
logger.log(`Extracted ${pageResults.length} items.`);

View File

@@ -181,10 +181,11 @@ export const CnncecpCrawler = {
const title = match[3]?.trim();
if (title && url) {
const fullUrl = url.startsWith('http') ? url : this.baseUrl + url;
results.push({
title,
publishDate: dateStr ? new Date(dateStr) : new Date(),
url: url.startsWith('http') ? url : this.baseUrl + url
url: fullUrl.replace(/\/\//g, '/')
});
}
}

View File

@@ -182,10 +182,11 @@ export const CnoocCrawler = {
const dateStr = match[3]?.trim();
if (title && url) {
const fullUrl = url.startsWith('http') ? url : this.baseUrl + url;
results.push({
title,
publishDate: dateStr ? new Date(dateStr) : new Date(),
url: url.startsWith('http') ? url : this.baseUrl + url
url: fullUrl.replace(/\/\//g, '/')
});
}
}

View File

@@ -188,10 +188,11 @@ export const EpsCrawler = {
const dateStr = match[3]?.trim();
if (title && url) {
const fullUrl = url.startsWith('http') ? url : this.baseUrl + url;
results.push({
title,
publishDate: dateStr ? new Date(dateStr) : new Date(),
url: url.startsWith('http') ? url : this.baseUrl + url
url: fullUrl.replace(/\/\//g, '/')
});
}
}

View File

@@ -234,10 +234,11 @@ export const EspicCrawler = {
const dateStr = match[3]?.trim();
if (title && url) {
const fullUrl = url.startsWith('http') ? url : this.baseUrl + url;
results.push({
title,
publishDate: dateStr ? new Date(dateStr) : new Date(),
url: url.startsWith('http') ? url : this.baseUrl + url
url: fullUrl.replace(/\/\//g, '/')
});
}
}

View File

@@ -185,10 +185,11 @@ export const PowerbeijingCrawler = {
const dateStr = match[3]?.trim();
if (title && url) {
const fullUrl = url.startsWith('http') ? url : this.baseUrl + url;
results.push({
title,
publishDate: dateStr ? new Date(dateStr) : new Date(),
url: url.startsWith('http') ? url : this.baseUrl + url
url: fullUrl.replace(/\/\//g, '/')
});
}
}

View File

@@ -190,10 +190,11 @@ export const SdiccCrawler = {
const dateStr = match[4]?.trim();
if (title && ggGuid && gcGuid) {
const fullUrl = `${this.baseUrl}/cgxx/ggDetail?gcGuid=${gcGuid}&ggGuid=${ggGuid}`;
results.push({
title,
publishDate: dateStr ? new Date(dateStr) : new Date(),
url: `${this.baseUrl}/cgxx/ggDetail?gcGuid=${gcGuid}&ggGuid=${ggGuid}`
url: fullUrl.replace(/\/\//g, '/')
});
}
}

View File

@@ -134,7 +134,7 @@ export const SzecpCrawler = {
allResults.push(...pageResults.map(r => ({
title: r!.title,
publishDate: new Date(r!.dateStr),
url: r!.url
url: r!.url.replace(/\/\//g, '/')
})));
logger.log(`Extracted ${pageResults.length} items.`);