Compare commits
2 Commits
9dc01eeb46
...
9257c78e72
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9257c78e72 | ||
|
|
e8beeec2b9 |
2
.env
2
.env
@@ -37,7 +37,7 @@ PROXY_HOST=127.0.0.1
|
|||||||
PROXY_PORT=3211
|
PROXY_PORT=3211
|
||||||
|
|
||||||
# 日志级别(可选):error, warn, info, debug, verbose
|
# 日志级别(可选):error, warn, info, debug, verbose
|
||||||
LOG_LEVEL=info
|
LOG_LEVEL=debug
|
||||||
|
|
||||||
# OpenAI API Key (用于 AI 推荐)
|
# OpenAI API Key (用于 AI 推荐)
|
||||||
ARK_API_KEY=a63d58b6-cf56-434b-8a42-5c781ba0822a
|
ARK_API_KEY=a63d58b6-cf56-434b-8a42-5c781ba0822a
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -13,4 +13,5 @@ build
|
|||||||
*.woff2
|
*.woff2
|
||||||
widget/looker/frontend/src/assets/fonts/OFL.txt
|
widget/looker/frontend/src/assets/fonts/OFL.txt
|
||||||
dist-electron
|
dist-electron
|
||||||
unpackage
|
unpackage
|
||||||
|
.cursor
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import {
|
|||||||
getDaysAgo,
|
getDaysAgo,
|
||||||
setStartOfDay,
|
setStartOfDay,
|
||||||
setEndOfDay,
|
setEndOfDay,
|
||||||
|
utcToBeijing,
|
||||||
} from '../../common/utils/timezone.util';
|
} from '../../common/utils/timezone.util';
|
||||||
|
|
||||||
interface FindAllQuery {
|
interface FindAllQuery {
|
||||||
@@ -23,7 +24,7 @@ interface SourceResult {
|
|||||||
export interface CrawlInfoAddStats {
|
export interface CrawlInfoAddStats {
|
||||||
source: string;
|
source: string;
|
||||||
count: number;
|
count: number;
|
||||||
latestUpdate: Date | string;
|
latestUpdate: Date | string | null;
|
||||||
latestPublishDate: Date | string | null;
|
latestPublishDate: Date | string | null;
|
||||||
error: string | null;
|
error: string | null;
|
||||||
}
|
}
|
||||||
@@ -177,16 +178,26 @@ export class BidsService {
|
|||||||
const results =
|
const results =
|
||||||
await this.crawlInfoRepository.query<CrawlInfoAddRawResult[]>(query);
|
await this.crawlInfoRepository.query<CrawlInfoAddRawResult[]>(query);
|
||||||
|
|
||||||
return results.map((item) => ({
|
return results.map((item) => {
|
||||||
source: String(item.source),
|
// 将UTC时间转换为北京时间显示
|
||||||
count: Number(item.count),
|
const latestUpdateBeijing = item.latestUpdate
|
||||||
latestUpdate: item.latestUpdate ? item.latestUpdate + 'Z' : null,
|
? utcToBeijing(new Date(item.latestUpdate))
|
||||||
latestPublishDate: item.latestPublishDate,
|
: null;
|
||||||
// 确保 error 字段正确处理:null 或空字符串都转换为 null,非空字符串保留
|
const latestPublishDateBeijing = item.latestPublishDate
|
||||||
error:
|
? utcToBeijing(new Date(item.latestPublishDate))
|
||||||
item.error && String(item.error).trim() !== ''
|
: null;
|
||||||
? String(item.error)
|
|
||||||
: null,
|
return {
|
||||||
}));
|
source: String(item.source),
|
||||||
|
count: Number(item.count),
|
||||||
|
latestUpdate: latestUpdateBeijing,
|
||||||
|
latestPublishDate: latestPublishDateBeijing,
|
||||||
|
// 确保 error 字段正确处理:null 或空字符串都转换为 null,非空字符串保留
|
||||||
|
error:
|
||||||
|
item.error && String(item.error).trim() !== ''
|
||||||
|
? String(item.error)
|
||||||
|
: null,
|
||||||
|
};
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,26 @@
|
|||||||
|
|
||||||
const TIMEZONE_OFFSET = 8 * 60 * 60 * 1000;
|
const TIMEZONE_OFFSET = 8 * 60 * 60 * 1000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 将北京时间(+8)转换为UTC
|
||||||
|
* 用于将爬取的北京时间字符串解析后的Date对象转为UTC存储
|
||||||
|
* @param date 北京时间的Date对象
|
||||||
|
* @returns UTC时间的Date对象
|
||||||
|
*/
|
||||||
|
export function beijingToUtc(date: Date): Date {
|
||||||
|
return new Date(date.getTime() - TIMEZONE_OFFSET);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 将UTC时间转换为北京时间(+8)
|
||||||
|
* 用于将数据库中的UTC时间转为北京时间显示
|
||||||
|
* @param date UTC时间的Date对象
|
||||||
|
* @returns 北京时间的Date对象
|
||||||
|
*/
|
||||||
|
export function utcToBeijing(date: Date): Date {
|
||||||
|
return new Date(date.getTime() + TIMEZONE_OFFSET);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 获取当前时间的东八区Date对象
|
* 获取当前时间的东八区Date对象
|
||||||
* @returns Date 当前时间的东八区表示
|
* @returns Date 当前时间的东八区表示
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { InjectRepository } from '@nestjs/typeorm';
|
|||||||
import { Repository } from 'typeorm';
|
import { Repository } from 'typeorm';
|
||||||
import * as puppeteer from 'puppeteer';
|
import * as puppeteer from 'puppeteer';
|
||||||
import { BidsService } from '../../bids/services/bid.service';
|
import { BidsService } from '../../bids/services/bid.service';
|
||||||
|
import { beijingToUtc } from '../../common/utils/timezone.util';
|
||||||
import { CrawlInfoAdd } from '../entities/crawl-info-add.entity';
|
import { CrawlInfoAdd } from '../entities/crawl-info-add.entity';
|
||||||
import { ChdtpCrawler } from './chdtp_target';
|
import { ChdtpCrawler } from './chdtp_target';
|
||||||
import { ChngCrawler } from './chng_target';
|
import { ChngCrawler } from './chng_target';
|
||||||
@@ -146,19 +147,21 @@ export class BidCrawlerService {
|
|||||||
: null;
|
: null;
|
||||||
|
|
||||||
for (const item of results) {
|
for (const item of results) {
|
||||||
|
// 将北京时间转换为UTC存储
|
||||||
|
const publishDateUtc = beijingToUtc(new Date(item.publishDate));
|
||||||
await this.bidsService.createOrUpdate({
|
await this.bidsService.createOrUpdate({
|
||||||
title: item.title,
|
title: item.title,
|
||||||
url: item.url,
|
url: item.url,
|
||||||
publishDate: item.publishDate,
|
publishDate: publishDateUtc,
|
||||||
source: crawler.name,
|
source: crawler.name,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 保存爬虫统计信息到数据库
|
// 保存爬虫统计信息到数据库(将北京时间转为UTC)
|
||||||
await this.saveCrawlInfo(
|
await this.saveCrawlInfo(
|
||||||
crawler.name,
|
crawler.name,
|
||||||
results.length,
|
results.length,
|
||||||
latestPublishDate,
|
latestPublishDate ? beijingToUtc(latestPublishDate) : null,
|
||||||
);
|
);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
const errorMessage = err instanceof Error ? err.message : String(err);
|
const errorMessage = err instanceof Error ? err.message : String(err);
|
||||||
@@ -219,11 +222,11 @@ export class BidCrawlerService {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 更新爬虫统计信息到数据库
|
// 更新爬虫统计信息到数据库(将北京时间转为UTC)
|
||||||
await this.saveCrawlInfo(
|
await this.saveCrawlInfo(
|
||||||
crawler.name,
|
crawler.name,
|
||||||
results.length,
|
results.length,
|
||||||
latestPublishDate,
|
latestPublishDate ? beijingToUtc(latestPublishDate) : null,
|
||||||
);
|
);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
const errorMessage =
|
const errorMessage =
|
||||||
@@ -361,19 +364,21 @@ export class BidCrawlerService {
|
|||||||
: null;
|
: null;
|
||||||
|
|
||||||
for (const item of results) {
|
for (const item of results) {
|
||||||
|
// 将北京时间转换为UTC存储
|
||||||
|
const publishDateUtc = beijingToUtc(new Date(item.publishDate));
|
||||||
await this.bidsService.createOrUpdate({
|
await this.bidsService.createOrUpdate({
|
||||||
title: item.title,
|
title: item.title,
|
||||||
url: item.url,
|
url: item.url,
|
||||||
publishDate: item.publishDate,
|
publishDate: publishDateUtc,
|
||||||
source: targetCrawler.name,
|
source: targetCrawler.name,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 保存爬虫统计信息到数据库
|
// 保存爬虫统计信息到数据库(将北京时间转为UTC)
|
||||||
await this.saveCrawlInfo(
|
await this.saveCrawlInfo(
|
||||||
targetCrawler.name,
|
targetCrawler.name,
|
||||||
results.length,
|
results.length,
|
||||||
latestPublishDate,
|
latestPublishDate ? beijingToUtc(latestPublishDate) : null,
|
||||||
);
|
);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ import { ConfigModule, ConfigService } from '@nestjs/config';
|
|||||||
database: configService.get<string>('DATABASE_NAME', 'bidding'),
|
database: configService.get<string>('DATABASE_NAME', 'bidding'),
|
||||||
entities: [__dirname + '/../**/*.entity{.ts,.js}'],
|
entities: [__dirname + '/../**/*.entity{.ts,.js}'],
|
||||||
synchronize: false,
|
synchronize: false,
|
||||||
timezone: '+08:00',
|
timezone: 'Z',
|
||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
|
|||||||
Reference in New Issue
Block a user