Files
bidding_watcher/src/bids/services/bid.service.ts

193 lines
5.1 KiB
TypeScript
Raw Normal View History

2026-01-09 23:18:52 +08:00
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository, LessThan } from 'typeorm';
2026-01-09 23:18:52 +08:00
import { BidItem } from '../entities/bid-item.entity';
import { CrawlInfoAdd } from '../../crawler/entities/crawl-info-add.entity';
2026-01-09 23:18:52 +08:00
interface FindAllQuery {
page?: number;
limit?: number;
source?: string;
keyword?: string;
}
interface SourceResult {
source: string;
}
export interface CrawlInfoAddStats {
source: string;
count: number;
latestUpdate: Date | string;
latestPublishDate: Date | string | null;
error: string | null;
}
interface CrawlInfoAddRawResult {
source: string;
count: number;
latestPublishDate: Date | string | null;
error: string | null;
latestUpdate: Date | string;
}
2026-01-09 23:18:52 +08:00
@Injectable()
export class BidsService {
constructor(
@InjectRepository(BidItem)
private bidRepository: Repository<BidItem>,
@InjectRepository(CrawlInfoAdd)
private crawlInfoRepository: Repository<CrawlInfoAdd>,
2026-01-09 23:18:52 +08:00
) {}
async findAll(query?: FindAllQuery) {
2026-01-09 23:18:52 +08:00
const { page = 1, limit = 10, source, keyword } = query || {};
const qb = this.bidRepository.createQueryBuilder('bid');
if (source) {
qb.andWhere('bid.source = :source', { source });
}
if (keyword) {
qb.andWhere('bid.title LIKE :keyword', { keyword: `%${keyword}%` });
}
qb.orderBy('bid.publishDate', 'DESC')
.skip((Number(page) - 1) * Number(limit))
.take(Number(limit));
2026-01-09 23:18:52 +08:00
const [items, total] = await qb.getManyAndCount();
return { items, total };
}
async createOrUpdate(data: Partial<BidItem>) {
// Use title or a hash of title to check for duplicates
const item = await this.bidRepository.findOne({
where: { title: data.title },
});
2026-01-09 23:18:52 +08:00
if (item) {
Object.assign(item, data);
return this.bidRepository.save(item);
}
return this.bidRepository.save(data);
}
async cleanOldData() {
const thirtyDaysAgo = new Date();
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
return this.bidRepository.delete({
createdAt: LessThan(thirtyDaysAgo),
});
}
async getSources(): Promise<string[]> {
const result = await this.bidRepository
.createQueryBuilder('bid')
.select('DISTINCT bid.source', 'source')
.where('bid.source IS NOT NULL')
.orderBy('bid.source', 'ASC')
.getRawMany<SourceResult>();
return result.map((item) => item.source);
}
async getRecentBids() {
const thirtyDaysAgo = new Date();
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
thirtyDaysAgo.setHours(0, 0, 0, 0);
return this.bidRepository
.createQueryBuilder('bid')
.where('bid.publishDate >= :thirtyDaysAgo', { thirtyDaysAgo })
.orderBy('bid.publishDate', 'DESC')
.getMany();
}
async getPinnedBids() {
return this.bidRepository
.createQueryBuilder('bid')
.where('bid.pin = :pin', { pin: true })
.orderBy('bid.publishDate', 'DESC')
.getMany();
}
async getBidsByDateRange(
startDate?: string,
endDate?: string,
keywords?: string[],
) {
const qb = this.bidRepository.createQueryBuilder('bid');
if (startDate) {
const start = new Date(startDate);
start.setHours(0, 0, 0, 0);
qb.andWhere('bid.publishDate >= :startDate', { startDate: start });
}
if (endDate) {
const end = new Date(endDate);
end.setHours(23, 59, 59, 999);
qb.andWhere('bid.publishDate <= :endDate', { endDate: end });
}
if (keywords && keywords.length > 0) {
const keywordConditions = keywords
.map((keyword, index) => {
return `bid.title LIKE :keyword${index}`;
})
.join(' OR ');
qb.andWhere(
`(${keywordConditions})`,
keywords.reduce((params, keyword, index) => {
params[`keyword${index}`] = `%${keyword}%`;
return params;
}, {}),
);
}
return qb.orderBy('bid.publishDate', 'DESC').getMany();
}
async updatePin(title: string, pin: boolean) {
const item = await this.bidRepository.findOne({ where: { title } });
if (!item) {
throw new Error('Bid not found');
}
item.pin = pin;
return this.bidRepository.save(item);
}
async getCrawlInfoAddStats(): Promise<CrawlInfoAddStats[]> {
// 获取每个来源的最新一次爬虫记录(按 createdAt 降序)
const query = `
SELECT
source,
count,
latestPublishDate,
error,
createdAt as latestUpdate
FROM crawl_info_add
WHERE (source, createdAt) IN (
SELECT source, MAX(createdAt)
FROM crawl_info_add
GROUP BY source
)
ORDER BY source ASC
`;
const results =
await this.crawlInfoRepository.query<CrawlInfoAddRawResult[]>(query);
return results.map((item) => ({
source: String(item.source),
count: Number(item.count),
latestUpdate: item.latestUpdate,
latestPublishDate: item.latestPublishDate,
// 确保 error 字段正确处理null 或空字符串都转换为 null非空字符串保留
error:
item.error && String(item.error).trim() !== ''
? String(item.error)
: null,
}));
}
2026-01-09 23:18:52 +08:00
}