- Change log level from 'info' to 'debug' for enhanced logging during development. - Add '.cursor' to .gitignore to prevent tracking of cursor files.
193 lines
5.1 KiB
TypeScript
193 lines
5.1 KiB
TypeScript
import { Injectable } from '@nestjs/common';
|
||
import { InjectRepository } from '@nestjs/typeorm';
|
||
import { Repository, LessThan } from 'typeorm';
|
||
import { BidItem } from '../entities/bid-item.entity';
|
||
import { CrawlInfoAdd } from '../../crawler/entities/crawl-info-add.entity';
|
||
import {
|
||
getDaysAgo,
|
||
setStartOfDay,
|
||
setEndOfDay,
|
||
} from '../../common/utils/timezone.util';
|
||
|
||
interface FindAllQuery {
|
||
page?: number;
|
||
limit?: number;
|
||
source?: string;
|
||
keyword?: string;
|
||
}
|
||
|
||
interface SourceResult {
|
||
source: string;
|
||
}
|
||
|
||
export interface CrawlInfoAddStats {
|
||
source: string;
|
||
count: number;
|
||
latestUpdate: Date | string;
|
||
latestPublishDate: Date | string | null;
|
||
error: string | null;
|
||
}
|
||
|
||
interface CrawlInfoAddRawResult {
|
||
source: string;
|
||
count: number;
|
||
latestPublishDate: Date | string | null;
|
||
error: string | null;
|
||
latestUpdate: Date | string;
|
||
}
|
||
|
||
@Injectable()
|
||
export class BidsService {
|
||
constructor(
|
||
@InjectRepository(BidItem)
|
||
private bidRepository: Repository<BidItem>,
|
||
@InjectRepository(CrawlInfoAdd)
|
||
private crawlInfoRepository: Repository<CrawlInfoAdd>,
|
||
) {}
|
||
|
||
async findAll(query?: FindAllQuery) {
|
||
const { page = 1, limit = 10, source, keyword } = query || {};
|
||
const qb = this.bidRepository.createQueryBuilder('bid');
|
||
|
||
if (source) {
|
||
qb.andWhere('bid.source = :source', { source });
|
||
}
|
||
|
||
if (keyword) {
|
||
qb.andWhere('bid.title LIKE :keyword', { keyword: `%${keyword}%` });
|
||
}
|
||
|
||
qb.orderBy('bid.publishDate', 'DESC')
|
||
.skip((Number(page) - 1) * Number(limit))
|
||
.take(Number(limit));
|
||
|
||
const [items, total] = await qb.getManyAndCount();
|
||
return { items, total };
|
||
}
|
||
|
||
async createOrUpdate(data: Partial<BidItem>) {
|
||
// Use title or a hash of title to check for duplicates
|
||
const item = await this.bidRepository.findOne({
|
||
where: { title: data.title },
|
||
});
|
||
if (item) {
|
||
Object.assign(item, data);
|
||
return this.bidRepository.save(item);
|
||
}
|
||
return this.bidRepository.save(data);
|
||
}
|
||
|
||
async cleanOldData() {
|
||
const thirtyDaysAgo = getDaysAgo(30);
|
||
return this.bidRepository.delete({
|
||
createdAt: LessThan(thirtyDaysAgo),
|
||
});
|
||
}
|
||
|
||
async getSources(): Promise<string[]> {
|
||
const result = await this.bidRepository
|
||
.createQueryBuilder('bid')
|
||
.select('DISTINCT bid.source', 'source')
|
||
.where('bid.source IS NOT NULL')
|
||
.orderBy('bid.source', 'ASC')
|
||
.getRawMany<SourceResult>();
|
||
return result.map((item) => item.source);
|
||
}
|
||
|
||
async getRecentBids() {
|
||
const thirtyDaysAgo = setStartOfDay(getDaysAgo(30));
|
||
|
||
return this.bidRepository
|
||
.createQueryBuilder('bid')
|
||
.where('bid.publishDate >= :thirtyDaysAgo', { thirtyDaysAgo })
|
||
.orderBy('bid.publishDate', 'DESC')
|
||
.getMany();
|
||
}
|
||
|
||
async getPinnedBids() {
|
||
return this.bidRepository
|
||
.createQueryBuilder('bid')
|
||
.where('bid.pin = :pin', { pin: true })
|
||
.orderBy('bid.publishDate', 'DESC')
|
||
.getMany();
|
||
}
|
||
|
||
async getBidsByDateRange(
|
||
startDate?: string,
|
||
endDate?: string,
|
||
keywords?: string[],
|
||
) {
|
||
const qb = this.bidRepository.createQueryBuilder('bid');
|
||
|
||
if (startDate) {
|
||
const start = setStartOfDay(new Date(startDate));
|
||
qb.andWhere('bid.publishDate >= :startDate', { startDate: start });
|
||
}
|
||
|
||
if (endDate) {
|
||
const end = setEndOfDay(new Date(endDate));
|
||
qb.andWhere('bid.publishDate <= :endDate', { endDate: end });
|
||
}
|
||
|
||
if (keywords && keywords.length > 0) {
|
||
const keywordConditions = keywords
|
||
.map((keyword, index) => {
|
||
return `bid.title LIKE :keyword${index}`;
|
||
})
|
||
.join(' OR ');
|
||
qb.andWhere(
|
||
`(${keywordConditions})`,
|
||
keywords.reduce((params, keyword, index) => {
|
||
params[`keyword${index}`] = `%${keyword}%`;
|
||
return params;
|
||
}, {}),
|
||
);
|
||
}
|
||
|
||
return qb.orderBy('bid.publishDate', 'DESC').getMany();
|
||
}
|
||
|
||
async updatePin(title: string, pin: boolean) {
|
||
const item = await this.bidRepository.findOne({ where: { title } });
|
||
if (!item) {
|
||
throw new Error('Bid not found');
|
||
}
|
||
item.pin = pin;
|
||
return this.bidRepository.save(item);
|
||
}
|
||
|
||
async getCrawlInfoAddStats(): Promise<CrawlInfoAddStats[]> {
|
||
// 获取每个来源的最新一次爬虫记录(按 createdAt 降序)
|
||
const query = `
|
||
SELECT
|
||
source,
|
||
count,
|
||
latestPublishDate,
|
||
error,
|
||
strftime('%Y-%m-%d %H:%M:%S', createdAt, '+8 hours') as latestUpdate
|
||
FROM crawl_info_add
|
||
WHERE (source, createdAt) IN (
|
||
SELECT source, MAX(createdAt)
|
||
FROM crawl_info_add
|
||
GROUP BY source
|
||
)
|
||
ORDER BY source ASC
|
||
`;
|
||
|
||
const results =
|
||
await this.crawlInfoRepository.query<CrawlInfoAddRawResult[]>(query);
|
||
|
||
return results.map((item) => ({
|
||
source: String(item.source),
|
||
count: Number(item.count),
|
||
latestUpdate: item.latestUpdate,
|
||
latestPublishDate: item.latestPublishDate,
|
||
// 确保 error 字段正确处理:null 或空字符串都转换为 null,非空字符串保留
|
||
error:
|
||
item.error && String(item.error).trim() !== ''
|
||
? String(item.error)
|
||
: null,
|
||
}));
|
||
}
|
||
}
|