feat: 添加爬虫状态监控功能

新增爬虫统计信息展示组件,包括后端数据查询接口和前端展示界面。同时简化日期显示格式并添加刷新提示功能。
This commit is contained in:
dmy
2026-01-14 09:26:04 +08:00
parent 571eea0f66
commit bcd7af4e69
7 changed files with 325 additions and 76 deletions

View File

@@ -43,6 +43,15 @@ type AiRecommendation struct {
CreatedAt string `json:"createdAt"`
}
// CrawlInfoStat 爬虫统计信息结构体
type CrawlInfoStat struct {
Source string `json:"source"`
Count int `json:"count"`
LatestUpdate string `json:"latestUpdate"`
LatestPublishDate string `json:"latestPublishDate"`
Error string `json:"error"`
}
// App struct
type App struct {
ctx context.Context
@@ -207,9 +216,9 @@ func (a *App) GetPinnedBidItems() ([]BidItem, error) {
return nil, fmt.Errorf("扫描行失败: %v", err)
}
item.PublishDate = publishDate.Format("2006-01-02 15:04:05")
item.CreatedAt = createdAt.Format("2006-01-02 15:04:05")
item.UpdatedAt = updatedAt.Format("2006-01-02 15:04:05")
item.PublishDate = publishDate.Format("2006-01-02")
item.CreatedAt = createdAt.Format("2006-01-02")
item.UpdatedAt = updatedAt.Format("2006-01-02")
items = append(items, item)
}
@@ -265,7 +274,7 @@ func (a *App) GetAiRecommendations() ([]AiRecommendation, error) {
return nil, fmt.Errorf("扫描行失败: %v", err)
}
item.CreatedAt = createdAt.Format("2006-01-02 15:04:05")
item.CreatedAt = createdAt.Format("2006-01-02")
items = append(items, item)
}
@@ -276,3 +285,87 @@ func (a *App) GetAiRecommendations() ([]AiRecommendation, error) {
return items, nil
}
// GetCrawlInfoStats 获取爬虫统计信息
func (a *App) GetCrawlInfoStats() ([]CrawlInfoStat, error) {
dsn := a.GetDatabaseDSN()
if dsn == "" {
return nil, fmt.Errorf("数据库配置未加载")
}
db, err := sql.Open("mysql", dsn)
if err != nil {
return nil, fmt.Errorf("连接数据库失败: %v", err)
}
defer db.Close()
// 测试连接
if err := db.Ping(); err != nil {
return nil, fmt.Errorf("数据库连接测试失败: %v", err)
}
// 查询 crawl_info_add 表,按 source 分组获取最新记录
query := `SELECT
c1.source,
c1.count,
c1.createdAt as latestUpdate,
c1.latestPublishDate,
c1.error
FROM crawl_info_add c1
WHERE c1.createdAt = (
SELECT MAX(c2.createdAt)
FROM crawl_info_add c2
WHERE c2.source = c1.source
)
ORDER BY c1.source`
rows, err := db.Query(query)
if err != nil {
return nil, fmt.Errorf("查询失败: %v", err)
}
defer rows.Close()
var stats []CrawlInfoStat
for rows.Next() {
var stat CrawlInfoStat
var latestUpdate, latestPublishDate sql.NullTime
var errorStr sql.NullString
err := rows.Scan(
&stat.Source,
&stat.Count,
&latestUpdate,
&latestPublishDate,
&errorStr,
)
if err != nil {
return nil, fmt.Errorf("扫描行失败: %v", err)
}
if latestUpdate.Valid {
stat.LatestUpdate = latestUpdate.Time.Format("2006-01-02 15:04:05")
} else {
stat.LatestUpdate = ""
}
if latestPublishDate.Valid {
stat.LatestPublishDate = latestPublishDate.Time.Format("2006-01-02 15:04:05")
} else {
stat.LatestPublishDate = ""
}
if errorStr.Valid {
stat.Error = errorStr.String
} else {
stat.Error = ""
}
stats = append(stats, stat)
}
if err := rows.Err(); err != nil {
return nil, fmt.Errorf("遍历行失败: %v", err)
}
return stats, nil
}