]*onclick="urlChange\('([^']+)','([^']+)'\)"[^>]*>[\s\S]*?| ]*>]*>([^<]+)<\/span><\/td>[\s\S]*? | ]*>]*>\s*(\d{4}-\d{2}-\d{2})\s*<\/span><\/td>[\s\S]*?<\/tr>/gs;
+ const regex =
+ /]*onclick="urlChange\('([^']+)','([^']+)'\)"[^>]*>[\s\S]*?| ]*>]*>([^<]+)<\/span><\/td>[\s\S]*? | ]*>]*>\s*(\d{4}-\d{2}-\d{2})\s*<\/span><\/td>[\s\S]*?<\/tr>/gs;
- let match;
+ let match: RegExpExecArray | null;
while ((match = regex.exec(html)) !== null) {
- const ggGuid = match[1]?.trim();
- const gcGuid = match[2]?.trim();
- const title = match[3]?.trim();
- const dateStr = match[4]?.trim();
+ const ggGuid = match[1]?.trim() ?? '';
+ const gcGuid = match[2]?.trim() ?? '';
+ const title = match[3]?.trim() ?? '';
+ const dateStr = match[4]?.trim() ?? '';
if (title && ggGuid && gcGuid) {
const fullUrl = `${this.baseUrl}/cgxx/ggDetail?gcGuid=${gcGuid}&ggGuid=${ggGuid}`;
results.push({
title,
publishDate: dateStr ? new Date(dateStr) : new Date(),
- url: fullUrl.replace(/\/\//g, '/')
+ url: fullUrl.replace(/\/\//g, '/'),
});
}
}
return results;
- }
+ },
};
diff --git a/src/crawler/services/szecp_target.spec.ts b/src/crawler/services/szecp_target.spec.ts
index 26455f1..a6c3f63 100644
--- a/src/crawler/services/szecp_target.spec.ts
+++ b/src/crawler/services/szecp_target.spec.ts
@@ -29,7 +29,7 @@ describe('SzecpCrawler Real Site Test', () => {
if (proxyArgs.length > 0) {
console.log('Using proxy:', proxyArgs.join(' '));
}
-
+
browser = await puppeteer.launch({
headless: false, // Run in non-headless mode
args: [
@@ -40,14 +40,14 @@ describe('SzecpCrawler Real Site Test', () => {
'--disable-infobars',
...proxyArgs,
],
- defaultViewport: null
+ defaultViewport: null,
});
});
afterAll(async () => {
if (browser) {
// Keep open for a few seconds after test to see result
- await new Promise(r => setTimeout(r, 50000));
+ await new Promise((r) => setTimeout(r, 50000));
await browser.close();
}
});
@@ -56,29 +56,33 @@ describe('SzecpCrawler Real Site Test', () => {
console.log(`
Starting crawl for: ${SzecpCrawler.name}`);
console.log(`Target URL: ${SzecpCrawler.url}`);
-
+
const results = await SzecpCrawler.crawl(browser);
-
+
console.log(`
Successfully found ${results.length} items:
`);
console.log('----------------------------------------');
results.forEach((item, index) => {
- console.log(`${index + 1}. [${item.publishDate.toLocaleDateString()}] ${item.title}`);
+ console.log(
+ `${index + 1}. [${item.publishDate.toLocaleDateString()}] ${item.title}`,
+ );
console.log(` Link: ${item.url}`);
console.log('----------------------------------------');
});
expect(results).toBeDefined();
expect(Array.isArray(results)).toBeTruthy();
-
+
if (results.length === 0) {
- console.warn('Warning: No items found. Observe browser window to see if content is loading or if there is a verification challenge.');
+ console.warn(
+ 'Warning: No items found. Observe browser window to see if content is loading or if there is a verification challenge.',
+ );
} else {
- const firstItem = results[0];
- expect(firstItem.title).toBeTruthy();
- expect(firstItem.url).toMatch(/^https?:\/\//);
- expect(firstItem.publishDate).toBeInstanceOf(Date);
+ const firstItem = results[0];
+ expect(firstItem.title).toBeTruthy();
+ expect(firstItem.url).toMatch(/^https?:\/\//);
+ expect(firstItem.publishDate).toBeInstanceOf(Date);
}
});
});
diff --git a/src/crawler/services/szecp_target.ts b/src/crawler/services/szecp_target.ts
index 498f59f..16b7ce2 100644
--- a/src/crawler/services/szecp_target.ts
+++ b/src/crawler/services/szecp_target.ts
@@ -12,13 +12,13 @@ async function simulateHumanMouseMovement(page: puppeteer.Page) {
for (let i = 0; i < movements; i++) {
const x = Math.floor(Math.random() * viewport.width);
const y = Math.floor(Math.random() * viewport.height);
-
+
await page.mouse.move(x, y, {
- steps: 10 + Math.floor(Math.random() * 20) // 10-30步,使移动更平滑
+ steps: 10 + Math.floor(Math.random() * 20), // 10-30步,使移动更平滑
});
-
+
// 随机停顿 100-500ms
- await new Promise(r => setTimeout(r, 100 + Math.random() * 400));
+ await new Promise((r) => setTimeout(r, 100 + Math.random() * 400));
}
}
@@ -28,23 +28,29 @@ async function simulateHumanScrolling(page: puppeteer.Page) {
for (let i = 0; i < scrollCount; i++) {
const scrollDistance = 100 + Math.floor(Math.random() * 400); // 100-500px
-
+
await page.evaluate((distance) => {
window.scrollBy({
top: distance,
- behavior: 'smooth'
+ behavior: 'smooth',
});
}, scrollDistance);
// 随机停顿 500-1500ms
- await new Promise(r => setTimeout(r, 500 + Math.random() * 1000));
+ await new Promise((r) => setTimeout(r, 500 + Math.random() * 1000));
}
// 滚动回顶部
await page.evaluate(() => {
window.scrollTo({ top: 0, behavior: 'smooth' });
});
- await new Promise(r => setTimeout(r, 1000));
+ await new Promise((r) => setTimeout(r, 1000));
+}
+
+interface SzecpCrawlerType {
+ name: string;
+ url: string;
+ baseUrl: string;
}
export const SzecpCrawler = {
@@ -52,7 +58,10 @@ export const SzecpCrawler = {
url: 'https://www.szecp.com.cn/first_zbgg/index.html',
baseUrl: 'https://www.szecp.com.cn/',
- async crawl(browser: puppeteer.Browser): Promise {
+ async crawl(
+ this: SzecpCrawlerType,
+ browser: puppeteer.Browser,
+ ): Promise {
const logger = new Logger('SzecpCrawler');
const page = await browser.newPage();
@@ -65,10 +74,14 @@ export const SzecpCrawler = {
await page.evaluateOnNewDocument(() => {
Object.defineProperty(navigator, 'webdriver', { get: () => false });
Object.defineProperty(navigator, 'language', { get: () => 'zh-CN' });
- Object.defineProperty(navigator, 'plugins', { get: () => [1, 2, 3, 4, 5] });
+ Object.defineProperty(navigator, 'plugins', {
+ get: () => [1, 2, 3, 4, 5],
+ });
});
- await page.setUserAgent('Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36');
+ await page.setUserAgent(
+ 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36',
+ );
await page.setViewport({ width: 1920, height: 1080 });
const allResults: ChdtpResult[] = [];
@@ -82,7 +95,7 @@ export const SzecpCrawler = {
// 模拟人类行为
logger.log('Simulating human mouse movements...');
await simulateHumanMouseMovement(page);
-
+
logger.log('Simulating human scrolling...');
await simulateHumanScrolling(page);
@@ -90,52 +103,69 @@ export const SzecpCrawler = {
logger.log('Clicking search button...');
await page.waitForSelector('.szb-zbcgSearch-key-v1', { timeout: 60000 });
await page.click('.szb-zbcgSearch-key-v1');
- await new Promise(r => setTimeout(r, 3000)); // Wait for results to load
+ await new Promise((r) => setTimeout(r, 3000)); // Wait for results to load
while (currentPage <= maxPages) {
logger.log(`Processing page ${currentPage}...`);
// Wait for content to load
- await page.waitForFunction(() => {
- return document.querySelectorAll('.szb-zbcgTable-other').length > 0;
- }, { timeout: 60000 }).catch(() => logger.warn('Content not found. Site might be slow.'));
+ await page
+ .waitForFunction(
+ () => {
+ return (
+ document.querySelectorAll('.szb-zbcgTable-other').length > 0
+ );
+ },
+ { timeout: 60000 },
+ )
+ .catch(() => logger.warn('Content not found. Site might be slow.'));
const pageResults = await page.evaluate((baseUrl) => {
// Extract from table rows
- const items = Array.from(document.querySelectorAll('.szb-zbcgTable-other'));
- return items.map(item => {
- const divs = item.querySelectorAll('div');
- if (divs.length >= 5) {
- const titleLink = divs[1].querySelector('a');
- const title = titleLink?.textContent?.trim() || '';
- const dateStr = divs[4].textContent?.trim() || '';
- const href = titleLink?.getAttribute('href') || '';
+ const items = Array.from(
+ document.querySelectorAll('.szb-zbcgTable-other'),
+ );
+ return items
+ .map((item) => {
+ const divs = item.querySelectorAll('div');
+ if (divs.length >= 5) {
+ const titleLink = divs[1].querySelector('a');
+ const title = titleLink?.textContent?.trim() || '';
+ const dateStr = divs[4].textContent?.trim() || '';
+ const href = titleLink?.getAttribute('href') || '';
- if (title.length < 5) return null; // Filter noise
+ if (title.length < 5) return null; // Filter noise
- // Construct full URL if href is relative
- const url = href.startsWith('http') ? href : `${baseUrl}${href}`;
+ // Construct full URL if href is relative
+ const url = href.startsWith('http')
+ ? href
+ : `${baseUrl}${href}`;
- return {
- title,
- dateStr,
- url
- };
- }
- return null;
- }).filter(i => i !== null);
+ return {
+ title,
+ dateStr,
+ url,
+ };
+ }
+ return null;
+ })
+ .filter((i) => i !== null);
}, this.baseUrl);
if (pageResults.length === 0) {
- logger.warn(`No results found on page ${currentPage}. Extraction failed.`);
+ logger.warn(
+ `No results found on page ${currentPage}. Extraction failed.`,
+ );
break;
}
- allResults.push(...pageResults.map(r => ({
- title: r!.title,
- publishDate: new Date(r!.dateStr),
- url: r!.url.replace(/\/\//g, '/')
- })));
+ allResults.push(
+ ...pageResults.map((r) => ({
+ title: r.title,
+ publishDate: new Date(r.dateStr),
+ url: r.url.replace(/\/\//g, '/'),
+ })),
+ );
logger.log(`Extracted ${pageResults.length} items.`);
@@ -144,27 +174,30 @@ export const SzecpCrawler = {
if (!nextButton) break;
await nextButton.click();
- await new Promise(r => setTimeout(r, 3000));
-
+ await new Promise((r) => setTimeout(r, 3000));
+
// 模拟人类行为
logger.log('Simulating human mouse movements...');
await simulateHumanMouseMovement(page);
-
+
logger.log('Simulating human scrolling...');
await simulateHumanScrolling(page);
-
+
currentPage++;
}
return allResults;
-
} catch (error) {
- logger.error(`Crawl failed: ${error.message}`);
+ const errorMessage =
+ error instanceof Error ? error.message : String(error);
+ logger.error(`Crawl failed: ${errorMessage}`);
return allResults;
} finally {
if (page) await page.close();
}
},
- extract() { return []; }
+ extract() {
+ return [];
+ },
};
diff --git a/src/database/database.module.ts b/src/database/database.module.ts
index ea1112a..a067ee1 100644
--- a/src/database/database.module.ts
+++ b/src/database/database.module.ts
@@ -12,7 +12,11 @@ import { CrawlInfoAdd } from '../crawler/entities/crawl-info-add.entity';
imports: [ConfigModule],
inject: [ConfigService],
useFactory: (configService: ConfigService) => ({
- type: configService.get('DATABASE_TYPE', 'mariadb'),
+ type:
+ (configService.get('DATABASE_TYPE', 'mariadb') as
+ | 'mariadb'
+ | 'mysql'
+ | 'postgres') || 'mariadb',
host: configService.get('DATABASE_HOST', 'localhost'),
port: configService.get('DATABASE_PORT', 3306),
username: configService.get('DATABASE_USERNAME', 'root'),
diff --git a/src/keywords/keyword.entity.ts b/src/keywords/keyword.entity.ts
index 29496df..e79dd96 100644
--- a/src/keywords/keyword.entity.ts
+++ b/src/keywords/keyword.entity.ts
@@ -1,4 +1,10 @@
-import { Entity, PrimaryGeneratedColumn, Column, CreateDateColumn, UpdateDateColumn } from 'typeorm';
+import {
+ Entity,
+ PrimaryGeneratedColumn,
+ Column,
+ CreateDateColumn,
+ UpdateDateColumn,
+} from 'typeorm';
@Entity('keywords')
export class Keyword {
diff --git a/src/main.ts b/src/main.ts
index c5a45be..3d65095 100644
--- a/src/main.ts
+++ b/src/main.ts
@@ -6,19 +6,20 @@ async function bootstrap() {
const app = await NestFactory.create(AppModule, {
bodyParser: true,
});
-
+
// 使用自定义日志服务
const logger = await app.resolve(CustomLogger);
app.useLogger(logger);
-
+
// 增加请求体大小限制(默认 100kb,增加到 50mb)
- const express = require('express');
+ // eslint-disable-next-line @typescript-eslint/no-require-imports
+ const express = require('express') as typeof import('express');
app.use(express.json({ limit: '50mb' }));
app.use(express.urlencoded({ limit: '50mb', extended: true }));
-
+
// 启用 CORS
app.enableCors();
-
+
await app.listen(process.env.PORT ?? 3000);
}
-bootstrap();
+void bootstrap();
diff --git a/src/scripts/ai-recommendations.ts b/src/scripts/ai-recommendations.ts
index f4d145d..b10a7a9 100644
--- a/src/scripts/ai-recommendations.ts
+++ b/src/scripts/ai-recommendations.ts
@@ -16,7 +16,9 @@ async function generateAiRecommendations() {
try {
// 获取 BidItem 的 repository 和 AiService
- const bidItemRepository = app.get>(getRepositoryToken(BidItem));
+ const bidItemRepository = app.get>(
+ getRepositoryToken(BidItem),
+ );
const aiService = app.get(AiService);
logger.log('开始查询 bid_items 表...');
@@ -27,11 +29,13 @@ async function generateAiRecommendations() {
threeDaysAgo.setHours(0, 0, 0, 0);
// 使用本地时间格式化输出,避免时区问题
- const localDateStr = threeDaysAgo.toLocaleDateString('zh-CN', {
- year: 'numeric',
- month: '2-digit',
- day: '2-digit'
- }).replace(/\//g, '-');
+ const localDateStr = threeDaysAgo
+ .toLocaleDateString('zh-CN', {
+ year: 'numeric',
+ month: '2-digit',
+ day: '2-digit',
+ })
+ .replace(/\//g, '-');
logger.log(`查询起始日期: ${localDateStr}`);
// 查询起始日期3天前,截止日期不限制的所有记录
@@ -50,8 +54,8 @@ async function generateAiRecommendations() {
}
// 提取 title
- const bidData = bidItems.map(item => ({
- title: item.title
+ const bidData = bidItems.map((item) => ({
+ title: item.title,
}));
logger.log('开始调用 AI 获取推荐...');
diff --git a/src/scripts/crawl.ts b/src/scripts/crawl.ts
index b97a78a..4a4428a 100644
--- a/src/scripts/crawl.ts
+++ b/src/scripts/crawl.ts
@@ -5,19 +5,19 @@ import { CustomLogger } from '../common/logger/logger.service';
async function runCrawler() {
const app = await NestFactory.createApplicationContext(AppModule);
-
+
// 设置自定义 logger,使 NestJS 框架日志也输出到文件
const logger = await app.resolve(CustomLogger);
app.useLogger(logger);
logger.setContext('CrawlScript');
-
+
try {
const crawlerService = await app.resolve(BidCrawlerService);
-
+
logger.log('Starting crawler...');
await crawlerService.crawlAll();
logger.log('Crawler completed successfully');
-
+
await app.close();
process.exit(0);
} catch (error) {
diff --git a/src/scripts/remove-duplicates.ts b/src/scripts/remove-duplicates.ts
index 0a9cec0..9ce63be 100644
--- a/src/scripts/remove-duplicates.ts
+++ b/src/scripts/remove-duplicates.ts
@@ -15,7 +15,9 @@ async function removeDuplicates() {
try {
// 获取 BidItem 的 repository
- const bidItemRepository = app.get>(getRepositoryToken(BidItem));
+ const bidItemRepository = app.get>(
+ getRepositoryToken(BidItem),
+ );
logger.log('开始查找重复的title...');
@@ -56,10 +58,12 @@ async function removeDuplicates() {
const itemsToDelete = items.slice(1);
if (itemsToDelete.length > 0) {
- const idsToDelete = itemsToDelete.map(item => item.id);
+ const idsToDelete = itemsToDelete.map((item) => item.id);
const deleteResult = await bidItemRepository.delete(idsToDelete);
totalDeleted += deleteResult.affected || 0;
- logger.log(` 删除了 ${deleteResult.affected} 条重复记录,保留ID: ${items[0].id} (最晚创建)`);
+ logger.log(
+ ` 删除了 ${deleteResult.affected} 条重复记录,保留ID: ${items[0].id} (最晚创建)`,
+ );
}
}
diff --git a/src/scripts/sync.ts b/src/scripts/sync.ts
index 6ca6e92..63ba8e6 100644
--- a/src/scripts/sync.ts
+++ b/src/scripts/sync.ts
@@ -8,7 +8,7 @@ import { CrawlInfoAdd } from '../crawler/entities/crawl-info-add.entity';
// 主数据库配置
const masterDbConfig: DataSourceOptions = {
- type: process.env.DATABASE_TYPE as any || 'mariadb',
+ type: (process.env.DATABASE_TYPE as any) || 'mariadb',
host: process.env.DATABASE_HOST || 'localhost',
port: parseInt(process.env.DATABASE_PORT || '3306'),
username: process.env.DATABASE_USERNAME || 'root',
@@ -20,7 +20,7 @@ const masterDbConfig: DataSourceOptions = {
// Slave 数据库配置
const slaveDbConfig: DataSourceOptions = {
- type: process.env.SLAVE_DATABASE_TYPE as any || 'mariadb',
+ type: (process.env.SLAVE_DATABASE_TYPE as any) || 'mariadb',
host: process.env.SLAVE_DATABASE_HOST || 'localhost',
port: parseInt(process.env.SLAVE_DATABASE_PORT || '3306'),
username: process.env.SLAVE_DATABASE_USERNAME || 'root',
@@ -94,12 +94,17 @@ async function createDatabaseIfNotExists(config: DataSourceOptions) {
password: (config as any).password,
});
- await connection.query(`CREATE DATABASE IF NOT EXISTS \`${(config as any).database}\``);
+ await connection.query(
+ `CREATE DATABASE IF NOT EXISTS \`${(config as any).database}\``,
+ );
await connection.end();
}
// 同步表结构
-async function syncSchema(masterDataSource: DataSource, slaveDataSource: DataSource): Promise {
+async function syncSchema(
+ masterDataSource: DataSource,
+ slaveDataSource: DataSource,
+): Promise {
logger.log('开始同步表结构...');
// 获取主数据库的所有表
@@ -137,8 +142,12 @@ async function syncSchema(masterDataSource: DataSource, slaveDataSource: DataSou
if (tableExists[0].count > 0) {
// 表存在,先备份数据到临时表
logger.log(`备份表 ${tableName} 的数据到 ${tempTableName}...`);
- await slaveDataSource.query(`CREATE TABLE ${tempTableName} AS SELECT * FROM \`${tableName}\``);
- logger.log(`备份完成,共备份 ${await slaveDataSource.query(`SELECT COUNT(*) as count FROM ${tempTableName}`).then(r => r[0].count)} 条记录`);
+ await slaveDataSource.query(
+ `CREATE TABLE ${tempTableName} AS SELECT * FROM \`${tableName}\``,
+ );
+ logger.log(
+ `备份完成,共备份 ${await slaveDataSource.query(`SELECT COUNT(*) as count FROM ${tempTableName}`).then((r) => r[0].count)} 条记录`,
+ );
}
// 删除 slave 数据库中的表(如果存在)
@@ -151,7 +160,7 @@ async function syncSchema(masterDataSource: DataSource, slaveDataSource: DataSou
if (tableExists[0].count > 0) {
try {
logger.log(`从 ${tempTableName} 恢复数据到 ${tableName}...`);
-
+
// 获取临时表的列名
const columns = await slaveDataSource.query(`
SELECT COLUMN_NAME
@@ -159,18 +168,22 @@ async function syncSchema(masterDataSource: DataSource, slaveDataSource: DataSou
WHERE TABLE_SCHEMA = '${(slaveDbConfig as any).database}'
AND TABLE_NAME = '${tempTableName}'
`);
-
- const columnNames = columns.map((c: any) => `\`${c.COLUMN_NAME}\``).join(', ');
-
+
+ const columnNames = columns
+ .map((c: any) => `\`${c.COLUMN_NAME}\``)
+ .join(', ');
+
// 将数据从临时表插入到新表
await slaveDataSource.query(`
INSERT INTO \`${tableName}\` (${columnNames})
SELECT ${columnNames} FROM ${tempTableName}
`);
-
- const restoredCount = await slaveDataSource.query(`SELECT COUNT(*) as count FROM \`${tableName}\``);
+
+ const restoredCount = await slaveDataSource.query(
+ `SELECT COUNT(*) as count FROM \`${tableName}\``,
+ );
logger.log(`数据恢复完成,共恢复 ${restoredCount[0].count} 条记录`);
-
+
// 删除临时表
await slaveDataSource.query(`DROP TABLE IF EXISTS ${tempTableName}`);
} catch (error) {
@@ -181,13 +194,13 @@ async function syncSchema(masterDataSource: DataSource, slaveDataSource: DataSou
}
logger.log('表结构同步完成');
-
+
// 重新初始化 slave 数据库连接以清除 TypeORM 元数据缓存
logger.log('重新初始化 slave 数据库连接...');
await slaveDataSource.destroy();
await slaveDataSource.initialize();
logger.log('Slave 数据库连接重新初始化完成');
-
+
return slaveDataSource;
}
@@ -227,7 +240,12 @@ async function syncDatabase() {
let totalSynced = 0;
for (const table of tables) {
- const count = await syncTable(masterDataSource, slaveDataSource, table.entity, table.name);
+ const count = await syncTable(
+ masterDataSource,
+ slaveDataSource,
+ table.entity,
+ table.name,
+ );
totalSynced += count;
}
diff --git a/src/scripts/update-source.ts b/src/scripts/update-source.ts
index a70f551..94f4caa 100644
--- a/src/scripts/update-source.ts
+++ b/src/scripts/update-source.ts
@@ -15,7 +15,9 @@ async function updateSource() {
try {
// 获取 BidItem 的 repository
- const bidItemRepository = app.get>(getRepositoryToken(BidItem));
+ const bidItemRepository = app.get>(
+ getRepositoryToken(BidItem),
+ );
const oldSource = '北京电力交易平台';
const newSource = '北京京能电子商务平台';
diff --git a/widget/looker/sys_run/go.mod b/widget/looker/sys_run/go.mod
new file mode 100644
index 0000000..3fc2a65
--- /dev/null
+++ b/widget/looker/sys_run/go.mod
@@ -0,0 +1,17 @@
+module systray_run
+
+go 1.23
+
+require github.com/getlantern/systray v1.2.2
+
+require (
+ github.com/getlantern/context v0.0.0-20190109183933-c447772a6520 // indirect
+ github.com/getlantern/errors v0.0.0-20190325191628-abdb3e3e36f7 // indirect
+ github.com/getlantern/golog v0.0.0-20190830074920-4ef2e798c2d7 // indirect
+ github.com/getlantern/hex v0.0.0-20190417191902-c6586a6fe0b7 // indirect
+ github.com/getlantern/hidden v0.0.0-20190325191715-f02dbb02be55 // indirect
+ github.com/getlantern/ops v0.0.0-20190325191751-d70cb0d6f85f // indirect
+ github.com/go-stack/stack v1.8.0 // indirect
+ github.com/oxtoacart/bpool v0.0.0-20190530202638-03653db5a59c // indirect
+ golang.org/x/sys v0.1.0 // indirect
+)
diff --git a/widget/looker/sys_run/go.sum b/widget/looker/sys_run/go.sum
new file mode 100644
index 0000000..4c056eb
--- /dev/null
+++ b/widget/looker/sys_run/go.sum
@@ -0,0 +1,32 @@
+github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/getlantern/context v0.0.0-20190109183933-c447772a6520 h1:NRUJuo3v3WGC/g5YiyF790gut6oQr5f3FBI88Wv0dx4=
+github.com/getlantern/context v0.0.0-20190109183933-c447772a6520/go.mod h1:L+mq6/vvYHKjCX2oez0CgEAJmbq1fbb/oNJIWQkBybY=
+github.com/getlantern/errors v0.0.0-20190325191628-abdb3e3e36f7 h1:6uJ+sZ/e03gkbqZ0kUG6mfKoqDb4XMAzMIwlajq19So=
+github.com/getlantern/errors v0.0.0-20190325191628-abdb3e3e36f7/go.mod h1:l+xpFBrCtDLpK9qNjxs+cHU6+BAdlBaxHqikB6Lku3A=
+github.com/getlantern/golog v0.0.0-20190830074920-4ef2e798c2d7 h1:guBYzEaLz0Vfc/jv0czrr2z7qyzTOGC9hiQ0VC+hKjk=
+github.com/getlantern/golog v0.0.0-20190830074920-4ef2e798c2d7/go.mod h1:zx/1xUUeYPy3Pcmet8OSXLbF47l+3y6hIPpyLWoR9oc=
+github.com/getlantern/hex v0.0.0-20190417191902-c6586a6fe0b7 h1:micT5vkcr9tOVk1FiH8SWKID8ultN44Z+yzd2y/Vyb0=
+github.com/getlantern/hex v0.0.0-20190417191902-c6586a6fe0b7/go.mod h1:dD3CgOrwlzca8ed61CsZouQS5h5jIzkK9ZWrTcf0s+o=
+github.com/getlantern/hidden v0.0.0-20190325191715-f02dbb02be55 h1:XYzSdCbkzOC0FDNrgJqGRo8PCMFOBFL9py72DRs7bmc=
+github.com/getlantern/hidden v0.0.0-20190325191715-f02dbb02be55/go.mod h1:6mmzY2kW1TOOrVy+r41Za2MxXM+hhqTtY3oBKd2AgFA=
+github.com/getlantern/ops v0.0.0-20190325191751-d70cb0d6f85f h1:wrYrQttPS8FHIRSlsrcuKazukx/xqO/PpLZzZXsF+EA=
+github.com/getlantern/ops v0.0.0-20190325191751-d70cb0d6f85f/go.mod h1:D5ao98qkA6pxftxoqzibIBBrLSUli+kYnJqrgBf9cIA=
+github.com/getlantern/systray v1.2.2 h1:dCEHtfmvkJG7HZ8lS/sLklTH4RKUcIsKrAD9sThoEBE=
+github.com/getlantern/systray v1.2.2/go.mod h1:pXFOI1wwqwYXEhLPm9ZGjS2u/vVELeIgNMY5HvhHhcE=
+github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=
+github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
+github.com/lxn/walk v0.0.0-20210112085537-c389da54e794/go.mod h1:E23UucZGqpuUANJooIbHWCufXvOcT6E7Stq81gU+CSQ=
+github.com/lxn/win v0.0.0-20210218163916-a377121e959e/go.mod h1:KxxjdtRkfNoYDCUP5ryK7XJJNTnpC8atvtmTheChOtk=
+github.com/oxtoacart/bpool v0.0.0-20190530202638-03653db5a59c h1:rp5dCmg/yLR3mgFuSOe4oEnDDmGLROTvMragMUXpTQw=
+github.com/oxtoacart/bpool v0.0.0-20190530202638-03653db5a59c/go.mod h1:X07ZCGwUbLaax7L0S3Tw4hpejzu63ZrrQiUe6W0hcy0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+golang.org/x/sys v0.0.0-20201018230417-eeed37f84f13/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U=
+golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+gopkg.in/Knetic/govaluate.v3 v3.0.0/go.mod h1:csKLBORsPbafmSCGTEh3U7Ozmsuq8ZSIlKk1bcqph0E=
| |