chore: 增加scripts目录
This commit is contained in:
297
scripts/ollama/simple-parallel-test.js
Normal file
297
scripts/ollama/simple-parallel-test.js
Normal file
@@ -0,0 +1,297 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const { Worker, isMainThread } = require('worker_threads');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
// 简单的Worker代码
|
||||
const workerCode = `
|
||||
const { parentPort, workerData } = require('worker_threads');
|
||||
const fs = require('fs');
|
||||
|
||||
const { inputPath, outputPath } = workerData;
|
||||
|
||||
function simpleConsoleRemover(jsCode) {
|
||||
return jsCode.replace(/console\.(log|error|warn|info|debug)\s*\([^)]*\);?\s*/g, '');
|
||||
}
|
||||
|
||||
async function processFile() {
|
||||
try {
|
||||
const originalCode = fs.readFileSync(inputPath, 'utf8');
|
||||
const cleanedCode = simpleConsoleRemover(originalCode);
|
||||
|
||||
// 确保输出目录存在
|
||||
const path = require('path');
|
||||
const outputDir = path.dirname(outputPath);
|
||||
if (!fs.existsSync(outputDir)) {
|
||||
fs.mkdirSync(outputDir, { recursive: true });
|
||||
}
|
||||
|
||||
fs.writeFileSync(outputPath, cleanedCode, 'utf8');
|
||||
|
||||
parentPort.postMessage({
|
||||
success: true,
|
||||
inputSize: originalCode.length,
|
||||
outputSize: cleanedCode.length,
|
||||
reduction: originalCode.length - cleanedCode.length
|
||||
});
|
||||
} catch (error) {
|
||||
parentPort.postMessage({ success: false, error: error.message });
|
||||
}
|
||||
}
|
||||
|
||||
processFile();
|
||||
`;
|
||||
|
||||
async function testSimpleParallel() {
|
||||
console.log('🧪 Simple Parallel Processing Test');
|
||||
console.log('=================================');
|
||||
console.log('');
|
||||
|
||||
if (!isMainThread) {
|
||||
console.log('⚠️ This test must be run in main thread');
|
||||
return;
|
||||
}
|
||||
|
||||
// 创建测试环境
|
||||
const testDir = path.join(__dirname, 'simple-test');
|
||||
const testOptimizedDir = testDir + '-parallel';
|
||||
|
||||
// 清理旧目录
|
||||
[testDir, testOptimizedDir].forEach(dir => {
|
||||
if (fs.existsSync(dir)) {
|
||||
fs.rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
fs.mkdirSync(testDir, { recursive: true });
|
||||
|
||||
// 创建测试文件
|
||||
console.log('📁 Creating test files...');
|
||||
const testFiles = [];
|
||||
|
||||
for (let i = 0; i < 8; i++) {
|
||||
const fileName = `test-${i}.js`;
|
||||
const filePath = path.join(testDir, fileName);
|
||||
|
||||
const testContent = `
|
||||
// Test file ${i}
|
||||
function test${i}() {
|
||||
console.log("Starting test ${i}");
|
||||
const data = { id: ${i}, value: Math.random() };
|
||||
console.info("Data created:", data);
|
||||
console.warn("Warning from test ${i}");
|
||||
return data;
|
||||
}
|
||||
|
||||
console.log("Module ${i} loaded");
|
||||
`;
|
||||
|
||||
fs.writeFileSync(filePath, testContent);
|
||||
testFiles.push({ fileName, filePath });
|
||||
}
|
||||
|
||||
console.log(`✅ Created ${testFiles.length} test files`);
|
||||
console.log('');
|
||||
|
||||
// 并行处理测试
|
||||
console.log('🚀 Testing Parallel Processing...');
|
||||
const parallelStart = Date.now();
|
||||
|
||||
// 创建Workers
|
||||
const workers = [];
|
||||
const results = [];
|
||||
|
||||
for (let i = 0; i < testFiles.length; i++) {
|
||||
const { fileName, filePath } = testFiles[i];
|
||||
const outputPath = path.join(testOptimizedDir, fileName);
|
||||
|
||||
const worker = new Worker(workerCode, {
|
||||
eval: true,
|
||||
workerData: { inputPath: filePath, outputPath }
|
||||
});
|
||||
|
||||
workers.push(worker);
|
||||
|
||||
worker.on('message', (result) => {
|
||||
results.push({ file: fileName, ...result });
|
||||
console.log(`${result.success ? '✅' : '❌'} ${fileName}`);
|
||||
|
||||
if (result.success) {
|
||||
console.log(` Size: ${result.inputSize} → ${result.outputSize} chars (${result.reduction} removed)`);
|
||||
} else {
|
||||
console.log(` Error: ${result.error}`);
|
||||
}
|
||||
});
|
||||
|
||||
worker.on('error', (error) => {
|
||||
results.push({ file: fileName, success: false, error: error.message });
|
||||
console.log(`❌ ${fileName}: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
// 等待所有Workers完成
|
||||
await new Promise((resolve) => {
|
||||
let completed = 0;
|
||||
const totalWorkers = workers.length;
|
||||
|
||||
workers.forEach(worker => {
|
||||
worker.on('exit', () => {
|
||||
completed++;
|
||||
if (completed === totalWorkers) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const parallelTime = Date.now() - parallelStart;
|
||||
|
||||
console.log('');
|
||||
console.log('📊 Parallel Results:');
|
||||
console.log(` Processing time: ${parallelTime}ms`);
|
||||
console.log(` Files processed: ${results.filter(r => r.success).length}/${results.length}`);
|
||||
|
||||
const totalReduction = results.reduce((sum, r) => sum + (r.reduction || 0), 0);
|
||||
console.log(` Total reduction: ${totalReduction} characters`);
|
||||
console.log(` Avg time per file: ${(parallelTime / results.length).toFixed(1)}ms`);
|
||||
|
||||
// 验证输出
|
||||
console.log('');
|
||||
console.log('🔍 Verifying output...');
|
||||
if (fs.existsSync(testOptimizedDir)) {
|
||||
const outputFiles = fs.readdirSync(testOptimizedDir);
|
||||
console.log(`✅ Generated ${outputFiles.length} output files`);
|
||||
|
||||
// 检查第一个文件
|
||||
const firstOutputPath = path.join(testOptimizedDir, outputFiles[0]);
|
||||
const firstOutputContent = fs.readFileSync(firstOutputPath, 'utf8');
|
||||
const hasConsole = firstOutputContent.includes('console.');
|
||||
|
||||
console.log(` Console removal: ${hasConsole ? '❌ Failed' : '✅ Success'}`);
|
||||
}
|
||||
|
||||
// 清理
|
||||
console.log('');
|
||||
console.log('🧹 Cleaning up...');
|
||||
[testDir, testOptimizedDir].forEach(dir => {
|
||||
if (fs.existsSync(dir)) {
|
||||
fs.rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
console.log('✅ Test completed!');
|
||||
|
||||
return {
|
||||
totalTime: parallelTime,
|
||||
successful: results.filter(r => r.success).length,
|
||||
total: results.length,
|
||||
totalReduction
|
||||
};
|
||||
}
|
||||
|
||||
async function testSingleThread() {
|
||||
console.log('🐌 Testing Single-Thread Processing...');
|
||||
|
||||
const testDir = path.join(__dirname, 'simple-test');
|
||||
const testSingleDir = testDir + '-single';
|
||||
|
||||
// 简单的本地处理函数
|
||||
function simpleConsoleRemover(jsCode) {
|
||||
return jsCode.replace(/console\.(log|error|warn|info|debug)\s*\([^)]*\);?\s*/g, '');
|
||||
}
|
||||
|
||||
const singleStart = Date.now();
|
||||
let totalReduction = 0;
|
||||
let successful = 0;
|
||||
|
||||
const files = fs.readdirSync(testDir).filter(f => f.endsWith('.js'));
|
||||
|
||||
for (const fileName of files) {
|
||||
const inputPath = path.join(testDir, fileName);
|
||||
const outputPath = path.join(testSingleDir, fileName);
|
||||
|
||||
try {
|
||||
const originalCode = fs.readFileSync(inputPath, 'utf8');
|
||||
const cleanedCode = simpleConsoleRemover(originalCode);
|
||||
|
||||
const outputDir = path.dirname(outputPath);
|
||||
if (!fs.existsSync(outputDir)) {
|
||||
fs.mkdirSync(outputDir, { recursive: true });
|
||||
}
|
||||
|
||||
fs.writeFileSync(outputPath, cleanedCode, 'utf8');
|
||||
|
||||
totalReduction += originalCode.length - cleanedCode.length;
|
||||
successful++;
|
||||
} catch (error) {
|
||||
console.log(`❌ ${fileName}: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
const singleTime = Date.now() - singleStart;
|
||||
|
||||
console.log(` Processing time: ${singleTime}ms`);
|
||||
console.log(` Files processed: ${successful}/${files.length}`);
|
||||
console.log(` Total reduction: ${totalReduction} characters`);
|
||||
console.log(` Avg time per file: ${(singleTime / files.length).toFixed(1)}ms`);
|
||||
|
||||
return {
|
||||
totalTime: singleTime,
|
||||
successful,
|
||||
total: files.length,
|
||||
totalReduction
|
||||
};
|
||||
}
|
||||
|
||||
// 主测试函数
|
||||
async function main() {
|
||||
console.log('🎯 Worker Thread Parallel Processing Test');
|
||||
console.log('========================================');
|
||||
console.log('');
|
||||
|
||||
try {
|
||||
// 测试并行处理
|
||||
const parallelResult = await testSimpleParallel();
|
||||
|
||||
// 测试单线程处理(使用相同的测试文件)
|
||||
const singleResult = await testSingleThread();
|
||||
|
||||
// 性能对比
|
||||
console.log('');
|
||||
console.log('🏁 Performance Comparison:');
|
||||
console.log('===========================');
|
||||
console.log(`Parallel: ${parallelResult.totalTime}ms (${parallelResult.successful} files)`);
|
||||
console.log(`Single: ${singleResult.totalTime}ms (${singleResult.successful} files)`);
|
||||
|
||||
if (parallelResult.totalTime > 0 && singleResult.totalTime > 0) {
|
||||
const speedup = singleResult.totalTime / parallelResult.totalTime;
|
||||
console.log(`Speedup: ${speedup.toFixed(2)}x`);
|
||||
|
||||
if (speedup > 1.5) {
|
||||
console.log('🎉 Parallel processing is significantly faster!');
|
||||
} else if (speedup > 1.1) {
|
||||
console.log('✅ Parallel processing shows improvement');
|
||||
} else {
|
||||
console.log('⚠️ Overhead may outweigh benefits for small files');
|
||||
}
|
||||
}
|
||||
|
||||
console.log('');
|
||||
console.log('💡 Parallel processing benefits increase with:');
|
||||
console.log(' • Larger file sizes');
|
||||
console.log(' • More files to process');
|
||||
console.log(' • More complex processing logic');
|
||||
console.log(' • When using Ollama AI processing');
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Test failed:', error.message);
|
||||
console.error(error.stack);
|
||||
}
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main().catch(console.error);
|
||||
}
|
||||
|
||||
module.exports = { testSimpleParallel, testSingleThread };
|
||||
Reference in New Issue
Block a user