#!/usr/bin/env python3 """ Patch Akiflow's app.asar for Linux integration. Patches applied: 1. (main process) Write tray title to ~/.cache/akiflow/tray-status.json 2. (main process) Watch ~/.cache/akiflow/toggle-tray to toggle tray window 3. (renderer) Remove IS_MAC gate so tray title updates fire on Linux too Usage: python3 patch-main.py [--dry-run] """ import struct import json import hashlib import shutil import sys import os import re from pathlib import Path if (Path.cwd() / "app.asar").exists(): ASAR_PATH = Path.cwd() / "app.asar" else: ASAR_PATH = Path(__file__).parent / "src" / "app" / "resources" / "app.asar" BACKUP_PATH = ASAR_PATH.with_suffix(".asar.bak") # ── Patches for publish/main.js ────────────────────────────────────────── # # Patterns use regex with capture groups so minified variable names (which # change each release) don't cause patch failures. def _tray_writer_repl(m): e_var, t_var, x_var = m.group(1), m.group(2), m.group(3) return ( f'ipcMain.handle("tray.setTitle",(({e_var},{t_var})=>{{{x_var}.title={t_var},' f'{x_var}.updateTitleAndIcon({x_var}.title);' 'try{const _d=require("path").join(require("os").homedir(),".cache","akiflow");' 'require("fs").mkdirSync(_d,{recursive:!0});' f'require("fs").writeFileSync(require("path").join(_d,"tray-status.json"),' f'JSON.stringify({{title:{t_var}||"",hasEvent:!!{t_var},timestamp:Date.now()}}))' '}catch(_e){}}))' ) MAIN_PATCHES = [ { "name": "Tray status JSON writer", "pattern": r'ipcMain\.handle\("tray\.setTitle",\(\((\w+),(\w+)\)=>\{(\w+)\.title=\2,\3\.updateTitleAndIcon\(\3\.title\)\}\)\)', "replacement": _tray_writer_repl, "check_applied": "tray-status.json", "required": True, }, { "name": "Tray window toggle watcher", # Uses process.platform instead of a minified IS_LINUX variable. # Uses require('electron').screen instead of a minified electron var. "original": 'globalThis.tray.setToolTip("Akiflow")', "replacement": ( 'globalThis.tray.setToolTip("Akiflow"),' "(()=>{if(process.platform===\"linux\"){const _fs=require(\"fs\"),_tp=require(\"path\").join(require(\"os\").homedir()," '".cache","akiflow","toggle-tray");' "try{_fs.mkdirSync(require(\"path\").dirname(_tp),{recursive:!0})}catch(_e){}" "_fs.watchFile(_tp,{interval:500},()=>{" "try{if(!globalThis.trayWindow||globalThis.trayWindow.isDestroyed())return;" "if(globalThis.trayWindow.isVisible()){globalThis.trayWindow._hide(\"fromWidget\");return}" "let _pos;try{_pos=JSON.parse(_fs.readFileSync(_tp,\"utf8\"))}catch(_e){}" "if(_pos&&_pos.x!=null&&_pos.y!=null){" "const _b=globalThis.trayWindow.getBounds()," "_d=require(\"electron\").screen.getDisplayNearestPoint({x:_pos.x,y:_pos.y})," "_nx=Math.max(_d.workArea.x,Math.min(_pos.x-Math.floor(_b.width/2),_d.workArea.x+_d.workArea.width-_b.width))," "_ny=_pos.y-_b.height-8;" "globalThis.trayWindow.setBounds({x:_nx,y:_ny<_d.workArea.y?_pos.y+8:_ny,width:_b.width,height:_b.height})}" 'globalThis.trayWindow._show("fromWidget")' "}catch(_e){}})}})()" ), "required": False, }, ] # ── Patches for publish/renderer/main.js ───────────────────────────────── RENDERER_PATCHES = [ { "name": "Remove IS_MAC gate on tray title init", # Matches: X.IS_MAC&&!X.IS_WEB&&(Y.store.sub(Z.atoms.trayTile,...)) "pattern": r'(\w+)\.IS_MAC&&!\1\.IS_WEB&&(\(\w+\.store\.sub\(\w+\.atoms\.trayTile,\w+\.setTitle\),setTimeout\(\w+\.setTitle,1e3\)\))', "replacement": r'(\1.IS_MAC||\1.IS_LINUX)&&!\1.IS_WEB&&\2', "check_applied": "IS_LINUX)&&!", "required": True, }, { "name": "Remove IS_MAC gate on trayTile atom", # Matches: trayTile=FUNC((e=>{if(!X.IS_MAC||X.IS_WEB)return null "pattern": r'(trayTile=\w+\(\(e=>\{if\()!(\w+)\.IS_MAC\|\|\2\.IS_WEB(\)return null)', "replacement": r'\1!(\2.IS_MAC||\2.IS_LINUX)||\2.IS_WEB\3', "check_applied": "IS_LINUX)||", "required": True, }, { "name": "Increase tray title truncation from 15 to 30 chars", "original": 't=e.length>15?e.slice(0,15).join("")+"...":e.join("")', "replacement": 't=e.length>30?e.slice(0,30).join("")+"...":e.join("")', "required": False, }, ] # ── Asar helpers ────────────────────────────────────────────────────────── def read_asar_header(f): raw = f.read(16) if len(raw) < 16: raise ValueError("File too small to be a valid asar archive") pickle_size, header_size, pickle_str_size, json_size = struct.unpack(" 0: patched_files[main_path] = main_bytes print(f" main.js: {main_entry['size']} -> {len(main_bytes)} bytes") if renderer_count > 0: patched_files[renderer_path] = renderer_bytes print(f" renderer/main.js: {renderer_entry['size']} -> {len(renderer_bytes)} bytes") if dry_run: print(f"\n[DRY RUN] {total_applied} patches would be applied. No files modified.") sys.exit(0) print("Rebuilding asar ...") orig_map = {} for pp, entry in packed_files: orig_map[pp] = (orig_data_offset + int(entry["offset"]), entry["size"]) with open(ASAR_PATH, "rb") as f: header, _, _ = read_asar_header(f) packed_files = collect_packed_files(header) new_offset = 0 file_order = [] for pp, entry in packed_files: if pp in patched_files: new_content = patched_files[pp] new_entry = { "size": len(new_content), "integrity": compute_integrity(new_content), "offset": str(new_offset), } set_entry(header, list(pp), new_entry) file_order.append((pp, True)) new_offset += len(new_content) else: entry["offset"] = str(new_offset) file_order.append((pp, False)) new_offset += entry["size"] header_bytes, new_data_offset = build_asar(header) if not BACKUP_PATH.exists(): print(f" Backing up to {BACKUP_PATH}") shutil.copy2(ASAR_PATH, BACKUP_PATH) tmp_path = ASAR_PATH.with_suffix(".asar.tmp") with open(ASAR_PATH, "rb") as src, open(tmp_path, "wb") as dst: dst.write(header_bytes) for pp, is_patched in file_order: if is_patched: dst.write(patched_files[pp]) else: abs_offset, size = orig_map[pp] src.seek(abs_offset) remaining = size while remaining > 0: chunk = src.read(min(remaining, 8 * 1024 * 1024)) if not chunk: break dst.write(chunk) remaining -= len(chunk) tmp_size = os.path.getsize(tmp_path) orig_size = os.path.getsize(ASAR_PATH) size_diff = sum(len(v) - orig_map[k][1] for k, v in patched_files.items()) expected_size = orig_size + (len(header_bytes) - orig_data_offset) + size_diff print(f" Original: {orig_size}, New: {tmp_size}, Expected: {expected_size}") if tmp_size != expected_size: print("ERROR: Size mismatch!") sys.exit(1) os.replace(tmp_path, ASAR_PATH) print(f"\nDone! {total_applied} patches applied to {ASAR_PATH}") if BACKUP_PATH.exists(): print(f"Backup at {BACKUP_PATH}") if __name__ == "__main__": patch()