#!/usr/bin/env python3 """ Patch Linear's app.asar for Linux integration. Patches applied: 1. (main process) Stub the in-app auto-updater on Linux so it doesn't spam logs trying to fetch latest-linux.yml (Linear doesn't host one) and doesn't fight the AUR package which is the source of truth. The exported `updater` object on Linux still has the public surface the macOS Application menu expects (checkForUpdates, installUpdate, isMenu*, isDownloading*, isUpdateReadyToInstall, menuText), so any cross-platform menu code that loses its `if (isMac)` guard in a future release won't crash. Usage: python3 patch-main.py [--dry-run] Run from a directory containing app.asar, OR adjust ASAR_PATH below. """ import struct import json import hashlib import shutil import sys import os import re from pathlib import Path if (Path.cwd() / "app.asar").exists(): ASAR_PATH = Path.cwd() / "app.asar" else: ASAR_PATH = Path(__file__).parent / "src" / "app" / "resources" / "app.asar" BACKUP_PATH = ASAR_PATH.with_suffix(".asar.bak") # ── Patches for out/main/index.js ──────────────────────────────────────── # # Linear is built with electron-vite + ToDesktop and ships the compiled # main process at out/main/index.js. Variable names are minified per # release so we use regex with capture groups. _LINUX_UPDATER_STUB = ( '{checkForUpdates:async()=>{},' 'installUpdate:async()=>{},' 'isMenuEnabled:!1,' 'isDownloadingUpdate:!1,' 'isUpdateReadyToInstall:!1,' 'isMenuVisible:!1,' 'menuText:""}' ) def _autoupdater_stub_repl(m): var_name = m.group(1) # e.g. "Jj" — the AutoUpdater instance binding cls_name = m.group(2) # e.g. "zA" — the imported AutoUpdater class confirmed_unload = m.group(3) will_quit = m.group(4) main_window_var = m.group(5) return ( f'var {var_name}=process.platform==="linux"?' f'{_LINUX_UPDATER_STUB}:' f'{cls_name}.create({{beforeUpdate:()=>{{{confirmed_unload}=!0,{will_quit}=!0}},' f'getWindow:()=>{main_window_var}}})' ) MAIN_PATCHES = [ { "name": "Stub AutoUpdater on Linux", # Original (current build): var Jj=zA.create({beforeUpdate:()=>{Kj=!0,Oj=!0},getWindow:()=>$}); # The mainWindow var name can be a normal identifier or `$`, so allow both. "pattern": ( r'var (\w+)=(\w+)\.create\(\{' r'beforeUpdate:\(\)=>\{(\w+)=!0,(\w+)=!0\},' r'getWindow:\(\)=>(\$|\w+)\}\)' ), "replacement": _autoupdater_stub_repl, "check_applied": 'process.platform==="linux"?{checkForUpdates', "required": False, # Optional: app still works if pattern moves }, ] # ── Asar helpers ────────────────────────────────────────────────────────── def read_asar_header(f): raw = f.read(16) if len(raw) < 16: raise ValueError("File too small to be a valid asar archive") pickle_size, header_size, pickle_str_size, json_size = struct.unpack(" 0: patched_files[main_path] = main_bytes print(f" out/main/index.js: {main_entry['size']} -> {len(main_bytes)} bytes") if dry_run: print(f"\n[DRY RUN] {total_applied} patches would be applied. No files modified.") sys.exit(0) print("Rebuilding asar ...") orig_map = {} for pp, entry in packed_files: orig_map[pp] = (orig_data_offset + int(entry["offset"]), entry["size"]) with open(ASAR_PATH, "rb") as f: header, _, _ = read_asar_header(f) packed_files = collect_packed_files(header) new_offset = 0 file_order = [] for pp, entry in packed_files: if pp in patched_files: new_content = patched_files[pp] new_entry = { "size": len(new_content), "integrity": compute_integrity(new_content), "offset": str(new_offset), } set_entry(header, list(pp), new_entry) file_order.append((pp, True)) new_offset += len(new_content) else: entry["offset"] = str(new_offset) file_order.append((pp, False)) new_offset += entry["size"] header_bytes, new_data_offset = build_asar(header) if not BACKUP_PATH.exists(): print(f" Backing up to {BACKUP_PATH}") shutil.copy2(ASAR_PATH, BACKUP_PATH) tmp_path = ASAR_PATH.with_suffix(".asar.tmp") with open(ASAR_PATH, "rb") as src, open(tmp_path, "wb") as dst: dst.write(header_bytes) for pp, is_patched in file_order: if is_patched: dst.write(patched_files[pp]) else: abs_offset, size = orig_map[pp] src.seek(abs_offset) remaining = size while remaining > 0: chunk = src.read(min(remaining, 8 * 1024 * 1024)) if not chunk: break dst.write(chunk) remaining -= len(chunk) tmp_size = os.path.getsize(tmp_path) orig_size = os.path.getsize(ASAR_PATH) size_diff = sum(len(v) - orig_map[k][1] for k, v in patched_files.items()) expected_size = orig_size + (len(header_bytes) - orig_data_offset) + size_diff print(f" Original: {orig_size}, New: {tmp_size}, Expected: {expected_size}") if tmp_size != expected_size: print("ERROR: Size mismatch!") sys.exit(1) os.replace(tmp_path, ASAR_PATH) print(f"\nDone! {total_applied} patches applied to {ASAR_PATH}") if BACKUP_PATH.exists(): print(f"Backup at {BACKUP_PATH}") if __name__ == "__main__": patch()