8b5beb1d by 柴进

:bug: 补齐遗漏的启动必需模块 (修复 Mac 打包后启动失败)

根因: audit_logger.py / config_util.py / preflight.py 这三个启动
必需模块从未被 git 追踪过. Windows 构建机上这些文件在本地磁盘,
PyInstaller 能找到, 所以 Win 包正常; 但 Mac 拉代码后根目录缺这
三个文件, PyInstaller 的 Analysis 找不到 import 链, 构建要么失败
要么运行时 ImportError.

本次补齐:
- audit_logger.py — 审计日志单例 (NDJSON 本地队列 + 异步 MySQL 上传)
- config_util.py  — 跨平台配置路径解析与安全加载
- preflight.py    — 启动门禁 (config/DB/schema 校验)
- database_schema.sql — 运维参考
- migrations/2026-04-21_add_audit_log_columns.sql — 审计表迁移
- .gitignore      — 屏蔽 .venv/build/dist/logs/__pycache__ 等,
                    防止以后再漏推业务代码时被大量噪声淹没
1 parent 1bbb3c47
1 # Python
2 __pycache__/
3 *.pyc
4 *.pyo
5 *.backup
6
7 # Virtual environments
8 .venv/
9 venv/
10
11 # Build artifacts
12 build/
13 dist/
14 *.egg-info/
15
16 # IDE
17 .idea/
18 .vscode/
19 .claude/
20
21 # Runtime / user data
22 logs/
23 errorlog/
24 data/
25 images/
26
27 # OS
28 .DS_Store
29 Thumbs.db
1 """
2 审计日志本地队列 + 后台上传 worker。
3
4 核心保证:事件一旦 log_use / log_login 返回,就已经 fsync 到本地 NDJSON 文件。
5 后台 worker 负责把本地队列异步上传到 MySQL;失败指数退避重试,成功后 compaction
6 重写队列文件删除已送达行。应用退出时 flush 一次尽量送达。
7
8 公开接口:
9 - init_audit_logger(db_config, queue_path, logs_dir): 启动单例
10 - get_audit_logger(): 获取单例(未初始化返回 None)
11 - AuditLogger.log_use(...)
12 - AuditLogger.log_login(...)
13 - AuditLogger.shutdown(timeout=5.0)
14 """
15 from __future__ import annotations
16
17 import json
18 import logging
19 import os
20 import threading
21 import time
22 from datetime import datetime
23 from pathlib import Path
24 from typing import Optional
25
26 import pymysql
27 from PySide6.QtCore import QThread
28
29
30 logger = logging.getLogger(__name__)
31
32 _instance: Optional["AuditLogger"] = None
33 _instance_lock = threading.Lock()
34
35
36 def init_audit_logger(db_config: dict, queue_path: Path, logs_dir: Path) -> "AuditLogger":
37 """在 preflight 通过后调用;幂等。"""
38 global _instance
39 with _instance_lock:
40 if _instance is None:
41 _instance = AuditLogger(db_config, queue_path, logs_dir)
42 _instance.start()
43 return _instance
44
45
46 def get_audit_logger() -> Optional["AuditLogger"]:
47 return _instance
48
49
50 class AuditLogger:
51 """
52 对外门面。只负责:
53 1. 落盘(log_use / log_login, fsync 后返回)
54 2. 拉起/关闭 worker
55 真正上传逻辑在 _UploadWorker。
56 """
57
58 def __init__(self, db_config: dict, queue_path: Path, logs_dir: Path):
59 self._db_config = db_config
60 self._queue_path = Path(queue_path)
61 self._logs_dir = Path(logs_dir)
62 self._file_lock = threading.Lock()
63 self._worker = _UploadWorker(
64 db_config=db_config,
65 queue_path=self._queue_path,
66 file_lock=self._file_lock,
67 )
68
69 def start(self) -> None:
70 self._queue_path.parent.mkdir(parents=True, exist_ok=True)
71 self._worker.start()
72
73 def log_use(
74 self,
75 user_name: str,
76 device_name: str,
77 prompt: str,
78 result_path: Optional[str],
79 status: str,
80 error_message: Optional[str],
81 model: Optional[str],
82 duration_ms: Optional[int],
83 finish_reason: Optional[str],
84 ) -> None:
85 record = {
86 "kind": "use_log",
87 "ts": datetime.now().isoformat(timespec="seconds"),
88 "user_name": user_name or "未知用户",
89 "device_name": device_name or "未知设备",
90 "prompt": prompt or "",
91 "result_path": result_path,
92 "status": status,
93 "error_message": error_message,
94 "model": model,
95 "duration_ms": duration_ms,
96 "finish_reason": finish_reason,
97 }
98 self._append(record)
99
100 def log_login(
101 self,
102 user_name: str,
103 local_ip: Optional[str],
104 public_ip: Optional[str],
105 device_name: Optional[str],
106 ) -> None:
107 record = {
108 "kind": "login_log",
109 "ts": datetime.now().isoformat(timespec="seconds"),
110 "user_name": user_name,
111 "local_ip": local_ip,
112 "public_ip": public_ip,
113 "device_name": device_name,
114 "login_time": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
115 }
116 self._append(record)
117
118 def shutdown(self, timeout: float = 5.0) -> None:
119 """应用退出前调用,尽量 flush。"""
120 self._worker.stop(timeout)
121
122 def _append(self, record: dict) -> None:
123 """落盘 + fsync。发生任何异常都不向上抛,但会落 error 日志(而不是 pass 吞掉)。"""
124 try:
125 line = json.dumps(record, ensure_ascii=False, default=str)
126 except Exception as e:
127 logger.error(f"审计事件序列化失败,已丢弃: {e}; record keys={list(record.keys())}")
128 return
129
130 try:
131 with self._file_lock:
132 with open(self._queue_path, "a", encoding="utf-8") as f:
133 f.write(line + "\n")
134 f.flush()
135 os.fsync(f.fileno())
136 self._worker.wake()
137 except Exception as e:
138 # 本地磁盘都写不进去,是真·严重故障。降级到日志文件,不再 raise
139 logger.error(f"审计事件落盘失败: {e}; 事件内容已写 error 日志兜底: {line[:200]}")
140
141
142 class _UploadWorker(QThread):
143 """后台线程:循环 drain 队列文件 → 批量 INSERT → compaction。"""
144
145 def __init__(self, db_config: dict, queue_path: Path, file_lock: threading.Lock):
146 super().__init__()
147 self._db_config = db_config
148 self._queue_path = Path(queue_path)
149 self._file_lock = file_lock
150 self._stop_event = threading.Event()
151 self._wake_event = threading.Event()
152 self._backoff = 1.0
153
154 # --- 外部控制 ---
155
156 def wake(self) -> None:
157 self._wake_event.set()
158
159 def stop(self, timeout: float = 5.0) -> None:
160 self._stop_event.set()
161 self._wake_event.set()
162 self.wait(int(timeout * 1000))
163
164 # --- 主循环 ---
165
166 def run(self) -> None:
167 logger.info("audit UploadWorker started")
168 while not self._stop_event.is_set():
169 try:
170 sent, unsent = self._drain_once()
171 except Exception as e:
172 logger.error(f"audit drain 抛出未预期异常: {e}", exc_info=True)
173 sent, unsent = 0, 1 # 当做失败处理
174
175 if unsent > 0:
176 self._backoff = min(self._backoff * 2, 300.0)
177 logger.debug(f"audit: unsent={unsent}, backoff={self._backoff}s")
178 else:
179 self._backoff = 1.0
180
181 # 退出前再尝试一次 drain(worker stop 时)
182 if self._stop_event.is_set():
183 break
184
185 wait_s = self._backoff if unsent > 0 else 60.0
186 self._wake_event.wait(wait_s)
187 self._wake_event.clear()
188
189 # 退出前最后一次 drain
190 try:
191 self._drain_once()
192 except Exception:
193 pass
194 logger.info("audit UploadWorker stopped")
195
196 # --- 核心 drain ---
197
198 def _drain_once(self) -> tuple[int, int]:
199 """
200 读快照 -> 批量上传 -> compaction。
201 返回 (sent_count, unsent_count)。
202 """
203 # 1. 快照读
204 with self._file_lock:
205 if not self._queue_path.exists():
206 return 0, 0
207 eof_at_read = self._queue_path.stat().st_size
208 if eof_at_read == 0:
209 return 0, 0
210 with open(self._queue_path, "rb") as f:
211 head_bytes = f.read(eof_at_read)
212
213 try:
214 head_text = head_bytes.decode("utf-8")
215 except UnicodeDecodeError as e:
216 logger.error(f"audit 队列文件不是合法 UTF-8,跳过本轮: {e}")
217 return 0, 1
218
219 lines = [ln for ln in head_text.split("\n") if ln.strip()]
220 if not lines:
221 return 0, 0
222
223 # 2. 连 DB + 批量 INSERT
224 try:
225 conn = pymysql.connect(
226 host=self._db_config["host"],
227 port=int(self._db_config.get("port", 3306)),
228 user=self._db_config["user"],
229 password=self._db_config["password"],
230 database=self._db_config["database"],
231 connect_timeout=5,
232 read_timeout=10,
233 write_timeout=10,
234 charset="utf8mb4",
235 )
236 except Exception as e:
237 logger.warning(f"audit connect 失败,稍后重试: {e}")
238 return 0, len(lines)
239
240 sent = 0
241 unsent_lines: list[str] = []
242 try:
243 with conn.cursor() as cursor:
244 for i, line in enumerate(lines):
245 try:
246 record = json.loads(line)
247 except json.JSONDecodeError as e:
248 logger.error(f"audit 队列出现坏行,已跳过: {e}; line={line[:120]!r}")
249 # 不保留到 unsent(避免无限重试坏行)
250 continue
251
252 try:
253 self._insert_one(cursor, record)
254 sent += 1
255 except Exception as e:
256 logger.warning(
257 f"audit INSERT 失败(后续全部留队列): {type(e).__name__}: {e}"
258 )
259 unsent_lines = lines[i:]
260 break
261 conn.commit()
262 except Exception as e:
263 logger.warning(f"audit commit 失败: {e}")
264 unsent_lines = lines
265 sent = 0
266 finally:
267 try:
268 conn.close()
269 except Exception:
270 pass
271
272 # 3. Compaction:重写队列文件 = unsent_lines + 期间新增的 tail
273 with self._file_lock:
274 try:
275 # 读快照之后新增的尾部
276 current_size = self._queue_path.stat().st_size
277 tail = b""
278 if current_size > eof_at_read:
279 with open(self._queue_path, "rb") as f:
280 f.seek(eof_at_read)
281 tail = f.read()
282
283 with open(self._queue_path, "wb") as f:
284 for ln in unsent_lines:
285 f.write((ln + "\n").encode("utf-8"))
286 if tail:
287 f.write(tail)
288 f.flush()
289 os.fsync(f.fileno())
290 except Exception as e:
291 # compaction 失败:不致命,已发送的会在下次 drain 被重发(幂等性由
292 # MySQL auto-increment id 保障,不会真复制业务数据,仅审计可能重复一次)
293 logger.error(f"audit compaction 失败: {e}", exc_info=True)
294
295 if sent > 0:
296 logger.info(f"audit drained: sent={sent}, unsent={len(unsent_lines)}")
297 return sent, len(unsent_lines)
298
299 # --- 具体插入 ---
300
301 def _insert_one(self, cursor, record: dict) -> None:
302 kind = record.get("kind")
303 if kind == "use_log":
304 sql = """
305 INSERT INTO `nano_banana_user_use_log`
306 (`user_name`, `device_name`, `prompt`, `result_path`, `status`,
307 `error_message`, `model`, `duration_ms`, `finish_reason`)
308 VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
309 """
310 cursor.execute(
311 sql,
312 (
313 record.get("user_name", "未知用户"),
314 record.get("device_name", "未知设备"),
315 record.get("prompt", ""),
316 record.get("result_path"),
317 record.get("status", "unknown"),
318 record.get("error_message"),
319 record.get("model"),
320 record.get("duration_ms"),
321 record.get("finish_reason"),
322 ),
323 )
324 elif kind == "login_log":
325 sql = """
326 INSERT INTO `nano_banana_user_log`
327 (`user_name`, `local_ip`, `public_ip`, `device_name`, `login_time`)
328 VALUES (%s, %s, %s, %s, %s)
329 """
330 login_time_val = record.get("login_time") or record.get("ts")
331 cursor.execute(
332 sql,
333 (
334 record.get("user_name"),
335 record.get("local_ip"),
336 record.get("public_ip"),
337 record.get("device_name"),
338 login_time_val,
339 ),
340 )
341 else:
342 raise ValueError(f"未知审计事件 kind={kind!r}")
1 """
2 config.json 安全加载器。
3
4 公开接口:
5 - DEFAULT_CONFIG: 默认配置常量
6 - load_config_safe(path): 区分处理各类 IO / JSON 错误,返回 (config, error_message)
7 - get_config_dir(): 跨平台返回配置目录
8 - get_config_path(): 返回 config.json 绝对路径
9 """
10 from __future__ import annotations
11
12 import json
13 import os
14 import shutil
15 import sys
16 import platform
17 from datetime import datetime
18 from pathlib import Path
19 from typing import Tuple
20
21
22 DEFAULT_CONFIG: dict = {
23 "api_key": "",
24 "saved_prompts": [],
25 "db_config": None,
26 "last_user": "",
27 "saved_password_hash": "",
28 "logging_config": {
29 "enabled": True,
30 "level": "INFO",
31 "log_to_console": True,
32 },
33 "history_config": {
34 "max_history_count": 100,
35 },
36 }
37
38
39 def get_config_dir() -> Path:
40 """跨平台返回用户级配置目录(与 image_generator.py 中 get_config_dir 一致)。"""
41 if getattr(sys, "frozen", False):
42 system = platform.system()
43 if system == "Darwin":
44 d = Path.home() / "Library" / "Application Support" / "ZB100ImageGenerator"
45 elif system == "Windows":
46 d = Path(os.getenv("APPDATA", str(Path.home()))) / "ZB100ImageGenerator"
47 else:
48 d = Path.home() / ".config" / "zb100imagegenerator"
49 else:
50 d = Path(".").resolve()
51 d.mkdir(parents=True, exist_ok=True)
52 return d
53
54
55 def get_config_path() -> Path:
56 return get_config_dir() / "config.json"
57
58
59 def load_config_safe(config_path: Path) -> Tuple[dict, str]:
60 """
61 安全加载 config.json。
62
63 返回 (config, error):
64 - 成功:(合并后的 config dict, "")
65 - 失败但可恢复:(DEFAULT_CONFIG 副本, 错误描述);preflight 会根据返回
66 的 error 和 config 内容共同决定是否拦截启动
67 - 不抛异常
68
69 行为:
70 - 文件不存在 → (DEFAULT_CONFIG 副本, "") # 让 preflight 判断是否允许
71 - 空文件 / JSON 损坏 → 备份为 .bak.<timestamp> + 返回默认值 + error
72 - PermissionError / OSError → 返回默认值 + error(不备份,读都读不到)
73 - 顶层不是 object → 返回默认值 + error
74 - 正常 → defaults.update(loaded) 合并后返回(保留未知字段以兼容)
75 """
76 config_path = Path(config_path)
77
78 if not config_path.exists():
79 return dict(DEFAULT_CONFIG), ""
80
81 try:
82 content = config_path.read_text(encoding="utf-8")
83 except PermissionError as e:
84 return dict(DEFAULT_CONFIG), f"permission denied: {e}"
85 except OSError as e:
86 return dict(DEFAULT_CONFIG), f"IO error: {e}"
87
88 if not content.strip():
89 _backup(config_path, reason="empty")
90 return dict(DEFAULT_CONFIG), "config.json was empty, using defaults"
91
92 try:
93 loaded = json.loads(content)
94 except json.JSONDecodeError as e:
95 _backup(config_path, reason="parse-error")
96 return dict(DEFAULT_CONFIG), f"JSON parse error, backed up to .bak: {e}"
97
98 if not isinstance(loaded, dict):
99 return dict(DEFAULT_CONFIG), "config.json top-level is not an object"
100
101 merged = dict(DEFAULT_CONFIG)
102 merged.update(loaded)
103 return merged, ""
104
105
106 def _backup(src: Path, reason: str) -> None:
107 """把损坏 / 空的 config 文件备份,不抛异常。"""
108 try:
109 ts = datetime.now().strftime("%Y%m%d_%H%M%S")
110 dst = src.with_suffix(f".json.bak.{reason}.{ts}")
111 shutil.copy2(src, dst)
112 except Exception:
113 pass
1 -- Nano Banana App Database Schema
2 -- 包含用户登录日志表和使用日志表
3 --
4 -- 表说明:
5 -- 1. nano_banana_user_log: 记录用户登录日志(IP地址、设备名称、登录时间)
6 -- 2. nano_banana_user_use_log: 记录用户生图操作日志(prompt、结果、状态、错误信息)
7
8 -- 创建用户登录日志表
9 CREATE TABLE `nano_banana_user_log` (
10 `user_name` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '用户名',
11 `local_ip` varchar(45) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '局域网IP地址',
12 `public_ip` varchar(45) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '公网IP地址(可为空)',
13 `device_name` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '设备名称',
14 `login_time` datetime COLLATE utf8mb4_unicode_ci DEFAULT CURRENT_TIMESTAMP COMMENT '登录时间',
15 INDEX `idx_user_name` (`user_name`),
16 INDEX `idx_login_time` (`login_time`)
17 ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='用户登录日志表';
18
19 -- 数据迁移脚本(如果表已存在)
20 ALTER TABLE `nano_banana_user_log`
21 ADD COLUMN `local_ip` varchar(45) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '局域网IP地址' AFTER `user_name`,
22 ADD COLUMN `public_ip` varchar(45) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '公网IP地址' AFTER `local_ip`,
23 MODIFY COLUMN `device_name` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '设备名称',
24 ADD INDEX `idx_user_name` (`user_name`),
25 ADD INDEX `idx_login_time` (`login_time`);
26
27 -- 创建用户使用日志表
28 CREATE TABLE `nano_banana_user_use_log` (
29 `id` INT AUTO_INCREMENT PRIMARY KEY COMMENT '自增主键',
30 `record_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT '记录时间',
31 `user_name` VARCHAR(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '用户名',
32 `device_name` VARCHAR(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '设备名称',
33 `prompt` TEXT COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '用户请求的 Prompt',
34 `result_path` VARCHAR(512) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '返回数据地址(成功时为图片路径)',
35 `status` ENUM('success', 'failure') COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '操作状态',
36 `error_message` TEXT COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '失败时的错误信息',
37 INDEX `idx_user_name` (`user_name`),
38 INDEX `idx_record_time` (`record_time`),
39 INDEX `idx_status` (`status`)
40 ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='用户使用日志表';
...\ No newline at end of file ...\ No newline at end of file
1 -- =====================================================================
2 -- Migration: add audit log columns (2026-04-21)
3 -- Change: add-audit-log-reliability
4 -- Purpose: 为审计日志表新增 model / duration_ms / finish_reason 三列,
5 -- 便于按模型、耗时、失败原因做分析和定位。
6 --
7 -- 执行者:@柴进(在 RDS 上执行,必须早于新版本下发)
8 -- 执行后:客户端新版启动时 preflight 会校验这三列存在,否则拦截启动。
9 -- =====================================================================
10
11 USE `saas_user`;
12
13 ALTER TABLE `nano_banana_user_use_log`
14 ADD COLUMN `model` VARCHAR(64) NULL COMMENT '本次生成使用的 Gemini 模型 ID',
15 ADD COLUMN `duration_ms` INT NULL COMMENT 'Worker start 到 emit 的耗时(毫秒)',
16 ADD COLUMN `finish_reason` VARCHAR(64) NULL COMMENT 'Gemini 响应 candidates[0].finish_reason';
17
18 -- 回滚(仅开发 / 灰度回退场景,生产不建议删列):
19 -- ALTER TABLE `nano_banana_user_use_log`
20 -- DROP COLUMN `model`,
21 -- DROP COLUMN `duration_ms`,
22 -- DROP COLUMN `finish_reason`;
1 """
2 启动门禁:保证审计日志上传的所有前置条件都成立。
3 任一失败即阻止应用进入主流程,对用户只显示一句"应用启动失败,请联系 @柴进"。
4 详细错误脱敏后写入 logs/preflight_error.log。
5 """
6 from __future__ import annotations
7
8 import logging
9 import re
10 import sys
11 import traceback
12 from datetime import datetime
13 from pathlib import Path
14 from typing import Tuple
15
16 import pymysql
17
18 from config_util import load_config_safe
19
20
21 logger = logging.getLogger(__name__)
22
23
24 REQUIRED_DB_FIELDS = ("host", "port", "user", "password", "database")
25 REQUIRED_TABLES = ("nano_banana_user_use_log", "nano_banana_user_log")
26 REQUIRED_USE_LOG_COLUMNS = (
27 "user_name", "device_name", "prompt", "result_path", "status",
28 "error_message", "model", "duration_ms", "finish_reason",
29 )
30 REQUIRED_LOGIN_LOG_COLUMNS = (
31 "user_name", "local_ip", "public_ip", "device_name", "login_time",
32 )
33
34
35 def preflight_check(config_path: Path, audit_queue_path: Path) -> Tuple[bool, str, dict]:
36 """
37 返回 (ok, error_detail, config)。
38 - ok=True: 一切就绪,调用方可以继续启动
39 - ok=False: error_detail 为详细错误描述(未脱敏;handle_preflight_failure 会脱敏后落盘)
40 - config: 成功时为可用 config dict;失败时可能为部分加载或 DEFAULT_CONFIG
41 """
42 # 1. config.json
43 try:
44 config, load_err = load_config_safe(config_path)
45 except Exception as e:
46 return False, f"config load crashed:\n{traceback.format_exc()}", {}
47
48 if load_err:
49 return False, f"config load error: {load_err}", config
50
51 # 2. db_config 字段完整
52 db = config.get("db_config")
53 if not db or not isinstance(db, dict):
54 return False, "config.json 缺少 db_config 字段或格式错误", config
55
56 missing = [k for k in REQUIRED_DB_FIELDS if not db.get(k)]
57 if missing:
58 return False, f"db_config 缺少字段: {missing}", config
59
60 # 3. MySQL 连接 + SELECT 1
61 conn = None
62 try:
63 conn = pymysql.connect(
64 host=db["host"],
65 port=int(db["port"]),
66 user=db["user"],
67 password=db["password"],
68 database=db["database"],
69 connect_timeout=5,
70 read_timeout=5,
71 write_timeout=5,
72 charset="utf8mb4",
73 )
74 except Exception as e:
75 return False, f"MySQL connect 失败: {type(e).__name__}: {e}", config
76
77 try:
78 with conn.cursor() as cur:
79 cur.execute("SELECT 1")
80 cur.fetchone()
81
82 # 4. 表存在
83 for table in REQUIRED_TABLES:
84 try:
85 cur.execute(f"SELECT 1 FROM `{table}` LIMIT 1")
86 cur.fetchone()
87 except Exception as e:
88 return False, f"审计表 {table} 不可用: {type(e).__name__}: {e}", config
89
90 # 5. 必要列存在
91 ok, col_err = _check_columns(cur, db["database"], "nano_banana_user_use_log",
92 REQUIRED_USE_LOG_COLUMNS)
93 if not ok:
94 return False, col_err, config
95 ok, col_err = _check_columns(cur, db["database"], "nano_banana_user_log",
96 REQUIRED_LOGIN_LOG_COLUMNS)
97 if not ok:
98 return False, col_err, config
99 finally:
100 try:
101 conn.close()
102 except Exception:
103 pass
104
105 # 6. 本地队列目录可写
106 try:
107 audit_queue_path.parent.mkdir(parents=True, exist_ok=True)
108 probe = audit_queue_path.parent / ".preflight_probe"
109 probe.write_text("ok", encoding="utf-8")
110 probe.unlink()
111 except Exception as e:
112 return False, f"审计队列目录不可写 ({audit_queue_path.parent}): {e}", config
113
114 return True, "", config
115
116
117 def _check_columns(cur, db_name: str, table: str, required: tuple[str, ...]) -> Tuple[bool, str]:
118 cur.execute(
119 "SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS "
120 "WHERE TABLE_SCHEMA=%s AND TABLE_NAME=%s",
121 (db_name, table),
122 )
123 existing = {row[0] for row in cur.fetchall()}
124 missing = [c for c in required if c not in existing]
125 if missing:
126 return False, f"表 {table} 缺少列: {missing}(请运行 migrations/2026-04-21_add_audit_log_columns.sql)"
127 return True, ""
128
129
130 def handle_preflight_failure(detail: str, logs_dir: Path) -> None:
131 """
132 写入脱敏详情到 logs/preflight_error.log,显示单行对话框,sys.exit(1)。
133 调用此函数前必须已经创建 QApplication。
134 """
135 from PySide6.QtWidgets import QMessageBox, QApplication
136
137 # 写日志(脱敏)
138 try:
139 logs_dir.mkdir(parents=True, exist_ok=True)
140 err_log = logs_dir / "preflight_error.log"
141 with open(err_log, "a", encoding="utf-8") as f:
142 f.write(f"\n===== {datetime.now().isoformat(timespec='seconds')} =====\n")
143 f.write(_scrub(detail))
144 f.write("\n")
145 except Exception:
146 pass
147
148 # 对用户:一句话
149 try:
150 app = QApplication.instance()
151 if app is None:
152 # preflight 失败比 QApplication 创建还早的极端情况(不应发生)
153 app = QApplication(sys.argv)
154 box = QMessageBox()
155 box.setIcon(QMessageBox.Critical)
156 box.setWindowTitle("启动失败")
157 box.setText("应用启动失败,请联系 @柴进")
158 box.setStandardButtons(QMessageBox.Ok)
159 box.exec()
160 except Exception:
161 # 最坏情况:连对话框都弹不出来
162 print("应用启动失败,请联系 @柴进", file=sys.stderr)
163
164 sys.exit(1)
165
166
167 _SCRUB_PATTERNS = [
168 (re.compile(r'("password"\s*:\s*)"[^"]*"'), r'\1"***"'),
169 (re.compile(r'("api_key"\s*:\s*)"[^"]*"'), r'\1"***"'),
170 (re.compile(r"(password\s*=\s*)\S+"), r"\1***"),
171 (re.compile(r"(api_key\s*=\s*)\S+"), r"\1***"),
172 ]
173
174
175 def _scrub(detail: str) -> str:
176 """从详情里擦除 password / api_key。"""
177 out = detail
178 for pat, repl in _SCRUB_PATTERNS:
179 out = pat.sub(repl, out)
180 return out