diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a302de8..3c42f0d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -14,6 +14,8 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 + with: + fetch-depth: 0 - name: Derive version from tag shell: pwsh @@ -26,6 +28,67 @@ jobs: "TAG_NAME=$tag" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8 "VERSION=$version" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8 + - name: Generate release notes from commits + shell: pwsh + run: | + git fetch --force --tags + + $tag = $env:TAG_NAME + if ([string]::IsNullOrWhiteSpace($tag)) { + throw "TAG_NAME is empty" + } + + $repo = "${{ github.repository }}" + + $prev = "" + try { + $commit = (git rev-list -n 1 $tag).Trim() + if (-not [string]::IsNullOrWhiteSpace($commit)) { + $prev = (git describe --tags --abbrev=0 "$commit^" 2>$null).Trim() + } + } catch {} + + if ([string]::IsNullOrWhiteSpace($prev)) { + # Fallback: best-effort previous version tag by semver-ish sorting. + $prev = (git tag --list "v*" --sort=-v:refname | Where-Object { $_ -ne $tag } | Select-Object -First 1) + } + + $range = "" + if (-not [string]::IsNullOrWhiteSpace($prev)) { + $range = "$prev..$tag" + } + + $lines = @() + if (-not [string]::IsNullOrWhiteSpace($range)) { + $lines = @(git log --no-merges --pretty=format:"- %s (%h)" --reverse $range) + } else { + # First release tag / missing history: include a small recent window. + $lines = @(git log --no-merges --pretty=format:"- %s (%h)" --reverse -n 50) + } + + if (-not $lines -or $lines.Count -eq 0) { + $lines = @("- 修复了一些已知问题,提升了稳定性。") + } + + $max = 60 + if ($lines.Count -gt $max) { + $total = $lines.Count + $lines = @($lines | Select-Object -First $max) + $lines += "- ...(共 $total 条提交,更多请查看完整变更链接)" + } + + $body = @() + $body += "## 更新内容 ($tag)" + $body += "" + $body += $lines + + if (-not [string]::IsNullOrWhiteSpace($prev)) { + $body += "" + $body += "完整变更: https://github.com/$repo/compare/$prev...$tag" + } + + ($body -join "`n") | Out-File -FilePath release-notes.md -Encoding utf8 + - name: Setup Node.js uses: actions/setup-node@v4 with: @@ -71,7 +134,8 @@ jobs: with: tag_name: ${{ env.TAG_NAME }} name: ${{ env.TAG_NAME }} - generate_release_notes: true + body_path: release-notes.md files: | desktop/dist/*Setup*.exe desktop/dist/*Setup*.exe.blockmap + desktop/dist/latest.yml diff --git a/.gitignore b/.gitignore index 0d3413b..9fb4c41 100644 --- a/.gitignore +++ b/.gitignore @@ -16,7 +16,13 @@ wheels/ # Local config templates /wechat_db_config_template.json +/wechat_db_config.json .ace-tool/ +pnpm-lock.yaml +/tools/tmp_isaac64_compare.js +/.claude/settings.local.json +.env +.env.* # Local dev repos and data /WxDatDecrypt/ @@ -25,13 +31,23 @@ wheels/ /vue3-wechat-tool/ /wechatDataBackup/ /wx_key/ +/refs/ +/WeFlow/ +/win95/ +/py_wx_key/ # Electron desktop app /desktop/node_modules/ /desktop/dist/ +/desktop/dist-updater-test/ /desktop/build/ /desktop/resources/ui/* !/desktop/resources/ui/.gitkeep /desktop/resources/backend/*.exe +/desktop/resources/backend/native/* +/desktop/resources/backend/pyproject.toml !/desktop/resources/backend/.gitkeep /desktop/resources/icon.ico + +# Local scratch file accidentally generated during development +/bento-summary.html diff --git a/README.md b/README.md index 392cc83..bae913b 100644 --- a/README.md +++ b/README.md @@ -4,50 +4,74 @@

WeChatDataAnalysis - 微信数据库解密与分析工具

-

一个专门用于微信4.x版本数据库解密的工具(支持聊天记录实时更新)

-

特别致谢echotrace(本项目大量功能参考其实现,提供了重要技术支持)

+

微信4.x数据解密并生成年度总结,高仿微信,支持实时更新,导出聊天记录,朋友圈等大量便捷功能

+

特别致谢H3CoF6(密钥与朋友圈等核心内容的技术支持)、echotraceWeFlow(本项目大量功能参考其实现)

Version Stars + Downloads Forks - License + QQ Group Python - FastAPI Vue.js SQLite
+## 年度总结 + + + + + + + + + + + + + + + + + + + + + +
年度总结 Modern
AnnualSummary 1AnnualSummary 2
AnnualSummary 3AnnualSummary 4
AnnualSummary 5AnnualSummary 6
AnnualSummary 7AnnualSummary 8
+ ## 界面预览 - - + - - + - - + - - + - - + - - + - + - + + + + + + + @@ -61,28 +85,23 @@ + + + + + +
首页检测页面聊天记录页面(支持多种消息类型展示,样式尽可能与微信保持一致)
首页微信检测页面聊天记录页面
解密页面图片密钥(填写)修改消息(本地修改,支持恢复)
数据库解密页面图片密钥(填写)修改消息
图片解密页面解密成功页面实时消息同步(点击侧边栏闪电图标后,消息会自动刷新)
图片解密页面解密成功页面实时消息同步
聊天记录页面设置面板(桌面行为、启动偏好、更新、朋友圈缓存策略)
聊天记录页面设置面板
朋友圈(支持查看用户之前朋友圈的背景图及时间;本地查看过的朋友圈即使后续不可见也可以查看)
朋友圈
聊天记录搜索
聊天记录导出
联系人导出
联系人导出
-## 功能特性 - -### 已实现功能 - -- **数据库解密**: 支持微信4.x版本数据库文件的解密 -- **多账户检测**: 自动检测并处理多个微信账户的数据库文件 -- **API接口**: 提供RESTful API接口进行数据库解密操作 -- **Web界面**: 提供现代化的Web操作界面 -- **聊天记录查看**: 支持查看解密后的聊天记录、消息搜索与离线导出 -- **实时更新(SSE)**: 支持开启实时模式,监听 `db_storage` 变更,增量同步新消息并自动刷新会话/消息列表 -- **聊天图片展示**: 支持部分版本图片消息无MD5时通过 file_id 兜底定位本地资源 +## 加入群聊 -### 开发计划 +也欢迎加入下方 QQ 群一起讨论。 -- **数据分析**: 对解密后的数据进行深度分析 -- **数据可视化**: 提供图表、统计报告等可视化展示 -- **聊天记录分析**: 消息频率、活跃时间、关键词分析等 -- **聊天记录优化**: 高级筛选、统计报表等功能 - -> **项目进展**: 查看 [GitHub项目面板](https://github.com/orgs/LifeArchiveProject/projects/1/views/1) 了解当前开发状态和后续功能规划 +

+ + WeChatDataAnalysis 加群二维码 + +

## 快速开始 @@ -135,8 +154,9 @@ npm run dev #### 2.5 访问应用 - 前端界面: http://localhost:3000 -- API服务: http://localhost:8000 -- API文档: http://localhost:8000/docs +- API服务(默认): http://localhost:10392 (可通过环境变量 WECHAT_TOOL_PORT 修改) +- API文档(默认): http://localhost:10392/docs +- 也可在应用内“设置 -> 后端端口”修改(支持“恢复默认”一键回到 10392):网页端会尝试重启本机后端到新端口并刷新(并写入 `output/runtime_settings.json`,开发模式下也会写入项目根目录 `.env` 供 `uv run` 下次启动使用);桌面端会重启内置后端并刷新 ## 打包为 EXE(Windows 桌面端) @@ -172,30 +192,44 @@ npm run dist 3. **数据隐私**: 解密后的数据包含个人隐私信息,请谨慎处理 4. **合法使用**: 请遵守相关法律法规,不得用于非法目的 +## 修改消息 + +支持在聊天页对单条消息进行本地修改(如修改消息文本/字段、修复为我发送、反转本地气泡方向),并在“修改记录”页查看原始与当前对比,支持单条恢复或按会话一键恢复。 + +该功能只修改本机本地数据库(`db_storage` 与解密副本),不会调用远端回写接口。 + +

+ 本地消息修改 +

+ ## 致谢 本项目的开发过程中参考了以下优秀的开源项目和资源: -### 主要参考项目 - 1. **[echotrace](https://github.com/ycccccccy/echotrace)** - 微信数据解析/取证工具 - 本项目大量功能参考并复用其实现思路,提供了重要技术支持 -2. **[wx_key](https://github.com/ycccccccy/wx_key)** - 微信数据库与图片密钥提取工具 +2. **[WeFlow](https://github.com/hicccc77/WeFlow)** - 微信数据分析工具 + - 提供了重要的功能参考和技术支持 + +3. **[wx_key](https://github.com/ycccccccy/wx_key)** - 微信数据库与图片密钥提取工具 - 支持获取微信 4.x 数据库密钥与缓存图片密钥 - 本项目推荐使用此工具获取密钥 -3. **[wechat-dump-rs](https://github.com/0xlane/wechat-dump-rs)** - Rust实现的微信数据库解密工具 +4. **[wechat-dump-rs](https://github.com/0xlane/wechat-dump-rs)** - Rust实现的微信数据库解密工具 - 提供了SQLCipher 4.0解密的正确实现参考 - 本项目的HMAC验证和页面处理逻辑基于此项目的实现 -4. **[oh-my-wechat](https://github.com/chclt/oh-my-wechat)** - 微信聊天记录查看工具 +5. **[oh-my-wechat](https://github.com/chclt/oh-my-wechat)** - 微信聊天记录查看工具 - 提供了优秀的聊天记录界面设计参考 - 本项目的聊天界面风格参考了此项目的实现 -5. **[vue3-wechat-tool](https://github.com/Ele-Cat/vue3-wechat-tool)** - 微信聊天记录工具(Vue3) +6. **[vue3-wechat-tool](https://github.com/Ele-Cat/vue3-wechat-tool)** - 微信聊天记录工具(Vue3) - 提供了聊天记录展示与交互的实现参考 +7. **[wx-dat](https://github.com/waaaaashi/wx-dat)** - 微信图片密钥获取工具 + - 实现真正的无头获取图片密钥,不再依赖扫描微信内存与点击朋友圈大图 + ## Star History [![Star History Chart](https://api.star-history.com/svg?repos=LifeArchiveProject/WeChatDataAnalysis&type=Date)](https://www.star-history.com/#LifeArchiveProject/WeChatDataAnalysis&Date) @@ -204,10 +238,7 @@ npm run dist 欢迎提交Issue和Pull Request来改进这个项目。 -## 许可证 - -本项目仅供学习和个人使用。请遵守相关法律法规。 - --- **免责声明**: 本工具仅供学习研究使用,使用者需自行承担使用风险。开发者不对因使用本工具造成的任何损失负责。 + diff --git a/desktop/package-lock.json b/desktop/package-lock.json index 4bb8122..11a8eb8 100644 --- a/desktop/package-lock.json +++ b/desktop/package-lock.json @@ -1,12 +1,15 @@ { "name": "wechat-data-analysis-desktop", - "version": "0.1.0", + "version": "1.3.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "wechat-data-analysis-desktop", - "version": "0.1.0", + "version": "1.3.0", + "dependencies": { + "electron-updater": "^6.7.3" + }, "devDependencies": { "concurrently": "^9.2.1", "cross-env": "^10.1.0", @@ -1105,7 +1108,6 @@ "version": "2.0.1", "resolved": "https://registry.npmmirror.com/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true, "license": "Python-2.0" }, "node_modules/assert-plus": { @@ -1295,7 +1297,6 @@ "version": "9.5.1", "resolved": "https://registry.npmmirror.com/builder-util-runtime/-/builder-util-runtime-9.5.1.tgz", "integrity": "sha512-qt41tMfgHTllhResqM5DcnHyDIWNgzHvuY2jDcYP9iaGpkWxTUzV6GQjDeLnlR1/DtdlcsWQbA7sByMpmJFTLQ==", - "dev": true, "license": "MIT", "dependencies": { "debug": "^4.3.4", @@ -1796,7 +1797,6 @@ "version": "4.4.3", "resolved": "https://registry.npmmirror.com/debug/-/debug-4.4.3.tgz", "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", - "dev": true, "license": "MIT", "dependencies": { "ms": "^2.1.3" @@ -2252,6 +2252,69 @@ "node": ">= 10.0.0" } }, + "node_modules/electron-updater": { + "version": "6.7.3", + "resolved": "https://registry.npmmirror.com/electron-updater/-/electron-updater-6.7.3.tgz", + "integrity": "sha512-EgkT8Z9noqXKbwc3u5FkJA+r48jwZ5DTUiOkJMOTEEH//n5Am6wfQGz7nvSFEA2oIAMv9jRzn5JKTyWeSKOPgg==", + "license": "MIT", + "dependencies": { + "builder-util-runtime": "9.5.1", + "fs-extra": "^10.1.0", + "js-yaml": "^4.1.0", + "lazy-val": "^1.0.5", + "lodash.escaperegexp": "^4.1.2", + "lodash.isequal": "^4.5.0", + "semver": "~7.7.3", + "tiny-typed-emitter": "^2.1.0" + } + }, + "node_modules/electron-updater/node_modules/fs-extra": { + "version": "10.1.0", + "resolved": "https://registry.npmmirror.com/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/electron-updater/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmmirror.com/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/electron-updater/node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmmirror.com/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/electron-updater/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, "node_modules/electron-winstaller": { "version": "5.4.0", "resolved": "https://registry.npmmirror.com/electron-winstaller/-/electron-winstaller-5.4.0.tgz", @@ -2816,7 +2879,6 @@ "version": "4.2.11", "resolved": "https://registry.npmmirror.com/graceful-fs/-/graceful-fs-4.2.11.tgz", "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true, "license": "ISC" }, "node_modules/has-flag": { @@ -3139,7 +3201,6 @@ "version": "4.1.1", "resolved": "https://registry.npmmirror.com/js-yaml/-/js-yaml-4.1.1.tgz", "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", - "dev": true, "license": "MIT", "dependencies": { "argparse": "^2.0.1" @@ -3207,7 +3268,6 @@ "version": "1.0.5", "resolved": "https://registry.npmmirror.com/lazy-val/-/lazy-val-1.0.5.tgz", "integrity": "sha512-0/BnGCCfyUMkBpeDgWihanIAF9JmZhHBgUhEqzvf+adhNGLoP6TaiI5oF8oyb3I45P+PcnrqihSf01M0l0G5+Q==", - "dev": true, "license": "MIT" }, "node_modules/lodash": { @@ -3217,6 +3277,19 @@ "dev": true, "license": "MIT" }, + "node_modules/lodash.escaperegexp": { + "version": "4.1.2", + "resolved": "https://registry.npmmirror.com/lodash.escaperegexp/-/lodash.escaperegexp-4.1.2.tgz", + "integrity": "sha512-TM9YBvyC84ZxE3rgfefxUWiQKLilstD6k7PTGt6wfbtXF8ixIJLOL3VYyV/z+ZiPLsVxAsKAFVwWlWeb2Y8Yyw==", + "license": "MIT" + }, + "node_modules/lodash.isequal": { + "version": "4.5.0", + "resolved": "https://registry.npmmirror.com/lodash.isequal/-/lodash.isequal-4.5.0.tgz", + "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", + "deprecated": "This package is deprecated. Use require('node:util').isDeepStrictEqual instead.", + "license": "MIT" + }, "node_modules/log-symbols": { "version": "4.1.0", "resolved": "https://registry.npmmirror.com/log-symbols/-/log-symbols-4.1.0.tgz", @@ -3535,7 +3608,6 @@ "version": "2.1.3", "resolved": "https://registry.npmmirror.com/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true, "license": "MIT" }, "node_modules/negotiator": { @@ -4272,7 +4344,6 @@ "version": "1.4.4", "resolved": "https://registry.npmmirror.com/sax/-/sax-1.4.4.tgz", "integrity": "sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==", - "dev": true, "license": "BlueOak-1.0.0", "engines": { "node": ">=11.0.0" @@ -4767,6 +4838,12 @@ "semver": "bin/semver" } }, + "node_modules/tiny-typed-emitter": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/tiny-typed-emitter/-/tiny-typed-emitter-2.1.0.tgz", + "integrity": "sha512-qVtvMxeXbVej0cQWKqVSSAHmKZEHAvxdF8HEUBFWts8h+xEo5m/lEiPakuyZ3BnCBjOD8i24kzNOiOLLgsSxhA==", + "license": "MIT" + }, "node_modules/tinyglobby": { "version": "0.2.15", "resolved": "https://registry.npmmirror.com/tinyglobby/-/tinyglobby-0.2.15.tgz", diff --git a/desktop/package.json b/desktop/package.json index 19bd486..2709dce 100644 --- a/desktop/package.json +++ b/desktop/package.json @@ -1,19 +1,23 @@ { "name": "wechat-data-analysis-desktop", "private": true, - "version": "0.1.0", + "version": "1.3.0", "main": "src/main.cjs", "scripts": { - "dev": "concurrently -k -s first \"cd ..\\\\frontend && npm run dev\" \"cross-env ELECTRON_START_URL=http://localhost:3000 electron .\"", - "dev:static": "pushd ..\\\\frontend && npm run generate && popd && cross-env ELECTRON_START_URL=http://127.0.0.1:8000 electron .", + "dev": "node scripts/dev.cjs", + "dev:static": "pushd ..\\\\frontend && npm run generate && popd && cross-env ELECTRON_START_URL=http://127.0.0.1:10392 electron .", "build:ui": "pushd ..\\\\frontend && npm run generate && popd && node scripts\\\\copy-ui.cjs", "build:backend": "uv sync --extra build && node scripts/build-backend.cjs", "build:icon": "node scripts/build-icon.cjs", - "dist": "npm run build:ui && npm run build:backend && npm run build:icon && electron-builder --win --x64" + "dist": "npm run build:ui && npm run build:backend && npm run build:icon && electron-builder --win --x64 --publish never" + }, + "dependencies": { + "electron-updater": "^6.7.3" }, "build": { "appId": "com.lifearchive.wechatdataanalysis", "productName": "WeChatDataAnalysis", + "artifactName": "${productName}-${version}-Setup.${ext}", "icon": "build/icon.ico", "asar": true, "directories": { @@ -21,7 +25,29 @@ }, "files": [ "src/**/*", - "package.json" + "package.json", + { + "from": "node_modules", + "to": "node_modules", + "filter": [ + "electron-updater/**/*", + "builder-util-runtime/**/*", + "debug/**/*", + "ms/**/*", + "sax/**/*", + "js-yaml/**/*", + "argparse/**/*", + "lazy-val/**/*", + "lodash.escaperegexp/**/*", + "lodash.isequal/**/*", + "tiny-typed-emitter/**/*", + "fs-extra/**/*", + "graceful-fs/**/*", + "jsonfile/**/*", + "universalify/**/*", + "semver/**/*" + ] + } ], "extraResources": [ { @@ -39,6 +65,12 @@ "nsis" ] }, + "publish": { + "provider": "github", + "owner": "LifeArchiveProject", + "repo": "WeChatDataAnalysis", + "releaseType": "release" + }, "nsis": { "oneClick": false, "allowToChangeInstallationDirectory": true, diff --git a/desktop/scripts/build-backend.cjs b/desktop/scripts/build-backend.cjs index 4e32f8a..df7c9bb 100644 --- a/desktop/scripts/build-backend.cjs +++ b/desktop/scripts/build-backend.cjs @@ -13,8 +13,63 @@ fs.mkdirSync(distDir, { recursive: true }); fs.mkdirSync(workDir, { recursive: true }); fs.mkdirSync(specDir, { recursive: true }); +function parseVersionTuple(rawVersion) { + const nums = String(rawVersion || "") + .split(/[^\d]+/) + .map((x) => Number.parseInt(x, 10)) + .filter((n) => Number.isInteger(n) && n >= 0); + while (nums.length < 4) nums.push(0); + return nums.slice(0, 4); +} + +function buildVersionInfoText(versionTuple, versionDot) { + const [a, b, c, d] = versionTuple; + return `# UTF-8 +VSVersionInfo( + ffi=FixedFileInfo( + filevers=(${a}, ${b}, ${c}, ${d}), + prodvers=(${a}, ${b}, ${c}, ${d}), + mask=0x3f, + flags=0x0, + OS=0x4, + fileType=0x1, + subtype=0x0, + date=(0, 0) + ), + kids=[ + StringFileInfo([ + StringTable( + '080404B0', + [StringStruct('CompanyName', 'LifeArchiveProject'), + StringStruct('FileDescription', 'WeFlow'), + StringStruct('FileVersion', '${versionDot}'), + StringStruct('InternalName', 'weflow'), + StringStruct('LegalCopyright', 'github.com/hicccc77/WeFlow'), + StringStruct('OriginalFilename', 'weflow.exe'), + StringStruct('ProductName', 'WeFlow'), + StringStruct('ProductVersion', '${versionDot}')]) + ]), + VarFileInfo([VarStruct('Translation', [2052, 1200])]) + ] +) +`; +} + const nativeDir = path.join(repoRoot, "src", "wechat_decrypt_tool", "native"); const addData = `${nativeDir};wechat_decrypt_tool/native`; +const projectToml = path.join(repoRoot, "pyproject.toml"); + +const desktopPackageJsonPath = path.join(repoRoot, "desktop", "package.json"); +let desktopVersion = "1.3.0"; +try { + const pkg = JSON.parse(fs.readFileSync(desktopPackageJsonPath, { encoding: "utf8" })); + const v = String(pkg?.version || "").trim(); + if (v) desktopVersion = v; +} catch {} +const versionTuple = parseVersionTuple(desktopVersion); +const versionDot = versionTuple.join("."); +const versionFilePath = path.join(workDir, "weflow-version.txt"); +fs.writeFileSync(versionFilePath, buildVersionInfoText(versionTuple, versionDot), { encoding: "utf8" }); const args = [ "run", @@ -30,11 +85,42 @@ const args = [ workDir, "--specpath", specDir, + "--version-file", + versionFilePath, "--add-data", addData, entry, ]; const r = spawnSync("uv", args, { cwd: repoRoot, stdio: "inherit" }); -process.exit(r.status ?? 1); +if ((r.status ?? 1) !== 0) { + process.exit(r.status ?? 1); +} + +// Keep a stable external native folder for packaged runtime to avoid relying on +// onefile temp extraction paths when wcdb_api.dll performs environment checks. +const packagedNativeDir = path.join(distDir, "native"); +try { + fs.rmSync(packagedNativeDir, { recursive: true, force: true }); +} catch {} +fs.mkdirSync(packagedNativeDir, { recursive: true }); + +for (const name of fs.readdirSync(nativeDir)) { + const src = path.join(nativeDir, name); + const dst = path.join(packagedNativeDir, name); + try { + if (fs.statSync(src).isFile()) { + fs.copyFileSync(src, dst); + } + } catch {} +} + +// Provide the project marker next to packaged backend resources. +if (fs.existsSync(projectToml)) { + try { + fs.copyFileSync(projectToml, path.join(distDir, "pyproject.toml")); + } catch {} +} + +process.exit(0); diff --git a/desktop/scripts/dev.cjs b/desktop/scripts/dev.cjs new file mode 100644 index 0000000..1e2f1ab --- /dev/null +++ b/desktop/scripts/dev.cjs @@ -0,0 +1,179 @@ +const http = require("http"); +const net = require("net"); +const path = require("path"); +const { spawn, spawnSync } = require("child_process"); + +const repoRoot = path.resolve(__dirname, "..", ".."); +const frontendDir = path.join(repoRoot, "frontend"); +const desktopDir = path.join(repoRoot, "desktop"); + +function parsePort(value) { + const n = Number.parseInt(String(value || "").trim(), 10); + return Number.isInteger(n) && n >= 1 && n <= 65535 ? n : null; +} + +function log(message) { + process.stdout.write(`[dev] ${message}\n`); +} + +function prefixPipe(stream, prefix) { + if (!stream) return; + let pending = ""; + stream.setEncoding("utf8"); + stream.on("data", (chunk) => { + pending += chunk; + const lines = pending.split(/\r?\n/); + pending = lines.pop() || ""; + for (const line of lines) { + process.stdout.write(`${prefix} ${line}\n`); + } + }); + stream.on("end", () => { + const tail = pending.trim(); + if (tail) process.stdout.write(`${prefix} ${tail}\n`); + }); +} + +function isPortAvailable(port, host) { + return new Promise((resolve) => { + const server = net.createServer(); + const done = (ok) => { + try { + server.close(); + } catch {} + resolve(ok); + }; + server.once("error", () => done(false)); + server.once("listening", () => done(true)); + server.listen(port, host); + }); +} + +async function choosePort({ label, envName, preferredPort, host, searchLimit = 20 }) { + if (preferredPort != null) { + const ok = await isPortAvailable(preferredPort, host); + if (!ok) throw new Error(`${label}端口 ${preferredPort} 已被占用,请修改环境变量 ${envName}`); + return preferredPort; + } + + const startPort = envName === "NUXT_PORT" ? 3000 : 10392; + for (let port = startPort; port <= startPort + searchLimit; port += 1) { + if (await isPortAvailable(port, host)) return port; + } + throw new Error(`未找到可用的${label}端口(起始 ${startPort})`); +} + +function httpReady(url) { + return new Promise((resolve) => { + const req = http.get(url, (res) => { + res.resume(); + resolve(true); + }); + req.on("error", () => resolve(false)); + req.setTimeout(1000, () => { + req.destroy(); + resolve(false); + }); + }); +} + +async function waitForUrl(url, child, timeoutMs) { + const startedAt = Date.now(); + while (Date.now() - startedAt < timeoutMs) { + if (child.exitCode != null) { + throw new Error(`前端进程提前退出,exitCode=${child.exitCode}`); + } + if (await httpReady(url)) return; + await new Promise((resolve) => setTimeout(resolve, 300)); + } + throw new Error(`等待前端启动超时:${url}`); +} + +function killChild(child) { + if (!child || child.killed || child.exitCode != null) return; + if (process.platform === "win32") { + spawnSync("taskkill", ["/pid", String(child.pid), "/t", "/f"], { stdio: "ignore" }); + return; + } + try { + child.kill("SIGTERM"); + } catch {} +} + +function spawnLogged(command, args, options, prefix) { + const child = spawn(command, args, { + ...options, + shell: process.platform === "win32", + stdio: ["inherit", "pipe", "pipe"], + }); + prefixPipe(child.stdout, `${prefix}`); + prefixPipe(child.stderr, `${prefix}`); + return child; +} + +async function main() { + const frontendHost = String(process.env.NUXT_HOST || "127.0.0.1").trim() || "127.0.0.1"; + const requestedFrontendPort = parsePort(process.env.NUXT_PORT); + const requestedBackendPort = parsePort(process.env.WECHAT_TOOL_PORT); + const frontendPort = await choosePort({ + label: "前端", + envName: "NUXT_PORT", + preferredPort: requestedFrontendPort, + host: frontendHost, + }); + const backendPort = await choosePort({ + label: "后端", + envName: "WECHAT_TOOL_PORT", + preferredPort: requestedBackendPort, + host: "127.0.0.1", + }); + const startUrl = `http://${frontendHost}:${frontendPort}`; + + log(`frontend=${startUrl}`); + log(`backend=http://127.0.0.1:${backendPort}/api`); + + const sharedEnv = { + ...process.env, + NUXT_HOST: frontendHost, + NUXT_PORT: String(frontendPort), + WECHAT_TOOL_PORT: String(backendPort), + ELECTRON_START_URL: startUrl, + }; + + const npmCommand = "npm"; + const electronCommand = "electron"; + const children = new Set(); + let shuttingDown = false; + + const shutdown = (exitCode) => { + if (shuttingDown) return; + shuttingDown = true; + for (const child of children) killChild(child); + process.exitCode = exitCode; + }; + + process.on("SIGINT", () => shutdown(130)); + process.on("SIGTERM", () => shutdown(143)); + + const frontend = spawnLogged(npmCommand, ["run", "dev"], { cwd: frontendDir, env: sharedEnv }, "[frontend]"); + children.add(frontend); + frontend.once("exit", (code, signal) => { + log(`frontend exited code=${code} signal=${signal}`); + shutdown(code == null ? 1 : code); + }); + + await waitForUrl(startUrl, frontend, 60_000); + log("frontend is ready, starting Electron"); + + const electron = spawnLogged(electronCommand, ["."], { cwd: desktopDir, env: sharedEnv }, "[electron]"); + children.add(electron); + electron.once("exit", (code, signal) => { + log(`electron exited code=${code} signal=${signal}`); + shutdown(code == null ? 0 : code); + }); +} + +main().catch((err) => { + process.stderr.write(`[dev] ${err?.stack || err}\n`); + process.exit(1); +}); diff --git a/desktop/scripts/installer-custom.nsh b/desktop/scripts/installer-custom.nsh index 6dd09fb..33e7240 100644 --- a/desktop/scripts/installer-custom.nsh +++ b/desktop/scripts/installer-custom.nsh @@ -14,6 +14,34 @@ Var WDA_InstallDirPage +!macro customInit + ; Safety: older versions created an `output` junction inside the install directory that points to the + ; per-user AppData `output` folder. Some uninstall/update flows may traverse that junction and delete + ; real user data. Remove it as early as possible during install/update. + Call WDA_RemoveLegacyOutputLink +!macroend + +!macro customInstall + ; Provide a safe, non-junction way for users to locate the real per-user output directory. + ; The actual data is NOT stored inside $INSTDIR (it is wiped on update/reinstall). + ; `open-output.cmd` uses %APPDATA% so it works for the current user. + FileOpen $0 "$INSTDIR\output-location.txt" w + FileWrite $0 "WeChatDataAnalysis output folder (per user):$\r$\n%APPDATA%\\${APP_PACKAGE_NAME}\\output$\r$\n" + FileClose $0 + + FileOpen $1 "$INSTDIR\open-output.cmd" w + ; NSIS escaping: use $\" to output a literal quote character into the .cmd file. + FileWrite $1 "@echo off$\r$\nexplorer $\"%APPDATA%\\${APP_PACKAGE_NAME}\\output$\"$\r$\n" + FileClose $1 +!macroend + +Function WDA_RemoveLegacyOutputLink + ; $INSTDIR is usually the full install directory. Be defensive and also try the nested path + ; in case the installer is running before electron-builder appends "\${APP_FILENAME}". + RMDir "$INSTDIR\output" + RMDir "$INSTDIR\${APP_FILENAME}\output" +FunctionEnd + !macro customPageAfterChangeDir ; Add a confirmation page after the directory picker so users clearly see ; the final install location (includes the app sub-folder). @@ -90,6 +118,10 @@ Var /GLOBAL WDA_DeleteUserData !macro customUnInit ; Default: keep user data (also applies to silent uninstall / update uninstall). StrCpy $WDA_DeleteUserData "0" + + ; Safety: if an older build created an `output` junction inside the install dir, remove it early so + ; directory cleanup can't traverse it and delete the real per-user output folder. + RMDir "$INSTDIR\output" !macroend !macro customUnWelcomePage diff --git a/desktop/src/main.cjs b/desktop/src/main.cjs index a2321bc..b01d844 100644 --- a/desktop/src/main.cjs +++ b/desktop/src/main.cjs @@ -7,28 +7,190 @@ const { globalShortcut, dialog, shell, + session, } = require("electron"); -const { spawn } = require("child_process"); +let autoUpdater = null; +let autoUpdaterLoadError = null; +try { + ({ autoUpdater } = require("electron-updater")); +} catch (err) { + autoUpdaterLoadError = err; +} +const { spawn, spawnSync } = require("child_process"); const fs = require("fs"); const http = require("http"); +const net = require("net"); const path = require("path"); -const BACKEND_HOST = process.env.WECHAT_TOOL_HOST || "127.0.0.1"; -const BACKEND_PORT = Number(process.env.WECHAT_TOOL_PORT || "8000"); -const BACKEND_HEALTH_URL = `http://${BACKEND_HOST}:${BACKEND_PORT}/api/health`; +const DEFAULT_BACKEND_HOST = String(process.env.WECHAT_TOOL_HOST || "127.0.0.1").trim() || "127.0.0.1"; +const DEFAULT_BACKEND_PORT = parsePort(process.env.WECHAT_TOOL_PORT) ?? 10392; let backendProc = null; -let backendStdioStream = null; let resolvedDataDir = null; let mainWindow = null; let tray = null; let isQuitting = false; let desktopSettings = null; +let backendPortChangeInProgress = false; + +const gotSingleInstanceLock = app.requestSingleInstanceLock(); +if (!gotSingleInstanceLock) { + // If we allow a second instance to boot it will try to spawn another backend on the same port. + // Quit early to avoid leaving orphan backend processes around. + try { + app.quit(); + } catch {} +} else { + app.on("second-instance", () => { + try { + if (app.isReady()) showMainWindow(); + else app.whenReady().then(() => showMainWindow()); + } catch {} + }); +} function nowIso() { return new Date().toISOString(); } +function parsePort(value) { + if (value == null) return null; + const raw = String(value).trim(); + if (!raw) return null; + const n = Number(raw); + if (!Number.isInteger(n)) return null; + if (n < 1 || n > 65535) return null; + return n; +} + +function formatHostForUrl(host) { + const h = String(host || "").trim(); + if (!h) return "127.0.0.1"; + // IPv6 literals must be wrapped in brackets in URLs. + if (h.includes(":") && !(h.startsWith("[") && h.endsWith("]"))) return `[${h}]`; + return h; +} + +function getBackendBindHost() { + return DEFAULT_BACKEND_HOST; +} + +function getBackendAccessHost() { + // 0.0.0.0 / :: are fine bind hosts, but not a reachable client destination. + const host = String(getBackendBindHost() || "").trim(); + if (host === "0.0.0.0" || host === "::") return "127.0.0.1"; + return host || "127.0.0.1"; +} + +function getBackendPort() { + const envPort = parsePort(process.env.WECHAT_TOOL_PORT); + if (envPort != null) return envPort; + // In dev we intentionally ignore persisted packaged-app settings so the + // launcher can keep Electron, Nuxt devProxy and the backend child aligned. + if (!app.isPackaged) return DEFAULT_BACKEND_PORT; + const settingsPort = parsePort(loadDesktopSettings()?.backendPort); + return settingsPort ?? DEFAULT_BACKEND_PORT; +} + +function setBackendPortSetting(nextPort) { + const p = parsePort(nextPort); + if (p == null) throw new Error("端口无效,请输入 1-65535 的整数"); + loadDesktopSettings(); + desktopSettings.backendPort = p; + persistDesktopSettings(); + process.env.WECHAT_TOOL_PORT = String(p); + return p; +} + +function getBackendHealthUrl() { + const host = formatHostForUrl(getBackendAccessHost()); + const port = getBackendPort(); + return `http://${host}:${port}/api/health`; +} + +function getBackendUiUrl() { + const host = formatHostForUrl(getBackendAccessHost()); + const port = getBackendPort(); + return `http://${host}:${port}/`; +} + +function isPortAvailable(port, host) { + return new Promise((resolve) => { + try { + const srv = net.createServer(); + srv.unref(); + srv.once("error", () => resolve(false)); + srv.listen({ port, host }, () => { + srv.close(() => resolve(true)); + }); + } catch { + resolve(false); + } + }); +} + +function getEphemeralPort(host) { + return new Promise((resolve) => { + try { + const srv = net.createServer(); + srv.unref(); + srv.once("error", () => resolve(null)); + srv.listen({ port: 0, host }, () => { + const addr = srv.address(); + const p = addr && typeof addr === "object" ? Number(addr.port) : null; + srv.close(() => resolve(Number.isInteger(p) ? p : null)); + }); + } catch { + resolve(null); + } + }); +} + +async function chooseAvailablePort(preferredPort, host) { + const preferred = parsePort(preferredPort); + if (preferred != null && (await isPortAvailable(preferred, host))) return preferred; + + // Keep the port close to the user's expectation when possible. + if (preferred != null) { + for (let i = 1; i <= 50; i += 1) { + const cand = preferred + i; + if (cand > 65535) break; + if (await isPortAvailable(cand, host)) return cand; + } + } + + // Fall back to an OS-chosen ephemeral port. + const random = await getEphemeralPort(host); + if (random != null && (await isPortAvailable(random, host))) return random; + + return null; +} + +async function ensureBackendPortAvailableOnStartup() { + // Avoid surprising behavior in dev: the frontend dev server expects a stable backend port. + if (!app.isPackaged) return getBackendPort(); + + const bindHost = getBackendBindHost(); + const currentPort = getBackendPort(); + const ok = await isPortAvailable(currentPort, bindHost); + if (ok) return currentPort; + + const chosen = await chooseAvailablePort(currentPort, bindHost); + if (chosen == null) { + logMain(`[main] backend port unavailable: ${currentPort} host=${bindHost}; failed to find a free port`); + return currentPort; + } + + try { + setBackendPortSetting(chosen); + logMain(`[main] backend port ${currentPort} unavailable; switched to ${chosen}`); + } catch (err) { + logMain(`[main] failed to persist backend port ${chosen}: ${err?.message || err}`); + } + + return getBackendPort(); +} + function resolveDataDir() { if (resolvedDataDir) return resolvedDataDir; @@ -59,6 +221,161 @@ function getUserDataDir() { return resolveDataDir(); } +function sanitizeAccountName(account) { + const name = String(account || "").trim(); + if (!name) throw new Error("缺少账号参数"); + if (name === "." || name === "..") throw new Error("账号参数非法"); + if (name.includes("/") || name.includes("\\")) throw new Error("账号参数非法"); + return name; +} + +function listDecryptedAccountsOnDisk(databasesDir) { + try { + if (!fs.existsSync(databasesDir)) return []; + } catch { + return []; + } + + let entries = []; + try { + entries = fs.readdirSync(databasesDir, { withFileTypes: true }); + } catch { + return []; + } + + const accounts = []; + for (const entry of entries) { + try { + if (!entry || !entry.isDirectory()) continue; + const accountDir = path.join(databasesDir, entry.name); + const hasSession = fs.existsSync(path.join(accountDir, "session.db")); + const hasContact = fs.existsSync(path.join(accountDir, "contact.db")); + if (hasSession && hasContact) accounts.push(String(entry.name || "")); + } catch {} + } + accounts.sort((a, b) => a.localeCompare(b)); + return accounts; +} + +function resolveAccountDirInOutput(account) { + const dataDir = resolveDataDir(); + if (!dataDir) throw new Error("无法定位数据目录"); + + const outputDir = path.join(dataDir, "output"); + const databasesDir = path.join(outputDir, "databases"); + const accountName = sanitizeAccountName(account); + + const base = path.resolve(databasesDir); + const accountDir = path.resolve(path.join(databasesDir, accountName)); + if (accountDir !== base && !accountDir.startsWith(base + path.sep)) { + throw new Error("账号路径非法"); + } + + return { + dataDir, + outputDir, + databasesDir, + accountName, + accountDir, + }; +} + +function getAccountInfoFromDisk(account) { + const { accountName, accountDir } = resolveAccountDirInOutput(account); + if (!fs.existsSync(accountDir) || !fs.statSync(accountDir).isDirectory()) { + throw new Error("账号数据不存在"); + } + + let entries = []; + try { + entries = fs.readdirSync(accountDir, { withFileTypes: true }); + } catch {} + const dbFiles = entries + .filter((e) => !!e && e.isFile() && String(e.name || "").toLowerCase().endsWith(".db")) + .map((e) => String(e.name || "")) + .sort((a, b) => a.localeCompare(b)); + + let sessionUpdatedAt = 0; + try { + const st = fs.statSync(path.join(accountDir, "session.db")); + sessionUpdatedAt = Math.floor(Number(st?.mtimeMs || 0) / 1000); + } catch {} + + return { + status: "success", + account: accountName, + path: accountDir, + database_count: dbFiles.length, + databases: dbFiles, + session_updated_at: sessionUpdatedAt, + }; +} + +function removeAccountFromKeyStore(dataDir, accountName) { + const keyStorePath = path.join(dataDir, "output", "account_keys.json"); + try { + if (!fs.existsSync(keyStorePath)) return false; + const raw = fs.readFileSync(keyStorePath, { encoding: "utf8" }); + const parsed = JSON.parse(raw || "{}"); + if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) return false; + if (!Object.prototype.hasOwnProperty.call(parsed, accountName)) return false; + delete parsed[accountName]; + fs.writeFileSync(keyStorePath, JSON.stringify(parsed, null, 2), { encoding: "utf8" }); + return true; + } catch { + return false; + } +} + +async function deleteAccountDataFromDisk(account) { + const { dataDir, outputDir, databasesDir, accountName, accountDir } = resolveAccountDirInOutput(account); + if (!fs.existsSync(accountDir) || !fs.statSync(accountDir).isDirectory()) { + throw new Error("账号数据不存在"); + } + + const wasBackendRunning = !!backendProc; + let restartError = null; + let result = null; + + if (wasBackendRunning) { + await stopBackendAndWait({ timeoutMs: 10_000 }); + } + + try { + const exportsDir = path.join(outputDir, "exports", accountName); + try { + fs.rmSync(exportsDir, { recursive: true, force: true }); + } catch {} + + fs.rmSync(accountDir, { recursive: true, force: true }); + const removedKeyCache = removeAccountFromKeyStore(dataDir, accountName); + const accounts = listDecryptedAccountsOnDisk(databasesDir); + result = { + status: "success", + deleted_account: accountName, + accounts, + default_account: accounts.length ? accounts[0] : null, + removed_key_cache: removedKeyCache, + }; + } finally { + if (wasBackendRunning) { + try { + startBackend(); + await waitForBackend({ timeoutMs: 30_000 }); + } catch (err) { + restartError = err; + logMain(`[main] failed to restart backend after deleteAccountData: ${err?.message || err}`); + } + } + } + + if (restartError) { + throw new Error(`删除完成,但后端重启失败:${restartError?.message || restartError}`); + } + if (!result) throw new Error("删除账号数据失败"); + return result; +} + function getExeDir() { try { return path.dirname(process.execPath); @@ -69,7 +386,11 @@ function getExeDir() { function ensureOutputLink() { // Users often expect an `output/` folder near the installed exe. We keep the real data - // in the per-user data dir, and (when possible) create a Windows junction next to the exe. + // in the per-user data dir. + // + // NOTE: We intentionally avoid creating a junction/symlink inside the install directory. + // Some uninstall/update flows may traverse reparse points and delete the target directory, + // causing data loss (the install dir is removed on every update/reinstall). if (!app.isPackaged) return; const exeDir = getExeDir(); @@ -77,26 +398,56 @@ function ensureOutputLink() { if (!exeDir || !dataDir) return; const target = path.join(dataDir, "output"); - const linkPath = path.join(exeDir, "output"); + const legacyLinkPath = path.join(exeDir, "output"); - // If the target doesn't exist yet, create it so the link points somewhere real. + // Ensure the real output dir exists. try { fs.mkdirSync(target, { recursive: true }); } catch {} - // If something already exists at linkPath, do not overwrite it. + // Best-effort: remove a legacy junction/symlink at `exeDir/output` so uninstallers can't + // accidentally traverse it and delete the real per-user output directory. try { - if (fs.existsSync(linkPath)) return; + const st = fs.lstatSync(legacyLinkPath); + if (st.isSymbolicLink()) { + try { + fs.unlinkSync(legacyLinkPath); + logMain(`[main] removed legacy output link: ${legacyLinkPath}`); + } catch (err) { + logMain(`[main] failed to remove legacy output link: ${err?.message || err}`); + } + } else if (st.isDirectory()) { + const entries = fs.readdirSync(legacyLinkPath); + if (Array.isArray(entries) && entries.length === 0) { + // Remove an empty real directory to reduce confusion (it will be recreated by the backend if needed). + fs.rmdirSync(legacyLinkPath); + } else { + // Do not overwrite non-empty directories to avoid data loss. + // Note: data stored here will be wiped on update/reinstall. + logMain( + `[main] output dir exists in install dir (not a link): ${legacyLinkPath}. real data dir output: ${target}` + ); + } + } else { + logMain(`[main] output path exists and is not a directory/link: ${legacyLinkPath}`); + } } catch { - return; + // Doesn't exist yet. } + // Best-effort: drop a helper file next to the exe so users can find the real data. + // This avoids the data-loss risks of using junctions/symlinks under the install directory. try { - fs.symlinkSync(target, linkPath, "junction"); - logMain(`[main] created output link: ${linkPath} -> ${target}`); - } catch (err) { - logMain(`[main] failed to create output link: ${err?.message || err}`); - } + const p = path.join(exeDir, "output-location.txt"); + const text = `WeChatDataAnalysis data directory\n\nOutput folder:\n${target}\n`; + fs.writeFileSync(p, text, { encoding: "utf8" }); + } catch {} + + try { + const p = path.join(exeDir, "open-output.cmd"); + const text = `@echo off\r\nexplorer \"${target}\"\r\n`; + fs.writeFileSync(p, text, { encoding: "utf8" }); + } catch {} } function getMainLogPath() { @@ -120,6 +471,34 @@ function getDesktopSettingsPath() { return path.join(dir, "desktop-settings.json"); } +function getPackagedUiDir() { + if (!app.isPackaged) return null; + try { + return path.join(process.resourcesPath, "ui"); + } catch { + return null; + } +} + +function readPackagedUiBuildId() { + const uiDir = getPackagedUiDir(); + if (!uiDir) return ""; + + try { + const indexPath = path.join(uiDir, "index.html"); + if (!fs.existsSync(indexPath)) return ""; + const html = fs.readFileSync(indexPath, { encoding: "utf8" }); + const match = + html.match(/buildId:"([^"]+)"/) || + html.match(/\/_payload\.json\?([^"'&<>\s]+)/) || + html.match(/data-src="\/_payload\.json\?([^"]+)"/); + return String(match?.[1] || "").trim(); + } catch (err) { + logMain(`[main] failed to read packaged UI build id: ${err?.message || err}`); + return ""; + } +} + function loadDesktopSettings() { if (desktopSettings) return desktopSettings; @@ -127,6 +506,13 @@ function loadDesktopSettings() { // 'tray' (default): closing the window hides it to the system tray. // 'exit': closing the window quits the app. closeBehavior: "tray", + // When set, suppress the auto-update prompt for this exact version. + ignoredUpdateVersion: "", + // Backend (FastAPI) listens on this port. Used in packaged builds. + backendPort: DEFAULT_BACKEND_PORT, + // Tracks the packaged UI build so we can invalidate Chromium's HTTP cache + // after upgrades without wiping user data/localStorage. + lastSeenUiBuildId: "", }; const p = getDesktopSettingsPath(); @@ -143,6 +529,7 @@ function loadDesktopSettings() { const raw = fs.readFileSync(p, { encoding: "utf8" }); const parsed = JSON.parse(raw || "{}"); desktopSettings = { ...defaults, ...(parsed && typeof parsed === "object" ? parsed : {}) }; + desktopSettings.backendPort = parsePort(desktopSettings.backendPort) ?? defaults.backendPort; } catch (err) { desktopSettings = { ...defaults }; logMain(`[main] failed to load settings: ${err?.message || err}`); @@ -177,6 +564,363 @@ function setCloseBehavior(next) { return desktopSettings.closeBehavior; } +function getIgnoredUpdateVersion() { + const v = String(loadDesktopSettings()?.ignoredUpdateVersion || "").trim(); + return v || ""; +} + +function setIgnoredUpdateVersion(version) { + loadDesktopSettings(); + desktopSettings.ignoredUpdateVersion = String(version || "").trim(); + persistDesktopSettings(); + return desktopSettings.ignoredUpdateVersion; +} + +async function refreshRendererCacheForPackagedUi() { + if (!app.isPackaged) return; + + const nextBuildId = readPackagedUiBuildId(); + if (!nextBuildId) return; + + const prevBuildId = String(loadDesktopSettings()?.lastSeenUiBuildId || "").trim(); + if (prevBuildId === nextBuildId) return; + + try { + const ses = session?.defaultSession; + if (ses) { + await ses.clearCache(); + try { + await ses.clearStorageData({ storages: ["serviceworkers"] }); + } catch {} + } + logMain(`[main] cleared renderer cache for UI build change: ${prevBuildId || "(none)"} -> ${nextBuildId}`); + } catch (err) { + logMain(`[main] failed to clear renderer cache for UI build change: ${err?.message || err}`); + } + + loadDesktopSettings(); + desktopSettings.lastSeenUiBuildId = nextBuildId; + persistDesktopSettings(); +} + +function parseEnvBool(value) { + if (value == null) return null; + const v = String(value).trim().toLowerCase(); + if (!v) return null; + if (v === "1" || v === "true" || v === "yes" || v === "y" || v === "on") return true; + if (v === "0" || v === "false" || v === "no" || v === "n" || v === "off") return false; + return null; +} + +let autoUpdateEnabledCache = null; +function isAutoUpdateEnabled() { + if (autoUpdateEnabledCache != null) return !!autoUpdateEnabledCache; + + const forced = parseEnvBool(process.env.AUTO_UPDATE_ENABLED); + let enabled = forced != null ? forced : !!app.isPackaged; + if (enabled && !autoUpdater) { + enabled = false; + logMain( + `[main] auto-update disabled: electron-updater unavailable: ${autoUpdaterLoadError?.message || "unknown error"}` + ); + } + + // In packaged builds electron-updater reads update config from app-update.yml. + // If missing, treat auto-update as disabled to avoid noisy errors. + if (enabled && app.isPackaged) { + try { + const updateConfigPath = path.join(process.resourcesPath, "app-update.yml"); + if (!fs.existsSync(updateConfigPath)) { + enabled = false; + logMain(`[main] auto-update disabled: missing ${updateConfigPath}`); + } + } catch (err) { + enabled = false; + logMain(`[main] auto-update disabled: failed to check app-update.yml: ${err?.message || err}`); + } + } + + autoUpdateEnabledCache = enabled; + return enabled; +} + +let autoUpdaterInitialized = false; +let updateDownloadInProgress = false; +let installOnDownload = false; +let updateDownloaded = false; +let lastUpdateInfo = null; + +function sendToRenderer(channel, payload) { + try { + if (!mainWindow || mainWindow.isDestroyed()) return; + mainWindow.webContents.send(channel, payload); + } catch (err) { + logMain(`[main] failed to send ${channel}: ${err?.message || err}`); + } +} + +function setWindowProgressBar(value) { + try { + if (!mainWindow || mainWindow.isDestroyed()) return; + mainWindow.setProgressBar(value); + } catch {} +} + +function looksLikeHtml(input) { + if (!input) return false; + const s = String(input); + if (!s.includes("<") || !s.includes(">")) return false; + // Be conservative: only treat the note as HTML if it contains common tags we expect from GitHub-rendered bodies. + return /<(p|div|br|ul|ol|li|a|strong|em|tt|code|pre|h[1-6])\b/i.test(s); +} + +function htmlToPlainText(html) { + if (!html) return ""; + + let text = String(html); + + // Drop script/style blocks entirely. + text = text.replace(/]*>[\s\S]*?<\/script>/gi, ""); + text = text.replace(/]*>[\s\S]*?<\/style>/gi, ""); + + // Keep links readable after stripping tags. + text = text.replace( + /]*href=(["'])([^"']+)\1[^>]*>([\s\S]*?)<\/a>/gi, + (_m, _q, href, inner) => { + const innerText = String(inner).replace(/<[^>]*>/g, "").trim(); + const url = String(href || "").trim(); + if (!url) return innerText; + if (!innerText) return url; + return `${innerText} (${url})`; + } + ); + + // Preserve line breaks / list structure before stripping remaining tags. + text = text.replace(/<\s*br\s*\/?>/gi, "\n"); + text = text.replace(/<\/\s*(p|div|h1|h2|h3|h4|h5|h6)\s*>/gi, "\n"); + text = text.replace(/<\s*li[^>]*>/gi, "- "); + text = text.replace(/<\/\s*li\s*>/gi, "\n"); + text = text.replace(/<\/\s*(ul|ol)\s*>/gi, "\n"); + + // Strip remaining tags. + text = text.replace(/<[^>]*>/g, ""); + + // Decode the handful of entities we commonly see from GitHub-rendered HTML. + const named = { + nbsp: " ", + amp: "&", + lt: "<", + gt: ">", + quot: '"', + apos: "'", + "#39": "'", + }; + text = text.replace(/&([a-z0-9#]+);/gi, (m, name) => { + const key = String(name || "").toLowerCase(); + if (named[key] != null) return named[key]; + + // Numeric entities (decimal / hex). + const decMatch = key.match(/^#(\d+)$/); + if (decMatch) { + const n = Number(decMatch[1]); + if (Number.isFinite(n) && n >= 0 && n <= 0x10ffff) { + try { + return String.fromCodePoint(n); + } catch { + return m; + } + } + return m; + } + + const hexMatch = key.match(/^#x([0-9a-f]+)$/i); + if (hexMatch) { + const n = Number.parseInt(hexMatch[1], 16); + if (Number.isFinite(n) && n >= 0 && n <= 0x10ffff) { + try { + return String.fromCodePoint(n); + } catch { + return m; + } + } + return m; + } + + return m; + }); + + // Normalize whitespace/newlines. + text = text.replace(/\r\n/g, "\n"); + text = text.replace(/\n{3,}/g, "\n\n"); + return text.trim(); +} + +function normalizeReleaseNotes(releaseNotes) { + if (!releaseNotes) return ""; + + const normalizeText = (value) => { + if (value == null) return ""; + const raw = typeof value === "string" ? value : String(value); + const trimmed = raw.trim(); + if (!trimmed) return ""; + if (looksLikeHtml(trimmed)) return htmlToPlainText(trimmed); + return trimmed; + }; + + if (typeof releaseNotes === "string") return normalizeText(releaseNotes); + if (Array.isArray(releaseNotes)) { + const parts = []; + for (const item of releaseNotes) { + const version = item?.version ? String(item.version) : ""; + const note = item?.note; + const noteText = + typeof note === "string" ? note : note != null ? JSON.stringify(note, null, 2) : ""; + const block = [version ? `v${version}` : "", normalizeText(noteText)] + .filter(Boolean) + .join("\n"); + if (block) parts.push(block); + } + return parts.join("\n\n"); + } + try { + return normalizeText(JSON.stringify(releaseNotes, null, 2)); + } catch { + return normalizeText(releaseNotes); + } +} + +function initAutoUpdater() { + if (autoUpdaterInitialized) return; + autoUpdaterInitialized = true; + + // Configure auto-updater (align with WeFlow). + autoUpdater.autoDownload = false; + // Don't install automatically on quit; let the user choose when to restart/install. + autoUpdater.autoInstallOnAppQuit = false; + autoUpdater.disableDifferentialDownload = true; + + autoUpdater.on("download-progress", (progress) => { + sendToRenderer("app:downloadProgress", progress); + const percent = Number(progress?.percent || 0); + if (Number.isFinite(percent) && percent > 0) { + setWindowProgressBar(Math.max(0, Math.min(1, percent / 100))); + } + }); + + autoUpdater.on("update-downloaded", () => { + updateDownloadInProgress = false; + updateDownloaded = true; + installOnDownload = false; + setWindowProgressBar(-1); + + const payload = { + version: lastUpdateInfo?.version ? String(lastUpdateInfo.version) : "", + releaseNotes: normalizeReleaseNotes(lastUpdateInfo?.releaseNotes), + }; + sendToRenderer("app:updateDownloaded", payload); + + try { + // If the window is hidden to tray, show a lightweight hint instead of forcing UI focus. + tray?.displayBalloon?.({ + title: "更新已下载完成", + content: "可在弹窗中选择“立即重启安装”,或稍后再安装。", + }); + } catch {} + }); + + autoUpdater.on("error", (err) => { + updateDownloadInProgress = false; + installOnDownload = false; + updateDownloaded = false; + setWindowProgressBar(-1); + const message = err?.message || String(err); + logMain(`[main] autoUpdater error: ${message}`); + sendToRenderer("app:updateError", { message }); + }); +} + +async function checkForUpdatesInternal() { + const enabled = isAutoUpdateEnabled(); + if (!enabled) return { hasUpdate: false, enabled: false }; + + initAutoUpdater(); + + try { + const result = await autoUpdater.checkForUpdates(); + const updateInfo = result?.updateInfo; + lastUpdateInfo = updateInfo || null; + const latestVersion = updateInfo?.version ? String(updateInfo.version) : ""; + const currentVersion = (() => { + try { + return app.getVersion(); + } catch { + return ""; + } + })(); + + if (latestVersion && currentVersion && latestVersion !== currentVersion) { + return { + hasUpdate: true, + enabled: true, + version: latestVersion, + releaseNotes: normalizeReleaseNotes(updateInfo?.releaseNotes), + }; + } + + return { hasUpdate: false, enabled: true }; + } catch (err) { + const message = err?.message || String(err); + logMain(`[main] checkForUpdates failed: ${message}`); + return { hasUpdate: false, enabled: true, error: message }; + } +} + +async function downloadAndInstallInternal() { + if (!isAutoUpdateEnabled()) { + throw new Error("自动更新已禁用"); + } + initAutoUpdater(); + + if (updateDownloadInProgress) { + throw new Error("正在下载更新中,请稍候…"); + } + + updateDownloadInProgress = true; + installOnDownload = true; + updateDownloaded = false; + setWindowProgressBar(0); + + try { + // Ensure update info is up-to-date (downloadUpdate relies on the last check). + await autoUpdater.checkForUpdates(); + await autoUpdater.downloadUpdate(); + return { success: true }; + } catch (err) { + updateDownloadInProgress = false; + installOnDownload = false; + setWindowProgressBar(-1); + throw err; + } +} + +function checkForUpdatesOnStartup() { + if (!isAutoUpdateEnabled()) return; + if (!app.isPackaged) return; // keep dev noise-free by default + + setTimeout(async () => { + const result = await checkForUpdatesInternal(); + if (!result?.hasUpdate) return; + + const ignored = getIgnoredUpdateVersion(); + if (ignored && ignored === result.version) return; + + sendToRenderer("app:updateAvailable", { + version: result.version, + releaseNotes: result.releaseNotes || "", + }); + }, 3000); +} + function getTrayIconPath() { // Prefer an icon shipped in `src/` so it works both in dev and packaged (asar) builds. const shipped = path.join(__dirname, "icon.ico"); @@ -238,6 +982,91 @@ function createTray() { label: "显示", click: () => showMainWindow(), }, + { + label: "检查更新...", + click: async () => { + try { + if (!isAutoUpdateEnabled()) { + await dialog.showMessageBox({ + type: "info", + title: "检查更新", + message: "自动更新已禁用(仅打包版本可用)。", + buttons: ["确定"], + noLink: true, + }); + return; + } + + const result = await checkForUpdatesInternal(); + if (result?.error) { + await dialog.showMessageBox({ + type: "error", + title: "检查更新失败", + message: result.error, + buttons: ["确定"], + noLink: true, + }); + return; + } + + if (result?.hasUpdate && result?.version) { + const { response } = await dialog.showMessageBox({ + type: "info", + title: "发现新版本", + message: `发现新版本 ${result.version},是否立即更新?`, + detail: result.releaseNotes ? `更新内容:\n${result.releaseNotes}` : undefined, + buttons: ["立即更新", "稍后", "忽略此版本"], + defaultId: 0, + cancelId: 1, + noLink: true, + }); + + if (response === 0) { + try { + await downloadAndInstallInternal(); + } catch (err) { + const message = err?.message || String(err); + logMain(`[main] downloadAndInstall failed (tray): ${message}`); + await dialog.showMessageBox({ + type: "error", + title: "更新失败", + message, + buttons: ["确定"], + noLink: true, + }); + } + } else if (response === 2) { + try { + setIgnoredUpdateVersion(result.version); + } catch {} + } + + return; + } + + await dialog.showMessageBox({ + type: "info", + title: "检查更新", + message: "当前已是最新版本。", + buttons: ["确定"], + noLink: true, + }); + } catch (err) { + const message = err?.message || String(err); + logMain(`[main] tray check updates failed: ${message}`); + await dialog.showMessageBox({ + type: "error", + title: "检查更新失败", + message, + buttons: ["确定"], + noLink: true, + }); + } + }, + }, + { + type: "separator", + }, { label: "退出", click: () => { @@ -282,20 +1111,20 @@ function attachBackendStdio(proc, logPath) { fs.mkdirSync(path.dirname(logPath), { recursive: true }); } catch {} + let stream = null; try { - backendStdioStream = fs.createWriteStream(logPath, { flags: "a" }); - backendStdioStream.write(`[${nowIso()}] [main] backend stdio -> ${logPath}\n`); + stream = fs.createWriteStream(logPath, { flags: "a" }); + stream.write(`[${nowIso()}] [main] backend stdio -> ${logPath}\n`); } catch { - backendStdioStream = null; return; } const write = (prefix, chunk) => { - if (!backendStdioStream) return; + if (!stream) return; try { const text = Buffer.isBuffer(chunk) ? chunk.toString("utf8") : String(chunk); - backendStdioStream.write(`[${nowIso()}] ${prefix} ${text}`); - if (!text.endsWith("\n")) backendStdioStream.write("\n"); + stream.write(`[${nowIso()}] ${prefix} ${text}`); + if (!text.endsWith("\n")) stream.write("\n"); } catch {} }; @@ -305,9 +1134,9 @@ function attachBackendStdio(proc, logPath) { proc.on("close", (code, signal) => { write("[backend:close]", `code=${code} signal=${signal}`); try { - backendStdioStream?.end(); + stream?.end(); } catch {} - backendStdioStream = null; + stream = null; }); } @@ -321,13 +1150,17 @@ function getPackagedBackendPath() { return path.join(process.resourcesPath, "backend", "wechat-backend.exe"); } +function getPackagedWcdbDllPath() { + return path.join(process.resourcesPath, "backend", "native", "wcdb_api.dll"); +} + function startBackend() { if (backendProc) return backendProc; const env = { ...process.env, - WECHAT_TOOL_HOST: BACKEND_HOST, - WECHAT_TOOL_PORT: String(BACKEND_PORT), + WECHAT_TOOL_HOST: getBackendBindHost(), + WECHAT_TOOL_PORT: String(getBackendPort()), // Make sure Python prints UTF-8 to stdout/stderr. PYTHONIOENCODING: process.env.PYTHONIOENCODING || "utf-8", }; @@ -351,8 +1184,17 @@ function startBackend() { `Packaged backend not found: ${backendExe}. Build it into desktop/resources/backend/wechat-backend.exe` ); } + const packagedWcdbDll = getPackagedWcdbDllPath(); + if (fs.existsSync(packagedWcdbDll)) { + env.WECHAT_TOOL_WCDB_API_DLL_PATH = packagedWcdbDll; + logMain(`[main] using packaged wcdb_api.dll: ${packagedWcdbDll}`); + } else { + logMain(`[main] packaged wcdb_api.dll not found: ${packagedWcdbDll}`); + } + + const backendCwd = path.dirname(backendExe); backendProc = spawn(backendExe, [], { - cwd: env.WECHAT_TOOL_DATA_DIR, + cwd: backendCwd, env, stdio: ["ignore", "pipe", "pipe"], windowsHide: true, @@ -367,8 +1209,9 @@ function startBackend() { }); } - backendProc.on("exit", (code, signal) => { - backendProc = null; + const proc = backendProc; + proc.on("exit", (code, signal) => { + if (backendProc === proc) backendProc = null; // eslint-disable-next-line no-console console.log(`[backend] exited code=${code} signal=${signal}`); logMain(`[backend] exited code=${code} signal=${signal}`); @@ -380,22 +1223,69 @@ function startBackend() { function stopBackend() { if (!backendProc) return; - try { - if (process.platform === "win32" && backendProc.pid) { - // Ensure child tree is killed on Windows. - spawn("taskkill", ["/pid", String(backendProc.pid), "/T", "/F"], { - stdio: "ignore", - windowsHide: true, - }); - return; + const pid = backendProc.pid; + logMain(`[main] stopBackend pid=${pid || "?"}`); + + // Best-effort: ensure process tree is gone on Windows. Use spawnSync so the kill + // isn't aborted by the app quitting immediately after "before-quit". + if (process.platform === "win32" && pid) { + const systemRoot = process.env.SystemRoot || process.env.WINDIR || "C:\\Windows"; + const taskkillExe = path.join(systemRoot, "System32", "taskkill.exe"); + const args = ["/pid", String(pid), "/T", "/F"]; + + try { + const exe = fs.existsSync(taskkillExe) ? taskkillExe : "taskkill"; + const r = spawnSync(exe, args, { stdio: "ignore", windowsHide: true, timeout: 5000 }); + if (r?.error) logMain(`[main] taskkill failed: ${r.error?.message || r.error}`); + else if (typeof r?.status === "number" && r.status !== 0) + logMain(`[main] taskkill exit code=${r.status}`); + } catch (err) { + logMain(`[main] taskkill exception: ${err?.message || err}`); } - } catch {} + } + // Fallback: kill the direct process (taskkill might be missing from PATH in some envs). try { backendProc.kill(); } catch {} } +async function stopBackendAndWait({ timeoutMs = 10_000 } = {}) { + if (!backendProc) return; + const proc = backendProc; + + await new Promise((resolve) => { + let done = false; + const finish = () => { + if (done) return; + done = true; + resolve(); + }; + + const timer = setTimeout(finish, timeoutMs); + + try { + proc.once("exit", () => { + clearTimeout(timer); + finish(); + }); + } catch {} + + try { + stopBackend(); + } catch { + clearTimeout(timer); + finish(); + } + }); +} + +async function restartBackend({ timeoutMs = 30_000 } = {}) { + await stopBackendAndWait({ timeoutMs: 10_000 }); + startBackend(); + await waitForBackend({ timeoutMs }); +} + function httpGet(url) { return new Promise((resolve, reject) => { const req = http.get(url, (res) => { @@ -410,17 +1300,28 @@ function httpGet(url) { }); } -async function waitForBackend({ timeoutMs }) { +async function waitForBackend({ timeoutMs, healthUrl } = {}) { + const url = String(healthUrl || getBackendHealthUrl()).trim(); const startedAt = Date.now(); // eslint-disable-next-line no-constant-condition while (true) { + // If the backend process died, fail fast (otherwise we'd wait for the full timeout). + if (!backendProc) { + throw new Error(`Backend process exited before becoming ready: ${url}`); + } + if (backendProc.exitCode != null) { + throw new Error( + `Backend process exited (code=${backendProc.exitCode} signal=${backendProc.signalCode || "null"}): ${url}` + ); + } + try { - const code = await httpGet(BACKEND_HEALTH_URL); + const code = await httpGet(url); if (code >= 200 && code < 500) return; } catch {} if (Date.now() - startedAt > timeoutMs) { - throw new Error(`Backend did not become ready in ${timeoutMs}ms: ${BACKEND_HEALTH_URL}`); + throw new Error(`Backend did not become ready in ${timeoutMs}ms: ${url}`); } await new Promise((r) => setTimeout(r, 300)); @@ -479,6 +1380,62 @@ function setupRendererConsoleLogging(win) { }); } +function setupRendererLifecycleLogging(win) { + if (!debugEnabled()) return; + + const logRendererLifecycle = (message) => { + logMain(`[renderer] ${message}`); + }; + + logRendererLifecycle(`window-created id=${win.id}`); + + win.webContents.on("did-start-loading", () => { + logRendererLifecycle("did-start-loading"); + }); + + win.webContents.on("dom-ready", () => { + logRendererLifecycle(`dom-ready url=${win.webContents.getURL()}`); + }); + + win.webContents.on("did-stop-loading", () => { + logRendererLifecycle("did-stop-loading"); + }); + + win.webContents.on("did-finish-load", () => { + logRendererLifecycle(`did-finish-load url=${win.webContents.getURL()}`); + }); + + win.webContents.on("did-fail-load", (_event, errorCode, errorDescription, validatedURL, isMainFrame) => { + logRendererLifecycle( + `did-fail-load code=${errorCode} mainFrame=${!!isMainFrame} url=${validatedURL} error=${errorDescription}` + ); + }); + + win.webContents.on("did-navigate", (_event, url, httpResponseCode, httpStatusText) => { + logRendererLifecycle( + `did-navigate url=${url} code=${httpResponseCode || 0} status=${httpStatusText || ""}` + ); + }); + + win.webContents.on("did-navigate-in-page", (_event, url, isMainFrame) => { + logRendererLifecycle(`did-navigate-in-page mainFrame=${!!isMainFrame} url=${url}`); + }); + + win.webContents.on("render-process-gone", (_event, details) => { + logRendererLifecycle( + `render-process-gone reason=${details?.reason || ""} exitCode=${details?.exitCode ?? ""}` + ); + }); + + win.on("unresponsive", () => { + logRendererLifecycle("window-unresponsive"); + }); + + win.on("responsive", () => { + logRendererLifecycle("window-responsive"); + }); +} + function createMainWindow() { const win = new BrowserWindow({ width: 1200, @@ -522,18 +1479,26 @@ function createMainWindow() { }); setupRendererConsoleLogging(win); + setupRendererLifecycleLogging(win); return win; } async function loadWithRetry(win, url) { const startedAt = Date.now(); + let attempt = 0; // eslint-disable-next-line no-constant-condition while (true) { + attempt += 1; + logMain(`[main] loadWithRetry attempt=${attempt} url=${url}`); try { await win.loadURL(url); + logMain(`[main] loadWithRetry success attempt=${attempt} elapsedMs=${Date.now() - startedAt} url=${url}`); return; - } catch { + } catch (err) { + logMain( + `[main] loadWithRetry failure attempt=${attempt} elapsedMs=${Date.now() - startedAt} url=${url} error=${err?.message || err}` + ); if (Date.now() - startedAt > 60_000) throw new Error(`Failed to load URL in time: ${url}`); await new Promise((r) => setTimeout(r, 500)); } @@ -601,6 +1566,15 @@ function registerWindowIpc() { } }); + ipcMain.handle("app:isDebugEnabled", () => { + try { + return debugEnabled(); + } catch (err) { + logMain(`[main] app:isDebugEnabled failed: ${err?.message || err}`); + return false; + } + }); + ipcMain.handle("app:setCloseBehavior", (_event, behavior) => { try { const next = setCloseBehavior(behavior); @@ -611,23 +1585,212 @@ function registerWindowIpc() { return getCloseBehavior(); } }); + + ipcMain.handle("backend:getPort", () => { + try { + return getBackendPort(); + } catch (err) { + logMain(`[main] backend:getPort failed: ${err?.message || err}`); + return DEFAULT_BACKEND_PORT; + } + }); + + ipcMain.handle("backend:setPort", async (_event, port) => { + if (backendPortChangeInProgress) throw new Error("端口切换中,请稍后重试"); + if (!app.isPackaged) { + throw new Error("开发模式不支持界面修改端口;请设置 WECHAT_TOOL_PORT 环境变量后重启"); + } + + const nextPort = parsePort(port); + if (nextPort == null) throw new Error("端口无效,请输入 1-65535 的整数"); + + const prevPort = getBackendPort(); + if (nextPort === prevPort) { + return { success: true, changed: false, port: prevPort, uiUrl: getBackendUiUrl() }; + } + + const bindHost = getBackendBindHost(); + const ok = await isPortAvailable(nextPort, bindHost); + if (!ok) throw new Error(`端口 ${nextPort} 已被占用,请换一个端口`); + + backendPortChangeInProgress = true; + try { + setBackendPortSetting(nextPort); + try { + await restartBackend({ timeoutMs: 30_000 }); + } catch (err) { + // Roll back to the previous port so the UI can keep working. + setBackendPortSetting(prevPort); + try { + await restartBackend({ timeoutMs: 30_000 }); + } catch {} + throw err; + } + + const uiUrl = getBackendUiUrl(); + setTimeout(() => { + try { + if (!mainWindow || mainWindow.isDestroyed()) return; + void loadWithRetry(mainWindow, uiUrl); + } catch (err) { + logMain(`[main] failed to reload UI after backend port change: ${err?.message || err}`); + } + }, 50); + + return { success: true, changed: true, port: nextPort, uiUrl }; + } finally { + backendPortChangeInProgress = false; + } + }); + + ipcMain.handle("app:getVersion", () => { + try { + return app.getVersion(); + } catch (err) { + logMain(`[main] getVersion failed: ${err?.message || err}`); + return ""; + } + }); + + ipcMain.handle("app:getOutputDir", () => { + const dir = resolveDataDir(); + if (!dir) return ""; + return path.join(dir, "output"); + }); + + ipcMain.handle("app:openOutputDir", async () => { + const dir = resolveDataDir(); + if (!dir) throw new Error("无法定位数据目录"); + const outDir = path.join(dir, "output"); + try { + fs.mkdirSync(outDir, { recursive: true }); + } catch {} + try { + const err = await shell.openPath(outDir); + if (err) throw new Error(err); + return { success: true, path: outDir }; + } catch (e) { + const message = e?.message || String(e); + logMain(`[main] openOutputDir failed: ${message}`); + throw new Error(message); + } + }); + + ipcMain.handle("app:getAccountInfo", async (_event, account) => { + try { + return getAccountInfoFromDisk(account); + } catch (e) { + throw new Error(e?.message || String(e)); + } + }); + + ipcMain.handle("app:deleteAccountData", async (_event, account) => { + try { + return await deleteAccountDataFromDisk(account); + } catch (e) { + throw new Error(e?.message || String(e)); + } + }); + + ipcMain.handle("app:checkForUpdates", async () => { + return await checkForUpdatesInternal(); + }); + + ipcMain.handle("app:downloadAndInstall", async () => { + return await downloadAndInstallInternal(); + }); + + ipcMain.handle("app:installUpdate", async () => { + if (!isAutoUpdateEnabled()) { + throw new Error("自动更新已禁用"); + } + initAutoUpdater(); + if (!updateDownloaded) { + throw new Error("更新尚未下载完成"); + } + + try { + // Safety: remove legacy `output` junctions in the install dir before triggering the NSIS update/uninstall. + // Some uninstall flows may traverse reparse points and delete the real per-user output directory. + try { + ensureOutputLink(); + } catch {} + autoUpdater.quitAndInstall(false, true); + return { success: true }; + } catch (err) { + const message = err?.message || String(err); + logMain(`[main] installUpdate failed: ${message}`); + throw new Error(message); + } + }); + + ipcMain.handle("app:ignoreUpdate", async (_event, version) => { + setIgnoredUpdateVersion(version); + return { success: true }; + }); + + ipcMain.handle("dialog:chooseDirectory", async (_event, options) => { + try { + const result = await dialog.showOpenDialog({ + title: String(options?.title || "选择文件夹"), + properties: ["openDirectory", "createDirectory"], + }); + return { + canceled: !!result?.canceled, + filePaths: Array.isArray(result?.filePaths) ? result.filePaths : [], + }; + } catch (err) { + logMain(`[main] dialog:chooseDirectory failed: ${err?.message || err}`); + return { + canceled: true, + filePaths: [], + }; + } + }); } async function main() { await app.whenReady(); + await refreshRendererCacheForPackagedUi(); Menu.setApplicationMenu(null); registerWindowIpc(); registerDebugShortcuts(); - // Resolve/create the data dir early so we can log reliably and (optionally) place a link + // Resolve/create the data dir early so we can log reliably and place helper files // next to the installed exe for easier access. resolveDataDir(); ensureOutputLink(); + await ensureBackendPortAvailableOnStartup(); logMain(`[main] app.isPackaged=${app.isPackaged} argv=${JSON.stringify(process.argv)}`); startBackend(); - await waitForBackend({ timeoutMs: 30_000 }); + try { + await waitForBackend({ timeoutMs: 30_000 }); + } catch (err) { + // In some environments a specific port may be blocked/reserved (WSAEACCES) or taken. + // Best-effort: pick a new port and retry once so the app can still start. + if (app.isPackaged) { + const prevPort = getBackendPort(); + const bindHost = getBackendBindHost(); + const nextPort = await chooseAvailablePort(prevPort + 1, bindHost); + if (nextPort != null && nextPort !== prevPort) { + logMain(`[main] backend not ready on port ${prevPort}; retrying on ${nextPort}`); + try { + setBackendPortSetting(nextPort); + await restartBackend({ timeoutMs: 30_000 }); + logMain(`[main] backend retry succeeded on port ${nextPort}`); + } catch (retryErr) { + logMain(`[main] backend retry failed: ${retryErr?.stack || String(retryErr)}`); + throw retryErr; + } + } else { + throw err; + } + } else { + throw err; + } + } const win = createMainWindow(); mainWindow = win; @@ -635,10 +1798,14 @@ async function main() { const startUrl = process.env.ELECTRON_START_URL || - (app.isPackaged ? `http://${BACKEND_HOST}:${BACKEND_PORT}/` : "http://localhost:3000"); + (app.isPackaged ? getBackendUiUrl() : "http://localhost:3000"); + logMain(`[main] debugEnabled=${debugEnabled()} startUrl=${startUrl}`); await loadWithRetry(win, startUrl); + // Auto-check updates after the UI has loaded (packaged builds only). + checkForUpdatesOnStartup(); + // If debug mode is enabled, auto-open DevTools so the user doesn't need menu/shortcuts. if (debugEnabled()) { try { @@ -664,20 +1831,22 @@ app.on("before-quit", () => { stopBackend(); }); -main().catch((err) => { - // eslint-disable-next-line no-console - console.error(err); - logMain(`[main] fatal: ${err?.stack || String(err)}`); - stopBackend(); - try { - const dir = getUserDataDir(); - if (dir) { - dialog.showErrorBox( - "WeChatDataAnalysis 启动失败", - `启动失败:${err?.message || err}\n\n请查看日志目录:\n${dir}\n\n文件:desktop-main.log / backend-stdio.log / output\\\\logs\\\\...` - ); - shell.openPath(dir); - } - } catch {} - app.quit(); -}); +if (gotSingleInstanceLock) { + main().catch((err) => { + // eslint-disable-next-line no-console + console.error(err); + logMain(`[main] fatal: ${err?.stack || String(err)}`); + stopBackend(); + try { + const dir = getUserDataDir(); + if (dir) { + dialog.showErrorBox( + "WeChatDataAnalysis 启动失败", + `启动失败:${err?.message || err}\n\n请查看日志目录:\n${dir}\n\n文件:desktop-main.log / backend-stdio.log / output\\\\logs\\\\...` + ); + shell.openPath(dir); + } + } catch {} + app.quit(); + }); +} diff --git a/desktop/src/preload.cjs b/desktop/src/preload.cjs index 1f1eea2..501d0e3 100644 --- a/desktop/src/preload.cjs +++ b/desktop/src/preload.cjs @@ -1,14 +1,55 @@ const { contextBridge, ipcRenderer } = require("electron"); contextBridge.exposeInMainWorld("wechatDesktop", { + // Marker used by the frontend to distinguish the Electron desktop shell from the pure web build. + __brand: "WeChatDataAnalysisDesktop", minimize: () => ipcRenderer.invoke("window:minimize"), toggleMaximize: () => ipcRenderer.invoke("window:toggleMaximize"), close: () => ipcRenderer.invoke("window:close"), isMaximized: () => ipcRenderer.invoke("window:isMaximized"), + isDebugEnabled: () => ipcRenderer.invoke("app:isDebugEnabled"), getAutoLaunch: () => ipcRenderer.invoke("app:getAutoLaunch"), setAutoLaunch: (enabled) => ipcRenderer.invoke("app:setAutoLaunch", !!enabled), getCloseBehavior: () => ipcRenderer.invoke("app:getCloseBehavior"), setCloseBehavior: (behavior) => ipcRenderer.invoke("app:setCloseBehavior", String(behavior || "")), + + getBackendPort: () => ipcRenderer.invoke("backend:getPort"), + setBackendPort: (port) => ipcRenderer.invoke("backend:setPort", Number(port)), + + chooseDirectory: (options = {}) => ipcRenderer.invoke("dialog:chooseDirectory", options), + + // Data/output folder helpers + getOutputDir: () => ipcRenderer.invoke("app:getOutputDir"), + openOutputDir: () => ipcRenderer.invoke("app:openOutputDir"), + getAccountInfo: (account) => ipcRenderer.invoke("app:getAccountInfo", String(account || "")), + deleteAccountData: (account) => ipcRenderer.invoke("app:deleteAccountData", String(account || "")), + + // Auto update + getVersion: () => ipcRenderer.invoke("app:getVersion"), + checkForUpdates: () => ipcRenderer.invoke("app:checkForUpdates"), + downloadAndInstall: () => ipcRenderer.invoke("app:downloadAndInstall"), + installUpdate: () => ipcRenderer.invoke("app:installUpdate"), + ignoreUpdate: (version) => ipcRenderer.invoke("app:ignoreUpdate", String(version || "")), + onDownloadProgress: (callback) => { + const handler = (_event, progress) => callback(progress); + ipcRenderer.on("app:downloadProgress", handler); + return () => ipcRenderer.removeListener("app:downloadProgress", handler); + }, + onUpdateAvailable: (callback) => { + const handler = (_event, info) => callback(info); + ipcRenderer.on("app:updateAvailable", handler); + return () => ipcRenderer.removeListener("app:updateAvailable", handler); + }, + onUpdateDownloaded: (callback) => { + const handler = (_event, info) => callback(info); + ipcRenderer.on("app:updateDownloaded", handler); + return () => ipcRenderer.removeListener("app:updateDownloaded", handler); + }, + onUpdateError: (callback) => { + const handler = (_event, payload) => callback(payload); + ipcRenderer.on("app:updateError", handler); + return () => ipcRenderer.removeListener("app:updateError", handler); + }, }); diff --git a/frontend/app.vue b/frontend/app.vue index 75ca905..ea71c78 100644 --- a/frontend/app.vue +++ b/frontend/app.vue @@ -1,39 +1,114 @@ diff --git a/frontend/assets/css/chat.css b/frontend/assets/css/chat.css new file mode 100644 index 0000000..231f295 --- /dev/null +++ b/frontend/assets/css/chat.css @@ -0,0 +1,1177 @@ +/* LinkCard:小程序标记与无 URL 降级 */ +.wechat-link-badge { + margin-left: auto; + padding-left: 8px; + font-size: 11px; + color: #b2b2b2; + white-space: nowrap; + flex-shrink: 0; +} + +.wechat-link-cover-badge { + margin-left: auto; + padding-left: 8px; + font-size: 11px; + color: rgba(243, 243, 243, 0.92); + white-space: nowrap; + flex-shrink: 0; +} + +.wechat-link-card.wechat-link-card--disabled, +.wechat-link-card-cover.wechat-link-card--disabled { + cursor: default; +} + +.wechat-link-card.wechat-link-card--disabled:hover, +.wechat-link-card-cover.wechat-link-card--disabled:hover { + background: #fff; +} + +/* 滚动条样式 */ +.overflow-y-auto::-webkit-scrollbar { + width: 6px; +} + +.overflow-y-auto::-webkit-scrollbar-track { + background: #f1f1f1; + border-radius: 3px; +} + +.overflow-y-auto::-webkit-scrollbar-thumb { + background: #c1c1c1; + border-radius: 3px; +} + +.overflow-y-auto::-webkit-scrollbar-thumb:hover { + background: #a1a1a1; +} + +/* 会话列表宽度:按物理像素(px)配置,按 dpr 换算为 CSS px */ +.session-list-panel { + width: calc(var(--session-list-width, 295px) / var(--dpr)); +} + +/* 会话列表拖动条(中间栏右侧) */ +.session-list-resizer { + position: absolute; + top: 0; + right: -3px; /* 覆盖在 border 上,便于拖动 */ + width: 6px; + height: 100%; + cursor: col-resize; + z-index: 50; +} + +.session-list-resizer::after { + content: ''; + position: absolute; + top: 0; + bottom: 0; + left: 2px; + width: 2px; + background: transparent; + transition: background-color 0.15s ease; +} + +.session-list-resizer:hover::after, +.session-list-resizer-active::after { + background: rgba(0, 0, 0, 0.12); +} + +/* 消息气泡样式 */ +.message-bubble { + border-radius: var(--message-radius); + position: relative; + z-index: 1; +} + +/* 发送的消息(右侧绿色气泡) */ +.sent-message { + background-color: #95EB69 !important; + border-radius: var(--message-radius); +} + +.sent-message::after { + content: ''; + position: absolute; + top: 50%; + right: -4px; + transform: translateY(-50%) rotate(45deg); + width: 10px; + height: 10px; + background-color: #95EB69; + border-radius: 2px; +} + +/* 接收的消息(左侧白色气泡) */ +.received-message { + background-color: white !important; + border-radius: var(--message-radius); +} + +.received-message::before { + content: ''; + position: absolute; + top: 50%; + left: -4px; + transform: translateY(-50%) rotate(45deg); + width: 10px; + height: 10px; + background-color: white; + border-radius: 2px; +} + +/* 聊天标签页样式 */ +.chat-tab { + cursor: pointer; + transition: all 0.2s ease; + color: #606060; +} + +.chat-tab:hover:not(.selected) { + background-color: #E5E5E5; +} + +.chat-tab.selected { + color: #07b75b !important; +} + +.chat-tab:not(.selected):hover { + color: #07b75b; +} + +/* 语音消息样式 */ +.voice-message-wrap { + display: flex; + width: 100%; +} + +.voice-bubble { + border-radius: var(--message-radius); + position: relative; + transition: opacity 0.15s ease; +} + +.voice-bubble:hover { + opacity: 0.85; +} + +.voice-bubble:active { + opacity: 0.7; +} + +.voice-sent { + border-radius: var(--message-radius); +} + +.voice-sent::after { + content: ''; + position: absolute; + top: 50%; + right: -4px; + transform: translateY(-50%) rotate(45deg); + width: 10px; + height: 10px; + background-color: #95EC69; + border-radius: 2px; +} + +.voice-received { + border-radius: var(--message-radius); +} + +.voice-received::before { + content: ''; + position: absolute; + top: 50%; + left: -4px; + transform: translateY(-50%) rotate(45deg); + width: 10px; + height: 10px; + background-color: white; + border-radius: 2px; +} + +/* 语音消息样式 - 微信风格 */ +.wechat-voice-wrapper { + display: flex; + width: 100%; + position: relative; +} + +.wechat-voice-bubble { + border-radius: var(--message-radius); + position: relative; + transition: opacity 0.15s ease; + min-width: 80px; + max-width: 200px; +} + +.wechat-voice-bubble:hover { + opacity: 0.85; +} + +.wechat-voice-bubble:active { + opacity: 0.7; +} + +.wechat-voice-sent { + background: #95EC69; +} + +.wechat-voice-sent::after { + content: ''; + position: absolute; + top: 50%; + right: -4px; + transform: translateY(-50%) rotate(45deg); + width: 10px; + height: 10px; + background: #95EC69; + border-radius: 2px; +} + +.wechat-voice-received { + background: white; +} + +.wechat-voice-received::before { + content: ''; + position: absolute; + top: 50%; + left: -4px; + transform: translateY(-50%) rotate(45deg); + width: 10px; + height: 10px; + background: white; + border-radius: 2px; +} + +.wechat-voice-content { + display: flex; + align-items: center; + padding: 8px 12px; + gap: 8px; +} + +/* 语音图标样式 */ +.wechat-voice-icon { + width: 18px; + height: 18px; + flex-shrink: 0; + color: #1a1a1a; +} + +.wechat-quote-voice-icon { + width: 14px; + height: 14px; + color: inherit; +} + +.voice-icon-sent { + transform: scaleX(-1); +} + +/* 播放时的波动动画 */ +.wechat-voice-icon.voice-playing .voice-wave-2 { + animation: voice-wave-2 1s infinite; +} + +.wechat-voice-icon.voice-playing .voice-wave-3 { + animation: voice-wave-3 1s infinite; +} + +@keyframes voice-wave-2 { + 0%, 33% { opacity: 0; } + 34%, 100% { opacity: 1; } +} + +@keyframes voice-wave-3 { + 0%, 66% { opacity: 0; } + 67%, 100% { opacity: 1; } +} + +.wechat-voice-duration { + font-size: 14px; + color: #1a1a1a; +} + +.wechat-voice-unread { + position: absolute; + top: 50%; + right: -20px; + transform: translateY(-50%); + width: 8px; + height: 8px; + border-radius: 50%; + background: #e75e58; +} + +/* 音视频通话消息样式 - 微信风格 */ +.wechat-voip-bubble { + border-radius: var(--message-radius); + position: relative; + min-width: 120px; +} + +.wechat-voip-sent { + background: #95EC69; +} + +.wechat-voip-sent::after { + content: ''; + position: absolute; + top: 50%; + right: -4px; + transform: translateY(-50%) rotate(45deg); + width: 10px; + height: 10px; + background: #95EC69; + border-radius: 2px; +} + +.wechat-voip-received { + background: white; +} + +.wechat-voip-received::before { + content: ''; + position: absolute; + top: 50%; + left: -4px; + transform: translateY(-50%) rotate(45deg); + width: 10px; + height: 10px; + background: white; + border-radius: 2px; +} + +.wechat-voip-content { + display: flex; + align-items: center; + padding: 8px 14px; + gap: 8px; +} + +.wechat-voip-icon { + width: 22px; + height: 14px; + flex-shrink: 0; + object-fit: contain; +} + +.wechat-voip-text { + font-size: 14px; + color: #1a1a1a; +} + +/* 统一特殊消息尾巴(红包 / 文件等) */ +.wechat-special-card { + position: relative; + overflow: visible; +} + +.wechat-special-card::after { + content: ''; + position: absolute; + top: 12px; + left: -4px; + width: 12px; + height: 12px; + background-color: inherit; + transform: rotate(45deg); + border-radius: 2px; +} + +.wechat-special-sent-side::after { + left: auto; + right: -4px; +} + +.wechat-chat-history-card { + width: 210px; + background: #ffffff; + border-radius: var(--message-radius); + cursor: pointer; + transition: background-color 0.15s ease; +} + +.wechat-chat-history-card:hover { + background: #f5f5f5; +} + +.wechat-chat-history-body { + padding: 10px 12px; +} + +.wechat-chat-history-title { + font-size: 14px; + font-weight: 400; + color: #161616; + margin-bottom: 6px; +} + +.wechat-chat-history-preview { + font-size: 12px; + color: #6b7280; + line-height: 1.4; +} + +.wechat-chat-history-line { + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; +} + +.wechat-chat-history-bottom { + height: 27px; + display: flex; + align-items: center; + padding: 0 12px; + border-top: none; + position: relative; +} + +.wechat-chat-history-bottom::before { + content: ''; + position: absolute; + top: 0; + left: 13px; + right: 13px; + height: 1.5px; + background: #e8e8e8; +} + +.wechat-chat-history-bottom span { + font-size: 12px; + color: #b2b2b2; +} + +/* 转账消息样式 - 微信风格 */ +.wechat-transfer-card { + width: 210px; + background: #f79c46; + border-radius: var(--message-radius); + overflow: visible; + position: relative; +} + +.wechat-transfer-card::after { + content: ''; + position: absolute; + top: 16px; + left: -4px; + width: 10px; + height: 10px; + background: #f79c46; + transform: rotate(45deg); + border-radius: 2px; +} + +.wechat-transfer-sent-side::after { + left: auto; + right: -4px; +} + +.wechat-transfer-content { + display: flex; + align-items: center; + padding: 10px 12px; + min-height: 58px; +} + +.wechat-transfer-icon { + width: 36px; + height: 36px; + flex-shrink: 0; + object-fit: contain; +} + +.wechat-transfer-info { + flex: 1; + margin-left: 10px; + display: flex; + flex-direction: column; + overflow: hidden; +} + +.wechat-transfer-amount { + font-size: 16px; + font-weight: 500; + color: #fff; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; +} + +.wechat-transfer-status { + font-size: 12px; + color: #fff; + margin-top: 2px; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; +} + +.wechat-transfer-bottom { + height: 27px; + display: flex; + align-items: center; + padding: 0 12px; + border-top: none; + position: relative; +} + +.wechat-transfer-bottom::before { + content: ''; + position: absolute; + top: 0; + left: 13px; + right: 13px; + height: 1px; + background: rgba(255,255,255,0.2); +} + +.wechat-transfer-bottom span { + font-size: 11px; + color: #fff; +} + +/* 已领取的转账样式 */ +.wechat-transfer-received { + background: #FDCE9D; +} + +.wechat-transfer-received::after { + background: #FDCE9D; +} + +.wechat-transfer-received .wechat-transfer-amount, +.wechat-transfer-received .wechat-transfer-status { + color: #fff; +} + +.wechat-transfer-received .wechat-transfer-bottom span { + color: #fff; +} + +/* 退回的转账样式 */ +.wechat-transfer-returned { + background: #fde1c3; +} + +.wechat-transfer-returned::after { + background: #fde1c3; +} + +.wechat-transfer-returned .wechat-transfer-amount, +.wechat-transfer-returned .wechat-transfer-status { + color: #fff; +} + +.wechat-transfer-returned .wechat-transfer-bottom span { + color: #fff; +} + +/* 过期的转账样式 */ +.wechat-transfer-overdue { + background: #E9CFB3; +} + +.wechat-transfer-overdue::after { + background: #E9CFB3; +} + +.wechat-transfer-overdue .wechat-transfer-amount, +.wechat-transfer-overdue .wechat-transfer-status { + color: #fff; +} + +.wechat-transfer-overdue .wechat-transfer-bottom span { + color: #fff; +} + +/* 红包消息样式 - 微信风格 */ +.wechat-redpacket-card { + width: 210px; + background: #fa9d3b; + border-radius: var(--message-radius); + overflow: visible; + position: relative; +} + +.wechat-redpacket-content { + display: flex; + align-items: center; + padding: 10px 12px; + min-height: 58px; +} + +.wechat-redpacket-icon { + width: 32px; + height: 36px; + flex-shrink: 0; + object-fit: contain; +} + +.wechat-redpacket-info { + flex: 1; + margin-left: 10px; + display: flex; + flex-direction: column; + overflow: hidden; +} + +.wechat-redpacket-text { + font-size: 14px; + color: #fff; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; +} + +.wechat-redpacket-status { + font-size: 12px; + color: #fff; + margin-top: 2px; +} + +.wechat-redpacket-bottom { + height: 27px; + display: flex; + align-items: center; + padding: 0 12px; + border-top: none; + position: relative; +} + +.wechat-redpacket-bottom::before { + content: ''; + position: absolute; + top: 0; + left: 13px; + right: 13px; + height: 1px; + background: rgba(255,255,255,0.2); +} + +.wechat-redpacket-bottom span { + font-size: 11px; + color: #faecda; +} + +/* 已领取的红包样式 */ +.wechat-redpacket-received { + background: #f8e2c6; +} + +.wechat-redpacket-received .wechat-redpacket-text, +.wechat-redpacket-received .wechat-redpacket-status { + color: #b88550; +} + +.wechat-redpacket-received .wechat-redpacket-bottom span { + color: #c9a67a; +} + +/* 文件消息样式 - 基于红包样式覆盖 */ +.wechat-file-card { + width: 210px; + background: #fff; + cursor: pointer; + transition: background-color 0.15s ease; +} + +.wechat-file-card .wechat-redpacket-content { + padding: 10px 12px; + min-height: 58px; +} + +.wechat-file-card .wechat-redpacket-bottom { + height: 27px; + padding: 0 12px; + border-top: none; + position: relative; +} + +.wechat-file-card .wechat-redpacket-bottom::before { + content: ''; + position: absolute; + top: 0; + left: 13px; + right: 13px; + height: 1.5px; + background: #e8e8e8; +} + +.wechat-file-card:hover { + background: #f5f5f5; +} + +.wechat-file-card .wechat-file-info { + margin-left: 0; + margin-right: 10px; +} + +.wechat-file-name { + font-size: 14px; + color: #1a1a1a; + display: -webkit-box; + -webkit-line-clamp: 2; + -webkit-box-orient: vertical; + overflow: hidden; + word-break: break-all; + line-height: 1.4; +} + +.wechat-file-size { + font-size: 12px; + color: #b2b2b2; + margin-top: 4px; +} + +.wechat-file-icon { + width: 40px; + height: 40px; + flex-shrink: 0; + object-fit: contain; +} + +.wechat-file-bottom { + border-top: 1px solid #e8e8e8; +} + +.wechat-file-bottom span { + font-size: 12px; + color: #b2b2b2; +} + +.wechat-file-logo { + width: 18px; + height: 18px; + object-fit: contain; + margin-right: 4px; +} + +/* 链接消息样式 - 微信风格 */ +.wechat-link-card { + width: 210px; + min-width: 210px; + max-width: 210px; + background: #fff; + display: flex; + flex-direction: column; + box-sizing: border-box; + border: none; + box-shadow: none; + outline: none; + cursor: pointer; + text-decoration: none; + transition: background-color 0.15s ease; +} + +.wechat-link-card:hover { + background: #f5f5f5; +} + +.wechat-link-content { + display: flex; + flex-direction: column; + gap: 8px; + box-sizing: border-box; + padding: 10px 10px 8px; + flex: 1 1 auto; +} + +.wechat-link-summary { + display: flex; + align-items: flex-start; + gap: 10px; + min-height: 42px; +} + +.wechat-link-title { + font-size: 14px; + color: #1a1a1a; + display: -webkit-box; + -webkit-line-clamp: 2; + -webkit-box-orient: vertical; + overflow: hidden; + line-height: 1.4; + word-break: break-word; +} + +.wechat-link-desc { + font-size: 12px; + color: #8c8c8c; + display: -webkit-box; + -webkit-line-clamp: 2; + -webkit-box-orient: vertical; + overflow: hidden; + line-height: 1.4; + word-break: break-word; + flex: 1 1 auto; + min-width: 0; +} + +.wechat-link-thumb { + width: 42px; + height: 42px; + flex: 0 0 auto; + border-radius: 0; + overflow: hidden; + background: #f2f2f2; + align-self: flex-start; +} + +.wechat-link-thumb-img { + width: 100%; + height: 100%; + object-fit: cover; + display: block; +} + +.wechat-link-card--mini-program { + max-height: 270px; + height: 270px; +} + +.wechat-link-mini-body { + display: flex; + flex-direction: column; + gap: 10px; + padding: 12px; + box-sizing: border-box; + flex: 1 1 auto; + min-height: 0; +} + +.wechat-link-mini-header { + display: flex; + align-items: center; + gap: 8px; + min-width: 0; +} + +.wechat-link-mini-header-avatar { + width: 20px; + height: 20px; + border-radius: 50%; + background: #14c15f; + color: #fff; + font-size: 11px; + line-height: 20px; + text-align: center; + flex-shrink: 0; + position: relative; + overflow: hidden; +} + +.wechat-link-mini-header-avatar-img { + position: absolute; + inset: 0; + width: 100%; + height: 100%; + object-fit: cover; + display: block; +} + +.wechat-link-mini-header-name { + font-size: 13px; + color: #7d7d7d; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + min-width: 0; + flex: 1 1 auto; +} + +.wechat-link-mini-title { + font-size: 13px; + line-height: 1.45; + color: #1a1a1a; + display: -webkit-box; + -webkit-line-clamp: 2; + -webkit-box-orient: vertical; + overflow: hidden; + word-break: break-word; +} + +.wechat-link-mini-preview { + width: 100%; + height: auto; + min-height: 0; + flex: 1 1 auto; + overflow: hidden; + background: #f2f2f2; + margin-top: auto; +} + +.wechat-link-mini-preview--empty { + background: #f7f7f7; +} + +.wechat-link-mini-preview-img { + width: 100%; + height: 100%; + object-fit: contain; + object-position: center; + display: block; +} + +.wechat-link-mini-footer { + height: 23px; + display: flex; + align-items: center; + gap: 6px; + padding: 0 12px; + box-sizing: border-box; + position: relative; + flex-shrink: 0; +} + +.wechat-link-mini-footer::before { + content: ''; + position: absolute; + top: 0; + left: 12px; + right: 12px; + height: 1px; + background: #e8e8e8; +} + +.wechat-link-mini-footer-icon { + width: 12px; + height: 12px; + object-fit: contain; + flex-shrink: 0; +} + +.wechat-link-mini-footer-text { + font-size: 10px; + color: #8c8c8c; +} + +.wechat-link-from { + height: 30px; + display: flex; + align-items: center; + gap: 5px; + padding: 0 10px; + position: relative; + flex-shrink: 0; +} + +.wechat-link-from::before { + content: ''; + position: absolute; + top: 0; + left: 11px; + right: 11px; + height: 1.5px; + background: #e8e8e8; +} + +.wechat-link-from-avatar { + width: 16px; + height: 16px; + border-radius: 50%; + background: #111; + color: #fff; + font-size: 11px; + line-height: 16px; + text-align: center; + flex-shrink: 0; + position: relative; + overflow: hidden; +} + +.wechat-link-from-avatar-img { + position: absolute; + inset: 0; + width: 100%; + height: 100%; + object-fit: cover; + display: block; +} + +.wechat-link-from-name { + font-size: 12px; + color: #b2b2b2; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} + +/* 链接封面卡片(170x230 图 + 60 底栏) */ +.wechat-link-card-cover { + width: 137px; + min-width: 137px; + max-width: 137px; + background: #fff; + display: flex; + flex-direction: column; + box-sizing: border-box; + border: none; + box-shadow: none; + outline: none; + cursor: pointer; + text-decoration: none; + transition: background-color 0.15s ease; +} + +.wechat-link-card-cover:hover { + background: #f5f5f5; +} + +.wechat-link-cover-image-wrap { + width: 137px; + height: 180px; + position: relative; + overflow: hidden; + border-radius: 4px 4px 0 0; + background: #f2f2f2; + flex-shrink: 0; +} + +.wechat-link-cover-image { + width: 100%; + height: 100%; + object-fit: cover; + object-position: center; + display: block; +} + +/* 仅公众号封面卡片去掉菱形尖角,其它消息保持原样 */ +.wechat-link-card-cover.wechat-special-card::after { + content: none !important; +} + +.wechat-link-cover-from { + height: 30px; + display: flex; + align-items: center; + gap: 6px; + padding: 0 10px; + box-sizing: border-box; + position: absolute; + left: 0; + right: 0; + bottom: 0; + background: transparent; + flex-shrink: 0; +} + +.wechat-link-cover-from-avatar { + width: 18px; + height: 18px; + border-radius: 50%; + background: #111; + color: #fff; + font-size: 11px; + line-height: 18px; + text-align: center; + flex-shrink: 0; + position: relative; + overflow: hidden; +} + +.wechat-link-cover-from-avatar-img { + position: absolute; + inset: 0; + width: 100%; + height: 100%; + object-fit: cover; + display: block; +} + +.wechat-link-cover-from-name { + font-size: 12px; + color: #f3f3f3; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} + +.wechat-link-cover-title { + height: 50px; + padding: 7px 10px 0; + box-sizing: border-box; + font-size: 12px; + line-height: 1.24; + color: #1a1a1a; + display: -webkit-box; + -webkit-line-clamp: 2; + -webkit-box-orient: vertical; + overflow: hidden; + word-break: break-word; + flex-shrink: 0; +} + +/* 隐私模式模糊效果 */ +.privacy-blur { + filter: blur(9px); + transition: filter 0.2s ease; +} + +.privacy-blur:hover { + filter: none; +} + +/* 定位引用消息的高亮效果 */ +.message-locate-highlight { + position: relative; + animation: locate-pulse 1.8s ease-out; +} + +.message-locate-highlight::before { + content: ''; + position: absolute; + inset: -4px -8px; + border-radius: 8px; + background: rgba(3, 193, 96, 0.12); + pointer-events: none; + animation: locate-fade 1.8s ease-out forwards; +} + +@keyframes locate-pulse { + 0% { + transform: scale(1.02); + } + 15% { + transform: scale(1); + } + 100% { + transform: scale(1); + } +} + +@keyframes locate-fade { + 0% { + opacity: 1; + background: rgba(3, 193, 96, 0.15); + } + 70% { + opacity: 1; + } + 100% { + opacity: 0; + } +} + +/* 骨架屏加载动画 */ +.skeleton-pulse { + animation: skeleton-loading 1.5s ease-in-out infinite; +} + +@keyframes skeleton-loading { + 0% { + opacity: 0.6; + } + 50% { + opacity: 0.3; + } + 100% { + opacity: 0.6; + } +} diff --git a/frontend/assets/css/tailwind.css b/frontend/assets/css/tailwind.css index 6fb94bf..dc80cee 100644 --- a/frontend/assets/css/tailwind.css +++ b/frontend/assets/css/tailwind.css @@ -10,32 +10,32 @@ --wechat-green-hover: #06ad56; --wechat-green-light: #e6f7f0; --wechat-green-dark: #059341; - + /* 主色调 */ --primary-color: #07c160; --primary-hover: #06ad56; --secondary-color: #4c9e5f; - + /* 危险色 */ --danger-color: #fa5151; --danger-hover: #e94848; - + /* 警告色 */ --warning-color: #ffc300; --warning-hover: #e6ad00; - + /* 背景色 */ --bg-primary: #f7f8fa; --bg-secondary: #ffffff; --bg-gray: #ededed; --bg-dark: #191919; - + /* 文字颜色 */ --text-primary: #191919; --text-secondary: #576b95; --text-light: #888888; --text-white: #ffffff; - + /* 边框颜色 */ --border-color: #e7e7e7; --border-light: #f4f4f4; @@ -80,6 +80,56 @@ /* 统一的消息圆角工具类 */ .msg-radius { border-radius: var(--message-radius); } .msg-bubble { @apply leading-normal break-words text-pretty; border-radius: var(--message-radius); } + + /* 隐私模式(通用):默认模糊,悬停显示 */ + .privacy-blur { + filter: blur(9px); + transition: filter 0.2s ease; + } + + .privacy-blur:hover { + filter: none; + } + + /* Wrapped 隐私模式:仅模糊“用户名文本”,头像不模糊(避免把头像也 blur 掉) */ + .wrapped-privacy .wrapped-privacy-name { + filter: blur(9px); + transition: filter 0.2s ease; + } + + .wrapped-privacy .wrapped-privacy-name:hover { + filter: none; + } + + /* Wrapped 隐私模式:模糊“消息内容文本”(仅在被标记为 message 的节点上生效) */ + .wrapped-privacy .wrapped-privacy-message { + filter: blur(9px); + transition: filter 0.2s ease; + } + + .wrapped-privacy .wrapped-privacy-message:hover { + filter: none; + } + + /* Wrapped 隐私模式:模糊“词云关键词” */ + .wrapped-privacy .wrapped-privacy-keyword { + filter: blur(9px); + transition: filter 0.2s ease; + } + + .wrapped-privacy .wrapped-privacy-keyword:hover { + filter: none; + } + + /* Wrapped 隐私模式:模糊头像(含 fallback 字符) */ + .wrapped-privacy .wrapped-privacy-avatar { + filter: blur(9px); + transition: filter 0.2s ease; + } + + .wrapped-privacy .wrapped-privacy-avatar:hover { + filter: none; + } /* 按钮样式 */ .btn { @apply px-6 py-3 rounded-full font-medium transition-all duration-200 focus:outline-none focus:ring-2 focus:ring-offset-2 transform active:scale-95; @@ -110,6 +160,48 @@ @apply hover:transform hover:scale-[1.02] transition-all duration-300; } + /* Wrapped (年度总结) 背景纹理 */ + .wrapped-noise { + background-image: url("data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxNjAiIGhlaWdodD0iMTYwIj4KICA8ZmlsdGVyIGlkPSJuIj4KICAgIDxmZVR1cmJ1bGVuY2UgdHlwZT0iZnJhY3RhbE5vaXNlIiBiYXNlRnJlcXVlbmN5PSIwLjgiIG51bU9jdGF2ZXM9IjQiIHN0aXRjaFRpbGVzPSJzdGl0Y2giLz4KICAgIDxmZUNvbG9yTWF0cml4IHR5cGU9InNhdHVyYXRlIiB2YWx1ZXM9IjAiLz4KICA8L2ZpbHRlcj4KICA8cmVjdCB3aWR0aD0iMTYwIiBoZWlnaHQ9IjE2MCIgZmlsdGVyPSJ1cmwoI24pIiBvcGFjaXR5PSIwLjQ1Ii8+Cjwvc3ZnPg=="); + background-repeat: repeat; + background-size: 320px 320px; + mix-blend-mode: multiply; + } + + /* Wrapped 增强噪点纹理(动态抖动) */ + .wrapped-noise-enhanced { + background-image: url("data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyMDAiIGhlaWdodD0iMjAwIj4KICA8ZmlsdGVyIGlkPSJuIj4KICAgIDxmZVR1cmJ1bGVuY2UgdHlwZT0iZnJhY3RhbE5vaXNlIiBiYXNlRnJlcXVlbmN5PSIwLjkiIG51bU9jdGF2ZXM9IjUiIHN0aXRjaFRpbGVzPSJzdGl0Y2giLz4KICAgIDxmZUNvbG9yTWF0cml4IHR5cGU9InNhdHVyYXRlIiB2YWx1ZXM9IjAiLz4KICA8L2ZpbHRlcj4KICA8cmVjdCB3aWR0aD0iMjAwIiBoZWlnaHQ9IjIwMCIgZmlsdGVyPSJ1cmwoI24pIiBvcGFjaXR5PSIwLjUiLz4KPC9zdmc+"); + background-repeat: repeat; + background-size: 200px 200px; + mix-blend-mode: multiply; + animation: noise-jitter 0.5s steps(3) infinite; + } + + /* Wrapped typography */ + .wrapped-title { + font-weight: 700; + letter-spacing: 0.02em; + } + + .wrapped-title-en { + font-weight: 700; + letter-spacing: 0.04em; + } + + .wrapped-body { + line-height: 1.8; + } + + .wrapped-label { + font-weight: 600; + letter-spacing: 0.15em; + text-transform: uppercase; + } + + .wrapped-number { + font-variant-numeric: tabular-nums; + } + /* 输入框样式 */ .input { @apply w-full px-4 py-3 bg-[#f7f8fa] border border-transparent rounded-xl focus:outline-none focus:ring-2 focus:ring-[#07c160] focus:bg-white focus:border-[#07c160] transition-all duration-200; @@ -557,35 +649,39 @@ } .header-btn { - @apply flex items-center gap-1.5 text-xs px-3 py-1.5 rounded-lg bg-white border border-gray-200 text-gray-700 transition-all duration-200 disabled:opacity-50 disabled:cursor-not-allowed; + @apply flex items-center gap-1.5 text-xs px-3 py-1.5 rounded-md bg-white border border-gray-200 text-gray-700 transition-all duration-150 disabled:opacity-50 disabled:cursor-not-allowed shadow-sm; } .header-btn:hover:not(:disabled) { - @apply bg-gray-50 border-gray-300; + @apply bg-gray-50 border-gray-300 shadow; } .header-btn:active:not(:disabled) { - @apply bg-gray-100; + @apply bg-gray-100 scale-95; + } + + .header-btn svg { + @apply w-3.5 h-3.5; } .header-btn-icon { - @apply w-8 h-8 flex items-center justify-center rounded-lg bg-white border border-gray-200 text-gray-600 transition-all duration-200; + @apply w-8 h-8 flex items-center justify-center rounded-lg bg-transparent border border-transparent text-gray-600 transition-all duration-200 disabled:opacity-50 disabled:cursor-not-allowed; } .header-btn-icon:hover { - @apply bg-gray-50 border-gray-300 text-gray-800; + @apply bg-transparent border-transparent text-gray-800; } .header-btn-icon-active { - @apply bg-[#03C160]/10 border-[#03C160] text-[#03C160]; + @apply bg-transparent border-transparent text-[#03C160]; } .header-btn-icon-active:hover { - @apply bg-[#03C160]/15; + @apply bg-transparent; } .message-filter-select { - @apply text-xs px-2 py-1.5 rounded-lg bg-white border border-gray-200 text-gray-700 focus:outline-none focus:ring-2 focus:ring-[#03C160]/20 focus:border-[#03C160] transition-all disabled:opacity-50 disabled:cursor-not-allowed; + @apply text-xs px-2 py-1.5 rounded-lg bg-transparent border-0 text-gray-700 focus:outline-none focus:ring-0 transition-all disabled:opacity-50 disabled:cursor-not-allowed; } /* 搜索侧边栏样式 */ @@ -609,6 +705,128 @@ @apply px-3 py-3 border-b border-gray-100; } + /* 时间侧边栏(按日期定位) */ + .time-sidebar { + @apply w-[420px] h-full flex flex-col bg-white border-l border-gray-200 flex-shrink-0; + } + + .time-sidebar-header { + @apply flex items-center justify-between px-4 py-3 border-b border-gray-200 bg-gray-50; + } + + .time-sidebar-title { + @apply flex items-center gap-2 text-sm font-medium text-gray-800; + } + + .time-sidebar-close { + @apply p-1.5 text-gray-500 hover:text-gray-700 hover:bg-gray-200 rounded-md transition-colors; + } + + .time-sidebar-body { + @apply flex-1 overflow-y-auto min-h-0; + } + + .time-sidebar-status { + @apply px-4 py-2 text-xs text-gray-600 border-b border-gray-100; + } + + .time-sidebar-status-error { + @apply text-red-600; + } + + .calendar-header { + @apply flex items-center justify-between px-4 py-3; + } + + .calendar-nav-btn { + @apply p-1.5 text-gray-500 hover:text-gray-700 hover:bg-gray-100 rounded-md transition-colors disabled:opacity-50 disabled:cursor-not-allowed; + } + + .calendar-month-label { + @apply text-sm font-medium text-gray-800; + } + + .calendar-month-label-selects { + @apply flex items-center gap-2; + } + + .calendar-ym-select { + @apply text-xs px-2 py-1 rounded-md border border-gray-200 bg-white text-gray-800 hover:border-gray-300 focus:outline-none focus:ring-2 focus:ring-[#03C160]/20 disabled:opacity-60 disabled:cursor-not-allowed; + } + + .calendar-weekdays { + @apply grid grid-cols-7 gap-1 px-4 pt-1; + } + + .calendar-weekday { + @apply text-[11px] text-gray-400 text-center py-1; + } + + .calendar-grid { + @apply grid grid-cols-7 gap-1 px-4 pb-4; + } + + .calendar-day { + @apply h-9 rounded-md flex items-center justify-center text-xs font-medium transition-colors border border-gray-200 bg-white disabled:cursor-not-allowed; + } + + .calendar-day-outside { + @apply bg-transparent border-transparent; + } + + .calendar-day-empty { + @apply bg-gray-100 text-gray-400 border-gray-100; + } + + .calendar-day-selected { + /* Keep background as-is (heatmap), but emphasize with a ring/outline. */ + box-shadow: 0 0 0 2px rgba(3, 193, 96, 0.85); + border-color: rgba(3, 193, 96, 0.95) !important; + } + + .calendar-day-l1 { + background: rgba(3, 193, 96, 0.12); + border-color: rgba(3, 193, 96, 0.18); + color: #065f46; + } + + .calendar-day-l2 { + background: rgba(3, 193, 96, 0.24); + border-color: rgba(3, 193, 96, 0.28); + color: #065f46; + } + + .calendar-day-l3 { + background: rgba(3, 193, 96, 0.38); + border-color: rgba(3, 193, 96, 0.40); + color: #064e3b; + } + + .calendar-day-l4 { + background: rgba(3, 193, 96, 0.55); + border-color: rgba(3, 193, 96, 0.55); + color: #053d2e; + } + + .calendar-day-l1:hover, + .calendar-day-l2:hover, + .calendar-day-l3:hover, + .calendar-day-l4:hover { + filter: brightness(0.98); + } + + .calendar-day-number { + @apply select-none; + } + + .time-sidebar-actions { + @apply px-4 pb-4; + } + + .time-sidebar-action-btn { + @apply w-full text-xs px-3 py-2 rounded-md bg-[#03C160] text-white hover:bg-[#02a650] transition-colors disabled:opacity-60 disabled:cursor-not-allowed; + } + /* 整合搜索框样式 */ .search-input-combined { @apply flex items-center bg-white border-2 border-gray-200 rounded-lg overflow-hidden transition-all duration-200; @@ -924,3 +1142,36 @@ opacity: 1; } } + +/* Wrapped 动画关键帧 */ + +@keyframes noise-jitter { + 0% { + transform: translate(0, 0); + } + 33% { + transform: translate(-1px, 1px); + } + 66% { + transform: translate(1px, -1px); + } + 100% { + transform: translate(0, 0); + } +} + +/* Wrapped 入场动画 */ +@keyframes wrapped-fade-in { + 0% { + opacity: 0; + transform: translateY(20px); + } + 100% { + opacity: 1; + transform: translateY(0); + } +} + +.wrapped-animate-in { + animation: wrapped-fade-in 0.6s ease-out forwards; +} diff --git a/frontend/assets/images/wechat/mini-program.svg b/frontend/assets/images/wechat/mini-program.svg new file mode 100644 index 0000000..e24435f --- /dev/null +++ b/frontend/assets/images/wechat/mini-program.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/assets/images/wechat/wechat-trans-icon2.png b/frontend/assets/images/wechat/wechat-trans-icon2.png index b9f72da..6d500a2 100644 Binary files a/frontend/assets/images/wechat/wechat-trans-icon2.png and b/frontend/assets/images/wechat/wechat-trans-icon2.png differ diff --git a/frontend/components/ApiStatus.vue b/frontend/components/ApiStatus.vue index d866a1e..5b68826 100644 --- a/frontend/components/ApiStatus.vue +++ b/frontend/components/ApiStatus.vue @@ -8,7 +8,7 @@

API连接问题

{{ appStore.apiMessage || '无法连接到后端服务' }}

-

请确保后端服务正在运行 (端口: 8000)

+

请确保后端服务正在运行

@@ -18,4 +18,4 @@ import { useAppStore } from '~/stores/app' const appStore = useAppStore() - \ No newline at end of file + diff --git a/frontend/components/ChatLocationCard.vue b/frontend/components/ChatLocationCard.vue new file mode 100644 index 0000000..c39c81c --- /dev/null +++ b/frontend/components/ChatLocationCard.vue @@ -0,0 +1,261 @@ + + + + + diff --git a/frontend/components/DesktopUpdateDialog.vue b/frontend/components/DesktopUpdateDialog.vue new file mode 100644 index 0000000..72a5050 --- /dev/null +++ b/frontend/components/DesktopUpdateDialog.vue @@ -0,0 +1,255 @@ + + + diff --git a/frontend/components/EditedMessagePreview.vue b/frontend/components/EditedMessagePreview.vue new file mode 100644 index 0000000..0e8160e --- /dev/null +++ b/frontend/components/EditedMessagePreview.vue @@ -0,0 +1,138 @@ + + + diff --git a/frontend/components/LivePhotoIcon.vue b/frontend/components/LivePhotoIcon.vue new file mode 100644 index 0000000..79ae903 --- /dev/null +++ b/frontend/components/LivePhotoIcon.vue @@ -0,0 +1,29 @@ + + + + diff --git a/frontend/components/SettingsDialog.vue b/frontend/components/SettingsDialog.vue new file mode 100644 index 0000000..fe69abd --- /dev/null +++ b/frontend/components/SettingsDialog.vue @@ -0,0 +1,770 @@ + + + + + diff --git a/frontend/components/SidebarRail.vue b/frontend/components/SidebarRail.vue new file mode 100644 index 0000000..e704e40 --- /dev/null +++ b/frontend/components/SidebarRail.vue @@ -0,0 +1,542 @@ + + + diff --git a/frontend/components/chat/ChatOverlays.vue b/frontend/components/chat/ChatOverlays.vue new file mode 100644 index 0000000..0b8999e --- /dev/null +++ b/frontend/components/chat/ChatOverlays.vue @@ -0,0 +1,1626 @@ + + + diff --git a/frontend/components/chat/ConversationPane.vue b/frontend/components/chat/ConversationPane.vue new file mode 100644 index 0000000..e8bced0 --- /dev/null +++ b/frontend/components/chat/ConversationPane.vue @@ -0,0 +1,116 @@ + + + diff --git a/frontend/components/chat/FileTypeIcon.vue b/frontend/components/chat/FileTypeIcon.vue new file mode 100644 index 0000000..69f7732 --- /dev/null +++ b/frontend/components/chat/FileTypeIcon.vue @@ -0,0 +1,33 @@ + + + diff --git a/frontend/components/chat/LinkCard.vue b/frontend/components/chat/LinkCard.vue new file mode 100644 index 0000000..1aa9890 --- /dev/null +++ b/frontend/components/chat/LinkCard.vue @@ -0,0 +1,285 @@ + diff --git a/frontend/components/chat/MessageContent.vue b/frontend/components/chat/MessageContent.vue new file mode 100644 index 0000000..1f1064a --- /dev/null +++ b/frontend/components/chat/MessageContent.vue @@ -0,0 +1,316 @@ + + + diff --git a/frontend/components/chat/MessageItem.vue b/frontend/components/chat/MessageItem.vue new file mode 100644 index 0000000..6f39a86 --- /dev/null +++ b/frontend/components/chat/MessageItem.vue @@ -0,0 +1,148 @@ + + + diff --git a/frontend/components/chat/MessageList.vue b/frontend/components/chat/MessageList.vue new file mode 100644 index 0000000..b2d5640 --- /dev/null +++ b/frontend/components/chat/MessageList.vue @@ -0,0 +1,48 @@ + + + diff --git a/frontend/components/chat/SessionListPanel.vue b/frontend/components/chat/SessionListPanel.vue new file mode 100644 index 0000000..bc4e115 --- /dev/null +++ b/frontend/components/chat/SessionListPanel.vue @@ -0,0 +1,144 @@ + + + diff --git a/frontend/components/wrapped/cards/Card00GlobalOverview.vue b/frontend/components/wrapped/cards/Card00GlobalOverview.vue new file mode 100644 index 0000000..d9c483b --- /dev/null +++ b/frontend/components/wrapped/cards/Card00GlobalOverview.vue @@ -0,0 +1,192 @@ + + + diff --git a/frontend/components/wrapped/cards/Card01CyberSchedule.vue b/frontend/components/wrapped/cards/Card01CyberSchedule.vue new file mode 100644 index 0000000..7bdf2e0 --- /dev/null +++ b/frontend/components/wrapped/cards/Card01CyberSchedule.vue @@ -0,0 +1,346 @@ + + + diff --git a/frontend/components/wrapped/cards/Card02MessageChars.vue b/frontend/components/wrapped/cards/Card02MessageChars.vue new file mode 100644 index 0000000..bb5fd0a --- /dev/null +++ b/frontend/components/wrapped/cards/Card02MessageChars.vue @@ -0,0 +1,42 @@ + + + + diff --git a/frontend/components/wrapped/cards/Card03ReplySpeed.vue b/frontend/components/wrapped/cards/Card03ReplySpeed.vue new file mode 100644 index 0000000..4a06a62 --- /dev/null +++ b/frontend/components/wrapped/cards/Card03ReplySpeed.vue @@ -0,0 +1,873 @@ + + + + + diff --git a/frontend/components/wrapped/cards/Card04EmojiUniverse.vue b/frontend/components/wrapped/cards/Card04EmojiUniverse.vue new file mode 100644 index 0000000..9b9fd9b --- /dev/null +++ b/frontend/components/wrapped/cards/Card04EmojiUniverse.vue @@ -0,0 +1,795 @@ + + + + + diff --git a/frontend/components/wrapped/cards/Card04MonthlyBestFriendsWall.vue b/frontend/components/wrapped/cards/Card04MonthlyBestFriendsWall.vue new file mode 100644 index 0000000..e30a888 --- /dev/null +++ b/frontend/components/wrapped/cards/Card04MonthlyBestFriendsWall.vue @@ -0,0 +1,434 @@ + + + + + diff --git a/frontend/components/wrapped/cards/Card06KeywordsWordCloud.vue b/frontend/components/wrapped/cards/Card06KeywordsWordCloud.vue new file mode 100644 index 0000000..427e2ec --- /dev/null +++ b/frontend/components/wrapped/cards/Card06KeywordsWordCloud.vue @@ -0,0 +1,861 @@ + + + + + diff --git a/frontend/components/wrapped/cards/Card07BentoSummary.vue b/frontend/components/wrapped/cards/Card07BentoSummary.vue new file mode 100644 index 0000000..d399942 --- /dev/null +++ b/frontend/components/wrapped/cards/Card07BentoSummary.vue @@ -0,0 +1,2794 @@ + + + + + diff --git a/frontend/components/wrapped/shared/BitsCardSwap.vue b/frontend/components/wrapped/shared/BitsCardSwap.vue new file mode 100644 index 0000000..768e4df --- /dev/null +++ b/frontend/components/wrapped/shared/BitsCardSwap.vue @@ -0,0 +1,291 @@ + + + diff --git a/frontend/components/wrapped/shared/BitsGridMotion.vue b/frontend/components/wrapped/shared/BitsGridMotion.vue new file mode 100644 index 0000000..5a09be2 --- /dev/null +++ b/frontend/components/wrapped/shared/BitsGridMotion.vue @@ -0,0 +1,184 @@ + + + + + diff --git a/frontend/components/wrapped/shared/BitsSplitText.vue b/frontend/components/wrapped/shared/BitsSplitText.vue new file mode 100644 index 0000000..df2a139 --- /dev/null +++ b/frontend/components/wrapped/shared/BitsSplitText.vue @@ -0,0 +1,160 @@ + + + diff --git a/frontend/components/wrapped/shared/VueBitsImageTrail.vue b/frontend/components/wrapped/shared/VueBitsImageTrail.vue new file mode 100644 index 0000000..612b5cc --- /dev/null +++ b/frontend/components/wrapped/shared/VueBitsImageTrail.vue @@ -0,0 +1,1225 @@ + + + + + + diff --git a/frontend/components/wrapped/shared/VueBitsStack.vue b/frontend/components/wrapped/shared/VueBitsStack.vue new file mode 100644 index 0000000..c191c7e --- /dev/null +++ b/frontend/components/wrapped/shared/VueBitsStack.vue @@ -0,0 +1,294 @@ + + + + + diff --git a/frontend/components/wrapped/shared/WrappedCardShell.vue b/frontend/components/wrapped/shared/WrappedCardShell.vue new file mode 100644 index 0000000..77274e7 --- /dev/null +++ b/frontend/components/wrapped/shared/WrappedCardShell.vue @@ -0,0 +1,62 @@ + + + diff --git a/frontend/components/wrapped/shared/WrappedControls.vue b/frontend/components/wrapped/shared/WrappedControls.vue new file mode 100644 index 0000000..9cdb4ad --- /dev/null +++ b/frontend/components/wrapped/shared/WrappedControls.vue @@ -0,0 +1,84 @@ + + + diff --git a/frontend/components/wrapped/shared/WrappedDeckBackground.vue b/frontend/components/wrapped/shared/WrappedDeckBackground.vue new file mode 100644 index 0000000..7e621c6 --- /dev/null +++ b/frontend/components/wrapped/shared/WrappedDeckBackground.vue @@ -0,0 +1,21 @@ + diff --git a/frontend/components/wrapped/shared/WrappedHero.vue b/frontend/components/wrapped/shared/WrappedHero.vue new file mode 100644 index 0000000..ae7bf94 --- /dev/null +++ b/frontend/components/wrapped/shared/WrappedHero.vue @@ -0,0 +1,403 @@ + + + + + diff --git a/frontend/components/wrapped/shared/WrappedYearSelector.vue b/frontend/components/wrapped/shared/WrappedYearSelector.vue new file mode 100644 index 0000000..f296e3b --- /dev/null +++ b/frontend/components/wrapped/shared/WrappedYearSelector.vue @@ -0,0 +1,102 @@ + + + + + diff --git a/frontend/components/wrapped/visualizations/AnnualCalendarHeatmap.vue b/frontend/components/wrapped/visualizations/AnnualCalendarHeatmap.vue new file mode 100644 index 0000000..21038e0 --- /dev/null +++ b/frontend/components/wrapped/visualizations/AnnualCalendarHeatmap.vue @@ -0,0 +1,417 @@ + + + + + diff --git a/frontend/components/wrapped/visualizations/ChatReplayAnimation.vue b/frontend/components/wrapped/visualizations/ChatReplayAnimation.vue new file mode 100644 index 0000000..3ded96b --- /dev/null +++ b/frontend/components/wrapped/visualizations/ChatReplayAnimation.vue @@ -0,0 +1,311 @@ + + + + + diff --git a/frontend/components/wrapped/visualizations/GlobalOverviewChart.vue b/frontend/components/wrapped/visualizations/GlobalOverviewChart.vue new file mode 100644 index 0000000..cb0bdad --- /dev/null +++ b/frontend/components/wrapped/visualizations/GlobalOverviewChart.vue @@ -0,0 +1,44 @@ + + + diff --git a/frontend/components/wrapped/visualizations/KeywordWordCloud.vue b/frontend/components/wrapped/visualizations/KeywordWordCloud.vue new file mode 100644 index 0000000..6f9a226 --- /dev/null +++ b/frontend/components/wrapped/visualizations/KeywordWordCloud.vue @@ -0,0 +1,484 @@ + + + + + diff --git a/frontend/components/wrapped/visualizations/MessageCharsChart.vue b/frontend/components/wrapped/visualizations/MessageCharsChart.vue new file mode 100644 index 0000000..8a0c423 --- /dev/null +++ b/frontend/components/wrapped/visualizations/MessageCharsChart.vue @@ -0,0 +1,760 @@ + + + + + diff --git a/frontend/components/wrapped/visualizations/WeekdayHourHeatmap.vue b/frontend/components/wrapped/visualizations/WeekdayHourHeatmap.vue new file mode 100644 index 0000000..36e5bf4 --- /dev/null +++ b/frontend/components/wrapped/visualizations/WeekdayHourHeatmap.vue @@ -0,0 +1,114 @@ + + + diff --git a/frontend/composables/chat/useChatEditing.js b/frontend/composables/chat/useChatEditing.js new file mode 100644 index 0000000..2d11360 --- /dev/null +++ b/frontend/composables/chat/useChatEditing.js @@ -0,0 +1,592 @@ +import { nextTick, ref, toRaw } from 'vue' + +const CONTEXT_MENU_MARGIN = 8 + +const initialContextMenu = () => ({ + visible: false, + x: 0, + y: 0, + message: null, + kind: '', + disabled: false, + editStatus: null, + editStatusLoading: false +}) + +const initialMessageEditModal = () => ({ + open: false, + loading: false, + saving: false, + error: '', + mode: 'content', + sessionId: '', + messageId: '', + draft: '', + rawRow: null +}) + +const initialMessageFieldsModal = () => ({ + open: false, + loading: false, + saving: false, + error: '', + sessionId: '', + messageId: '', + unsafe: false, + editsJson: '', + rawRow: null +}) + +export const useChatEditing = ({ + api, + selectedAccount, + selectedContact, + refreshSelectedMessages, + normalizeMessage, + allMessages, + locateMessageByServerId +}) => { + const contextMenu = ref(initialContextMenu()) + const contextMenuElement = ref(null) + const messageEditModal = ref(initialMessageEditModal()) + const messageFieldsModal = ref(initialMessageFieldsModal()) + + const closeContextMenu = () => { + contextMenu.value = initialContextMenu() + } + + const repositionContextMenu = () => { + if (!process.client || !contextMenu.value.visible) return + const menuEl = contextMenuElement.value + if (!menuEl) return + + const rect = menuEl.getBoundingClientRect() + const viewportWidth = Math.max(window.innerWidth || 0, document.documentElement?.clientWidth || 0) + const viewportHeight = Math.max(window.innerHeight || 0, document.documentElement?.clientHeight || 0) + if (!viewportWidth || !viewportHeight) return + + const maxX = Math.max(CONTEXT_MENU_MARGIN, viewportWidth - rect.width - CONTEXT_MENU_MARGIN) + const maxY = Math.max(CONTEXT_MENU_MARGIN, viewportHeight - rect.height - CONTEXT_MENU_MARGIN) + const currentX = Number(contextMenu.value.x || 0) + const currentY = Number(contextMenu.value.y || 0) + const nextX = Math.min(Math.max(currentX, CONTEXT_MENU_MARGIN), maxX) + const nextY = Math.min(Math.max(currentY, CONTEXT_MENU_MARGIN), maxY) + + if (nextX !== currentX || nextY !== currentY) { + contextMenu.value = { + ...contextMenu.value, + x: nextX, + y: nextY + } + } + } + + const scheduleContextMenuReposition = () => { + if (!process.client) return + void nextTick(() => { + const run = () => repositionContextMenu() + if (typeof window.requestAnimationFrame === 'function') { + window.requestAnimationFrame(run) + } else { + run() + } + }) + } + + const loadContextMenuEditStatus = async (params) => { + if (!process.client) return + const account = String(params?.account || '').trim() + const username = String(params?.username || '').trim() + const messageId = String(params?.message_id || '').trim() + if (!account || !username || !messageId) { + contextMenu.value.editStatusLoading = false + return + } + + try { + const response = await api.getChatEditStatus({ account, username, message_id: messageId }) + const current = String(contextMenu.value?.message?.id || '').trim() + if (contextMenu.value.visible && current === messageId) { + contextMenu.value.editStatus = response || { modified: false } + scheduleContextMenuReposition() + } + } catch { + const current = String(contextMenu.value?.message?.id || '').trim() + if (contextMenu.value.visible && current === messageId) { + contextMenu.value.editStatus = null + scheduleContextMenuReposition() + } + } finally { + const current = String(contextMenu.value?.message?.id || '').trim() + if (contextMenu.value.visible && current === messageId) { + contextMenu.value.editStatusLoading = false + scheduleContextMenuReposition() + } + } + } + + const openMediaContextMenu = (event, message, kind) => { + if (!process.client) return + event.preventDefault() + event.stopPropagation() + + let actualKind = kind + let disabled = true + if (kind === 'voice') { + disabled = !(message?.serverIdStr || message?.serverId) + } else if (kind === 'file') { + disabled = !message?.fileMd5 + } else if (kind === 'image') { + disabled = !(message?.imageMd5 || message?.imageFileId) + } else if (kind === 'emoji') { + disabled = !message?.emojiMd5 + } else if (kind === 'video') { + if (message?.videoMd5 || message?.videoFileId) { + disabled = false + actualKind = 'video' + } else if (message?.videoThumbMd5 || message?.videoThumbFileId) { + disabled = false + actualKind = 'video_thumb' + } + } + + contextMenu.value = { + visible: true, + x: event.clientX, + y: event.clientY, + message, + kind: actualKind, + disabled, + editStatus: null, + editStatusLoading: false + } + + try { + const account = String(selectedAccount.value || '').trim() + const username = String(selectedContact.value?.username || '').trim() + const messageId = String(message?.id || '').trim() + if (account && username && messageId) { + contextMenu.value.editStatusLoading = true + void loadContextMenuEditStatus({ account, username, message_id: messageId }) + } + } catch {} + + scheduleContextMenuReposition() + } + + const prettyJson = (value) => { + try { + return JSON.stringify(value ?? null, null, 2) + } catch { + return String(value ?? '') + } + } + + const isLikelyTextMessage = (message) => { + if (!message) return false + const renderType = String(message?.renderType || '').trim() + if (renderType && renderType !== 'text') return false + if (message?.imageUrl || message?.emojiUrl || message?.videoUrl || message?.voiceUrl) return false + return true + } + + const closeMessageEditModal = () => { + messageEditModal.value = initialMessageEditModal() + } + + const openMessageEditModal = async ({ message, mode }) => { + if (!process.client) return + const account = String(selectedAccount.value || '').trim() + const sessionId = String(selectedContact.value?.username || '').trim() + const messageId = String(message?.id || '').trim() + if (!account || !sessionId || !messageId) return + + const resolvedMode = mode === 'raw' ? 'raw' : 'content' + const initialDraft = resolvedMode === 'content' + ? (typeof message?.content === 'string' ? message.content : String(message?.content ?? '')) + : '' + + messageEditModal.value = { + open: true, + loading: true, + saving: false, + error: '', + mode: resolvedMode, + sessionId, + messageId, + draft: initialDraft, + rawRow: null + } + + try { + const response = await api.getChatMessageRaw({ account, username: sessionId, message_id: messageId }) + const row = response?.row || null + const rawContent = row?.message_content + const rawDraft = typeof rawContent === 'string' ? rawContent : String(rawContent ?? '') + const draft = resolvedMode === 'raw' ? rawDraft : messageEditModal.value.draft + messageEditModal.value = { ...messageEditModal.value, loading: false, rawRow: row, draft } + } catch (error) { + messageEditModal.value = { ...messageEditModal.value, loading: false, error: error?.message || '加载失败' } + } + } + + const saveMessageEditModal = async () => { + if (!process.client) return + if (messageEditModal.value.saving || messageEditModal.value.loading) return + + const account = String(selectedAccount.value || '').trim() + const sessionId = String(messageEditModal.value.sessionId || '').trim() + const messageId = String(messageEditModal.value.messageId || '').trim() + if (!account || !sessionId || !messageId) return + + messageEditModal.value = { ...messageEditModal.value, saving: true, error: '' } + try { + const response = await api.editChatMessage({ + account, + session_id: sessionId, + message_id: messageId, + edits: { + message_content: String(messageEditModal.value.draft ?? '') + }, + unsafe: false + }) + + if (response?.updated_message) { + try { + const updated = normalizeMessage(response.updated_message) + const username = String(selectedContact.value?.username || '').trim() + const list = allMessages.value[username] || [] + const index = list.findIndex((message) => String(message?.id || '') === String(updated?.id || '')) + if (index >= 0) { + const next = [...list] + next[index] = updated + allMessages.value = { ...allMessages.value, [username]: next } + } else { + await refreshSelectedMessages() + } + } catch { + await refreshSelectedMessages() + } + } else { + await refreshSelectedMessages() + } + + closeMessageEditModal() + } catch (error) { + messageEditModal.value = { ...messageEditModal.value, saving: false, error: error?.message || '保存失败' } + return + } finally { + messageEditModal.value = { ...messageEditModal.value, saving: false } + } + } + + const closeMessageFieldsModal = () => { + messageFieldsModal.value = initialMessageFieldsModal() + } + + const openMessageFieldsModal = async (message) => { + if (!process.client) return + const account = String(selectedAccount.value || '').trim() + const sessionId = String(selectedContact.value?.username || '').trim() + const messageId = String(message?.id || '').trim() + if (!account || !sessionId || !messageId) return + + messageFieldsModal.value = { + open: true, + loading: true, + saving: false, + error: '', + sessionId, + messageId, + unsafe: false, + editsJson: '', + rawRow: null + } + + try { + const response = await api.getChatMessageRaw({ account, username: sessionId, message_id: messageId }) + const row = response?.row || null + const seed = {} + for (const key of ['message_content', 'local_type', 'create_time', 'server_id', 'origin_source', 'source']) { + if (row && Object.prototype.hasOwnProperty.call(row, key)) seed[key] = row[key] + } + messageFieldsModal.value = { + ...messageFieldsModal.value, + loading: false, + rawRow: row, + editsJson: JSON.stringify(seed, null, 2) + } + } catch (error) { + messageFieldsModal.value = { ...messageFieldsModal.value, loading: false, error: error?.message || '加载失败' } + } + } + + const saveMessageFieldsModal = async () => { + if (!process.client) return + if (messageFieldsModal.value.saving || messageFieldsModal.value.loading) return + + const account = String(selectedAccount.value || '').trim() + const sessionId = String(messageFieldsModal.value.sessionId || '').trim() + const messageId = String(messageFieldsModal.value.messageId || '').trim() + if (!account || !sessionId || !messageId) return + + let edits = null + try { + edits = JSON.parse(String(messageFieldsModal.value.editsJson || '').trim() || 'null') + } catch { + messageFieldsModal.value = { ...messageFieldsModal.value, error: 'JSON 格式错误' } + return + } + if (!edits || typeof edits !== 'object' || Array.isArray(edits)) { + messageFieldsModal.value = { ...messageFieldsModal.value, error: 'edits 必须是 JSON 对象' } + return + } + if (!Object.keys(edits).length) { + messageFieldsModal.value = { ...messageFieldsModal.value, error: 'edits 不能为空' } + return + } + + messageFieldsModal.value = { ...messageFieldsModal.value, saving: true, error: '' } + try { + await api.editChatMessage({ + account, + session_id: sessionId, + message_id: messageId, + edits, + unsafe: !!messageFieldsModal.value.unsafe + }) + await refreshSelectedMessages() + closeMessageFieldsModal() + } catch (error) { + messageFieldsModal.value = { ...messageFieldsModal.value, saving: false, error: error?.message || '保存失败' } + return + } finally { + messageFieldsModal.value = { ...messageFieldsModal.value, saving: false } + } + } + + const copyTextToClipboard = async (text) => { + if (!process.client) return false + + const value = String(text ?? '').trim() + if (!value) return false + + try { + await navigator.clipboard.writeText(value) + return true + } catch {} + + try { + const element = document.createElement('textarea') + element.value = value + element.setAttribute('readonly', 'true') + element.style.position = 'fixed' + element.style.left = '-9999px' + element.style.top = '-9999px' + document.body.appendChild(element) + element.select() + const ok = document.execCommand('copy') + document.body.removeChild(element) + if (ok) return true + } catch {} + + try { + window.prompt('复制内容:', value) + return true + } catch { + return false + } + } + + const onCopyMessageTextClick = async () => { + if (!process.client) return + const message = contextMenu.value.message + if (!message) return + try { + const text = String(message?.content || '').trim() + if (!text) { + window.alert('该消息没有可复制的文本') + return + } + const ok = await copyTextToClipboard(text) + if (!ok) window.alert('复制失败:无法写入剪贴板') + } catch { + window.alert('复制失败') + } finally { + closeContextMenu() + } + } + + const onCopyMessageJsonClick = async () => { + if (!process.client) return + const message = contextMenu.value.message + if (!message) return + try { + const raw = toRaw(message) || message + const json = JSON.stringify(raw, (_key, value) => (typeof value === 'bigint' ? value.toString() : value), 2) + const ok = await copyTextToClipboard(json) + if (!ok) window.alert('复制失败:无法写入剪贴板') + } catch { + window.alert('复制失败') + } finally { + closeContextMenu() + } + } + + const onOpenFolderClick = async () => { + if (contextMenu.value.disabled) return + const message = contextMenu.value.message + const kind = contextMenu.value.kind + + try { + if (!selectedAccount.value || !selectedContact.value?.username) return + + const params = { + account: selectedAccount.value, + username: selectedContact.value.username, + kind + } + + if (kind === 'voice') { + params.server_id = message.serverIdStr || message.serverId + } else if (kind === 'file') { + params.md5 = message.fileMd5 + } else if (kind === 'image') { + if (message.imageMd5) params.md5 = message.imageMd5 + else if (message.imageFileId) params.file_id = message.imageFileId + } else if (kind === 'emoji') { + params.md5 = message.emojiMd5 + } else if (kind === 'video') { + params.md5 = message.videoMd5 + if (message.videoFileId) params.file_id = message.videoFileId + } else if (kind === 'video_thumb') { + params.md5 = message.videoThumbMd5 + if (message.videoThumbFileId) params.file_id = message.videoThumbFileId + } + + await api.openChatMediaFolder(params) + } finally { + closeContextMenu() + } + } + + const onEditMessageClick = async () => { + const message = contextMenu.value.message + if (!message) return + const mode = isLikelyTextMessage(message) ? 'content' : 'raw' + closeContextMenu() + await openMessageEditModal({ message, mode }) + } + + const onEditMessageFieldsClick = async () => { + const message = contextMenu.value.message + if (!message) return + closeContextMenu() + await openMessageFieldsModal(message) + } + + const onResetEditedMessageClick = async () => { + if (!process.client) return + const message = contextMenu.value.message + const account = String(selectedAccount.value || '').trim() + const sessionId = String(selectedContact.value?.username || '').trim() + const messageId = String(message?.id || '').trim() + if (!message || !account || !sessionId || !messageId) return + + const ok = window.confirm('确认恢复该条消息到首次快照吗?') + if (!ok) return + + try { + await api.resetChatEditedMessage({ account, session_id: sessionId, message_id: messageId }) + closeContextMenu() + await refreshSelectedMessages() + } catch (error) { + window.alert(error?.message || '恢复失败') + } finally { + closeContextMenu() + } + } + + const onRepairMessageSenderAsMeClick = async () => { + if (!process.client) return + const message = contextMenu.value.message + const account = String(selectedAccount.value || '').trim() + const sessionId = String(selectedContact.value?.username || '').trim() + const messageId = String(message?.id || '').trim() + if (!message || !account || !sessionId || !messageId) return + + const ok = window.confirm('确认将该消息修复为“我发送”吗?这会修改 real_sender_id 字段。') + if (!ok) return + + try { + await api.repairChatMessageSender({ account, session_id: sessionId, message_id: messageId, mode: 'me' }) + closeContextMenu() + await refreshSelectedMessages() + } catch (error) { + window.alert(error?.message || '修复失败') + } finally { + closeContextMenu() + } + } + + const onFlipWechatMessageDirectionClick = async () => { + if (!process.client) return + const message = contextMenu.value.message + const account = String(selectedAccount.value || '').trim() + const sessionId = String(selectedContact.value?.username || '').trim() + const messageId = String(message?.id || '').trim() + if (!message || !account || !sessionId || !messageId) return + + const ok = window.confirm( + '确认反转该消息在微信客户端的左右气泡位置吗?\n\n这会修改 packed_info_data 字段(有风险)。\n可通过“恢复原消息”撤销。' + ) + if (!ok) return + + try { + await api.flipChatMessageDirection({ account, session_id: sessionId, message_id: messageId }) + closeContextMenu() + await refreshSelectedMessages() + } catch (error) { + window.alert(error?.message || '反转失败') + } finally { + closeContextMenu() + } + } + + const onLocateQuotedMessageClick = async () => { + const message = contextMenu.value.message + if (!message?.quoteServerId) return + closeContextMenu() + const ok = await locateMessageByServerId(message.quoteServerId) + if (!ok && process.client) { + window.alert('定位引用消息失败') + } + } + + return { + contextMenu, + contextMenuElement, + messageEditModal, + messageFieldsModal, + closeContextMenu, + openMediaContextMenu, + prettyJson, + isLikelyTextMessage, + closeMessageEditModal, + openMessageEditModal, + saveMessageEditModal, + closeMessageFieldsModal, + openMessageFieldsModal, + saveMessageFieldsModal, + copyTextToClipboard, + onCopyMessageTextClick, + onCopyMessageJsonClick, + onOpenFolderClick, + onEditMessageClick, + onEditMessageFieldsClick, + onResetEditedMessageClick, + onRepairMessageSenderAsMeClick, + onFlipWechatMessageDirectionClick, + onLocateQuotedMessageClick + } +} diff --git a/frontend/composables/chat/useChatExport.js b/frontend/composables/chat/useChatExport.js new file mode 100644 index 0000000..bad2c48 --- /dev/null +++ b/frontend/composables/chat/useChatExport.js @@ -0,0 +1,460 @@ +import { computed, ref, watch } from 'vue' +import { reportServerErrorFromResponse } from '~/lib/server-error-logging' +import { toUnixSeconds } from '~/lib/chat/formatters' + +export const useChatExport = ({ api, apiBase, contacts, selectedAccount, selectedContact, privacyMode }) => { + const exportModalOpen = ref(false) + const isExportCreating = ref(false) + const exportError = ref('') + + const exportScope = ref('current') + const exportFormat = ref('json') + const exportDownloadRemoteMedia = ref(true) + const exportHtmlPageSize = ref(1000) + const exportMessageTypeOptions = [ + { value: 'text', label: '文本' }, + { value: 'image', label: '图片' }, + { value: 'emoji', label: '表情' }, + { value: 'video', label: '视频' }, + { value: 'voice', label: '语音' }, + { value: 'chatHistory', label: '聊天记录' }, + { value: 'transfer', label: '转账' }, + { value: 'redPacket', label: '红包' }, + { value: 'file', label: '文件' }, + { value: 'link', label: '链接' }, + { value: 'quote', label: '引用' }, + { value: 'system', label: '系统' }, + { value: 'voip', label: '通话' } + ] + const exportMessageTypes = ref(exportMessageTypeOptions.map((item) => item.value)) + + const exportStartLocal = ref('') + const exportEndLocal = ref('') + const exportFileName = ref('') + const exportFolder = ref('') + const exportFolderHandle = ref(null) + const exportSaveBusy = ref(false) + const exportSaveMsg = ref('') + const exportAutoSavedFor = ref('') + + const exportSearchQuery = ref('') + const exportListTab = ref('all') + const exportSelectedUsernames = ref([]) + + const exportJob = ref(null) + let exportPollTimer = null + let exportEventSource = null + + const clamp01 = (value) => Math.min(1, Math.max(0, value)) + const asNumber = (value) => { + const next = Number(value) + return Number.isFinite(next) ? next : 0 + } + + const exportOverallPercent = computed(() => { + const job = exportJob.value + const progress = job?.progress || {} + const total = asNumber(progress.conversationsTotal) + const done = asNumber(progress.conversationsDone) + if (total <= 0) return 0 + + const currentTotal = asNumber(progress.currentConversationMessagesTotal) + const currentDone = asNumber(progress.currentConversationMessagesExported) + const currentFraction = currentTotal > 0 ? clamp01(currentDone / currentTotal) : 0 + const overall = clamp01((done + (job?.status === 'running' ? currentFraction : 0)) / total) + return Math.round(overall * 100) + }) + + const exportCurrentPercent = computed(() => { + const progress = exportJob.value?.progress || {} + const total = asNumber(progress.currentConversationMessagesTotal) + const done = asNumber(progress.currentConversationMessagesExported) + if (total <= 0) return null + return Math.round(clamp01(done / total) * 100) + }) + + const exportFilteredContacts = computed(() => { + const query = String(exportSearchQuery.value || '').trim().toLowerCase() + let list = Array.isArray(contacts.value) ? contacts.value : [] + + const tab = String(exportListTab.value || 'all') + if (tab === 'groups') list = list.filter((contact) => !!contact?.isGroup) + if (tab === 'singles') list = list.filter((contact) => !contact?.isGroup) + + if (!query) return list + return list.filter((contact) => { + const name = String(contact?.name || '').toLowerCase() + const username = String(contact?.username || '').toLowerCase() + return name.includes(query) || username.includes(query) + }) + }) + + const exportContactCounts = computed(() => { + const list = Array.isArray(contacts.value) ? contacts.value : [] + const total = list.length + const groups = list.filter((contact) => !!contact?.isGroup).length + return { total, groups, singles: total - groups } + }) + + const isDesktopExportRuntime = () => { + return !!(process.client && window?.wechatDesktop?.chooseDirectory) + } + + const isWebDirectoryPickerSupported = () => { + return !!(process.client && typeof window.showDirectoryPicker === 'function') + } + + const hasWebExportFolder = computed(() => { + return !!(isWebDirectoryPickerSupported() && exportFolderHandle.value) + }) + + const chooseExportFolder = async () => { + exportError.value = '' + exportSaveMsg.value = '' + try { + if (!process.client) { + exportError.value = '当前环境不支持选择导出目录' + return + } + + if (isDesktopExportRuntime()) { + const result = await window.wechatDesktop.chooseDirectory({ title: '选择导出目录' }) + if (result && !result.canceled && Array.isArray(result.filePaths) && result.filePaths.length > 0) { + exportFolder.value = String(result.filePaths[0] || '').trim() + exportFolderHandle.value = null + } + return + } + + if (isWebDirectoryPickerSupported()) { + const handle = await window.showDirectoryPicker() + if (handle) { + exportFolderHandle.value = handle + exportFolder.value = `浏览器目录:${String(handle.name || '已选择')}` + } + return + } + + exportError.value = '当前浏览器不支持目录选择,请使用桌面端或 Chromium 新版浏览器' + } catch (error) { + exportError.value = error?.message || '选择导出目录失败' + } + } + + const guessExportZipName = (job) => { + const raw = String(job?.zipPath || '').trim() + if (raw) { + const name = raw.replace(/\\/g, '/').split('/').pop() + if (name && name.toLowerCase().endsWith('.zip')) return name + } + const exportId = String(job?.exportId || '').trim() || 'export' + return `wechat_chat_export_${exportId}.zip` + } + + const getExportDownloadUrl = (exportId) => { + return `${apiBase}/chat/exports/${encodeURIComponent(String(exportId || ''))}/download` + } + + const saveExportToSelectedFolder = async (options = {}) => { + const autoSave = !!options?.auto + exportError.value = '' + exportSaveMsg.value = '' + if (!process.client || !isWebDirectoryPickerSupported()) { + exportError.value = '当前环境不支持保存到浏览器目录' + return + } + const handle = exportFolderHandle.value + if (!handle || typeof handle.getFileHandle !== 'function') { + exportError.value = '请先选择浏览器导出目录' + return + } + + const exportId = exportJob.value?.exportId + if (!exportId || String(exportJob.value?.status || '') !== 'done') { + exportError.value = '导出任务尚未完成' + return + } + + exportSaveBusy.value = true + try { + const response = await fetch(getExportDownloadUrl(exportId)) + if (!response.ok) { + await reportServerErrorFromResponse(response, { + method: 'GET', + requestUrl: getExportDownloadUrl(exportId), + message: `下载导出文件失败(${response.status})`, + source: 'chat.exportDownload' + }) + throw new Error(`下载导出文件失败(${response.status})`) + } + const blob = await response.blob() + const fileName = guessExportZipName(exportJob.value) + const fileHandle = await handle.getFileHandle(fileName, { create: true }) + const writable = await fileHandle.createWritable() + await writable.write(blob) + await writable.close() + exportAutoSavedFor.value = String(exportId) + exportSaveMsg.value = autoSave + ? `已自动保存到已选目录:${fileName}` + : `已保存到已选目录:${fileName}` + } catch (error) { + exportError.value = error?.message || '保存到浏览器目录失败' + } finally { + exportSaveBusy.value = false + } + } + + const stopExportPolling = () => { + if (exportEventSource) { + try { + exportEventSource.close() + } catch {} + exportEventSource = null + } + if (exportPollTimer) { + clearInterval(exportPollTimer) + exportPollTimer = null + } + } + + const startExportHttpPolling = (exportId) => { + if (!exportId) return + exportPollTimer = setInterval(async () => { + try { + const response = await api.getChatExport(exportId) + exportJob.value = response?.job || exportJob.value + const status = String(exportJob.value?.status || '') + if (status === 'done' || status === 'error' || status === 'cancelled') { + stopExportPolling() + } + } catch {} + }, 1200) + } + + const startExportPolling = (exportId) => { + stopExportPolling() + if (!exportId) return + + if (process.client && typeof window !== 'undefined' && typeof EventSource !== 'undefined') { + const url = `${apiBase}/chat/exports/${encodeURIComponent(String(exportId))}/events` + try { + exportEventSource = new EventSource(url) + exportEventSource.onmessage = (event) => { + try { + const next = JSON.parse(String(event.data || '{}')) + exportJob.value = next || exportJob.value + const status = String(exportJob.value?.status || '') + if (status === 'done' || status === 'error' || status === 'cancelled') { + stopExportPolling() + } + } catch {} + } + exportEventSource.onerror = () => { + try { + exportEventSource?.close() + } catch {} + exportEventSource = null + if (!exportPollTimer) startExportHttpPolling(exportId) + } + return + } catch { + exportEventSource = null + } + } + + startExportHttpPolling(exportId) + } + + const openExportModal = () => { + exportModalOpen.value = true + exportError.value = '' + exportSaveMsg.value = '' + exportListTab.value = 'all' + exportStartLocal.value = '' + exportEndLocal.value = '' + exportMessageTypes.value = exportMessageTypeOptions.map((item) => item.value) + exportAutoSavedFor.value = '' + exportScope.value = selectedContact.value?.username ? 'current' : 'all' + } + + const closeExportModal = () => { + exportModalOpen.value = false + exportError.value = '' + } + + watch(exportModalOpen, (open) => { + if (!process.client) return + if (!open) { + stopExportPolling() + return + } + + const exportId = exportJob.value?.exportId + const status = String(exportJob.value?.status || '') + if (exportId && (status === 'queued' || status === 'running')) { + startExportPolling(exportId) + } + }) + + watch( + () => ({ + exportId: String(exportJob.value?.exportId || ''), + status: String(exportJob.value?.status || '') + }), + async ({ exportId, status }) => { + if (!process.client || status !== 'done' || !exportId) return + if (!hasWebExportFolder.value) return + if (exportAutoSavedFor.value === exportId) return + if (exportSaveBusy.value) return + await saveExportToSelectedFolder({ auto: true }) + } + ) + + const startChatExport = async () => { + exportError.value = '' + exportSaveMsg.value = '' + if (!selectedAccount.value) { + exportError.value = '未选择账号' + return + } + + let scope = exportScope.value + let usernames = [] + if (scope === 'current') { + scope = 'selected' + if (selectedContact.value?.username) { + usernames = [selectedContact.value.username] + } + } else if (scope === 'selected') { + usernames = Array.isArray(exportSelectedUsernames.value) ? exportSelectedUsernames.value.filter(Boolean) : [] + } + + if (scope === 'selected' && (!usernames || usernames.length === 0)) { + exportError.value = '请选择至少一个会话' + return + } + + const hasDesktopFolder = isDesktopExportRuntime() && !!String(exportFolder.value || '').trim() + const hasWebFolder = !isDesktopExportRuntime() && !!exportFolderHandle.value + if (!hasDesktopFolder && !hasWebFolder) { + exportError.value = '请先选择导出目录' + return + } + + const startTime = toUnixSeconds(exportStartLocal.value) + const endTime = toUnixSeconds(exportEndLocal.value) + if (startTime && endTime && startTime > endTime) { + exportError.value = '时间范围不合法:开始时间不能晚于结束时间' + return + } + + const messageTypes = Array.isArray(exportMessageTypes.value) ? exportMessageTypes.value.filter(Boolean) : [] + if (messageTypes.length === 0) { + exportError.value = '请至少勾选一个消息类型' + return + } + + const selectedTypeSet = new Set(messageTypes.map((item) => String(item || '').trim())) + const mediaKindSet = new Set() + if (selectedTypeSet.has('chatHistory')) { + mediaKindSet.add('image') + mediaKindSet.add('emoji') + mediaKindSet.add('video') + mediaKindSet.add('video_thumb') + mediaKindSet.add('voice') + mediaKindSet.add('file') + } + if (selectedTypeSet.has('image')) mediaKindSet.add('image') + if (selectedTypeSet.has('emoji')) mediaKindSet.add('emoji') + if (selectedTypeSet.has('video')) { + mediaKindSet.add('video') + mediaKindSet.add('video_thumb') + } + if (selectedTypeSet.has('voice')) mediaKindSet.add('voice') + if (selectedTypeSet.has('file')) mediaKindSet.add('file') + + const mediaKinds = Array.from(mediaKindSet) + const includeMedia = !privacyMode.value && mediaKinds.length > 0 + + isExportCreating.value = true + exportAutoSavedFor.value = '' + try { + const response = await api.createChatExport({ + account: selectedAccount.value, + scope, + usernames, + format: exportFormat.value, + start_time: startTime, + end_time: endTime, + include_hidden: false, + include_official: false, + message_types: messageTypes, + include_media: includeMedia, + media_kinds: mediaKinds, + download_remote_media: exportFormat.value === 'html' && !!exportDownloadRemoteMedia.value, + html_page_size: Math.max(0, Math.floor(Number(exportHtmlPageSize.value || 1000))), + output_dir: isDesktopExportRuntime() ? String(exportFolder.value || '').trim() : null, + privacy_mode: !!privacyMode.value, + file_name: exportFileName.value || null + }) + + exportJob.value = response?.job || null + const exportId = exportJob.value?.exportId + if (exportId) startExportPolling(exportId) + } catch (error) { + exportError.value = error?.message || '创建导出任务失败' + } finally { + isExportCreating.value = false + } + } + + const cancelCurrentExport = async () => { + const exportId = exportJob.value?.exportId + if (!exportId) return + + try { + await api.cancelChatExport(exportId) + const response = await api.getChatExport(exportId) + exportJob.value = response?.job || exportJob.value + } catch (error) { + exportError.value = error?.message || '取消导出失败' + } + } + + return { + exportModalOpen, + isExportCreating, + exportError, + exportScope, + exportFormat, + exportDownloadRemoteMedia, + exportHtmlPageSize, + exportMessageTypeOptions, + exportMessageTypes, + exportStartLocal, + exportEndLocal, + exportFileName, + exportFolder, + exportFolderHandle, + exportSaveBusy, + exportSaveMsg, + exportAutoSavedFor, + exportSearchQuery, + exportListTab, + exportSelectedUsernames, + exportJob, + exportOverallPercent, + exportCurrentPercent, + exportFilteredContacts, + exportContactCounts, + hasWebExportFolder, + chooseExportFolder, + getExportDownloadUrl, + saveExportToSelectedFolder, + openExportModal, + closeExportModal, + startChatExport, + cancelCurrentExport, + stopExportPolling + } +} diff --git a/frontend/composables/chat/useChatHistoryWindows.js b/frontend/composables/chat/useChatHistoryWindows.js new file mode 100644 index 0000000..550a32d --- /dev/null +++ b/frontend/composables/chat/useChatHistoryWindows.js @@ -0,0 +1,488 @@ +import { ref } from 'vue' +import { + buildChatHistoryWindowPayload, + createChatHistoryRecordNormalizer, + enhanceChatHistoryRecords, + formatChatHistoryVideoDuration, + getChatHistoryPreviewLines, + isChatHistoryRecordItemIncomplete, + normalizeChatHistoryUrl, + parseChatHistoryRecord, + pickFirstMd5, + stripWeChatInvisible +} from '~/lib/chat/chat-history' + +export const useChatHistoryWindows = ({ + api, + apiBase, + selectedAccount, + selectedContact, + openImagePreview, + openVideoPreview +}) => { + const floatingWindows = ref([]) + let floatingWindowSeq = 0 + let floatingWindowZ = 70 + const floatingDragState = { id: '', offsetX: 0, offsetY: 0 } + + const clampNumber = (value, min, max) => Math.min(max, Math.max(min, value)) + const normalizeRecordItem = createChatHistoryRecordNormalizer({ + apiBase, + getSelectedAccount: () => selectedAccount.value, + getSelectedContact: () => selectedContact.value + }) + + const getFloatingWindowById = (id) => { + const list = Array.isArray(floatingWindows.value) ? floatingWindows.value : [] + return list.find((item) => String(item?.id || '') === String(id || '')) || null + } + + const focusFloatingWindow = (id) => { + const windowItem = getFloatingWindowById(id) + if (!windowItem) return + floatingWindowZ += 1 + windowItem.zIndex = floatingWindowZ + } + + const closeFloatingWindow = (id) => { + const key = String(id || '') + floatingWindows.value = (Array.isArray(floatingWindows.value) ? floatingWindows.value : []).filter((item) => String(item?.id || '') !== key) + if (floatingDragState.id && String(floatingDragState.id) === key) { + floatingDragState.id = '' + } + } + + const closeTopFloatingWindow = () => { + const list = Array.isArray(floatingWindows.value) ? floatingWindows.value : [] + if (!list.length) return + const top = [...list].sort((a, b) => Number(b?.zIndex || 0) - Number(a?.zIndex || 0))[0] + if (top?.id) closeFloatingWindow(top.id) + } + + const openFloatingWindow = (payload) => { + if (!process.client || typeof window === 'undefined') return null + floatingWindowSeq += 1 + floatingWindowZ += 1 + const width = clampNumber(Number(payload?.width || 520), 360, Math.max(360, (window.innerWidth || 1200) - 48)) + const height = clampNumber(Number(payload?.height || 420), 320, Math.max(320, (window.innerHeight || 900) - 48)) + const x = clampNumber(Number(payload?.x || Math.round(((window.innerWidth || width) - width) / 2)), 16, Math.max(16, (window.innerWidth || width) - width - 16)) + const y = clampNumber(Number(payload?.y || Math.round(((window.innerHeight || height) - height) / 2)), 16, Math.max(16, (window.innerHeight || height) - height - 16)) + + const windowItem = { + id: `chat-floating-${floatingWindowSeq}`, + kind: String(payload?.kind || 'chatHistory'), + title: String(payload?.title || ''), + info: payload?.info || { isChatRoom: false }, + records: Array.isArray(payload?.records) ? payload.records : [], + url: String(payload?.url || ''), + content: String(payload?.content || ''), + preview: String(payload?.preview || ''), + from: String(payload?.from || ''), + fromAvatar: String(payload?.fromAvatar || ''), + loading: !!payload?.loading, + width, + height, + x, + y, + zIndex: floatingWindowZ + } + floatingWindows.value = [...floatingWindows.value, windowItem] + return windowItem + } + + const startFloatingWindowDrag = (id, event) => { + if (!process.client) return + const windowItem = getFloatingWindowById(id) + if (!windowItem) return + focusFloatingWindow(id) + const point = 'touches' in event ? event.touches?.[0] : event + floatingDragState.id = id + floatingDragState.offsetX = Number(point?.clientX || 0) - Number(windowItem.x || 0) + floatingDragState.offsetY = Number(point?.clientY || 0) - Number(windowItem.y || 0) + } + + const onFloatingWindowMouseMove = (event) => { + if (!process.client) return + if (!floatingDragState.id) return + const windowItem = getFloatingWindowById(floatingDragState.id) + if (!windowItem) return + const point = 'touches' in event ? event.touches?.[0] : event + const nextX = Number(point?.clientX || 0) - floatingDragState.offsetX + const nextY = Number(point?.clientY || 0) - floatingDragState.offsetY + windowItem.x = clampNumber(nextX, 8, Math.max(8, (window.innerWidth || nextX) - windowItem.width - 8)) + windowItem.y = clampNumber(nextY, 8, Math.max(8, (window.innerHeight || nextY) - windowItem.height - 8)) + } + + const onFloatingWindowMouseUp = () => { + floatingDragState.id = '' + } + + const chatHistoryModalVisible = ref(false) + const chatHistoryModalTitle = ref('') + const chatHistoryModalRecords = ref([]) + const chatHistoryModalInfo = ref({ isChatRoom: false }) + const chatHistoryModalStack = ref([]) + const goBackChatHistoryModal = () => {} + const closeChatHistoryModal = () => { + chatHistoryModalVisible.value = false + chatHistoryModalTitle.value = '' + chatHistoryModalRecords.value = [] + chatHistoryModalInfo.value = { isChatRoom: false } + chatHistoryModalStack.value = [] + } + + const onChatHistoryVideoThumbError = (record) => { + if (!record) return + const candidates = record._videoThumbCandidates + if (!Array.isArray(candidates) || candidates.length <= 1) { + record._videoThumbError = true + return + } + const current = Math.max(0, Number(record._videoThumbCandidateIndex || 0)) + const next = current + 1 + if (next < candidates.length) { + record._videoThumbCandidateIndex = next + record.videoThumbUrl = candidates[next] + return + } + record._videoThumbError = true + } + + const onChatHistoryLinkPreviewError = (record) => { + if (!record) return + const candidates = record._linkPreviewCandidates + if (!Array.isArray(candidates) || candidates.length <= 1) { + record._linkPreviewError = true + return + } + const current = Math.max(0, Number(record._linkPreviewCandidateIndex || 0)) + const next = current + 1 + if (next < candidates.length) { + record._linkPreviewCandidateIndex = next + record.preview = candidates[next] + record._linkPreviewError = false + return + } + record._linkPreviewError = true + } + + const onChatHistoryFromAvatarLoad = (record) => { + try { + if (record) { + record._fromAvatarImgOk = true + record._fromAvatarImgError = false + record._fromAvatarLast = String(record.fromAvatar || '').trim() + } + } catch {} + } + + const onChatHistoryFromAvatarError = (record) => { + try { + if (record) { + record._fromAvatarImgOk = false + record._fromAvatarImgError = true + record._fromAvatarLast = String(record.fromAvatar || '').trim() + } + } catch {} + } + + const onChatHistoryQuoteThumbError = (record) => { + if (!record || !record.quote) return + const candidates = record._quoteThumbCandidates + if (!Array.isArray(candidates) || candidates.length <= 1) { + record._quoteThumbError = true + return + } + const current = Math.max(0, Number(record._quoteThumbCandidateIndex || 0)) + const next = current + 1 + if (next < candidates.length) { + record._quoteThumbCandidateIndex = next + record.quote.thumbUrl = candidates[next] + return + } + record._quoteThumbError = true + } + + const openChatHistoryQuote = (record) => { + if (!process.client) return + const quote = record?.quote + if (!quote) return + const kind = String(quote.kind || '') + const url = String(quote.url || '').trim() + if (!url) return + + if (kind === 'video') { + openVideoPreview(url, quote?.thumbUrl) + return + } + if (kind === 'image' || kind === 'emoji') { + openImagePreview(url) + } + } + + const getChatHistoryLinkFromText = (record) => { + const from = String(record?.from || '').trim() + if (from) return from + const url = String(record?.url || '').trim() + if (!url) return '' + try { return new URL(url).hostname || '' } catch { return '' } + } + + const getChatHistoryLinkFromAvatarText = (record) => { + const text = String(getChatHistoryLinkFromText(record) || '').trim() + return text ? (Array.from(text)[0] || '') : '' + } + + const openUrlInBrowser = (url) => { + const next = String(url || '').trim() + if (!next) return + try { window.open(next, '_blank', 'noopener,noreferrer') } catch {} + } + + const resolveChatHistoryLinkRecord = async (record) => { + if (!process.client || !record || !selectedAccount.value) return null + const serverId = String(record?.fromnewmsgid || '').trim() + if (!serverId || record._linkResolving) return null + + record._linkResolving = true + try { + const response = await api.resolveAppMsg({ + account: selectedAccount.value, + server_id: serverId + }) + if (response && typeof response === 'object') { + const title = String(response.title || '').trim() + const content = String(response.content || '').trim() + const url = String(response.url || '').trim() + const from = String(response.from || '').trim() + + const normalizePreviewUrl = (value) => { + const raw = String(value || '').trim() + if (!raw) return '' + if (/^\/api\/chat\/media\//i.test(raw) || /^blob:/i.test(raw) || /^data:/i.test(raw)) return raw + if (!/^https?:\/\//i.test(raw)) return '' + try { + const host = new URL(raw).hostname.toLowerCase() + if (host.endsWith('.qpic.cn') || host.endsWith('.qlogo.cn')) { + return `${apiBase}/chat/media/proxy_image?url=${encodeURIComponent(raw)}` + } + } catch {} + return raw + } + + if (title) record.title = title + if (content && !stripWeChatInvisible(record.content)) record.content = content + if (url) record.url = url + if (from) record.from = from + if (response.linkStyle) record.linkStyle = String(response.linkStyle || '').trim() + if (response.linkType) record.linkType = String(response.linkType || '').trim() + + const fromUsername = String(response.fromUsername || '').trim() + if (fromUsername) record.fromUsername = fromUsername + const fromAvatarUrl = fromUsername + ? `${apiBase}/chat/avatar?account=${encodeURIComponent(selectedAccount.value || '')}&username=${encodeURIComponent(fromUsername)}` + : (url ? `${apiBase}/chat/media/favicon?url=${encodeURIComponent(url)}` : '') + if (fromAvatarUrl) { + const last = String(record._fromAvatarLast || '').trim() + record.fromAvatar = fromAvatarUrl + if (String(fromAvatarUrl).trim() !== last) { + record._fromAvatarLast = String(fromAvatarUrl).trim() + record._fromAvatarImgOk = false + record._fromAvatarImgError = false + } + } + + const style = String(response.linkStyle || '').trim() + const thumb = String(response.thumbUrl || '').trim() + const cover = String(response.coverUrl || '').trim() + const picked = style === 'cover' ? (cover || thumb) : (thumb || cover) + const previewResolved = normalizePreviewUrl(picked) + if (previewResolved) { + const currentPreview = String(record.preview || '').trim() + const candidates = Array.isArray(record._linkPreviewCandidates) ? record._linkPreviewCandidates.slice() : [] + if (currentPreview && !candidates.includes(currentPreview)) candidates.push(currentPreview) + if (!candidates.includes(previewResolved)) candidates.push(previewResolved) + record._linkPreviewCandidates = candidates + if (!currentPreview || record._linkPreviewError) { + record.preview = previewResolved + record._linkPreviewCandidateIndex = candidates.indexOf(previewResolved) + record._linkPreviewError = false + } + } + return response + } + } catch {} + finally { + try { record._linkResolving = false } catch {} + } + return null + } + + const resolveChatHistoryLinkRecords = (windowItem) => { + if (!process.client) return + const records = Array.isArray(windowItem?.records) ? windowItem.records : [] + const targets = records.filter((record) => { + if (!record) return false + if (String(record.renderType || '') !== 'link') return false + if (!String(record.fromnewmsgid || '').trim()) return false + const fromMissing = String(record.from || '').trim() === '' + const previewMissing = !String(record.preview || '').trim() + const urlMissing = !String(record.url || '').trim() + const fromAvatarMissing = !String(record.fromAvatar || '').trim() + return fromMissing || previewMissing || urlMissing || fromAvatarMissing + }) + if (!targets.length) return + ;(async () => { + for (const target of targets.slice(0, 12)) { + await resolveChatHistoryLinkRecord(target) + } + })() + } + + const openChatHistoryLinkWindow = (record) => { + if (!process.client) return + const title = String(record?.title || record?.content || '链接').trim() + const url = String(record?.url || '').trim() + const preview = String(record?.preview || '').trim() + const from = String(record?.from || '').trim() + const fromAvatar = String(record?.fromAvatar || '').trim() + const needResolve = !!String(record?.fromnewmsgid || '').trim() && (!url || !from || !preview || !fromAvatar) + const windowItem = openFloatingWindow({ + kind: 'link', + title: title || '链接', + url, + content: String(record?.content || '').trim(), + preview, + from, + fromAvatar, + width: 520, + height: 420, + loading: needResolve + }) + if (!windowItem) return + focusFloatingWindow(windowItem.id) + try { + windowItem._linkPreviewCandidates = Array.isArray(record?._linkPreviewCandidates) ? record._linkPreviewCandidates.slice() : (preview ? [preview] : []) + windowItem._linkPreviewCandidateIndex = Math.max(0, Number(record?._linkPreviewCandidateIndex || 0)) + windowItem._linkPreviewError = false + windowItem._fromAvatarLast = fromAvatar + windowItem._fromAvatarImgOk = !!record?._fromAvatarImgOk + windowItem._fromAvatarImgError = !!record?._fromAvatarImgError + windowItem.fromnewmsgid = String(record?.fromnewmsgid || '').trim() + } catch {} + if (needResolve) { + ;(async () => { + await resolveChatHistoryLinkRecord(windowItem) + windowItem.loading = false + })() + } + } + + const openChatHistoryModal = (message) => { + if (!process.client) return + const { title0, info0, records0 } = buildChatHistoryWindowPayload(message, normalizeRecordItem) + const windowItem = openFloatingWindow({ + kind: 'chatHistory', + title: title0 || '聊天记录', + info: info0, + records: records0, + width: 560, + height: Math.round(Math.max(420, (window.innerHeight || 700) * 0.78)) + }) + if (!windowItem) return + try { resolveChatHistoryLinkRecords(windowItem) } catch {} + } + + const openNestedChatHistory = (record) => { + if (!process.client) return + const title = String(record?.title || '聊天记录') + const content = String(record?.content || '') + const recordItem = String(record?.recordItem || '').trim() + const serverId = String(record?.fromnewmsgid || '').trim() + + const { info0, records0 } = buildChatHistoryWindowPayload({ title, content, recordItem }, normalizeRecordItem) + const windowItem = openFloatingWindow({ + kind: 'chatHistory', + title: title || '聊天记录', + info: info0, + records: records0, + width: 560, + height: Math.round(Math.max(420, (window.innerHeight || 700) * 0.78)), + loading: false + }) + if (!windowItem) return + try { resolveChatHistoryLinkRecords(windowItem) } catch {} + + if (!serverId || !selectedAccount.value || record?._nestedResolving || !isChatHistoryRecordItemIncomplete(recordItem)) return + record._nestedResolving = true + windowItem.loading = true + + ;(async () => { + try { + const response = await api.resolveNestedChatHistory({ + account: selectedAccount.value, + server_id: serverId + }) + const resolved = String(response?.recordItem || '').trim() + if (!resolved) return + windowItem.title = String(response?.title || title || '聊天记录') + const parsed = parseChatHistoryRecord(resolved) + windowItem.info = parsed?.info || { isChatRoom: false, count: 0 } + const items = Array.isArray(parsed?.items) ? parsed.items : [] + windowItem.records = items.length ? enhanceChatHistoryRecords(items.map(normalizeRecordItem)) : [] + if (!windowItem.records.length) { + const lines = String(response?.content || content || '').trim().split(/\r?\n/).map((item) => item.trim()).filter(Boolean) + windowItem.info = { isChatRoom: false, count: 0 } + windowItem.records = lines.map((line, idx) => normalizeRecordItem({ + id: String(idx), + datatype: '1', + sourcename: '', + sourcetime: '', + content: line, + renderType: 'text' + })) + } + try { resolveChatHistoryLinkRecords(windowItem) } catch {} + } catch {} + finally { + windowItem.loading = false + try { record._nestedResolving = false } catch {} + } + })() + } + + return { + floatingWindows, + chatHistoryModalVisible, + chatHistoryModalTitle, + chatHistoryModalRecords, + chatHistoryModalInfo, + chatHistoryModalStack, + goBackChatHistoryModal, + closeChatHistoryModal, + getFloatingWindowById, + focusFloatingWindow, + closeFloatingWindow, + closeTopFloatingWindow, + openFloatingWindow, + startFloatingWindowDrag, + onFloatingWindowMouseMove, + onFloatingWindowMouseUp, + formatChatHistoryVideoDuration, + getChatHistoryPreviewLines, + onChatHistoryVideoThumbError, + onChatHistoryLinkPreviewError, + onChatHistoryFromAvatarLoad, + onChatHistoryFromAvatarError, + onChatHistoryQuoteThumbError, + openChatHistoryQuote, + getChatHistoryLinkFromText, + getChatHistoryLinkFromAvatarText, + openUrlInBrowser, + resolveChatHistoryLinkRecord, + resolveChatHistoryLinkRecords, + openChatHistoryLinkWindow, + openChatHistoryModal, + openNestedChatHistory + } +} diff --git a/frontend/composables/chat/useChatMessages.js b/frontend/composables/chat/useChatMessages.js new file mode 100644 index 0000000..7acb0fd --- /dev/null +++ b/frontend/composables/chat/useChatMessages.js @@ -0,0 +1,890 @@ +import { computed, nextTick, onUnmounted, ref, watch } from 'vue' +import { + formatFileSize, + formatTimeDivider, + getVoiceDurationInSeconds, + getVoiceWidth +} from '~/lib/chat/formatters' +import { createMessageNormalizer, dedupeMessagesById } from '~/lib/chat/message-normalizer' + +export const useChatMessages = ({ + api, + apiBase, + selectedAccount, + selectedContact, + realtimeStore, + realtimeEnabled, + desktopAutoRealtime, + privacyMode, + searchContext +}) => { + const messagePageSize = 50 + + const allMessages = ref({}) + const messagesMeta = ref({}) + const isLoadingMessages = ref(false) + const messagesError = ref('') + const messageContainerRef = ref(null) + const activeMessagesFor = ref('') + const showJumpToBottom = ref(false) + let lastRenderMessagesFingerprint = '' + + const isDesktopRenderer = () => { + if (!process.client || typeof window === 'undefined') return false + return !!window.wechatDesktop?.__brand + } + + const logMessagePhase = (phase, details = {}) => { + if (!isDesktopRenderer()) return + console.info(`[chat-messages] ${phase}`, { + account: String(selectedAccount.value || '').trim(), + selectedUsername: String(selectedContact.value?.username || '').trim(), + activeMessagesFor: String(activeMessagesFor.value || '').trim(), + ...details + }) + } + + const previewImageUrl = ref(null) + const previewVideoUrl = ref(null) + const previewVideoPosterUrl = ref('') + const previewVideoError = ref('') + + const voiceRefs = ref({}) + const currentPlayingVoice = ref(null) + const playingVoiceId = ref(null) + + const highlightServerIdStr = ref('') + const highlightMessageId = ref('') + let highlightTimer = null + + const messageTypeFilter = ref('all') + const messageTypeFilterOptions = [ + { value: 'all', label: '全部' }, + { value: 'text', label: '文本' }, + { value: 'image', label: '图片' }, + { value: 'emoji', label: '表情' }, + { value: 'video', label: '视频' }, + { value: 'voice', label: '语音' }, + { value: 'file', label: '文件' }, + { value: 'link', label: '链接' }, + { value: 'quote', label: '引用' }, + { value: 'chatHistory', label: '聊天记录' }, + { value: 'transfer', label: '转账' }, + { value: 'redPacket', label: '红包' }, + { value: 'location', label: '位置' }, + { value: 'voip', label: '通话' }, + { value: 'system', label: '系统' } + ] + + const normalizeMessage = createMessageNormalizer({ + apiBase, + getSelectedAccount: () => selectedAccount.value, + getSelectedContact: () => selectedContact.value + }) + + const messages = computed(() => { + if (!selectedContact.value) return [] + return allMessages.value[selectedContact.value.username] || [] + }) + + const hasMoreMessages = computed(() => { + if (!selectedContact.value) return false + const key = selectedContact.value.username + const meta = messagesMeta.value[key] + if (!meta) return false + if (meta.hasMore != null) return !!meta.hasMore + const total = Number(meta.total || 0) + const loaded = messages.value.length + return total > loaded + }) + + const reverseMessageSides = ref(false) + const reverseSidesStorageKey = computed(() => { + const account = String(selectedAccount.value || '').trim() + const username = String(selectedContact.value?.username || '').trim() + if (account && username) return `wechatda:reverse_message_sides:${account}:${username}` + return 'wechatda:reverse_message_sides:global' + }) + + const loadReverseMessageSides = () => { + if (!process.client) return + try { + const value = localStorage.getItem(reverseSidesStorageKey.value) + reverseMessageSides.value = value === '1' + } catch {} + } + + watch(reverseSidesStorageKey, () => loadReverseMessageSides(), { immediate: true }) + watch(reverseMessageSides, (value) => { + if (!process.client) return + try { + localStorage.setItem(reverseSidesStorageKey.value, value ? '1' : '0') + } catch {} + }) + + const toggleReverseMessageSides = () => { + reverseMessageSides.value = !reverseMessageSides.value + } + + const renderMessages = computed(() => { + const list = messages.value || [] + const reverseSides = !!reverseMessageSides.value + const fingerprint = `${String(selectedContact.value?.username || '').trim()}:${list.length}:${reverseSides ? '1' : '0'}` + const shouldLogRender = isDesktopRenderer() && fingerprint !== lastRenderMessagesFingerprint + if (shouldLogRender) { + logMessagePhase('renderMessages:start', { + count: list.length, + reverseSides + }) + } + let previousTs = 0 + const rendered = list.map((message) => { + const ts = Number(message.createTime || 0) + const show = !previousTs || (ts && Math.abs(ts - previousTs) >= 300) + if (ts) previousTs = ts + const originalIsSent = !!message?.isSent + return { + ...message, + _originalIsSent: originalIsSent, + isSent: reverseSides ? !originalIsSent : originalIsSent, + showTimeDivider: !!show, + timeDivider: formatTimeDivider(ts) + } + }) + if (shouldLogRender) { + lastRenderMessagesFingerprint = fingerprint + logMessagePhase('renderMessages:end', { + count: rendered.length, + reverseSides + }) + } + return rendered + }) + + const updateJumpToBottomState = () => { + const container = messageContainerRef.value + if (!container) { + showJumpToBottom.value = false + return + } + const distance = container.scrollHeight - container.scrollTop - container.clientHeight + showJumpToBottom.value = distance > 160 + } + + const scrollToBottom = () => { + const container = messageContainerRef.value + if (!container) return + container.scrollTop = container.scrollHeight + updateJumpToBottomState() + } + + const flashMessage = (id) => { + highlightMessageId.value = String(id || '').trim() + if (highlightTimer) clearTimeout(highlightTimer) + highlightTimer = setTimeout(() => { + highlightMessageId.value = '' + highlightServerIdStr.value = '' + highlightTimer = null + }, 2200) + } + + const scrollToMessageId = async (id) => { + const target = String(id || '').trim() + if (!target) return false + await nextTick() + const container = messageContainerRef.value + const element = container?.querySelector?.(`[data-msg-id="${CSS.escape(target)}"]`) + if (!element || typeof element.scrollIntoView !== 'function') return false + element.scrollIntoView({ block: 'center', behavior: 'smooth' }) + return true + } + + const openImagePreview = (url) => { + previewImageUrl.value = String(url || '').trim() || null + } + + const closeImagePreview = () => { + previewImageUrl.value = null + } + + const openVideoPreview = (url, poster) => { + previewVideoUrl.value = String(url || '').trim() || null + previewVideoPosterUrl.value = String(poster || '').trim() + previewVideoError.value = '' + } + + const closeVideoPreview = () => { + previewVideoUrl.value = null + previewVideoPosterUrl.value = '' + previewVideoError.value = '' + } + + const onPreviewVideoError = () => { + previewVideoError.value = '视频加载失败,可能是资源不存在或无法访问。' + } + + const setVoiceRef = (id, element) => { + const key = String(id || '').trim() + if (!key) return + if (element) { + voiceRefs.value = { ...voiceRefs.value, [key]: element } + } else if (voiceRefs.value[key]) { + const next = { ...voiceRefs.value } + delete next[key] + voiceRefs.value = next + } + } + + const playVoiceById = async (voiceId) => { + const key = String(voiceId || '').trim() + if (!key) return + const audio = voiceRefs.value[key] + if (!audio) return + + try { + if (currentPlayingVoice.value && currentPlayingVoice.value !== audio) { + currentPlayingVoice.value.pause() + currentPlayingVoice.value.currentTime = 0 + } + } catch {} + + if (currentPlayingVoice.value === audio && !audio.paused) { + try { + audio.pause() + audio.currentTime = 0 + } catch {} + currentPlayingVoice.value = null + playingVoiceId.value = null + return + } + + try { + await audio.play() + currentPlayingVoice.value = audio + playingVoiceId.value = key + audio.onended = () => { + if (playingVoiceId.value === key) { + currentPlayingVoice.value = null + playingVoiceId.value = null + } + } + } catch {} + } + + const playVoice = async (message) => { + await playVoiceById(message?.id) + } + + const getQuoteVoiceId = (message) => `quote-${String(message?.quoteServerId || message?.id || '')}` + + const playQuoteVoice = async (message) => { + await playVoiceById(getQuoteVoiceId(message)) + } + + const isQuotedVoice = (message) => String(message?.quoteType || '').trim() === '34' + const isQuotedImage = (message) => { + return !!String(message?.quoteImageUrl || '').trim() || String(message?.quoteContent || '').trim() === '[图片]' + } + const isQuotedLink = (message) => { + return String(message?.quoteType || '').trim() === '5' || !!String(message?.quoteThumbUrl || '').trim() + } + const getQuotedLinkText = (message) => { + const title = String(message?.quoteTitle || '').trim() + const content = String(message?.quoteContent || '').trim() + return content || title || '' + } + + const onQuoteImageError = (message) => { + if (message) message._quoteImageError = true + } + + const onQuoteThumbError = (message) => { + if (message) message._quoteThumbError = true + } + + const onAvatarError = (event, target) => { + try { event?.target && (event.target.style.display = 'none') } catch {} + try { if (target) target.avatar = null } catch {} + } + + const shouldShowEmojiDownload = (message) => { + if (!message?.emojiMd5) return false + const url = String(message?.emojiRemoteUrl || '').trim() + if (!url) return false + if (!/^https?:\/\//i.test(url)) return false + return true + } + + const onEmojiDownloadClick = async (message) => { + if (!process.client) return + if (!message?.emojiMd5) return + if (!selectedAccount.value) return + + const emojiUrl = String(message?.emojiRemoteUrl || '').trim() + if (!emojiUrl) { + window.alert('该表情没有可用的下载地址') + return + } + if (message._emojiDownloading) return + + message._emojiDownloading = true + try { + await api.downloadChatEmoji({ + account: selectedAccount.value, + md5: message.emojiMd5, + emoji_url: emojiUrl, + force: false + }) + message._emojiDownloaded = true + if (message.emojiLocalUrl) { + message.emojiUrl = message.emojiLocalUrl + } + } catch (error) { + window.alert(error?.message || '下载失败') + } finally { + message._emojiDownloading = false + } + } + + const onFileClick = async (message) => { + if (!message?.fileMd5) return + try { + if (!selectedAccount.value) return + if (!selectedContact.value?.username) return + await api.openChatMediaFolder({ + account: selectedAccount.value, + username: selectedContact.value.username, + kind: 'file', + md5: message.fileMd5 + }) + } catch (error) { + console.error('打开文件夹失败:', error) + } + } + + const loadMessages = async ({ username, reset }) => { + if (!username || !selectedAccount.value) return + + logMessagePhase('loadMessages:enter', { + username, + reset + }) + messagesError.value = '' + isLoadingMessages.value = true + activeMessagesFor.value = username + + try { + const existing = allMessages.value[username] || [] + const container = messageContainerRef.value + const beforeScrollHeight = container ? container.scrollHeight : 0 + const beforeScrollTop = container ? container.scrollTop : 0 + const offset = reset ? 0 : existing.length + + const params = { + account: selectedAccount.value, + username, + limit: messagePageSize, + offset, + order: 'asc' + } + if (messageTypeFilter.value && messageTypeFilter.value !== 'all') { + params.render_types = messageTypeFilter.value + } + if (realtimeEnabled.value) { + params.source = 'realtime' + } + logMessagePhase('loadMessages:request:start', { + username, + reset, + offset, + existingCount: existing.length, + renderTypeFilter: messageTypeFilter.value, + realtime: !!realtimeEnabled.value + }) + const response = await api.listChatMessages(params) + logMessagePhase('loadMessages:request:end', { + username, + reset, + rawCount: Array.isArray(response?.messages) ? response.messages.length : 0, + total: Number(response?.total || 0), + hasMore: response?.hasMore + }) + + const raw = response?.messages || [] + logMessagePhase('loadMessages:normalize:start', { + username, + rawCount: raw.length + }) + const mapped = dedupeMessagesById(raw.map(normalizeMessage)) + logMessagePhase('loadMessages:normalize:end', { + username, + mappedCount: mapped.length + }) + + if (activeMessagesFor.value !== username) { + logMessagePhase('loadMessages:abort-stale', { + username, + activeMessagesFor: activeMessagesFor.value + }) + return + } + + logMessagePhase('loadMessages:state-commit:start', { + username, + reset, + mappedCount: mapped.length + }) + if (reset) { + allMessages.value = { ...allMessages.value, [username]: mapped } + } else { + const existingIds = new Set(existing.map((message) => String(message?.id || ''))) + const older = mapped.filter((message) => { + const id = String(message?.id || '') + if (!id) return true + if (existingIds.has(id)) return false + existingIds.add(id) + return true + }) + allMessages.value = { + ...allMessages.value, + [username]: [...older, ...existing] + } + } + logMessagePhase('loadMessages:state-commit:end', { + username, + storedCount: (allMessages.value[username] || []).length + }) + + messagesMeta.value = { + ...messagesMeta.value, + [username]: { + total: Number(response?.total || 0), + hasMore: response?.hasMore + } + } + logMessagePhase('loadMessages:meta-commit:end', { + username, + total: Number(response?.total || 0), + hasMore: response?.hasMore + }) + + logMessagePhase('loadMessages:nextTick:start', { + username + }) + await nextTick() + logMessagePhase('loadMessages:nextTick:end', { + username, + renderedCount: (allMessages.value[username] || []).length + }) + const nextContainer = messageContainerRef.value + if (nextContainer) { + if (reset) { + nextContainer.scrollTop = nextContainer.scrollHeight + } else { + const afterScrollHeight = nextContainer.scrollHeight + nextContainer.scrollTop = beforeScrollTop + (afterScrollHeight - beforeScrollHeight) + } + } + updateJumpToBottomState() + logMessagePhase('loadMessages:scroll:end', { + username, + hasContainer: !!nextContainer, + scrollTop: nextContainer ? nextContainer.scrollTop : null, + scrollHeight: nextContainer ? nextContainer.scrollHeight : null + }) + } catch (error) { + console.error('[chat-messages] loadMessages:error', { + account: String(selectedAccount.value || '').trim(), + username: String(username || '').trim(), + reset: !!reset, + error + }) + messagesError.value = error?.message || '加载聊天记录失败' + } finally { + isLoadingMessages.value = false + logMessagePhase('loadMessages:exit', { + username, + reset, + loading: isLoadingMessages.value, + error: messagesError.value + }) + } + } + + const loadMoreMessages = async () => { + if (!selectedContact.value) return + if (searchContext.value?.active) return + await loadMessages({ username: selectedContact.value.username, reset: false }) + } + + const refreshSelectedMessages = async () => { + if (!selectedContact.value) return + await loadMessages({ username: selectedContact.value.username, reset: true }) + } + + const refreshRealtimeIncremental = async () => { + if (!realtimeEnabled.value || !selectedAccount.value || !selectedContact.value?.username) return + if (searchContext.value?.active || isLoadingMessages.value) return + + const username = selectedContact.value.username + const existing = allMessages.value[username] || [] + if (!existing.length) return + + const container = messageContainerRef.value + const atBottom = !!container && (container.scrollHeight - container.scrollTop - container.clientHeight) < 80 + + const params = { + account: selectedAccount.value, + username, + limit: 30, + offset: 0, + order: 'asc', + source: 'realtime' + } + if (messageTypeFilter.value && messageTypeFilter.value !== 'all') { + params.render_types = messageTypeFilter.value + } + + const response = await api.listChatMessages(params) + if (selectedContact.value?.username !== username) return + + const latest = (response?.messages || []).map(normalizeMessage) + const seenIds = new Set(existing.map((message) => String(message?.id || ''))) + const newOnes = [] + for (const message of latest) { + const id = String(message?.id || '') + if (!id || seenIds.has(id)) continue + seenIds.add(id) + newOnes.push(message) + } + if (!newOnes.length) return + + allMessages.value = { ...allMessages.value, [username]: [...existing, ...newOnes] } + + await nextTick() + const nextContainer = messageContainerRef.value + if (nextContainer && atBottom) { + nextContainer.scrollTop = nextContainer.scrollHeight + } + updateJumpToBottomState() + } + + let realtimeRefreshFuture = null + let realtimeRefreshQueued = false + + const queueRealtimeRefresh = () => { + if (realtimeRefreshFuture) { + realtimeRefreshQueued = true + return + } + + realtimeRefreshFuture = refreshRealtimeIncremental().finally(() => { + realtimeRefreshFuture = null + if (realtimeRefreshQueued) { + realtimeRefreshQueued = false + queueRealtimeRefresh() + } + }) + } + + const tryEnableRealtimeAuto = async () => { + if (!process.client || typeof window === 'undefined') return + if (!desktopAutoRealtime.value || realtimeEnabled.value || !selectedAccount.value) return + try { + await realtimeStore.enable({ silent: true }) + } catch {} + } + + const resetMessageState = () => { + allMessages.value = {} + messagesMeta.value = {} + messagesError.value = '' + highlightMessageId.value = '' + highlightServerIdStr.value = '' + } + + const contactProfileCardOpen = ref(false) + const contactProfileCardMessageId = ref('') + const contactProfileLoading = ref(false) + const contactProfileError = ref('') + const contactProfileData = ref(null) + let contactProfileHoverHideTimer = null + + const contactProfileResolvedName = computed(() => { + const profile = contactProfileData.value || {} + const displayName = String(profile?.displayName || '').trim() + if (displayName) return displayName + const contactName = String(selectedContact.value?.name || '').trim() + if (contactName) return contactName + return String(profile?.username || selectedContact.value?.username || '').trim() + }) + + const contactProfileResolvedUsername = computed(() => { + const profile = contactProfileData.value || {} + return String(profile?.username || selectedContact.value?.username || '').trim() + }) + + const contactProfileResolvedNickname = computed(() => String(contactProfileData.value?.nickname || '').trim()) + const contactProfileResolvedAlias = computed(() => String(contactProfileData.value?.alias || '').trim()) + const contactProfileResolvedRegion = computed(() => String(contactProfileData.value?.region || '').trim()) + const contactProfileResolvedRemark = computed(() => String(contactProfileData.value?.remark || '').trim()) + const contactProfileResolvedSignature = computed(() => String(contactProfileData.value?.signature || '').trim()) + const contactProfileResolvedSource = computed(() => String(contactProfileData.value?.source || '').trim()) + const contactProfileResolvedAvatar = computed(() => { + const avatar = String(contactProfileData.value?.avatar || '').trim() + if (avatar) return avatar + return String(selectedContact.value?.avatar || '').trim() + }) + + const contactProfileResolvedGender = computed(() => { + const value = contactProfileData.value?.gender + if (value == null || value === '') return '' + const gender = Number(value) + if (!Number.isFinite(gender)) return '' + if (gender === 1) return '男' + if (gender === 2) return '女' + if (gender === 0) return '未知' + return String(gender) + }) + + const contactProfileResolvedSourceScene = computed(() => { + const value = contactProfileData.value?.sourceScene + if (value == null || value === '') return null + const scene = Number(value) + return Number.isFinite(scene) ? scene : null + }) + + const fetchContactProfile = async (options = {}) => { + const username = String(options?.username || contactProfileData.value?.username || selectedContact.value?.username || '').trim() + const displayNameFallback = String(options?.displayName || '').trim() + const avatarFallback = String(options?.avatar || '').trim() + const account = String(selectedAccount.value || '').trim() + if (!username || !account) { + contactProfileData.value = null + return + } + + contactProfileLoading.value = true + contactProfileError.value = '' + try { + const response = await api.listChatContacts({ + account, + include_friends: true, + include_groups: true, + include_officials: true + }) + const list = Array.isArray(response?.contacts) ? response.contacts : [] + const matched = list.find((item) => String(item?.username || '').trim() === username) + if (matched) { + const normalized = { ...matched, username } + if (!String(normalized.displayName || '').trim() && displayNameFallback) { + normalized.displayName = displayNameFallback + } + if (!String(normalized.avatar || '').trim() && avatarFallback) { + normalized.avatar = avatarFallback + } + contactProfileData.value = normalized + } else { + contactProfileData.value = { + username, + displayName: displayNameFallback || selectedContact.value?.name || username, + avatar: avatarFallback || selectedContact.value?.avatar || '', + nickname: '', + alias: '', + gender: null, + region: '', + remark: '', + signature: '', + source: '', + sourceScene: null + } + } + } catch (error) { + contactProfileData.value = { + username, + displayName: displayNameFallback || selectedContact.value?.name || username, + avatar: avatarFallback || selectedContact.value?.avatar || '', + nickname: '', + alias: '', + gender: null, + region: '', + remark: '', + signature: '', + source: '', + sourceScene: null + } + contactProfileError.value = error?.message || '加载联系人资料失败' + } finally { + contactProfileLoading.value = false + } + } + + const clearContactProfileHoverHideTimer = () => { + if (contactProfileHoverHideTimer) { + clearTimeout(contactProfileHoverHideTimer) + contactProfileHoverHideTimer = null + } + } + + const closeContactProfileCard = () => { + contactProfileCardOpen.value = false + contactProfileCardMessageId.value = '' + } + + const onMessageAvatarMouseEnter = async (message) => { + if (!!message?.isSent) return + const messageId = String(message?.id ?? '').trim() + if (!messageId) return + const username = String(message?.senderUsername || '').trim() + if (!username || username === 'self') return + + const senderName = String(message?.senderDisplayName || message?.sender || '').trim() + const senderAvatar = String(message?.avatar || '').trim() + if (!contactProfileData.value || String(contactProfileData.value?.username || '').trim() !== username) { + contactProfileData.value = { + username, + displayName: senderName || username, + avatar: senderAvatar, + nickname: '', + alias: '', + gender: null, + region: '', + remark: '', + signature: '', + source: '', + sourceScene: null + } + } else { + if (!String(contactProfileData.value?.displayName || '').trim() && senderName) { + contactProfileData.value.displayName = senderName + } + if (!String(contactProfileData.value?.avatar || '').trim() && senderAvatar) { + contactProfileData.value.avatar = senderAvatar + } + } + + clearContactProfileHoverHideTimer() + contactProfileCardMessageId.value = messageId + contactProfileCardOpen.value = true + await fetchContactProfile({ username, displayName: senderName, avatar: senderAvatar }) + } + + const onMessageAvatarMouseLeave = () => { + clearContactProfileHoverHideTimer() + contactProfileHoverHideTimer = setTimeout(() => { + closeContactProfileCard() + }, 120) + } + + const onContactCardMouseEnter = () => { + clearContactProfileHoverHideTimer() + } + + watch( + () => selectedContact.value?.username, + () => { + clearContactProfileHoverHideTimer() + closeContactProfileCard() + contactProfileError.value = '' + contactProfileData.value = null + } + ) + + watch( + () => selectedAccount.value, + () => { + clearContactProfileHoverHideTimer() + closeContactProfileCard() + contactProfileError.value = '' + contactProfileData.value = null + } + ) + + onUnmounted(() => { + if (highlightTimer) clearTimeout(highlightTimer) + highlightTimer = null + clearContactProfileHoverHideTimer() + }) + + return { + allMessages, + messagesMeta, + messages, + renderMessages, + hasMoreMessages, + isLoadingMessages, + messagesError, + messageContainerRef, + showJumpToBottom, + messagePageSize, + messageTypeFilter, + messageTypeFilterOptions, + reverseMessageSides, + previewImageUrl, + previewVideoUrl, + previewVideoPosterUrl, + previewVideoError, + voiceRefs, + currentPlayingVoice, + playingVoiceId, + highlightServerIdStr, + highlightMessageId, + contactProfileCardOpen, + contactProfileCardMessageId, + contactProfileLoading, + contactProfileError, + contactProfileData, + contactProfileResolvedName, + contactProfileResolvedUsername, + contactProfileResolvedNickname, + contactProfileResolvedAlias, + contactProfileResolvedGender, + contactProfileResolvedRegion, + contactProfileResolvedRemark, + contactProfileResolvedSignature, + contactProfileResolvedSource, + contactProfileResolvedSourceScene, + contactProfileResolvedAvatar, + normalizeMessage, + updateJumpToBottomState, + scrollToBottom, + flashMessage, + scrollToMessageId, + openImagePreview, + closeImagePreview, + openVideoPreview, + closeVideoPreview, + onPreviewVideoError, + setVoiceRef, + playVoice, + playQuoteVoice, + getQuoteVoiceId, + getVoiceDurationInSeconds, + getVoiceWidth, + isQuotedVoice, + isQuotedImage, + isQuotedLink, + getQuotedLinkText, + onQuoteImageError, + onQuoteThumbError, + onAvatarError, + shouldShowEmojiDownload, + onEmojiDownloadClick, + onFileClick, + toggleReverseMessageSides, + loadMessages, + loadMoreMessages, + refreshSelectedMessages, + refreshRealtimeIncremental, + queueRealtimeRefresh, + tryEnableRealtimeAuto, + resetMessageState, + fetchContactProfile, + clearContactProfileHoverHideTimer, + closeContactProfileCard, + onMessageAvatarMouseEnter, + onMessageAvatarMouseLeave, + onContactCardMouseEnter, + formatFileSize + } +} diff --git a/frontend/composables/chat/useChatSearch.js b/frontend/composables/chat/useChatSearch.js new file mode 100644 index 0000000..8a92e75 --- /dev/null +++ b/frontend/composables/chat/useChatSearch.js @@ -0,0 +1,1678 @@ +import { computed, nextTick, onMounted, onUnmounted, ref, watch } from 'vue' +import { + dateToUnixSeconds, + formatMessageFullTime, + highlightKeyword +} from '~/lib/chat/formatters' + +export const createEmptySearchContext = () => ({ + active: false, + kind: 'search', + label: '', + username: '', + anchorId: '', + anchorIndex: -1, + hasMoreBefore: false, + hasMoreAfter: false, + loadingBefore: false, + loadingAfter: false, + savedMessages: null, + savedMeta: null +}) + +export const useChatSearch = ({ + api, + heatColor, + contacts, + selectedAccount, + selectedContact, + privacyMode, + allMessages, + messagesMeta, + messages, + messageContainerRef, + messagePageSize, + hasMoreMessages, + isLoadingMessages, + normalizeMessage, + updateJumpToBottomState, + scrollToMessageId, + flashMessage, + highlightMessageId, + searchContext, + selectContact, + loadMoreMessages +}) => { +const messageSearchOpen = ref(false) +const messageSearchQuery = ref('') +const messageSearchScope = ref('global') // conversation | global +const messageSearchRangeDays = ref('') // empty means no time filter +const messageSearchSessionType = ref('') // empty means all (global only): group | single +const messageSearchSender = ref('') // 发送者筛选 +const messageSearchSenderOptions = ref([]) +const messageSearchSenderLoading = ref(false) +const messageSearchSenderError = ref('') +const messageSearchSenderOptionsKey = ref('') +const messageSearchSenderDropdownOpen = ref(false) +const messageSearchSenderDropdownRef = ref(null) +const messageSearchSenderDropdownInputRef = ref(null) +const messageSearchSenderDropdownQuery = ref('') +const messageSearchStartDate = ref('') // 自定义开始日期 +const messageSearchEndDate = ref('') // 自定义结束日期 +const messageSearchResults = ref([]) +const messageSearchLoading = ref(false) +const messageSearchError = ref('') +const messageSearchBackendStatus = ref('') +const messageSearchIndexInfo = ref(null) +const messageSearchHasMore = ref(false) +const messageSearchOffset = ref(0) +const messageSearchLimit = 50 +const messageSearchTotal = ref(0) +const messageSearchSelectedIndex = ref(-1) +const messageSearchInputRef = ref(null) +let messageSearchDebounceTimer = null +let messageSearchIndexPollTimer = null + +// 搜索UI增强 +const searchInputFocused = ref(false) +const showAdvancedFilters = ref(false) +const searchHistory = ref([]) +const SEARCH_HISTORY_KEY = 'wechat_search_history' +const MAX_SEARCH_HISTORY = 10 + +// 加载搜索历史 +const loadSearchHistory = () => { +if (!process.client) return +try { + const saved = localStorage.getItem(SEARCH_HISTORY_KEY) + if (saved) { + searchHistory.value = JSON.parse(saved) || [] + } +} catch (e) { + searchHistory.value = [] +} +} + +// 保存搜索历史 +const saveSearchHistory = (query) => { +if (!process.client) return +if (!query || !query.trim()) return +const q = query.trim() +try { + let history = [...searchHistory.value] + // 移除重复项 + history = history.filter(item => item !== q) + // 添加到开头 + history.unshift(q) + // 限制数量 + if (history.length > MAX_SEARCH_HISTORY) { + history = history.slice(0, MAX_SEARCH_HISTORY) + } + searchHistory.value = history + localStorage.setItem(SEARCH_HISTORY_KEY, JSON.stringify(history)) +} catch (e) { + // ignore +} +} + +// 清空搜索历史 +const clearSearchHistory = () => { +if (!process.client) return +searchHistory.value = [] +try { + localStorage.removeItem(SEARCH_HISTORY_KEY) +} catch (e) { + // ignore +} +} + +// 应用搜索历史 +const applySearchHistory = async (query) => { +messageSearchQuery.value = query +await runMessageSearch({ reset: true }) +} + +const messageSearchIndexExists = computed(() => !!messageSearchIndexInfo.value?.exists) +const messageSearchIndexReady = computed(() => !!messageSearchIndexInfo.value?.ready) +const messageSearchIndexBuildStatus = computed(() => String(messageSearchIndexInfo.value?.build?.status || '')) +const messageSearchIndexBuildIndexed = computed(() => Number(messageSearchIndexInfo.value?.build?.indexedMessages || 0)) +const messageSearchIndexMetaCount = computed(() => { +const meta = messageSearchIndexInfo.value?.meta || {} +const v = meta.message_count ?? meta.messageCount ?? meta.message_count ?? 0 +return Number(v || 0) +}) + +const messageSearchIndexProgressText = computed(() => { +if (messageSearchIndexBuildStatus.value !== 'building') return '' +const n = Number(messageSearchIndexBuildIndexed.value || 0) +return n > 0 ? `已索引 ${n.toLocaleString()} 条` : '准备中...' +}) + +const messageSearchIndexText = computed(() => { +if (!messageSearchIndexInfo.value) return '' +if (!messageSearchIndexExists.value) return '索引未建立' +if (messageSearchIndexBuildStatus.value === 'error') return '索引异常' +if (!messageSearchIndexReady.value) return '索引未完成,需重建' +const n = Number(messageSearchIndexMetaCount.value || 0) +return n > 0 ? `索引已就绪(${n.toLocaleString()} 条)` : '索引已就绪' +}) + +const messageSearchIndexActionText = computed(() => { +if (messageSearchIndexBuildStatus.value === 'building') return '建立中' +return messageSearchIndexExists.value ? '重建索引' : '建立索引' +}) + +const messageSearchIndexActionDisabled = computed(() => { +return messageSearchIndexBuildStatus.value === 'building' || messageSearchLoading.value +}) + +const formatCount = (n) => { +const v = Number(n || 0) +if (!Number.isFinite(v) || v <= 0) return '' +try { + return v.toLocaleString() +} catch { + return String(v) +} +} + +const messageSearchSenderDisabled = computed(() => { +if (!selectedAccount.value) return true +const scope = String(messageSearchScope.value || 'conversation') +if (scope === 'conversation') { + return !selectedContact.value?.username +} +const q = String(messageSearchQuery.value || '').trim() +if (q.length >= 2) return false +return !String(messageSearchSender.value || '').trim() +}) + +const messageSearchSelectedSenderInfo = computed(() => { +const u = String(messageSearchSender.value || '').trim() +if (!u) return null +const list = Array.isArray(messageSearchSenderOptions.value) ? messageSearchSenderOptions.value : [] +const found = list.find((s) => String(s?.username || '').trim() === u) +if (found) return found +return { username: u, displayName: u, avatar: null, count: null } +}) + +const messageSearchSelectedSenderInitial = computed(() => { +const info = messageSearchSelectedSenderInfo.value +if (!info) return '人' +const n = String(info.displayName || info.username || '').trim() +return n ? n.charAt(0) : '人' +}) + +const messageSearchSenderLabel = computed(() => { +const cur = String(messageSearchSender.value || '').trim() +if (!cur) { + if (String(messageSearchScope.value || '') === 'global' && String(messageSearchQuery.value || '').trim().length < 2) { + return '发送者' + } + return '不限发送者' +} +const info = messageSearchSelectedSenderInfo.value +return String(info?.displayName || info?.username || cur) +}) + +const filteredMessageSearchSenderOptions = computed(() => { +const list = Array.isArray(messageSearchSenderOptions.value) ? messageSearchSenderOptions.value : [] +const q = String(messageSearchSenderDropdownQuery.value || '').trim().toLowerCase() +if (!q) return list +return list.filter((s) => { + const u = String(s?.username || '').toLowerCase() + const n = String(s?.displayName || '').toLowerCase() + return u.includes(q) || n.includes(q) +}) +}) + +const closeMessageSearchSenderDropdown = () => { +messageSearchSenderDropdownOpen.value = false +messageSearchSenderDropdownQuery.value = '' +} + +const getMessageSearchSenderFacetKey = () => { +const acc = String(selectedAccount.value || '').trim() +if (!acc) return '' +const scope = String(messageSearchScope.value || 'conversation') +const conv = scope === 'conversation' ? String(selectedContact.value?.username || '') : '' +const q = String(messageSearchQuery.value || '').trim() +const range = String(messageSearchRangeDays.value || '') +const sd = String(messageSearchStartDate.value || '') +const ed = String(messageSearchEndDate.value || '') +const st = scope === 'global' ? String(messageSearchSessionType.value || '').trim() : '' +return [acc, scope, conv, q, range, sd, ed, st].join('|') +} + +const ensureMessageSearchSendersLoaded = async () => { +const key = getMessageSearchSenderFacetKey() +if (!key) return +if (messageSearchSenderOptionsKey.value === key && !messageSearchSenderLoading.value) return +const list = await fetchMessageSearchSenders() +messageSearchSenderOptionsKey.value = key +return list +} + +const toggleMessageSearchSenderDropdown = async () => { +if (messageSearchSenderDisabled.value) return +if (messageSearchSenderDropdownOpen.value) { + closeMessageSearchSenderDropdown() + return +} +messageSearchSenderDropdownOpen.value = true +await ensureMessageSearchSendersLoaded() +await nextTick() +try { + messageSearchSenderDropdownInputRef.value?.focus?.() +} catch {} +} + +const selectMessageSearchSender = (username) => { +messageSearchSender.value = String(username || '') +closeMessageSearchSenderDropdown() +} + +const fetchMessageSearchIndexStatus = async () => { +if (!selectedAccount.value) return null +try { + const resp = await api.getChatSearchIndexStatus({ account: selectedAccount.value }) + messageSearchIndexInfo.value = resp?.index || null + return messageSearchIndexInfo.value +} catch (e) { + return null +} +} + +const fetchMessageSearchSenders = async () => { +messageSearchSenderError.value = '' +if (!selectedAccount.value) { + messageSearchSenderOptions.value = [] + messageSearchSenderOptionsKey.value = '' + return [] +} + +const scope = String(messageSearchScope.value || 'conversation') +const msgQ = String(messageSearchQuery.value || '').trim() + +const params = { + account: selectedAccount.value, + limit: 200 +} + +if (scope === 'conversation') { + if (!selectedContact.value?.username) { + messageSearchSenderOptions.value = [] + messageSearchSenderOptionsKey.value = '' + return [] + } + params.username = selectedContact.value.username +} else { + if (msgQ.length < 2) { + messageSearchSenderOptions.value = [] + messageSearchSenderOptionsKey.value = '' + return [] + } +} + +if (msgQ) { + params.message_q = msgQ +} + +params.render_types = 'text' + +const range = String(messageSearchRangeDays.value || '') +if (range === 'custom') { + const start = dateToUnixSeconds(messageSearchStartDate.value, false) + const end = dateToUnixSeconds(messageSearchEndDate.value, true) + if (start != null) params.start_time = start + if (end != null) params.end_time = end + if (start != null && end != null && start > end) { + messageSearchSenderError.value = '时间范围不合法:开始日期不能晚于结束日期' + messageSearchSenderOptions.value = [] + messageSearchSenderOptionsKey.value = '' + return [] + } +} else { + const days = Number(range || 0) + if (days > 0 && Number.isFinite(days)) { + const end = Math.floor(Date.now() / 1000) + const start = Math.max(0, end - Math.floor(days * 24 * 3600)) + params.start_time = start + params.end_time = end + } +} + +if (scope === 'global') { + const st = String(messageSearchSessionType.value || '').trim() + if (st) params.session_type = st +} + +messageSearchSenderLoading.value = true +try { + const resp = await api.listChatSearchSenders(params) + const status = String(resp?.status || 'success') + if (status !== 'success') { + if (status !== 'index_building') { + messageSearchSenderError.value = String(resp?.message || '加载发送者失败') + } + messageSearchSenderOptions.value = [] + messageSearchSenderOptionsKey.value = '' + return [] + } + const list = Array.isArray(resp?.senders) ? resp.senders : [] + messageSearchSenderOptions.value = list + messageSearchSenderOptionsKey.value = getMessageSearchSenderFacetKey() + const cur = String(messageSearchSender.value || '').trim() + if (cur && !list.some((s) => String(s?.username || '').trim() === cur)) { + messageSearchSender.value = '' + } + return list +} catch (e) { + messageSearchSenderError.value = e?.message || '加载发送者失败' + messageSearchSenderOptions.value = [] + messageSearchSenderOptionsKey.value = '' + return [] +} finally { + messageSearchSenderLoading.value = false +} +} + +const stopMessageSearchIndexPolling = () => { +if (messageSearchIndexPollTimer) clearInterval(messageSearchIndexPollTimer) +messageSearchIndexPollTimer = null +} + +const ensureMessageSearchIndexPolling = () => { +if (messageSearchIndexPollTimer) return +messageSearchIndexPollTimer = setInterval(async () => { + if (!messageSearchOpen.value) { + stopMessageSearchIndexPolling() + return + } + + const info = await fetchMessageSearchIndexStatus() + const exists = !!info?.exists + const ready = !!info?.ready + const bs = String(info?.build?.status || '') + const done = exists && ready && bs !== 'building' + if (done) { + stopMessageSearchIndexPolling() + if (String(messageSearchScope.value || '') === 'conversation') { + await fetchMessageSearchSenders() + } + if (String(messageSearchQuery.value || '').trim()) { + await runMessageSearch({ reset: true }) + } + } +}, 1200) +} + +const onMessageSearchIndexAction = async () => { +if (!selectedAccount.value) return +const rebuild = messageSearchIndexExists.value +try { + const resp = await api.buildChatSearchIndex({ account: selectedAccount.value, rebuild }) + messageSearchIndexInfo.value = resp?.index || null + messageSearchBackendStatus.value = 'index_building' + ensureMessageSearchIndexPolling() +} catch (e) { + messageSearchError.value = e?.message || '建立索引失败' +} +} +const getMessageSearchHitAvatarUrl = (hit) => { +if (!hit) return '' +const scope = String(messageSearchScope.value || '') +const url = + scope === 'global' + ? (hit.conversationAvatar || hit.senderAvatar || '') + : (hit.senderAvatar || hit.conversationAvatar || '') +return String(url || '').trim() +} + +const getMessageSearchHitAvatarAlt = (hit) => { +if (!hit) return '头像' +const scope = String(messageSearchScope.value || '') +if (scope === 'global') { + const name = String(hit.conversationName || hit.username || '').trim() + return name ? `${name} 头像` : '头像' +} +let name = String(hit.senderDisplayName || '').trim() +if (!name) { + name = hit.isSent ? '我' : String(hit.senderUsername || '').trim() +} +return name ? `${name} 头像` : '头像' +} + +const getMessageSearchHitAvatarInitial = (hit) => { +if (!hit) return '?' +const scope = String(messageSearchScope.value || '') +let text = '' +if (scope === 'global') { + text = String(hit.conversationName || hit.username || '').trim() +} else { + text = String(hit.senderDisplayName || '').trim() + if (!text) { + text = hit.isSent ? '我' : String(hit.senderUsername || '').trim() + } +} +return (text.charAt(0) || '?').toString() +} +const searchContextBannerText = computed(() => { +if (!searchContext.value?.active) return '' +const kind = String(searchContext.value.kind || 'search') +if (kind === 'date') { + const label = String(searchContext.value.label || '').trim() + return label ? `已定位到 ${label}(上下文模式)` : '已定位到指定日期(上下文模式)' +} +if (kind === 'first') { + return '已定位到会话顶部(上下文模式)' +} +return '已定位到搜索结果(上下文模式)' +}) + +// 回到最新按钮 +const showJumpToBottom = ref(false) + +// 时间侧边栏(按日期定位) +const timeSidebarOpen = ref(false) +const timeSidebarYear = ref(null) +const timeSidebarMonth = ref(null) // 1-12 +const timeSidebarCounts = ref({}) // { 'YYYY-MM-DD': count } +const timeSidebarMax = ref(0) +const timeSidebarTotal = ref(0) +const timeSidebarLoading = ref(false) +const timeSidebarError = ref('') +const timeSidebarSelectedDate = ref('') // YYYY-MM-DD (current/selected day) +// Simple in-memory cache per (account|username|YYYY-MM) +const timeSidebarCache = ref({}) +const timeSidebarWeekdays = ['一', '二', '三', '四', '五', '六', '日'] + +const timeSidebarMonthLabel = computed(() => { +const y = Number(timeSidebarYear.value || 0) +const m = Number(timeSidebarMonth.value || 0) +if (!y || !m) return '' +return `${y}年${m}月` +}) + +const timeSidebarYearOptions = computed(() => { +// WeChat history normally starts after 2011, but keep a broader range for safety. +const nowY = new Date().getFullYear() +const minY = 2000 +const maxY = Math.max(nowY, Number(timeSidebarYear.value || 0) || nowY) +const years = [] +for (let y = maxY; y >= minY; y--) years.push(y) +return years +}) + +const timeSidebarActiveDays = computed(() => { +const counts = timeSidebarCounts.value || {} +const keys = Object.keys(counts) +return keys.length +}) + +const _pad2 = (n) => String(n).padStart(2, '0') + +const _dateStrFromEpochSeconds = (ts) => { +const t = Number(ts || 0) +if (!t) return '' +try { + const d = new Date(t * 1000) + return `${d.getFullYear()}-${_pad2(d.getMonth() + 1)}-${_pad2(d.getDate())}` +} catch { + return '' +} +} + +// Calendar heatmap color: reuse Wrapped heat palette, but bucket to Wrapped-like legend levels +// so ">=1 message" is always visibly tinted (instead of being almost white when max is huge). +const _calendarHeatColor = (count, maxV) => { +const v = Math.max(0, Number(count || 0)) +const m = Math.max(0, Number(maxV || 0)) +if (!(v > 0)) return '' +if (!(m > 0)) return heatColor(1, 1) +const levels = 6 +const ratio = Math.max(0, Math.min(1, v / m)) +const level = Math.min(levels, Math.max(1, Math.ceil(ratio * levels))) +const valueForLevel = Math.max(1, Math.round(level * (m / levels))) +return heatColor(valueForLevel, m) +} + +const timeSidebarCalendarCells = computed(() => { +const y = Number(timeSidebarYear.value || 0) +const m = Number(timeSidebarMonth.value || 0) // 1-12 +if (!y || !m) return [] + +const daysInMonth = new Date(y, m, 0).getDate() +const firstDow = new Date(y, m - 1, 1).getDay() // 0=Sun..6=Sat +const offset = (firstDow + 6) % 7 // Monday=0 + +const maxV = Math.max(0, Number(timeSidebarMax.value || 0)) +const counts = timeSidebarCounts.value || {} +const selected = String(timeSidebarSelectedDate.value || '').trim() + +const out = [] +for (let i = 0; i < 42; i++) { + const dayNum = i - offset + 1 + const inMonth = dayNum >= 1 && dayNum <= daysInMonth + if (!inMonth) { + out.push({ + key: `e:${y}-${m}:${i}`, + day: '', + dateStr: '', + count: 0, + disabled: true, + className: 'calendar-day-outside', + style: null, + title: '' + }) + continue + } + + const dateStr = `${y}-${_pad2(m)}-${_pad2(dayNum)}` + const count = Math.max(0, Number(counts[dateStr] || 0)) + const disabled = count <= 0 + + const style = !disabled + ? { backgroundColor: _calendarHeatColor(count, Math.max(maxV, count)) } + : null + + const className = [ + disabled ? 'calendar-day-empty' : '', + (selected && dateStr === selected) ? 'calendar-day-selected' : '' + ].filter(Boolean).join(' ') + + out.push({ + key: dateStr, + day: String(dayNum), + dateStr, + count, + disabled, + // NOTE: heatmap bg color is applied via inline style (reusing Wrapped heatmap palette). + // Dynamic class names like `calendar-day-l${level}` may be purged by Tailwind and lead to no bg color. + className, + style, + title: `${dateStr}:${count} 条` + }) +} +return out +}) +const closeMessageSearch = () => { +messageSearchOpen.value = false +closeMessageSearchSenderDropdown() +messageSearchError.value = '' +messageSearchLoading.value = false +messageSearchBackendStatus.value = '' +stopMessageSearchIndexPolling() +if (messageSearchDebounceTimer) clearTimeout(messageSearchDebounceTimer) +messageSearchDebounceTimer = null +} + +let timeSidebarReqId = 0 + +const closeTimeSidebar = () => { +timeSidebarOpen.value = false +timeSidebarError.value = '' +} + +const _timeSidebarCacheKey = ({ account, username, year, month }) => { +const acc = String(account || '').trim() +const u = String(username || '').trim() +const y = Number(year || 0) +const m = Number(month || 0) +return `${acc}|${u}|${y}-${_pad2(m)}` +} + +const _applyTimeSidebarMonthData = (data) => { +const counts = (data && typeof data.counts === 'object' && !Array.isArray(data.counts)) ? data.counts : {} +timeSidebarCounts.value = counts +timeSidebarMax.value = Math.max(0, Number(data?.max || 0)) +timeSidebarTotal.value = Math.max(0, Number(data?.total || 0)) +} + +const loadTimeSidebarMonth = async ({ year, month, force } = {}) => { +if (!selectedAccount.value) return +if (!selectedContact.value?.username) return + +const y = Number(year || timeSidebarYear.value || 0) +const m = Number(month || timeSidebarMonth.value || 0) +if (!y || !m) return + +timeSidebarYear.value = y +timeSidebarMonth.value = m + +const key = _timeSidebarCacheKey({ + account: selectedAccount.value, + username: selectedContact.value.username, + year: y, + month: m +}) + +if (!force) { + const cached = timeSidebarCache.value[key] + if (cached) { + timeSidebarError.value = '' + _applyTimeSidebarMonthData(cached) + return + } +} + +const reqId = ++timeSidebarReqId +timeSidebarLoading.value = true +timeSidebarError.value = '' + +try { + const resp = await api.getChatMessageDailyCounts({ + account: selectedAccount.value, + username: selectedContact.value.username, + year: y, + month: m + }) + if (reqId !== timeSidebarReqId) return + if (String(resp?.status || '') !== 'success') { + throw new Error(String(resp?.message || '加载日历失败')) + } + + const data = { + counts: resp?.counts || {}, + max: Number(resp?.max || 0), + total: Number(resp?.total || 0) + } + + _applyTimeSidebarMonthData(data) + timeSidebarCache.value = { ...timeSidebarCache.value, [key]: data } +} catch (e) { + if (reqId !== timeSidebarReqId) return + timeSidebarError.value = e?.message || '加载日历失败' + _applyTimeSidebarMonthData({ counts: {}, max: 0, total: 0 }) +} finally { + if (reqId === timeSidebarReqId) { + timeSidebarLoading.value = false + } +} +} + +const _pickTimeSidebarInitialYearMonth = () => { +const list = messages.value || [] +const last = Array.isArray(list) && list.length ? list[list.length - 1] : null +const ts = Number(last?.createTime || 0) +const d = ts ? new Date(ts * 1000) : new Date() +return { year: d.getFullYear(), month: d.getMonth() + 1 } +} + +const _applyTimeSidebarSelectedDate = async (dateStr, { syncMonth } = {}) => { +const ds = String(dateStr || '').trim() +if (!ds) return +if (timeSidebarSelectedDate.value !== ds) { + timeSidebarSelectedDate.value = ds +} +if (!syncMonth || !timeSidebarOpen.value) return + +const parts = ds.split('-') +const y = Number(parts?.[0] || 0) +const m = Number(parts?.[1] || 0) +if (!y || !m) return + +if (Number(timeSidebarYear.value || 0) !== y || Number(timeSidebarMonth.value || 0) !== m) { + timeSidebarYear.value = y + timeSidebarMonth.value = m + // Fire and forget; request id guard + cache inside loadTimeSidebarMonth will handle racing. + await loadTimeSidebarMonth({ year: y, month: m, force: false }) +} +} + +const toggleTimeSidebar = async () => { +timeSidebarOpen.value = !timeSidebarOpen.value +if (!timeSidebarOpen.value) return +closeMessageSearch() + +const { year, month } = _pickTimeSidebarInitialYearMonth() +timeSidebarYear.value = year +timeSidebarMonth.value = month + +// Default selected day: current viewport's latest loaded message day (usually "latest"). +const list = messages.value || [] +const last = Array.isArray(list) && list.length ? list[list.length - 1] : null +const ds = _dateStrFromEpochSeconds(Number(last?.createTime || 0)) +if (ds) await _applyTimeSidebarSelectedDate(ds, { syncMonth: false }) + +await loadTimeSidebarMonth({ year, month, force: false }) +} + +const prevTimeSidebarMonth = async () => { +const y0 = Number(timeSidebarYear.value || 0) +const m0 = Number(timeSidebarMonth.value || 0) +if (!y0 || !m0) return +const y = m0 === 1 ? (y0 - 1) : y0 +const m = m0 === 1 ? 12 : (m0 - 1) +await loadTimeSidebarMonth({ year: y, month: m, force: false }) +} + +const nextTimeSidebarMonth = async () => { +const y0 = Number(timeSidebarYear.value || 0) +const m0 = Number(timeSidebarMonth.value || 0) +if (!y0 || !m0) return +const y = m0 === 12 ? (y0 + 1) : y0 +const m = m0 === 12 ? 1 : (m0 + 1) +await loadTimeSidebarMonth({ year: y, month: m, force: false }) +} + +const onTimeSidebarYearMonthChange = async () => { +if (!timeSidebarOpen.value) return +const y = Number(timeSidebarYear.value || 0) +const m = Number(timeSidebarMonth.value || 0) +if (!y || !m) return +await loadTimeSidebarMonth({ year: y, month: m, force: false }) +} + +const ensureMessageSearchScopeValid = () => { +if (messageSearchScope.value === 'conversation' && !selectedContact.value) { + messageSearchScope.value = 'global' +} +} + +const toggleMessageSearch = async () => { +messageSearchOpen.value = !messageSearchOpen.value +ensureMessageSearchScopeValid() +if (!messageSearchOpen.value) return +closeTimeSidebar() +await nextTick() +try { + messageSearchInputRef.value?.focus?.() +} catch {} +await fetchMessageSearchIndexStatus() +await fetchMessageSearchSenders() +if (String(messageSearchQuery.value || '').trim()) { + await runMessageSearch({ reset: true }) +} +} + +let messageSearchReqId = 0 + +const runMessageSearch = async ({ reset } = {}) => { +if (!selectedAccount.value) return +ensureMessageSearchScopeValid() + +const q = String(messageSearchQuery.value || '').trim() +if (!q) { + messageSearchResults.value = [] + messageSearchHasMore.value = false + messageSearchError.value = '' + messageSearchSelectedIndex.value = -1 + messageSearchBackendStatus.value = '' + messageSearchTotal.value = 0 + stopMessageSearchIndexPolling() + return +} + +if (reset) { + messageSearchOffset.value = 0 + messageSearchResults.value = [] + messageSearchSelectedIndex.value = -1 +} + +const reqId = ++messageSearchReqId +messageSearchLoading.value = true +messageSearchError.value = '' +messageSearchBackendStatus.value = '' + +const scope = String(messageSearchScope.value || 'conversation') + +const params = { + account: selectedAccount.value, + q, + limit: messageSearchLimit, + offset: messageSearchOffset.value +} + +params.render_types = 'text' + +const range = String(messageSearchRangeDays.value || '') +if (range === 'custom') { + const start = dateToUnixSeconds(messageSearchStartDate.value, false) + const end = dateToUnixSeconds(messageSearchEndDate.value, true) + if (start != null) params.start_time = start + if (end != null) params.end_time = end + if (start != null && end != null && start > end) { + messageSearchLoading.value = false + messageSearchError.value = '时间范围不合法:开始日期不能晚于结束日期' + return + } +} else { + const days = Number(range || 0) + if (days > 0 && Number.isFinite(days)) { + const end = Math.floor(Date.now() / 1000) + const start = Math.max(0, end - Math.floor(days * 24 * 3600)) + params.start_time = start + params.end_time = end + } +} + +if (scope === 'global') { + const st = String(messageSearchSessionType.value || '').trim() + if (st) params.session_type = st +} + +if (String(messageSearchSender.value || '').trim()) { + params.sender = String(messageSearchSender.value || '').trim() +} + +if (scope === 'conversation') { + if (!selectedContact.value?.username) { + messageSearchLoading.value = false + messageSearchError.value = '请选择一个会话再搜索' + return + } + params.username = selectedContact.value.username +} + +try { + const resp = await api.searchChatMessages(params) + if (reqId !== messageSearchReqId) return + + if (resp?.index) { + messageSearchIndexInfo.value = resp.index + } + + const status = String(resp?.status || 'success') + messageSearchBackendStatus.value = status + + if (status === 'index_building') { + if (reset) { + messageSearchResults.value = [] + messageSearchSelectedIndex.value = -1 + } + messageSearchHasMore.value = false + messageSearchTotal.value = 0 + ensureMessageSearchIndexPolling() + return + } + + if (status === 'index_error') { + if (reset) { + messageSearchResults.value = [] + messageSearchSelectedIndex.value = -1 + } + messageSearchHasMore.value = false + messageSearchTotal.value = 0 + messageSearchError.value = String(resp?.message || '索引错误') + stopMessageSearchIndexPolling() + return + } + + if (status !== 'success') { + if (reset) { + messageSearchResults.value = [] + messageSearchSelectedIndex.value = -1 + } + messageSearchHasMore.value = false + messageSearchTotal.value = 0 + messageSearchError.value = String(resp?.message || '搜索失败') + stopMessageSearchIndexPolling() + return + } + + const hits = Array.isArray(resp?.hits) ? resp.hits : [] + if (reset) { + messageSearchResults.value = hits + } else { + messageSearchResults.value = [...messageSearchResults.value, ...hits] + } + messageSearchHasMore.value = !!resp?.hasMore + messageSearchTotal.value = Number(resp?.total ?? resp?.totalInScan ?? 0) + stopMessageSearchIndexPolling() + + if (messageSearchSelectedIndex.value < 0 && messageSearchResults.value.length) { + messageSearchSelectedIndex.value = 0 + } + + // 保存搜索历史(仅在有结果时保存) + if (!privacyMode.value && reset && hits.length > 0) { + saveSearchHistory(q) + } +} catch (e) { + if (reqId !== messageSearchReqId) return + messageSearchError.value = e?.message || '搜索失败' +} finally { + if (reqId === messageSearchReqId) { + messageSearchLoading.value = false + } +} +} + +const loadMoreSearchResults = async () => { +if (!messageSearchHasMore.value) return +if (messageSearchLoading.value) return +messageSearchOffset.value = Number(messageSearchOffset.value || 0) + messageSearchLimit +await runMessageSearch({ reset: false }) +} + +const exitSearchContext = async () => { +if (!searchContext.value?.active) return +const u = String(searchContext.value.username || '').trim() +const saved = searchContext.value.savedMessages +const savedMeta = searchContext.value.savedMeta + +if (u && saved) { + allMessages.value = { ...allMessages.value, [u]: saved } +} +if (u && savedMeta) { + messagesMeta.value = { ...messagesMeta.value, [u]: savedMeta } +} + +searchContext.value = { + active: false, + kind: 'search', + label: '', + username: '', + anchorId: '', + anchorIndex: -1, + hasMoreBefore: false, + hasMoreAfter: false, + loadingBefore: false, + loadingAfter: false, + savedMessages: null, + savedMeta: null +} +highlightMessageId.value = '' +await nextTick() +updateJumpToBottomState() +} + +const locateSearchHit = async (hit) => { +if (!process.client) return +if (!selectedAccount.value) return +if (!hit?.id) return + +const targetUsername = String(hit?.username || selectedContact.value?.username || '').trim() +if (!targetUsername) return + +const targetContact = contacts.value.find((c) => c?.username === targetUsername) +if (targetContact && selectedContact.value?.username !== targetUsername) { + await selectContact(targetContact, { skipLoadMessages: true }) +} + +if (searchContext.value?.active && searchContext.value.username !== targetUsername) { + await exitSearchContext() +} + +if (!searchContext.value?.active) { + searchContext.value = { + active: true, + kind: 'search', + label: '', + username: targetUsername, + anchorId: String(hit.id), + anchorIndex: -1, + hasMoreBefore: true, + hasMoreAfter: true, + loadingBefore: false, + loadingAfter: false, + savedMessages: allMessages.value[targetUsername] || [], + savedMeta: messagesMeta.value[targetUsername] || null + } +} else { + searchContext.value.kind = 'search' + searchContext.value.label = '' + searchContext.value.anchorId = String(hit.id) + searchContext.value.hasMoreBefore = true + searchContext.value.hasMoreAfter = true + searchContext.value.loadingBefore = false + searchContext.value.loadingAfter = false +} + +try { + const resp = await api.getChatMessagesAround({ + account: selectedAccount.value, + username: targetUsername, + anchor_id: String(hit.id), + before: 35, + after: 35 + }) + + const raw = resp?.messages || [] + const mapped = raw.map(normalizeMessage) + allMessages.value = { ...allMessages.value, [targetUsername]: mapped } + messagesMeta.value = { ...messagesMeta.value, [targetUsername]: { total: mapped.length, hasMore: false } } + + searchContext.value.anchorId = String(resp?.anchorId || hit.id) + searchContext.value.anchorIndex = Number(resp?.anchorIndex ?? -1) + + const ok = await scrollToMessageId(searchContext.value.anchorId) + if (ok) flashMessage(searchContext.value.anchorId) +} catch (e) { + window.alert(e?.message || '定位失败') +} +} + +const locateByAnchorId = async ({ targetUsername, anchorId, kind, label } = {}) => { +if (!process.client) return +if (!selectedAccount.value) return +const u = String(targetUsername || selectedContact.value?.username || '').trim() +const anchor = String(anchorId || '').trim() +if (!u || !anchor) return + +const targetContact = contacts.value.find((c) => c?.username === u) +if (targetContact && selectedContact.value?.username !== u) { + await selectContact(targetContact, { skipLoadMessages: true }) +} + +if (searchContext.value?.active && searchContext.value.username !== u) { + await exitSearchContext() +} + +const kindNorm = String(kind || 'search').trim() || 'search' +const labelNorm = String(label || '').trim() +const hasMoreBeforeInit = kindNorm === 'first' ? false : true + +if (!searchContext.value?.active) { + searchContext.value = { + active: true, + kind: kindNorm, + label: labelNorm, + username: u, + anchorId: anchor, + anchorIndex: -1, + hasMoreBefore: hasMoreBeforeInit, + hasMoreAfter: true, + loadingBefore: false, + loadingAfter: false, + savedMessages: allMessages.value[u] || [], + savedMeta: messagesMeta.value[u] || null + } +} else { + searchContext.value.kind = kindNorm + searchContext.value.label = labelNorm + searchContext.value.anchorId = anchor + searchContext.value.username = u + searchContext.value.hasMoreBefore = hasMoreBeforeInit + searchContext.value.hasMoreAfter = true + searchContext.value.loadingBefore = false + searchContext.value.loadingAfter = false +} + +try { + const resp = await api.getChatMessagesAround({ + account: selectedAccount.value, + username: u, + anchor_id: anchor, + before: 35, + after: 35 + }) + + const raw = resp?.messages || [] + const mapped = raw.map(normalizeMessage) + allMessages.value = { ...allMessages.value, [u]: mapped } + messagesMeta.value = { ...messagesMeta.value, [u]: { total: mapped.length, hasMore: false } } + + searchContext.value.anchorId = String(resp?.anchorId || anchor) + searchContext.value.anchorIndex = Number(resp?.anchorIndex ?? -1) + + const ok = await scrollToMessageId(searchContext.value.anchorId) + if (ok) flashMessage(searchContext.value.anchorId) +} catch (e) { + window.alert(e?.message || '定位失败') +} +} + +const locateByDate = async (dateStr) => { +if (!process.client) return +if (!selectedAccount.value) return +if (!selectedContact.value?.username) return + +const ds = String(dateStr || '').trim() +if (!ds) return +await _applyTimeSidebarSelectedDate(ds, { syncMonth: true }) + +try { + const resp = await api.getChatMessageAnchor({ + account: selectedAccount.value, + username: selectedContact.value.username, + kind: 'day', + date: ds + }) + const status = String(resp?.status || '') + const anchorId = String(resp?.anchorId || '').trim() + if (status !== 'success' || !anchorId) { + window.alert('当日暂无聊天记录') + return + } + await locateByAnchorId({ targetUsername: selectedContact.value.username, anchorId, kind: 'date', label: ds }) +} catch (e) { + window.alert(e?.message || '定位失败') +} +} + +const jumpToConversationFirst = async () => { +if (!process.client) return +if (!selectedAccount.value) return +if (!selectedContact.value?.username) return + +try { + const resp = await api.getChatMessageAnchor({ + account: selectedAccount.value, + username: selectedContact.value.username, + kind: 'first' + }) + const status = String(resp?.status || '') + const anchorId = String(resp?.anchorId || '').trim() + if (status !== 'success' || !anchorId) { + window.alert('暂无聊天记录') + return + } + const ds = _dateStrFromEpochSeconds(Number(resp?.createTime || 0)) + if (ds) await _applyTimeSidebarSelectedDate(ds, { syncMonth: true }) + await locateByAnchorId({ targetUsername: selectedContact.value.username, anchorId, kind: 'first', label: '' }) +} catch (e) { + window.alert(e?.message || '定位失败') +} +} + +const onTimeSidebarDayClick = async (cell) => { +if (!cell || cell.disabled) return +const ds = String(cell.dateStr || '').trim() +if (!ds) return +await locateByDate(ds) +} + +const _mergeContextMessages = (username, nextList) => { +const u = String(username || '').trim() +if (!u) return +const list = Array.isArray(nextList) ? nextList : [] +allMessages.value = { ...allMessages.value, [u]: list } +// Keep meta aligned; context mode doesn't rely on hasMore from meta. +const prevMeta = messagesMeta.value[u] || null +messagesMeta.value = { + ...messagesMeta.value, + [u]: { + total: Math.max(Number(prevMeta?.total || 0), list.length), + hasMore: false + } +} +} + +const loadMoreSearchContextAfter = async () => { +if (!process.client) return +if (!selectedAccount.value) return +if (!searchContext.value?.active) return +if (searchContext.value.loadingAfter) return +if (!searchContext.value.hasMoreAfter) return + +const u = String(searchContext.value.username || selectedContact.value?.username || '').trim() +if (!u) return +const existing = allMessages.value[u] || [] +const last = Array.isArray(existing) && existing.length ? existing[existing.length - 1] : null +const anchorId = String(last?.id || '').trim() +if (!anchorId) { + searchContext.value.hasMoreAfter = false + return +} + +const ctxUsername = u +searchContext.value.loadingAfter = true +try { + const resp = await api.getChatMessagesAround({ + account: selectedAccount.value, + username: ctxUsername, + anchor_id: anchorId, + before: 0, + after: messagePageSize + }) + + if (!searchContext.value?.active || String(searchContext.value.username || '').trim() !== ctxUsername) return + + const raw = resp?.messages || [] + const mapped = raw.map(normalizeMessage) + + const existingIds = new Set(existing.map((m) => String(m?.id || ''))) + const appended = [] + for (const m of mapped) { + const id = String(m?.id || '').trim() + if (!id) continue + if (existingIds.has(id)) continue + existingIds.add(id) + appended.push(m) + } + + if (!appended.length) { + searchContext.value.hasMoreAfter = false + return + } + + _mergeContextMessages(ctxUsername, [...existing, ...appended]) +} catch (e) { + window.alert(e?.message || '加载更多消息失败') +} finally { + if (searchContext.value?.active && String(searchContext.value.username || '').trim() === ctxUsername) { + searchContext.value.loadingAfter = false + } +} +} + +const loadMoreSearchContextBefore = async () => { +if (!process.client) return +if (!selectedAccount.value) return +if (!searchContext.value?.active) return +if (searchContext.value.loadingBefore) return +if (!searchContext.value.hasMoreBefore) return + +const u = String(searchContext.value.username || selectedContact.value?.username || '').trim() +if (!u) return +const existing = allMessages.value[u] || [] +const first = Array.isArray(existing) && existing.length ? existing[0] : null +const anchorId = String(first?.id || '').trim() +if (!anchorId) { + searchContext.value.hasMoreBefore = false + return +} + +const c = messageContainerRef.value +const beforeScrollHeight = c ? c.scrollHeight : 0 +const beforeScrollTop = c ? c.scrollTop : 0 + +const ctxUsername = u +searchContext.value.loadingBefore = true +try { + const resp = await api.getChatMessagesAround({ + account: selectedAccount.value, + username: ctxUsername, + anchor_id: anchorId, + before: messagePageSize, + after: 0 + }) + + if (!searchContext.value?.active || String(searchContext.value.username || '').trim() !== ctxUsername) return + + const raw = resp?.messages || [] + const mapped = raw.map(normalizeMessage) + + const existingIds = new Set(existing.map((m) => String(m?.id || ''))) + const prepended = [] + for (const m of mapped) { + const id = String(m?.id || '').trim() + if (!id) continue + if (existingIds.has(id)) continue + existingIds.add(id) + prepended.push(m) + } + + if (!prepended.length) { + searchContext.value.hasMoreBefore = false + return + } + + _mergeContextMessages(ctxUsername, [...prepended, ...existing]) + + await nextTick() + const c2 = messageContainerRef.value + if (c2) { + const afterScrollHeight = c2.scrollHeight + c2.scrollTop = beforeScrollTop + (afterScrollHeight - beforeScrollHeight) + } +} catch (e) { + window.alert(e?.message || '加载更多消息失败') +} finally { + if (searchContext.value?.active && String(searchContext.value.username || '').trim() === ctxUsername) { + searchContext.value.loadingBefore = false + } +} +} + +const onSearchHitClick = async (hit, idx) => { +messageSearchSelectedIndex.value = Number(idx || 0) +await locateSearchHit(hit) +} + +const onSearchNext = async () => { +const q = String(messageSearchQuery.value || '').trim() +if (!q) return + +if (!messageSearchResults.value.length && !messageSearchLoading.value) { + await runMessageSearch({ reset: true }) +} +if (!messageSearchResults.value.length) return + +const cur = Number(messageSearchSelectedIndex.value || 0) +const next = (cur + 1) % messageSearchResults.value.length +messageSearchSelectedIndex.value = next +await locateSearchHit(messageSearchResults.value[next]) +} + +const onSearchPrev = async () => { +const q = String(messageSearchQuery.value || '').trim() +if (!q) return + +if (!messageSearchResults.value.length && !messageSearchLoading.value) { + await runMessageSearch({ reset: true }) +} +if (!messageSearchResults.value.length) return + +const cur = Number(messageSearchSelectedIndex.value || 0) +const prev = (cur - 1 + messageSearchResults.value.length) % messageSearchResults.value.length +messageSearchSelectedIndex.value = prev +await locateSearchHit(messageSearchResults.value[prev]) +} +const openMessageSearch = async () => { +closeTimeSidebar() +messageSearchOpen.value = true +ensureMessageSearchScopeValid() +await nextTick() +try { + messageSearchInputRef.value?.focus?.() +} catch {} +await fetchMessageSearchIndexStatus() +} +watch(messageSearchScope, async () => { +if (!messageSearchOpen.value) return +ensureMessageSearchScopeValid() +closeMessageSearchSenderDropdown() +messageSearchSender.value = '' +messageSearchSenderOptions.value = [] +messageSearchSenderOptionsKey.value = '' +await fetchMessageSearchSenders() +messageSearchOffset.value = 0 +messageSearchResults.value = [] +messageSearchSelectedIndex.value = -1 +if (String(messageSearchQuery.value || '').trim()) { + await runMessageSearch({ reset: true }) +} +}) + +watch(messageSearchRangeDays, async () => { +if (!messageSearchOpen.value) return +closeMessageSearchSenderDropdown() +messageSearchOffset.value = 0 +messageSearchResults.value = [] +messageSearchSelectedIndex.value = -1 +if (String(messageSearchQuery.value || '').trim()) { + await runMessageSearch({ reset: true }) +} +}) + +watch(messageSearchSessionType, async () => { +if (!messageSearchOpen.value) return +if (String(messageSearchScope.value || '') !== 'global') return +closeMessageSearchSenderDropdown() +messageSearchSender.value = '' +messageSearchSenderOptions.value = [] +messageSearchSenderOptionsKey.value = '' +await fetchMessageSearchSenders() +messageSearchOffset.value = 0 +messageSearchResults.value = [] +messageSearchSelectedIndex.value = -1 +if (String(messageSearchQuery.value || '').trim()) { + await runMessageSearch({ reset: true }) +} +}) + +watch([messageSearchStartDate, messageSearchEndDate], async () => { +if (!messageSearchOpen.value) return +if (String(messageSearchRangeDays.value || '') !== 'custom') return +closeMessageSearchSenderDropdown() +messageSearchOffset.value = 0 +messageSearchResults.value = [] +messageSearchSelectedIndex.value = -1 +if (String(messageSearchQuery.value || '').trim()) { + await runMessageSearch({ reset: true }) +} +}) + +watch(messageSearchSender, async () => { +if (!messageSearchOpen.value) return +messageSearchOffset.value = 0 +messageSearchResults.value = [] +messageSearchSelectedIndex.value = -1 +if (String(messageSearchQuery.value || '').trim()) { + await runMessageSearch({ reset: true }) +} +}) + +watch(messageSearchQuery, () => { +if (!messageSearchOpen.value) return +if (messageSearchDebounceTimer) clearTimeout(messageSearchDebounceTimer) +messageSearchDebounceTimer = null +const q = String(messageSearchQuery.value || '').trim() +if (q.length < 2) return +messageSearchDebounceTimer = setTimeout(() => { + runMessageSearch({ reset: true }) +}, 280) +}) + +watch( +() => selectedContact.value?.username, +async () => { + if (!messageSearchOpen.value) return + if (String(messageSearchScope.value || '') !== 'conversation') return + closeMessageSearchSenderDropdown() + messageSearchSender.value = '' + messageSearchSenderOptions.value = [] + messageSearchSenderOptionsKey.value = '' + await fetchMessageSearchSenders() + if (String(messageSearchQuery.value || '').trim()) { + await runMessageSearch({ reset: true }) + } +} +) + +const autoLoadReady = ref(true) + +let timeSidebarScrollSyncRaf = null +const syncTimeSidebarSelectedDateFromScroll = () => { +if (!process.client) return +if (!timeSidebarOpen.value) return +if (!selectedContact.value) return + +const c = messageContainerRef.value +if (!c) return + +if (timeSidebarScrollSyncRaf) return +timeSidebarScrollSyncRaf = requestAnimationFrame(() => { + timeSidebarScrollSyncRaf = null + try { + const containerRect = c.getBoundingClientRect() + const targetY = containerRect.top + 24 + const els = c.querySelectorAll?.('[data-msg-id][data-create-time]') || [] + if (!els || !els.length) return + + let chosen = null + for (const el of els) { + const r = el.getBoundingClientRect?.() + if (!r) continue + if (r.bottom >= targetY) { + chosen = el + break + } + } + if (!chosen) chosen = els[els.length - 1] + const ts = Number(chosen?.getAttribute?.('data-create-time') || 0) + const ds = _dateStrFromEpochSeconds(ts) + if (!ds) return + // Don't await inside rAF; keep scroll handler snappy. + _applyTimeSidebarSelectedDate(ds, { syncMonth: true }) + } catch {} +}) +} + +const contextAutoLoadTopReady = ref(true) +const contextAutoLoadBottomReady = ref(true) + +const onMessageScrollInContextMode = async () => { +const c = messageContainerRef.value +if (!c) return +if (!searchContext.value?.active) return + +const distBottom = c.scrollHeight - c.scrollTop - c.clientHeight + +// Reset "ready" gates when user scrolls away from edges. +if (c.scrollTop > 160) contextAutoLoadTopReady.value = true +if (distBottom > 160) contextAutoLoadBottomReady.value = true + +if (c.scrollTop <= 60 && contextAutoLoadTopReady.value && searchContext.value.hasMoreBefore && !searchContext.value.loadingBefore) { + contextAutoLoadTopReady.value = false + await loadMoreSearchContextBefore() + return +} + +if (distBottom <= 80 && contextAutoLoadBottomReady.value && searchContext.value.hasMoreAfter && !searchContext.value.loadingAfter) { + contextAutoLoadBottomReady.value = false + await loadMoreSearchContextAfter() +} +} + +const onMessageScroll = async () => { +const c = messageContainerRef.value +if (!c) return +updateJumpToBottomState() +if (!selectedContact.value) return + +// Keep the time sidebar selection in sync with the current viewport. +syncTimeSidebarSelectedDateFromScroll() + +if (searchContext.value?.active) { + await onMessageScrollInContextMode() + return +} + +if (c.scrollTop > 120) { + autoLoadReady.value = true + return +} + +if (c.scrollTop <= 60 && autoLoadReady.value && hasMoreMessages.value && !isLoadingMessages.value) { + autoLoadReady.value = false + await loadMoreMessages() +} +} + + const resetSearchState = () => { + closeMessageSearch() + closeTimeSidebar() + timeSidebarYear.value = null + timeSidebarMonth.value = null + _applyTimeSidebarMonthData({ counts: {}, max: 0, total: 0 }) + timeSidebarError.value = '' + timeSidebarSelectedDate.value = '' + messageSearchResults.value = [] + messageSearchOffset.value = 0 + messageSearchHasMore.value = false + messageSearchBackendStatus.value = '' + messageSearchTotal.value = 0 + messageSearchIndexInfo.value = null + messageSearchSelectedIndex.value = -1 + searchContext.value = createEmptySearchContext() + highlightMessageId.value = '' + } + + onMounted(() => { + loadSearchHistory() + }) + + onUnmounted(() => { + if (messageSearchDebounceTimer) clearTimeout(messageSearchDebounceTimer) + messageSearchDebounceTimer = null + stopMessageSearchIndexPolling() + if (timeSidebarScrollSyncRaf) { + cancelAnimationFrame(timeSidebarScrollSyncRaf) + timeSidebarScrollSyncRaf = null + } + }) + + return { + messageSearchOpen, + messageSearchQuery, + messageSearchScope, + messageSearchRangeDays, + messageSearchSessionType, + messageSearchSender, + messageSearchSenderOptions, + messageSearchSenderLoading, + messageSearchSenderError, + messageSearchSenderOptionsKey, + messageSearchSenderDropdownOpen, + messageSearchSenderDropdownRef, + messageSearchSenderDropdownInputRef, + messageSearchSenderDropdownQuery, + messageSearchStartDate, + messageSearchEndDate, + messageSearchResults, + messageSearchLoading, + messageSearchError, + messageSearchBackendStatus, + messageSearchIndexInfo, + messageSearchHasMore, + messageSearchOffset, + messageSearchTotal, + messageSearchSelectedIndex, + messageSearchInputRef, + searchInputFocused, + showAdvancedFilters, + searchHistory, + messageSearchIndexExists, + messageSearchIndexReady, + messageSearchIndexBuildStatus, + messageSearchIndexBuildIndexed, + messageSearchIndexMetaCount, + messageSearchIndexProgressText, + messageSearchIndexText, + messageSearchIndexActionText, + messageSearchIndexActionDisabled, + messageSearchSenderDisabled, + messageSearchSelectedSenderInfo, + messageSearchSelectedSenderInitial, + messageSearchSenderLabel, + filteredMessageSearchSenderOptions, + searchContextBannerText, + timeSidebarOpen, + timeSidebarYear, + timeSidebarMonth, + timeSidebarCounts, + timeSidebarMax, + timeSidebarTotal, + timeSidebarLoading, + timeSidebarError, + timeSidebarSelectedDate, + timeSidebarWeekdays, + timeSidebarMonthLabel, + timeSidebarYearOptions, + timeSidebarActiveDays, + timeSidebarCalendarCells, + getMessageSearchHitAvatarUrl, + getMessageSearchHitAvatarAlt, + getMessageSearchHitAvatarInitial, + closeMessageSearchSenderDropdown, + ensureMessageSearchSendersLoaded, + toggleMessageSearchSenderDropdown, + selectMessageSearchSender, + fetchMessageSearchIndexStatus, + fetchMessageSearchSenders, + onMessageSearchIndexAction, + closeMessageSearch, + closeTimeSidebar, + loadTimeSidebarMonth, + toggleTimeSidebar, + prevTimeSidebarMonth, + nextTimeSidebarMonth, + onTimeSidebarYearMonthChange, + toggleMessageSearch, + openMessageSearch, + runMessageSearch, + loadMoreSearchResults, + exitSearchContext, + locateSearchHit, + locateByAnchorId, + locateByDate, + jumpToConversationFirst, + onTimeSidebarDayClick, + loadMoreSearchContextAfter, + loadMoreSearchContextBefore, + onSearchHitClick, + onSearchNext, + onSearchPrev, + syncTimeSidebarSelectedDateFromScroll, + onMessageScrollInContextMode, + onMessageScroll, + clearSearchHistory, + applySearchHistory, + ensureMessageSearchScopeValid, + resetSearchState + } +} diff --git a/frontend/composables/chat/useChatSessions.js b/frontend/composables/chat/useChatSessions.js new file mode 100644 index 0000000..9010b15 --- /dev/null +++ b/frontend/composables/chat/useChatSessions.js @@ -0,0 +1,289 @@ +import { computed, onMounted, ref } from 'vue' +import { normalizeSessionPreview } from '~/lib/chat/formatters' + +const SESSION_LIST_WIDTH_KEY = 'ui.chat.session_list_width_physical' +const SESSION_LIST_WIDTH_KEY_LEGACY = 'ui.chat.session_list_width' +const SESSION_LIST_WIDTH_DEFAULT = 295 +const SESSION_LIST_WIDTH_MIN = 220 +const SESSION_LIST_WIDTH_MAX = 520 + +export const useChatSessions = ({ chatAccounts, selectedAccount, realtimeEnabled, api }) => { + const showSearchAccountSwitcher = false + + const contacts = ref([]) + const selectedContact = ref(null) + const searchQuery = ref('') + const isLoadingContacts = ref(false) + const contactsError = ref('') + + const sessionListWidth = ref(SESSION_LIST_WIDTH_DEFAULT) + const sessionListResizing = ref(false) + + let sessionListResizeStartX = 0 + let sessionListResizeStartWidth = SESSION_LIST_WIDTH_DEFAULT + let sessionListResizeStartDpr = 1 + let sessionListResizePrevCursor = '' + let sessionListResizePrevUserSelect = '' + + const availableAccounts = computed(() => { + return Array.isArray(chatAccounts?.accounts) ? chatAccounts.accounts : [] + }) + + const clampSessionListWidth = (value) => { + const next = Number.isFinite(value) ? value : SESSION_LIST_WIDTH_DEFAULT + return Math.min(SESSION_LIST_WIDTH_MAX, Math.max(SESSION_LIST_WIDTH_MIN, Math.round(next))) + } + + const loadSessionListWidth = () => { + if (!process.client) return + try { + const raw = localStorage.getItem(SESSION_LIST_WIDTH_KEY) + const value = parseInt(String(raw || ''), 10) + if (!Number.isNaN(value)) { + sessionListWidth.value = clampSessionListWidth(value) + return + } + + const legacy = localStorage.getItem(SESSION_LIST_WIDTH_KEY_LEGACY) + const legacyValue = parseInt(String(legacy || ''), 10) + if (!Number.isNaN(legacyValue)) { + const dpr = window.devicePixelRatio || 1 + const converted = clampSessionListWidth(legacyValue * dpr) + sessionListWidth.value = converted + try { + localStorage.setItem(SESSION_LIST_WIDTH_KEY, String(converted)) + localStorage.removeItem(SESSION_LIST_WIDTH_KEY_LEGACY) + } catch {} + } + } catch {} + } + + const saveSessionListWidth = () => { + if (!process.client) return + try { + localStorage.setItem(SESSION_LIST_WIDTH_KEY, String(clampSessionListWidth(sessionListWidth.value))) + } catch {} + } + + const setSessionListResizingActive = (active) => { + if (!process.client) return + try { + const body = document.body + if (!body) return + if (active) { + sessionListResizePrevCursor = body.style.cursor || '' + sessionListResizePrevUserSelect = body.style.userSelect || '' + body.style.cursor = 'col-resize' + body.style.userSelect = 'none' + } else { + body.style.cursor = sessionListResizePrevCursor + body.style.userSelect = sessionListResizePrevUserSelect + sessionListResizePrevCursor = '' + sessionListResizePrevUserSelect = '' + } + } catch {} + } + + const onSessionListResizerPointerMove = (event) => { + if (!sessionListResizing.value) return + const clientX = Number(event?.clientX || 0) + sessionListWidth.value = clampSessionListWidth( + sessionListResizeStartWidth + (clientX - sessionListResizeStartX) * (sessionListResizeStartDpr || 1) + ) + } + + const stopSessionListResize = () => { + if (!process.client) return + if (!sessionListResizing.value) return + sessionListResizing.value = false + setSessionListResizingActive(false) + try { + window.removeEventListener('pointermove', onSessionListResizerPointerMove) + } catch {} + saveSessionListWidth() + } + + const onSessionListResizerPointerUp = () => { + stopSessionListResize() + } + + const onSessionListResizerPointerDown = (event) => { + if (!process.client) return + try { + event?.preventDefault?.() + } catch {} + + sessionListResizing.value = true + sessionListResizeStartX = Number(event?.clientX || 0) + sessionListResizeStartWidth = Number(sessionListWidth.value || SESSION_LIST_WIDTH_DEFAULT) + sessionListResizeStartDpr = window.devicePixelRatio || 1 + setSessionListResizingActive(true) + + try { + window.addEventListener('pointermove', onSessionListResizerPointerMove) + window.addEventListener('pointerup', onSessionListResizerPointerUp, { once: true }) + } catch {} + } + + const resetSessionListWidth = () => { + sessionListWidth.value = SESSION_LIST_WIDTH_DEFAULT + saveSessionListWidth() + } + + onMounted(() => { + loadSessionListWidth() + }) + + const filteredContacts = computed(() => { + const query = String(searchQuery.value || '').trim().toLowerCase() + if (!query) return contacts.value + return contacts.value.filter((contact) => { + const name = String(contact?.name || '').toLowerCase() + const username = String(contact?.username || '').toLowerCase() + return name.includes(query) || username.includes(query) + }) + }) + + const mapSessions = (sessions) => { + return sessions.map((session) => ({ + id: session.id, + name: session.name || session.username || session.id, + avatar: session.avatar || null, + lastMessage: normalizeSessionPreview(session.lastMessage || ''), + lastMessageTime: session.lastMessageTime || '', + unreadCount: session.unreadCount || 0, + isGroup: !!session.isGroup, + isTop: !!session.isTop, + username: session.username + })) + } + + const clearContactsState = (errorMessage = '') => { + contacts.value = [] + selectedContact.value = null + contactsError.value = errorMessage + } + + const loadSessionsForSelectedAccount = async () => { + if (!selectedAccount.value) { + clearContactsState('') + return [] + } + + const fetchSessions = async (source) => { + const params = { + account: selectedAccount.value, + limit: 400, + include_hidden: false, + include_official: false + } + if (source) params.source = source + return api.listChatSessions(params) + } + + let sessionsResp = null + if (realtimeEnabled?.value) { + try { + sessionsResp = await fetchSessions('realtime') + } catch { + sessionsResp = null + } + } + if (!sessionsResp) { + sessionsResp = await fetchSessions('') + } + + const sessions = Array.isArray(sessionsResp?.sessions) ? sessionsResp.sessions : [] + contacts.value = mapSessions(sessions) + contactsError.value = '' + return contacts.value + } + + const refreshSessionsForSelectedAccount = async ({ sourceOverride } = {}) => { + if (!process.client || typeof window === 'undefined') return + if (!selectedAccount.value) return + if (isLoadingContacts.value) return + + const previousUsername = selectedContact.value?.username || '' + const desiredSource = (sourceOverride != null) + ? String(sourceOverride || '').trim() + : (realtimeEnabled?.value ? 'realtime' : '') + + const params = { + account: selectedAccount.value, + limit: 400, + include_hidden: false, + include_official: false + } + + let sessionsResp = null + if (desiredSource) { + try { + sessionsResp = await api.listChatSessions({ ...params, source: desiredSource }) + } catch { + sessionsResp = null + } + } + if (!sessionsResp) { + try { + sessionsResp = await api.listChatSessions(params) + } catch { + return + } + } + + const sessions = Array.isArray(sessionsResp?.sessions) ? sessionsResp.sessions : [] + const nextContacts = mapSessions(sessions) + contacts.value = nextContacts + + if (previousUsername) { + const matched = nextContacts.find((contact) => contact.username === previousUsername) + if (matched) selectedContact.value = matched + } + } + + const loadContacts = async () => { + if (contacts.value.length && !isLoadingContacts.value) { + return { usedPrefetched: true } + } + + isLoadingContacts.value = true + contactsError.value = '' + try { + await chatAccounts.ensureLoaded() + + if (!selectedAccount.value) { + clearContactsState(chatAccounts.error || '未检测到已解密账号,请先解密数据库。') + return { usedPrefetched: false } + } + + await loadSessionsForSelectedAccount() + return { usedPrefetched: false } + } catch (error) { + clearContactsState(error?.message || '加载联系人失败') + return { usedPrefetched: false } + } finally { + isLoadingContacts.value = false + } + } + + return { + showSearchAccountSwitcher, + availableAccounts, + contacts, + selectedContact, + searchQuery, + filteredContacts, + isLoadingContacts, + contactsError, + sessionListWidth, + sessionListResizing, + clearContactsState, + loadContacts, + loadSessionsForSelectedAccount, + refreshSessionsForSelectedAccount, + onSessionListResizerPointerDown, + stopSessionListResize, + resetSessionListWidth + } +} diff --git a/frontend/composables/useApi.js b/frontend/composables/useApi.js index 9c4aa57..5d73eb4 100644 --- a/frontend/composables/useApi.js +++ b/frontend/composables/useApi.js @@ -1,20 +1,28 @@ +import { reportServerError } from '~/lib/server-error-logging' + // API请求组合式函数 export const useApi = () => { - const config = useRuntimeConfig() + const baseURL = useApiBase() // 基础请求函数 const request = async (url, options = {}) => { try { - // 在客户端使用完整的API路径 - const baseURL = process.client ? 'http://localhost:8000/api' : '/api' - const response = await $fetch(url, { baseURL, ...options, - onResponseError({ response }) { + async onResponseError({ response }) { if (response.status === 400) { throw new Error(response._data?.detail || '请求参数错误') - } else if (response.status === 500) { + } else if (response.status >= 500) { + await reportServerError({ + status: response.status, + method: options?.method || 'GET', + requestUrl: url, + message: '服务器错误,请稍后重试', + backendDetail: response._data?.detail || '', + source: 'useApi', + apiBase: baseURL, + }) throw new Error('服务器错误,请稍后重试') } } @@ -63,6 +71,22 @@ export const useApi = () => { return await request('/chat/accounts') } + const getChatAccountInfo = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.account) query.set('account', params.account) + const url = '/chat/account_info' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + const deleteChatAccount = async (params = {}) => { + const account = String(params?.account || '').trim() + if (!account) throw new Error('Missing account') + const query = new URLSearchParams() + query.set('account', account) + const url = '/chat/account' + (query.toString() ? `?${query.toString()}` : '') + return await request(url, { method: 'DELETE' }) + } + const listChatSessions = async (params = {}) => { const query = new URLSearchParams() if (params && params.account) query.set('account', params.account) @@ -87,6 +111,75 @@ export const useApi = () => { return await request(url) } + const getChatMessageRaw = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.account) query.set('account', params.account) + if (params && params.username) query.set('username', params.username) + if (params && params.message_id) query.set('message_id', params.message_id) + const url = '/chat/messages/raw' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + const editChatMessage = async (payload = {}) => { + return await request('/chat/messages/edit', { + method: 'POST', + body: payload + }) + } + + const repairChatMessageSender = async (payload = {}) => { + return await request('/chat/messages/repair_sender', { + method: 'POST', + body: payload + }) + } + + // Flip message direction in the WeChat client by swapping packed_info_data (unsafe, but undoable via reset). + const flipChatMessageDirection = async (payload = {}) => { + return await request('/chat/messages/flip_direction', { + method: 'POST', + body: payload + }) + } + + const listChatEditedSessions = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.account) query.set('account', params.account) + const url = '/chat/edits/sessions' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + const listChatEditedMessages = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.account) query.set('account', params.account) + if (params && params.username) query.set('username', params.username) + const url = '/chat/edits/messages' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + const getChatEditStatus = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.account) query.set('account', params.account) + if (params && params.username) query.set('username', params.username) + if (params && params.message_id) query.set('message_id', params.message_id) + const url = '/chat/edits/message_status' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + const resetChatEditedMessage = async (payload = {}) => { + return await request('/chat/edits/reset_message', { + method: 'POST', + body: payload + }) + } + + const resetChatEditedSession = async (payload = {}) => { + return await request('/chat/edits/reset_session', { + method: 'POST', + body: payload + }) + } + const getChatRealtimeStatus = async (params = {}) => { const query = new URLSearchParams() if (params && params.account) query.set('account', params.account) @@ -99,6 +192,7 @@ export const useApi = () => { if (params && params.account) query.set('account', params.account) if (params && params.username) query.set('username', params.username) if (params && params.max_scan != null) query.set('max_scan', String(params.max_scan)) + if (params && params.backfill_limit != null) query.set('backfill_limit', String(params.backfill_limit)) const url = '/chat/realtime/sync' + (query.toString() ? `?${query.toString()}` : '') return await request(url, { method: 'POST' }) } @@ -179,6 +273,96 @@ export const useApi = () => { return await request(url) } + // 聊天记录日历热力图:某月每日消息数 + const getChatMessageDailyCounts = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.account) query.set('account', params.account) + if (params && params.username) query.set('username', params.username) + if (params && params.year != null) query.set('year', String(params.year)) + if (params && params.month != null) query.set('month', String(params.month)) + const url = '/chat/messages/daily_counts' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + // 聊天记录定位锚点:某日第一条 / 会话最早一条 + const getChatMessageAnchor = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.account) query.set('account', params.account) + if (params && params.username) query.set('username', params.username) + if (params && params.kind) query.set('kind', String(params.kind)) + if (params && params.date) query.set('date', String(params.date)) + const url = '/chat/messages/anchor' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + // 解析嵌套合并转发聊天记录(通过 server_id) + const resolveNestedChatHistory = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.account) query.set('account', params.account) + if (params && params.server_id != null) query.set('server_id', String(params.server_id)) + const url = '/chat/chat_history/resolve' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + // 解析卡片/小程序等 App 消息(通过 server_id) + const resolveAppMsg = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.account) query.set('account', params.account) + if (params && params.server_id != null) query.set('server_id', String(params.server_id)) + const url = '/chat/appmsg/resolve' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + // 朋友圈时间线 + const listSnsTimeline = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.account) query.set('account', params.account) + if (params && params.limit != null) query.set('limit', String(params.limit)) + if (params && params.offset != null) query.set('offset', String(params.offset)) + if (params && params.usernames && Array.isArray(params.usernames) && params.usernames.length > 0) { + query.set('usernames', params.usernames.join(',')) + } else if (params && params.usernames && typeof params.usernames === 'string') { + query.set('usernames', params.usernames) + } + if (params && params.keyword) query.set('keyword', params.keyword) + const url = '/sns/timeline' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + // 朋友圈联系人列表(按发圈数统计) + const listSnsUsers = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.account) query.set('account', params.account) + if (params && params.keyword) query.set('keyword', String(params.keyword)) + if (params && params.limit != null) query.set('limit', String(params.limit)) + const url = '/sns/users' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + // 朋友圈图片本地缓存候选(用于错图时手动选择) + const listSnsMediaCandidates = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.account) query.set('account', params.account) + if (params && params.create_time != null) query.set('create_time', String(params.create_time)) + if (params && params.width != null) query.set('width', String(params.width)) + if (params && params.height != null) query.set('height', String(params.height)) + if (params && params.limit != null) query.set('limit', String(params.limit)) + if (params && params.offset != null) query.set('offset', String(params.offset)) + const url = '/sns/media_candidates' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + // 保存朋友圈图片手动匹配结果(本机) + const saveSnsMediaPicks = async (data = {}) => { + return await request('/sns/media_picks', { + method: 'POST', + body: { + account: data.account || null, + picks: (data && data.picks && typeof data.picks === 'object' && !Array.isArray(data.picks)) ? data.picks : {} + } + }) + } + const openChatMediaFolder = async (params = {}) => { const query = new URLSearchParams() if (params && params.account) query.set('account', params.account) @@ -251,7 +435,10 @@ export const useApi = () => { message_types: Array.isArray(data.message_types) ? data.message_types : [], include_media: data.include_media == null ? true : !!data.include_media, media_kinds: Array.isArray(data.media_kinds) ? data.media_kinds : ['image', 'emoji', 'video', 'video_thumb', 'voice', 'file'], + output_dir: data.output_dir == null ? null : String(data.output_dir || '').trim(), allow_process_key_extract: !!data.allow_process_key_extract, + download_remote_media: !!data.download_remote_media, + html_page_size: data.html_page_size != null ? Number(data.html_page_size) : 1000, privacy_mode: !!data.privacy_mode, file_name: data.file_name || null } @@ -271,15 +458,128 @@ export const useApi = () => { if (!exportId) throw new Error('Missing exportId') return await request(`/chat/exports/${encodeURIComponent(String(exportId))}`, { method: 'DELETE' }) } - + + // 朋友圈导出(离线 HTML zip) + const createSnsExport = async (data = {}) => { + return await request('/sns/exports', { + method: 'POST', + body: { + account: data.account || null, + scope: data.scope || 'selected', + usernames: Array.isArray(data.usernames) ? data.usernames : [], + use_cache: data.use_cache == null ? true : !!data.use_cache, + output_dir: data.output_dir == null ? null : String(data.output_dir || '').trim(), + file_name: data.file_name || null + } + }) + } + + const getSnsExport = async (exportId) => { + if (!exportId) throw new Error('Missing exportId') + return await request(`/sns/exports/${encodeURIComponent(String(exportId))}`) + } + + const cancelSnsExport = async (exportId) => { + if (!exportId) throw new Error('Missing exportId') + return await request(`/sns/exports/${encodeURIComponent(String(exportId))}`, { method: 'DELETE' }) + } + + // 联系人 + const listChatContacts = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.account) query.set('account', params.account) + if (params && params.keyword) query.set('keyword', params.keyword) + if (params && params.include_friends != null) query.set('include_friends', String(!!params.include_friends)) + if (params && params.include_groups != null) query.set('include_groups', String(!!params.include_groups)) + if (params && params.include_officials != null) query.set('include_officials', String(!!params.include_officials)) + const url = '/chat/contacts' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + const exportChatContacts = async (payload = {}) => { + return await request('/chat/contacts/export', { + method: 'POST', + body: { + account: payload.account || null, + output_dir: payload.output_dir || '', + format: payload.format || 'json', + include_avatar_link: payload.include_avatar_link == null ? true : !!payload.include_avatar_link, + keyword: payload.keyword || null, + contact_types: { + friends: payload?.contact_types?.friends == null ? true : !!payload.contact_types.friends, + groups: payload?.contact_types?.groups == null ? true : !!payload.contact_types.groups, + officials: payload?.contact_types?.officials == null ? true : !!payload.contact_types.officials, + } + } + }) + } + + // WeChat Wrapped(年度总结) + const getWrappedAnnual = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.year != null) query.set('year', String(params.year)) + if (params && params.account) query.set('account', String(params.account)) + if (params && params.refresh != null) query.set('refresh', String(!!params.refresh)) + const url = '/wrapped/annual' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + // WeChat Wrapped(年度总结)- 目录/元信息(轻量,用于按页懒加载) + const getWrappedAnnualMeta = async (params = {}) => { + const query = new URLSearchParams() + if (params && params.year != null) query.set('year', String(params.year)) + if (params && params.account) query.set('account', String(params.account)) + if (params && params.refresh != null) query.set('refresh', String(!!params.refresh)) + const url = '/wrapped/annual/meta' + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + // WeChat Wrapped(年度总结)- 单张卡片(按页加载) + const getWrappedAnnualCard = async (cardId, params = {}) => { + if (cardId == null) throw new Error('Missing cardId') + const query = new URLSearchParams() + if (params && params.year != null) query.set('year', String(params.year)) + if (params && params.account) query.set('account', String(params.account)) + if (params && params.refresh != null) query.set('refresh', String(!!params.refresh)) + const safeId = encodeURIComponent(String(cardId)) + const url = `/wrapped/annual/cards/${safeId}` + (query.toString() ? `?${query.toString()}` : '') + return await request(url) + } + + // 获取微信进程状态 + const getWxStatus = async () => { + return await request('/wechat/status') + } + + // 获取数据库密钥 + const getKeys = async () => { + return await request('/get_keys') + } + + // 获取图片密钥 + const getImageKey = async () => { + return await request('/get_image_key') + } + return { detectWechat, detectCurrentAccount, decryptDatabase, healthCheck, listChatAccounts, + getChatAccountInfo, + deleteChatAccount, listChatSessions, listChatMessages, + getChatMessageRaw, + editChatMessage, + repairChatMessageSender, + flipChatMessageDirection, + listChatEditedSessions, + listChatEditedMessages, + getChatEditStatus, + resetChatEditedMessage, + resetChatEditedSession, getChatRealtimeStatus, syncChatRealtimeMessages, syncChatRealtimeAll, @@ -288,6 +588,14 @@ export const useApi = () => { buildChatSearchIndex, listChatSearchSenders, getChatMessagesAround, + getChatMessageDailyCounts, + getChatMessageAnchor, + resolveNestedChatHistory, + resolveAppMsg, + listSnsTimeline, + listSnsUsers, + listSnsMediaCandidates, + saveSnsMediaPicks, openChatMediaFolder, downloadChatEmoji, saveMediaKeys, @@ -296,6 +604,17 @@ export const useApi = () => { createChatExport, getChatExport, listChatExports, - cancelChatExport + cancelChatExport, + createSnsExport, + getSnsExport, + cancelSnsExport, + listChatContacts, + exportChatContacts, + getWrappedAnnual, + getWrappedAnnualMeta, + getWrappedAnnualCard, + getKeys, + getImageKey, + getWxStatus, } } diff --git a/frontend/composables/useApiBase.js b/frontend/composables/useApiBase.js new file mode 100644 index 0000000..f18dbc0 --- /dev/null +++ b/frontend/composables/useApiBase.js @@ -0,0 +1,45 @@ +import { normalizeApiBase, readApiBaseOverride } from '~/lib/api-settings' + +// Client-side cache so that useApiBase() can be called safely outside +// the Nuxt composable context (e.g. inside async callbacks / onMounted chains). +let _clientCache = '' + +const shouldIgnoreStoredOverride = () => { + if (!process.client || !import.meta.dev) return false + return typeof window !== 'undefined' && !!window.wechatDesktop?.__brand +} + +export const useApiBase = () => { + if (process.client && _clientCache) return _clientCache + + // useRuntimeConfig() requires the Nuxt app context, which is only + // guaranteed during synchronous setup. On the client we cache the + // result so later (context-less) calls still work. + let config + try { + config = useRuntimeConfig() + } catch { + // Context unavailable – fall back to cached value or default. + return _clientCache || '/api' + } + + // Default to same-origin `/api` so Nuxt devProxy / backend-mounted UI both work. + // Override priority: + // 1) Local UI setting (web + desktop) + // 2) NUXT_PUBLIC_API_BASE env/runtime config + // 3) `/api` + const override = process.client && !shouldIgnoreStoredOverride() ? readApiBaseOverride() : '' + const runtime = String(config?.public?.apiBase || '').trim() + const result = normalizeApiBase(override || runtime || '/api') + + if (process.client) _clientCache = result + return result +} + +/** + * Call this when the user changes the API base override in settings + * so the cached value is refreshed. + */ +export const invalidateApiBaseCache = () => { + _clientCache = '' +} diff --git a/frontend/composables/useDesktopUpdate.js b/frontend/composables/useDesktopUpdate.js new file mode 100644 index 0000000..676682e --- /dev/null +++ b/frontend/composables/useDesktopUpdate.js @@ -0,0 +1,236 @@ +let listenersInitialized = false; +let removeListeners = []; + +const getDesktopApi = () => { + if (!process.client) return null; + if (typeof window === "undefined") return null; + return window?.wechatDesktop || null; +}; + +const isDesktopShell = () => !!getDesktopApi(); + +const isUpdaterSupported = () => { + const api = getDesktopApi(); + if (!api) return false; + + // If the bridge exposes a brand marker, ensure it's our Electron shell. + if (api.__brand && api.__brand !== "WeChatDataAnalysisDesktop") return false; + + // Require updater IPC to avoid showing update UI in the pure web build. + return ( + typeof api.getVersion === "function" && + typeof api.checkForUpdates === "function" && + typeof api.downloadAndInstall === "function" + ); +}; + +export const useDesktopUpdate = () => { + const info = useState("desktopUpdate.info", () => null); + const open = useState("desktopUpdate.open", () => false); + const isDownloading = useState("desktopUpdate.isDownloading", () => false); + const readyToInstall = useState("desktopUpdate.readyToInstall", () => false); + const progress = useState("desktopUpdate.progress", () => ({ percent: 0 })); + const error = useState("desktopUpdate.error", () => ""); + const currentVersion = useState("desktopUpdate.currentVersion", () => ""); + + const manualCheckLoading = useState("desktopUpdate.manualCheckLoading", () => false); + const lastCheckMessage = useState("desktopUpdate.lastCheckMessage", () => ""); + const lastCheckAt = useState("desktopUpdate.lastCheckAt", () => 0); + + const setUpdateInfo = (payload) => { + if (!payload) return; + const version = String(payload?.version || "").trim(); + const releaseNotes = String(payload?.releaseNotes || ""); + if (!version) return; + info.value = { version, releaseNotes }; + readyToInstall.value = false; + }; + + const dismiss = () => { + open.value = false; + }; + + const refreshVersion = async () => { + if (!isUpdaterSupported()) return ""; + try { + const v = await getDesktopApi()?.getVersion?.(); + currentVersion.value = String(v || ""); + return currentVersion.value; + } catch { + return currentVersion.value || ""; + } + }; + + const initListeners = async () => { + if (!isUpdaterSupported()) return; + if (listenersInitialized) return; + listenersInitialized = true; + + await refreshVersion(); + + const unsubs = []; + + const unUpdate = window.wechatDesktop?.onUpdateAvailable?.((payload) => { + error.value = ""; + isDownloading.value = false; + readyToInstall.value = false; + progress.value = { percent: 0 }; + setUpdateInfo(payload); + open.value = true; + }); + if (typeof unUpdate === "function") unsubs.push(unUpdate); + + const unProgress = window.wechatDesktop?.onDownloadProgress?.((p) => { + progress.value = p || { percent: 0 }; + const percent = Number(progress.value?.percent || 0); + if (Number.isFinite(percent) && percent > 0) { + isDownloading.value = true; + } + }); + if (typeof unProgress === "function") unsubs.push(unProgress); + + const unDownloaded = window.wechatDesktop?.onUpdateDownloaded?.((payload) => { + // Download finished. Keep the dialog open and let the user decide when to install. + setUpdateInfo(payload || info.value || {}); + isDownloading.value = false; + readyToInstall.value = true; + progress.value = { ...(progress.value || {}), percent: 100 }; + open.value = true; + }); + if (typeof unDownloaded === "function") unsubs.push(unDownloaded); + + const unError = window.wechatDesktop?.onUpdateError?.((payload) => { + const msg = String(payload?.message || ""); + if (msg) error.value = msg; + isDownloading.value = false; + readyToInstall.value = false; + }); + if (typeof unError === "function") unsubs.push(unError); + + removeListeners = unsubs; + }; + + const startUpdate = async () => { + if (!isUpdaterSupported()) return; + + error.value = ""; + isDownloading.value = true; + readyToInstall.value = false; + progress.value = { percent: 0 }; + + try { + await getDesktopApi()?.downloadAndInstall?.(); + } catch (e) { + const msg = e?.message || String(e); + error.value = msg; + isDownloading.value = false; + } + }; + + const installUpdate = async () => { + if (!isUpdaterSupported()) return; + if (!getDesktopApi()?.installUpdate) return; + + error.value = ""; + try { + await getDesktopApi()?.installUpdate?.(); + } catch (e) { + const msg = e?.message || String(e); + error.value = msg; + } + }; + + const ignore = async () => { + if (!isUpdaterSupported()) return; + const version = String(info.value?.version || "").trim(); + if (!version) return; + + try { + await getDesktopApi()?.ignoreUpdate?.(version); + } catch (e) { + const msg = e?.message || String(e); + error.value = msg; + } finally { + // Hide the dialog locally; startup auto-check will also respect the ignore. + open.value = false; + info.value = null; + } + }; + + const manualCheck = async () => { + if (!isDesktopShell()) { + lastCheckMessage.value = "仅桌面端可用。"; + return { hasUpdate: false }; + } + if (!isUpdaterSupported()) { + lastCheckMessage.value = "当前桌面端版本不支持自动更新。"; + return { hasUpdate: false }; + } + + manualCheckLoading.value = true; + error.value = ""; + lastCheckMessage.value = ""; + + try { + await refreshVersion(); + + const res = await getDesktopApi()?.checkForUpdates?.(); + lastCheckAt.value = Date.now(); + + if (res?.enabled === false) { + lastCheckMessage.value = "自动更新已禁用(仅打包版本可用)。"; + return res; + } + + if (res?.error) { + lastCheckMessage.value = `检查更新失败:${String(res.error)}`; + return res; + } + + if (res?.hasUpdate && res?.version) { + setUpdateInfo({ version: res.version, releaseNotes: res.releaseNotes || "" }); + open.value = true; + lastCheckMessage.value = `发现新版本:${String(res.version)}`; + return res; + } + + lastCheckMessage.value = "当前已是最新版本。"; + return res; + } catch (e) { + const msg = e?.message || String(e); + lastCheckMessage.value = `检查更新失败:${msg}`; + return { hasUpdate: false, error: msg }; + } finally { + manualCheckLoading.value = false; + } + }; + + const cleanup = () => { + try { + for (const fn of removeListeners) fn?.(); + } catch {} + removeListeners = []; + listenersInitialized = false; + }; + + return { + info, + open, + isDownloading, + readyToInstall, + progress, + error, + currentVersion, + manualCheckLoading, + lastCheckMessage, + lastCheckAt, + initListeners, + refreshVersion, + manualCheck, + startUpdate, + installUpdate, + ignore, + dismiss, + cleanup, + }; +}; diff --git a/frontend/composables/useSettingsDialog.js b/frontend/composables/useSettingsDialog.js new file mode 100644 index 0000000..6727c56 --- /dev/null +++ b/frontend/composables/useSettingsDialog.js @@ -0,0 +1,17 @@ +export const useSettingsDialog = () => { + const open = useState('settings-dialog-open', () => false) + + const openDialog = () => { + open.value = true + } + + const closeDialog = () => { + open.value = false + } + + return { + open, + openDialog, + closeDialog, + } +} diff --git a/frontend/lib/api-settings.js b/frontend/lib/api-settings.js new file mode 100644 index 0000000..a333d5d --- /dev/null +++ b/frontend/lib/api-settings.js @@ -0,0 +1,35 @@ +export const API_BASE_OVERRIDE_KEY = 'ui.apiBaseOverride' + +export const readApiBaseOverride = () => { + if (!process.client) return '' + try { + const raw = localStorage.getItem(API_BASE_OVERRIDE_KEY) + return String(raw || '').trim() + } catch { + return '' + } +} + +export const writeApiBaseOverride = (value) => { + if (!process.client) return + try { + const v = String(value || '').trim() + if (!v) localStorage.removeItem(API_BASE_OVERRIDE_KEY) + else localStorage.setItem(API_BASE_OVERRIDE_KEY, v) + } catch {} +} + +export const normalizeApiBase = (value) => { + const raw = String(value || '').trim() + if (!raw) return '/api' + + let v = raw.replace(/\/$/, '') + + // If a full origin is provided, auto-append `/api` when missing. + if (/^https?:\/\//i.test(v) && !/\/api$/i.test(v)) { + v = `${v}/api` + } + + return v.replace(/\/$/, '') +} + diff --git a/frontend/lib/chat/chat-history.js b/frontend/lib/chat/chat-history.js new file mode 100644 index 0000000..6d1d388 --- /dev/null +++ b/frontend/lib/chat/chat-history.js @@ -0,0 +1,474 @@ +import { getChatHistoryPreviewLines } from '~/lib/chat/formatters' + +export const isMaybeMd5 = (value) => /^[0-9a-f]{32}$/i.test(String(value || '').trim()) + +export const pickFirstMd5 = (...values) => { + for (const value of values) { + const text = String(value || '').trim() + if (isMaybeMd5(text)) return text.toLowerCase() + } + return '' +} + +export const normalizeChatHistoryUrl = (value) => String(value || '').trim().replace(/\s+/g, '') + +export const stripWeChatInvisible = (value) => { + return String(value || '').replace(/[\u3164\u2800]/g, '').trim() +} + +export const parseChatHistoryRecord = (recordItemXml) => { + if (!process.client) return { info: null, items: [] } + const xml = String(recordItemXml || '').trim() + if (!xml) return { info: null, items: [] } + + const normalized = xml + .replace(/ /g, ' ') + .replace(/[\u0000-\u0008\u000B\u000C\u000E-\u001F]/g, '') + .replace(/&(?!amp;|lt;|gt;|quot;|apos;|#\d+;|#x[\da-fA-F]+;)/g, '&') + + let doc + try { + doc = new DOMParser().parseFromString(normalized, 'text/xml') + } catch { + return { info: null, items: [] } + } + + const parserErrors = doc.getElementsByTagName('parsererror') + if (parserErrors && parserErrors.length) return { info: null, items: [] } + + const getText = (node, tag) => { + try { + if (!node) return '' + const elements = Array.from(node.getElementsByTagName(tag) || []) + const direct = elements.find((el) => el && el.parentNode === node) + const target = direct || elements[0] + return String(target?.textContent || '').trim() + } catch { + return '' + } + } + + const getDirectChildXml = (node, tag) => { + try { + if (!node) return '' + const children = Array.from(node.children || []) + const target = children.find((child) => String(child?.tagName || '').toLowerCase() === String(tag || '').toLowerCase()) + if (!target) return '' + const raw = String(target.textContent || '').trim() + if (raw && raw.startsWith('<') && raw.endsWith('>')) return raw + if (typeof XMLSerializer !== 'undefined') { + return new XMLSerializer().serializeToString(target) + } + } catch {} + return '' + } + + const getAnyXml = (node, tag) => { + try { + if (!node) return '' + const elements = Array.from(node.getElementsByTagName(tag) || []) + const direct = elements.find((el) => el && el.parentNode === node) + const target = direct || elements[0] + if (!target) return '' + const raw = String(target.textContent || '').trim() + if (raw && raw.startsWith('<') && raw.endsWith('>')) return raw + if (typeof XMLSerializer !== 'undefined') return new XMLSerializer().serializeToString(target) + } catch {} + return '' + } + + const sameTag = (element, tag) => String(element?.tagName || '').toLowerCase() === String(tag || '').toLowerCase() + + const closestAncestorByTag = (node, tag) => { + const lower = String(tag || '').toLowerCase() + let current = node + while (current) { + if (current.nodeType === 1 && String(current.tagName || '').toLowerCase() === lower) return current + current = current.parentNode + } + return null + } + + const root = doc?.documentElement + const isChatRoom = String(getText(root, 'isChatRoom') || '').trim() === '1' + const title = getText(root, 'title') + const desc = getText(root, 'desc') || getText(root, 'info') + + const datalist = (() => { + try { + const all = Array.from(doc.getElementsByTagName('datalist') || []) + const top = root ? all.find((el) => closestAncestorByTag(el, 'recorditem') === root) : null + return top || all[0] || null + } catch { + return null + } + })() + + const datalistCount = (() => { + try { + if (!datalist) return 0 + const value = String(datalist.getAttribute('count') || '').trim() + return Math.max(0, parseInt(value, 10) || 0) + } catch { + return 0 + } + })() + + const itemNodes = (() => { + if (datalist) return Array.from(datalist.children || []).filter((el) => sameTag(el, 'dataitem')) + return Array.from(root?.children || []).filter((el) => sameTag(el, 'dataitem')) + })() + + const parsed = itemNodes.map((node, idx) => { + const datatype = String(node.getAttribute('datatype') || getText(node, 'datatype') || '').trim() + const dataid = String(node.getAttribute('dataid') || getText(node, 'dataid') || '').trim() || String(idx) + + const sourcename = getText(node, 'sourcename') + const sourcetime = getText(node, 'sourcetime') + const sourceheadurl = normalizeChatHistoryUrl(getText(node, 'sourceheadurl')) + const datatitle = getText(node, 'datatitle') + const datadesc = getText(node, 'datadesc') + const link = normalizeChatHistoryUrl(getText(node, 'link') || getText(node, 'dataurl') || getText(node, 'url')) + const datafmt = getText(node, 'datafmt') + const duration = getText(node, 'duration') + + const fullmd5 = getText(node, 'fullmd5') + const thumbfullmd5 = getText(node, 'thumbfullmd5') + const md5 = getText(node, 'md5') || getText(node, 'emoticonmd5') || getText(node, 'emojiMd5') + const fromnewmsgid = getText(node, 'fromnewmsgid') + const srcMsgLocalid = getText(node, 'srcMsgLocalid') || getText(node, 'srcMsgLocalId') + const srcMsgCreateTime = getText(node, 'srcMsgCreateTime') + const cdnurlstring = normalizeChatHistoryUrl(getText(node, 'cdnurlstring')) + const encrypturlstring = normalizeChatHistoryUrl(getText(node, 'encrypturlstring')) + const externurl = normalizeChatHistoryUrl(getText(node, 'externurl')) + const aeskey = getText(node, 'aeskey') + const nestedRecordItem = getAnyXml(node, 'recorditem') || getDirectChildXml(node, 'recorditem') || getText(node, 'recorditem') + + let content = datatitle || datadesc + if (!content) { + if (datatype === '4') content = '[视频]' + else if (datatype === '2' || datatype === '3') content = '[图片]' + else if (datatype === '47' || datatype === '37') content = '[表情]' + else if (datatype) content = `[消息 ${datatype}]` + else content = '[消息]' + } + + const fmt = String(datafmt || '').trim().toLowerCase().replace(/^\./, '') + const imageFormats = new Set(['jpg', 'jpeg', 'png', 'gif', 'webp', 'bmp', 'heic', 'heif']) + + let renderType = 'text' + if (datatype === '17') { + renderType = 'chatHistory' + } else if (datatype === '5' || link) { + renderType = 'link' + } else if (datatype === '4' || String(duration || '').trim() || fmt === 'mp4') { + renderType = 'video' + } else if (datatype === '47' || datatype === '37') { + renderType = 'emoji' + } else if ( + datatype === '2' + || datatype === '3' + || imageFormats.has(fmt) + || (datatype !== '1' && isMaybeMd5(fullmd5)) + ) { + renderType = 'image' + } else if (isMaybeMd5(md5) && /表情/.test(String(content || ''))) { + renderType = 'emoji' + } + + let outTitle = '' + let outUrl = '' + let recordItem = '' + if (renderType === 'chatHistory') { + outTitle = datatitle || content || '聊天记录' + content = datadesc || '' + recordItem = nestedRecordItem + } else if (renderType === 'link') { + outTitle = datatitle || content || '' + outUrl = link || externurl || '' + const cleanDesc = stripWeChatInvisible(datadesc) + const cleanTitle = stripWeChatInvisible(outTitle) + if (!cleanDesc || (cleanTitle && cleanDesc === cleanTitle)) { + content = '' + } else { + content = String(datadesc || '').trim() + } + } + + return { + id: dataid, + datatype, + sourcename, + sourcetime, + sourceheadurl, + datafmt, + duration, + fullmd5, + thumbfullmd5, + md5, + fromnewmsgid, + srcMsgLocalid, + srcMsgCreateTime, + cdnurlstring, + encrypturlstring, + externurl, + aeskey, + renderType, + title: outTitle, + recordItem, + url: outUrl, + content + } + }) + + return { + info: { isChatRoom, title, desc, count: datalistCount }, + items: parsed + } +} + +export const formatChatHistoryVideoDuration = (value) => { + const total = Math.max(0, parseInt(String(value || '').trim(), 10) || 0) + const minutes = Math.floor(total / 60) + const seconds = total % 60 + if (minutes <= 0) return `0:${String(seconds).padStart(2, '0')}` + return `${minutes}:${String(seconds).padStart(2, '0')}` +} + +export const createChatHistoryRecordNormalizer = ({ apiBase, getSelectedAccount, getSelectedContact }) => { + return (record) => { + const account = encodeURIComponent(String(getSelectedAccount?.() || '').trim()) + const username = encodeURIComponent(String(getSelectedContact?.()?.username || '').trim()) + const output = { ...(record || {}) } + + output.senderDisplayName = String(output.sourcename || '').trim() + output.senderAvatar = normalizeChatHistoryUrl(output.sourceheadurl) + output.fullTime = String(output.sourcetime || '').trim() + + if (output.renderType === 'link') { + const linkUrl = String(output.url || output.externurl || '').trim() + output.url = linkUrl + output.from = String(output.from || '').trim() + const previewCandidates = [] + const fileId = (() => { + const localId = parseInt(String(output.srcMsgLocalid || '').trim(), 10) || 0 + const createTime = parseInt(String(output.srcMsgCreateTime || '').trim(), 10) || 0 + if (localId > 0 && createTime > 0) return `${localId}_${createTime}` + return '' + })() + if (fileId) { + previewCandidates.push( + `${apiBase}/chat/media/image?account=${account}&file_id=${encodeURIComponent(fileId)}&username=${username}` + ) + } + + output.previewMd5 = pickFirstMd5(output.fullmd5, output.thumbfullmd5, output.md5) + const srcServerId = String(output.fromnewmsgid || '').trim() + if (output.previewMd5) { + const previewParts = [ + `account=${account}`, + `md5=${encodeURIComponent(output.previewMd5)}`, + srcServerId ? `server_id=${encodeURIComponent(srcServerId)}` : '', + `username=${username}` + ].filter(Boolean) + previewCandidates.push(`${apiBase}/chat/media/image?${previewParts.join('&')}`) + } + + output._linkPreviewCandidates = previewCandidates + output._linkPreviewCandidateIndex = 0 + output._linkPreviewError = false + output.preview = previewCandidates[0] || '' + + const fromUsername = String(output.fromUsername || '').trim() + output.fromUsername = fromUsername + output.fromAvatar = fromUsername + ? `${apiBase}/chat/avatar?account=${account}&username=${encodeURIComponent(fromUsername)}` + : (linkUrl ? `${apiBase}/chat/media/favicon?url=${encodeURIComponent(linkUrl)}` : '') + output._fromAvatarLast = output.fromAvatar + output._fromAvatarImgOk = false + output._fromAvatarImgError = false + } else if (output.renderType === 'video') { + output.videoMd5 = pickFirstMd5(output.fullmd5, output.md5) + output.videoThumbMd5 = pickFirstMd5(output.thumbfullmd5) + output.videoDuration = String(output.duration || '').trim() + const thumbCandidates = [] + if (output.videoMd5) { + thumbCandidates.push(`${apiBase}/chat/media/video_thumb?account=${account}&md5=${encodeURIComponent(output.videoMd5)}&username=${username}`) + } + if (output.videoThumbMd5 && output.videoThumbMd5 !== output.videoMd5) { + thumbCandidates.push(`${apiBase}/chat/media/video_thumb?account=${account}&md5=${encodeURIComponent(output.videoThumbMd5)}&username=${username}`) + } + output._videoThumbCandidates = thumbCandidates + output._videoThumbCandidateIndex = 0 + output._videoThumbError = false + output.videoThumbUrl = thumbCandidates[0] || '' + output.videoUrl = output.videoMd5 + ? `${apiBase}/chat/media/video?account=${account}&md5=${encodeURIComponent(output.videoMd5)}&username=${username}` + : '' + if (!output.content || /^\[.+\]$/.test(String(output.content || '').trim())) output.content = '[视频]' + } else if (output.renderType === 'emoji') { + output.emojiMd5 = pickFirstMd5(output.md5, output.fullmd5, output.thumbfullmd5) + const remoteEmojiUrl = String(output.cdnurlstring || output.externurl || output.encrypturlstring || '').trim() + const remoteAesKey = String(output.aeskey || '').trim() + output.emojiRemoteUrl = remoteEmojiUrl + output.emojiUrl = output.emojiMd5 + ? `${apiBase}/chat/media/emoji?account=${account}&md5=${encodeURIComponent(output.emojiMd5)}&username=${username}${remoteEmojiUrl ? `&emoji_url=${encodeURIComponent(remoteEmojiUrl)}` : ''}${remoteAesKey ? `&aes_key=${encodeURIComponent(remoteAesKey)}` : ''}` + : '' + if (!output.content || /^\[.+\]$/.test(String(output.content || '').trim())) output.content = '[表情]' + } else if (output.renderType === 'image') { + output.imageMd5 = pickFirstMd5(output.fullmd5, output.thumbfullmd5, output.md5) + const srcServerId = String(output.fromnewmsgid || '').trim() + const imageParts = [ + `account=${account}`, + output.imageMd5 ? `md5=${encodeURIComponent(output.imageMd5)}` : '', + srcServerId ? `server_id=${encodeURIComponent(srcServerId)}` : '', + `username=${username}` + ].filter(Boolean) + output.imageUrl = imageParts.length ? `${apiBase}/chat/media/image?${imageParts.join('&')}` : '' + if (!output.content || /^\[.+\]$/.test(String(output.content || '').trim())) output.content = '[图片]' + } + + return output + } +} + +export const enhanceChatHistoryRecords = (records) => { + const list = Array.isArray(records) ? records : [] + const videoByThumbMd5 = new Map() + const videoByMd5 = new Map() + const imageByMd5 = new Map() + const emojiByMd5 = new Map() + + for (const record of list) { + if (!record) continue + if (record.renderType === 'video' && record.videoThumbMd5) { + videoByThumbMd5.set(String(record.videoThumbMd5).toLowerCase(), record) + } + if (record.renderType === 'video' && record.videoMd5) { + videoByMd5.set(String(record.videoMd5).toLowerCase(), record) + } + if (record.renderType === 'image') { + const keys = [ + pickFirstMd5(record.imageMd5), + pickFirstMd5(record.fullmd5), + pickFirstMd5(record.thumbfullmd5) + ].filter(Boolean) + for (const key of keys) imageByMd5.set(key, record) + } + if (record.renderType === 'emoji') { + const keys = [ + pickFirstMd5(record.emojiMd5), + pickFirstMd5(record.md5), + pickFirstMd5(record.fullmd5), + pickFirstMd5(record.thumbfullmd5) + ].filter(Boolean) + for (const key of keys) emojiByMd5.set(key, record) + } + } + + for (const record of list) { + if (!record || String(record.renderType || '') !== 'text') continue + + const refKey = pickFirstMd5(record.thumbfullmd5) || pickFirstMd5(record.fullmd5) + if (!refKey) continue + + const video = videoByThumbMd5.get(refKey) || videoByMd5.get(refKey) + if (video) { + const quoteThumbCandidates = Array.isArray(video._videoThumbCandidates) ? video._videoThumbCandidates.slice() : [] + record._quoteThumbCandidates = quoteThumbCandidates + record._quoteThumbCandidateIndex = 0 + record._quoteThumbError = false + const quoteThumbUrl = quoteThumbCandidates[0] || video.videoThumbUrl || '' + record.renderType = 'quote' + record.quote = { + kind: 'video', + thumbUrl: quoteThumbUrl, + url: video.videoUrl || '', + duration: video.videoDuration || '', + label: video.content || '[视频]', + targetId: video.id || '' + } + record.quoteMedia = { + videoMd5: video.videoMd5, + videoThumbMd5: video.videoThumbMd5, + videoUrl: video.videoUrl, + videoThumbUrl: quoteThumbUrl + } + continue + } + + const image = imageByMd5.get(refKey) + if (image) { + record.renderType = 'quote' + record.quote = { + kind: 'image', + thumbUrl: image.imageUrl || '', + url: image.imageUrl || '', + label: image.content || '[图片]', + targetId: image.id || '' + } + record.quoteMedia = { + imageMd5: image.imageMd5, + imageUrl: image.imageUrl + } + continue + } + + const emoji = emojiByMd5.get(refKey) + if (emoji) { + record.renderType = 'quote' + record.quote = { + kind: 'emoji', + thumbUrl: emoji.emojiUrl || '', + url: emoji.emojiUrl || '', + label: emoji.content || '[表情]', + targetId: emoji.id || '' + } + record.quoteMedia = { + emojiMd5: emoji.emojiMd5, + emojiUrl: emoji.emojiUrl + } + } + } + + return list +} + +export const isChatHistoryRecordItemIncomplete = (recordItemXml) => { + const recordItem = String(recordItemXml || '').trim() + if (!recordItem) return true + try { + const parsed = parseChatHistoryRecord(recordItem) + const got = Array.isArray(parsed?.items) ? parsed.items.length : 0 + const expected = Math.max(0, parseInt(String(parsed?.info?.count || '0'), 10) || 0) + if (expected > 0 && got < expected) return true + if (got <= 0) return true + } catch { + return true + } + return false +} + +export const buildChatHistoryWindowPayload = (payload, normalizeRecordItem) => { + const title0 = String(payload?.title || '聊天记录') + const content0 = String(payload?.content || '') + const recordItem0 = String(payload?.recordItem || '').trim() + const parsed = parseChatHistoryRecord(recordItem0) + const info0 = parsed?.info || { isChatRoom: false, count: 0 } + const items = Array.isArray(parsed?.items) ? parsed.items : [] + let records0 = items.length ? enhanceChatHistoryRecords(items.map(normalizeRecordItem)) : [] + if (!records0.length) { + const lines = content0.trim().split(/\r?\n/).map((item) => item.trim()).filter(Boolean) + records0 = lines.map((line, idx) => normalizeRecordItem({ + id: String(idx), + datatype: '1', + sourcename: '', + sourcetime: '', + content: line, + renderType: 'text' + })) + } + return { title0, content0, recordItem0, info0, records0 } +} + +export { getChatHistoryPreviewLines } diff --git a/frontend/lib/chat/file-icons.js b/frontend/lib/chat/file-icons.js new file mode 100644 index 0000000..090fc82 --- /dev/null +++ b/frontend/lib/chat/file-icons.js @@ -0,0 +1,50 @@ +import zipIconUrl from '~/assets/images/wechat/zip.png' +import pdfIconUrl from '~/assets/images/wechat/pdf.png' +import wordIconUrl from '~/assets/images/wechat/word.png' +import excelIconUrl from '~/assets/images/wechat/excel.png' + +export const getFileIconKind = (fileName) => { + if (!fileName) return 'default' + const ext = String(fileName).split('.').pop()?.toLowerCase() || '' + switch (ext) { + case 'pdf': + return 'pdf' + case 'zip': + case 'rar': + case '7z': + case 'tar': + case 'gz': + return 'zip' + case 'doc': + case 'docx': + return 'doc' + case 'xls': + case 'xlsx': + case 'csv': + return 'xls' + case 'ppt': + case 'pptx': + return 'ppt' + case 'txt': + case 'md': + case 'log': + return 'txt' + default: + return 'default' + } +} + +export const getFileIconUrl = (fileName) => { + switch (getFileIconKind(fileName)) { + case 'pdf': + return pdfIconUrl + case 'doc': + return wordIconUrl + case 'xls': + return excelIconUrl + case 'zip': + return zipIconUrl + default: + return '' + } +} diff --git a/frontend/lib/chat/formatters.js b/frontend/lib/chat/formatters.js new file mode 100644 index 0000000..1d85f3d --- /dev/null +++ b/frontend/lib/chat/formatters.js @@ -0,0 +1,211 @@ +export const normalizeSessionPreview = (value) => { + const text = String(value || '').trim() + if (/^\[location\]/i.test(text)) return text.replace(/^\[location\]/i, '[位置]') + if (/:\s*\[location\]$/i.test(text)) return text.replace(/\[location\]$/i, '[位置]') + return text +} + +export const formatSmartTime = (ts) => { + if (!ts) return '' + try { + const date = new Date(Number(ts) * 1000) + const now = new Date() + const hh = String(date.getHours()).padStart(2, '0') + const mm = String(date.getMinutes()).padStart(2, '0') + const timeStr = `${hh}:${mm}` + + const todayStart = new Date(now.getFullYear(), now.getMonth(), now.getDate()) + const targetStart = new Date(date.getFullYear(), date.getMonth(), date.getDate()) + const dayDiff = Math.floor((todayStart - targetStart) / (1000 * 60 * 60 * 24)) + + if (dayDiff === 0) return timeStr + if (dayDiff === 1) return `昨天 ${timeStr}` + if (dayDiff >= 2 && dayDiff <= 6) { + const weekdays = ['星期日', '星期一', '星期二', '星期三', '星期四', '星期五', '星期六'] + return `${weekdays[date.getDay()]} ${timeStr}` + } + + const month = date.getMonth() + 1 + const day = date.getDate() + if (date.getFullYear() === now.getFullYear()) { + return `${month}月${day}日 ${timeStr}` + } + + return `${date.getFullYear()}年${month}月${day}日 ${timeStr}` + } catch { + return '' + } +} + +export const formatTimeDivider = (ts) => formatSmartTime(ts) + +export const formatMessageTime = (ts) => { + if (!ts) return '' + try { + const date = new Date(Number(ts) * 1000) + const hh = String(date.getHours()).padStart(2, '0') + const mm = String(date.getMinutes()).padStart(2, '0') + return `${hh}:${mm}` + } catch { + return '' + } +} + +export const formatMessageFullTime = (ts) => { + if (!ts) return '' + try { + const date = new Date(Number(ts) * 1000) + const yyyy = String(date.getFullYear()) + const MM = String(date.getMonth() + 1).padStart(2, '0') + const dd = String(date.getDate()).padStart(2, '0') + const hh = String(date.getHours()).padStart(2, '0') + const mm = String(date.getMinutes()).padStart(2, '0') + const ss = String(date.getSeconds()).padStart(2, '0') + return `${yyyy}-${MM}-${dd} ${hh}:${mm}:${ss}` + } catch { + return '' + } +} + +export const formatFileSize = (size) => { + if (!size) return '' + const text = String(size).trim() + const value = parseFloat(text) + if (Number.isNaN(value)) return text + if (value < 1024) return `${value} B` + if (value < 1024 * 1024) return `${(value / 1024).toFixed(2)} KB` + return `${(value / 1024 / 1024).toFixed(2)} MB` +} + +export const formatTransferAmount = (amount) => { + const text = String(amount ?? '').trim() + if (!text) return '' + return text.replace(/[¥¥]/g, '').trim() +} + +export const getRedPacketText = (message) => { + const text = String(message?.content ?? '').trim() + if (!text || text === '[Red Packet]') return '恭喜发财,大吉大利' + return text +} + +export const isTransferReturned = (message) => { + const paySubType = String(message?.paySubType || '').trim() + if (paySubType === '4' || paySubType === '9') return true + const status = String(message?.transferStatus || '').trim() + const content = String(message?.content || '').trim() + const text = `${status} ${content}`.trim() + if (!text) return false + return text.includes('退回') || text.includes('退还') +} + +export const isTransferOverdue = (message) => { + const paySubType = String(message?.paySubType || '').trim() + if (paySubType === '10') return true + const status = String(message?.transferStatus || '').trim() + const content = String(message?.content || '').trim() + const text = `${status} ${content}`.trim() + if (!text) return false + return text.includes('过期') +} + +export const getTransferTitle = (message) => { + const paySubType = String(message?.paySubType || '').trim() + if (message?.transferStatus) return message.transferStatus + switch (paySubType) { + case '1': + return '转账' + case '3': + return message?.isSent ? '已被接收' : '已收款' + case '8': + return '发起转账' + case '4': + return '已退还' + case '9': + return '已被退还' + case '10': + return '已过期' + default: + break + } + if (message?.content && message.content !== '转账' && message.content !== '[转账]') { + return message.content + } + return '转账' +} + +export const formatCount = (count) => { + const value = Number(count || 0) + if (!Number.isFinite(value) || value <= 0) return '' + try { + return value.toLocaleString() + } catch { + return String(value) + } +} + +export const escapeHtml = (value) => { + if (!value) return '' + return String(value) + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"') + .replace(/'/g, ''') +} + +export const highlightKeyword = (text, keyword) => { + if (!text || !keyword) return escapeHtml(text || '') + const escaped = escapeHtml(text) + const kw = String(keyword || '').trim() + if (!kw) return escaped + try { + const escapedKw = kw.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') + const regex = new RegExp(`(${escapedKw})`, 'gi') + return escaped.replace(regex, '$1') + } catch { + return escaped + } +} + +export const getVoiceDurationInSeconds = (durationMs) => { + const value = Number(durationMs || 0) + if (!Number.isFinite(value) || value <= 0) return 0 + return Math.max(1, Math.round(value / 1000)) +} + +export const getVoiceWidth = (durationMs) => { + const seconds = getVoiceDurationInSeconds(durationMs) + const clamped = Math.min(60, Math.max(1, seconds)) + return `${80 + clamped * 4}px` +} + +export const toUnixSeconds = (datetimeLocal) => { + const value = String(datetimeLocal || '').trim() + if (!value) return null + const date = new Date(value) + const ms = date.getTime() + if (Number.isNaN(ms)) return null + return Math.floor(ms / 1000) +} + +export const dateToUnixSeconds = (dateStr, endOfDay = false) => { + const value = String(dateStr || '').trim() + if (!value) return null + const matched = value.match(/^(\d{4})-(\d{2})-(\d{2})$/) + if (!matched) return null + const year = Number(matched[1]) + const month = Number(matched[2]) + const day = Number(matched[3]) + if (!Number.isFinite(year) || !Number.isFinite(month) || !Number.isFinite(day)) return null + const date = new Date(year, month - 1, day, endOfDay ? 23 : 0, endOfDay ? 59 : 0, endOfDay ? 59 : 0) + const ms = date.getTime() + if (Number.isNaN(ms)) return null + return Math.floor(ms / 1000) +} + +export const getChatHistoryPreviewLines = (message) => { + const raw = String(message?.content || '').trim() + if (!raw) return [] + return raw.split(/\r?\n/).map((item) => item.trim()).filter(Boolean).slice(0, 4) +} diff --git a/frontend/lib/chat/message-normalizer.js b/frontend/lib/chat/message-normalizer.js new file mode 100644 index 0000000..5ac676c --- /dev/null +++ b/frontend/lib/chat/message-normalizer.js @@ -0,0 +1,265 @@ +import { formatMessageFullTime, formatMessageTime } from '~/lib/chat/formatters' + +const normalizeMaybeUrl = (value) => (typeof value === 'string' ? value.trim() : '') + +const isUsableMediaUrl = (value) => { + const text = normalizeMaybeUrl(value) + if (!text) return false + return ( + /^https?:\/\//i.test(text) + || /^blob:/i.test(text) + || /^data:/i.test(text) + || /^\/api\/chat\/media\//i.test(text) + ) +} + +const buildAccountMediaUrl = (apiBase, path, parts) => { + return `${apiBase}${path}?${parts.filter(Boolean).join('&')}` +} + +export const createMessageNormalizer = ({ apiBase, getSelectedAccount, getSelectedContact }) => { + return (msg) => { + const account = String(getSelectedAccount?.() || '').trim() + const contact = getSelectedContact?.() || null + const username = String(contact?.username || '').trim() + const isSent = !!msg.isSent + const sender = isSent ? '我' : (msg.senderDisplayName || msg.senderUsername || contact?.name || '') + const fallbackAvatar = (!isSent && !contact?.isGroup) ? (contact?.avatar || null) : null + + const normalizedThumbUrl = (() => { + const candidates = [msg.thumbUrl, msg.preview] + for (const candidate of candidates) { + if (isUsableMediaUrl(candidate)) return normalizeMaybeUrl(candidate) + } + return '' + })() + + const normalizedLinkPreviewUrl = (() => { + const url = normalizedThumbUrl + if (!url) return '' + if (/^\/api\/chat\/media\//i.test(url) || /^blob:/i.test(url) || /^data:/i.test(url)) return url + if (!/^https?:\/\//i.test(url)) return url + try { + const host = new URL(url).hostname.toLowerCase() + if (host.endsWith('.qpic.cn') || host.endsWith('.qlogo.cn')) { + return `${apiBase}/chat/media/proxy_image?url=${encodeURIComponent(url)}` + } + } catch {} + return url + })() + + const fromUsername = String(msg.fromUsername || '').trim() + const fromAvatar = fromUsername + ? `${apiBase}/chat/avatar?account=${encodeURIComponent(account)}&username=${encodeURIComponent(fromUsername)}` + : (() => { + const href = String(msg.url || '').trim() + return href ? `${apiBase}/chat/media/favicon?url=${encodeURIComponent(href)}` : '' + })() + + const localEmojiUrl = msg.emojiMd5 + ? `${apiBase}/chat/media/emoji?account=${encodeURIComponent(account)}&md5=${encodeURIComponent(msg.emojiMd5)}&username=${encodeURIComponent(username)}` + : '' + + const localImageUrl = (() => { + if (!msg.imageMd5 && !msg.imageFileId) return '' + return buildAccountMediaUrl(apiBase, '/chat/media/image', [ + `account=${encodeURIComponent(account)}`, + msg.imageMd5 ? `md5=${encodeURIComponent(msg.imageMd5)}` : '', + msg.imageFileId ? `file_id=${encodeURIComponent(msg.imageFileId)}` : '', + `username=${encodeURIComponent(username)}` + ]) + })() + + const normalizedImageUrl = (() => { + const current = isUsableMediaUrl(msg.imageUrl) ? normalizeMaybeUrl(msg.imageUrl) : '' + if (current && /\/api\/chat\/media\/image\b/i.test(current) && localImageUrl) { + return localImageUrl + } + return current || localImageUrl || '' + })() + + const normalizedEmojiUrl = msg.emojiUrl || localEmojiUrl + + const localVideoThumbUrl = (() => { + if (!msg.videoThumbMd5 && !msg.videoThumbFileId) return '' + return buildAccountMediaUrl(apiBase, '/chat/media/video_thumb', [ + `account=${encodeURIComponent(account)}`, + msg.videoThumbMd5 ? `md5=${encodeURIComponent(msg.videoThumbMd5)}` : '', + msg.videoThumbFileId ? `file_id=${encodeURIComponent(msg.videoThumbFileId)}` : '', + `username=${encodeURIComponent(username)}` + ]) + })() + + const localVideoUrl = (() => { + if (!msg.videoMd5 && !msg.videoFileId) return '' + return buildAccountMediaUrl(apiBase, '/chat/media/video', [ + `account=${encodeURIComponent(account)}`, + msg.videoMd5 ? `md5=${encodeURIComponent(msg.videoMd5)}` : '', + msg.videoFileId ? `file_id=${encodeURIComponent(msg.videoFileId)}` : '', + `username=${encodeURIComponent(username)}` + ]) + })() + + const normalizedVideoThumbUrl = (isUsableMediaUrl(msg.videoThumbUrl) ? normalizeMaybeUrl(msg.videoThumbUrl) : '') || localVideoThumbUrl + const normalizedVideoUrl = (isUsableMediaUrl(msg.videoUrl) ? normalizeMaybeUrl(msg.videoUrl) : '') || localVideoUrl + const serverIdStr = String(msg.serverIdStr || (msg.serverId != null ? String(msg.serverId) : '')).trim() + const normalizedVoiceUrl = (() => { + if (msg.voiceUrl) return msg.voiceUrl + if (!serverIdStr) return '' + if (String(msg.renderType || '') !== 'voice') return '' + return `${apiBase}/chat/media/voice?account=${encodeURIComponent(account)}&server_id=${encodeURIComponent(serverIdStr)}` + })() + + const remoteFromServer = ( + typeof msg.emojiRemoteUrl === 'string' + && /^https?:\/\//i.test(msg.emojiRemoteUrl) + && !/\/api\/chat\/media\/emoji\b/i.test(msg.emojiRemoteUrl) + && !/\blocalhost\b/i.test(msg.emojiRemoteUrl) + && !/\b127\.0\.0\.1\b/i.test(msg.emojiRemoteUrl) + ) ? msg.emojiRemoteUrl : '' + + const remoteFromEmojiUrl = ( + typeof msg.emojiUrl === 'string' + && /^https?:\/\//i.test(msg.emojiUrl) + && !/\/api\/chat\/media\/emoji\b/i.test(msg.emojiUrl) + && !/\blocalhost\b/i.test(msg.emojiUrl) + && !/\b127\.0\.0\.1\b/i.test(msg.emojiUrl) + ) ? msg.emojiUrl : '' + + const emojiRemoteUrl = remoteFromServer || remoteFromEmojiUrl + const emojiIsLocal = typeof normalizedEmojiUrl === 'string' && /\/api\/chat\/media\/emoji\b/i.test(normalizedEmojiUrl) + const emojiDownloaded = !!emojiRemoteUrl && !!emojiIsLocal + + const replyText = String(msg.content || '').trim() + let quoteContent = String(msg.quoteContent || '') + const trimmedQuoteContent = quoteContent.trim() + if (replyText && trimmedQuoteContent) { + if (trimmedQuoteContent === replyText) { + quoteContent = '' + } else { + const lines = trimmedQuoteContent.split(/\r?\n/).map((item) => item.trim()) + if (lines.length && (lines[0] === replyText || lines[0] === replyText.split(/\r?\n/)[0]?.trim())) { + quoteContent = trimmedQuoteContent.split(/\r?\n/).slice(1).join('\n').trim() + } else if (trimmedQuoteContent.startsWith(replyText)) { + quoteContent = trimmedQuoteContent.slice(replyText.length).trim() + } + } + } + + const quoteServerIdStr = String(msg.quoteServerId || '').trim() + const quoteTypeStr = String(msg.quoteType || '').trim() + const quoteVoiceUrl = quoteServerIdStr + ? `${apiBase}/chat/media/voice?account=${encodeURIComponent(account)}&server_id=${encodeURIComponent(quoteServerIdStr)}` + : '' + + const quoteImageUrl = (() => { + if (!quoteServerIdStr) return '' + if (quoteTypeStr !== '3' && String(msg.quoteContent || '').trim() !== '[图片]') return '' + return buildAccountMediaUrl(apiBase, '/chat/media/image', [ + `account=${encodeURIComponent(account)}`, + `server_id=${encodeURIComponent(quoteServerIdStr)}`, + username ? `username=${encodeURIComponent(username)}` : '' + ]) + })() + + const quoteThumbUrl = (() => { + const raw = isUsableMediaUrl(msg.quoteThumbUrl) ? normalizeMaybeUrl(msg.quoteThumbUrl) : '' + if (!raw) return '' + if (/^\/api\/chat\/media\//i.test(raw) || /^blob:/i.test(raw) || /^data:/i.test(raw)) return raw + if (!/^https?:\/\//i.test(raw)) return raw + try { + const host = new URL(raw).hostname.toLowerCase() + if (host.endsWith('.qpic.cn') || host.endsWith('.qlogo.cn')) { + return `${apiBase}/chat/media/proxy_image?url=${encodeURIComponent(raw)}` + } + } catch {} + return raw + })() + + return { + id: msg.id, + serverId: msg.serverId || 0, + serverIdStr, + sender, + senderUsername: msg.senderUsername || '', + senderDisplayName: msg.senderDisplayName || '', + content: msg.content || '', + time: formatMessageTime(msg.createTime), + fullTime: formatMessageFullTime(msg.createTime), + createTime: Number(msg.createTime || 0), + isSent, + type: 'text', + renderType: msg.renderType || 'text', + voipType: msg.voipType || '', + title: msg.title || '', + url: msg.url || '', + recordItem: msg.recordItem || '', + imageMd5: msg.imageMd5 || '', + imageFileId: msg.imageFileId || '', + emojiMd5: msg.emojiMd5 || '', + emojiUrl: normalizedEmojiUrl || '', + emojiLocalUrl: localEmojiUrl || '', + emojiRemoteUrl, + _emojiDownloaded: !!emojiDownloaded, + thumbUrl: msg.thumbUrl || '', + imageUrl: normalizedImageUrl || '', + videoMd5: msg.videoMd5 || '', + videoThumbMd5: msg.videoThumbMd5 || '', + videoFileId: msg.videoFileId || '', + videoThumbFileId: msg.videoThumbFileId || '', + videoThumbUrl: normalizedVideoThumbUrl || '', + videoUrl: normalizedVideoUrl || '', + quoteTitle: msg.quoteTitle || '', + quoteContent, + quoteUsername: msg.quoteUsername || '', + quoteServerId: quoteServerIdStr, + quoteType: quoteTypeStr, + quoteVoiceLength: msg.quoteVoiceLength || '', + quoteVoiceUrl, + quoteImageUrl: quoteImageUrl || '', + quoteThumbUrl: quoteThumbUrl || '', + _quoteImageError: false, + _quoteThumbError: false, + amount: msg.amount || '', + coverUrl: msg.coverUrl || '', + fileSize: msg.fileSize || '', + fileMd5: msg.fileMd5 || '', + paySubType: msg.paySubType || '', + transferStatus: msg.transferStatus || '', + transferReceived: msg.paySubType === '3' || msg.transferStatus === '已收款' || msg.transferStatus === '已被接收', + voiceUrl: normalizedVoiceUrl || '', + voiceDuration: msg.voiceLength || msg.voiceDuration || '', + locationLat: msg.locationLat ?? null, + locationLng: msg.locationLng ?? null, + locationPoiname: String(msg.locationPoiname || '').trim(), + locationLabel: String(msg.locationLabel || '').trim(), + preview: normalizedLinkPreviewUrl || '', + linkType: String(msg.linkType || '').trim(), + linkStyle: String(msg.linkStyle || '').trim(), + linkCardVariant: String(msg.linkStyle || '').trim() === 'cover' ? 'cover' : 'default', + from: String(msg.from || '').trim(), + fromUsername, + fromAvatar, + isGroup: !!contact?.isGroup, + avatar: msg.senderAvatar || msg.avatar || fallbackAvatar || null, + avatarColor: null + } + } +} + +export const dedupeMessagesById = (list) => { + const input = Array.isArray(list) ? list : [] + const seen = new Set() + const output = [] + for (const item of input) { + const id = String(item?.id || '') + if (!id) { + output.push(item) + continue + } + if (seen.has(id)) continue + seen.add(id) + output.push(item) + } + return output +} diff --git a/frontend/lib/desktop-settings.js b/frontend/lib/desktop-settings.js new file mode 100644 index 0000000..81bf607 --- /dev/null +++ b/frontend/lib/desktop-settings.js @@ -0,0 +1,22 @@ +export const DESKTOP_SETTING_AUTO_REALTIME_KEY = 'desktop.settings.autoRealtime' +export const DESKTOP_SETTING_DEFAULT_TO_CHAT_KEY = 'desktop.settings.defaultToChatWhenData' +// 朋友圈图片:是否允许使用缓存(默认开启)。关闭后会尽量每次都走下载+解密流程。 +export const SNS_SETTING_USE_CACHE_KEY = 'sns.settings.useCache' + +export const readLocalBoolSetting = (key, fallback = false) => { + if (!process.client) return !!fallback + try { + const raw = localStorage.getItem(String(key || '')) + if (raw == null) return !!fallback + return String(raw).toLowerCase() === 'true' + } catch { + return !!fallback + } +} + +export const writeLocalBoolSetting = (key, value) => { + if (!process.client) return + try { + localStorage.setItem(String(key || ''), value ? 'true' : 'false') + } catch {} +} diff --git a/frontend/lib/privacy-mode.js b/frontend/lib/privacy-mode.js new file mode 100644 index 0000000..85c7ce7 --- /dev/null +++ b/frontend/lib/privacy-mode.js @@ -0,0 +1,20 @@ +export const PRIVACY_MODE_KEY = 'ui.privacy_mode' + +export const readPrivacyMode = (fallback = false) => { + if (!process.client) return !!fallback + try { + const raw = localStorage.getItem(PRIVACY_MODE_KEY) + if (raw == null) return !!fallback + const normalized = String(raw).trim().toLowerCase() + return normalized === '1' || normalized === 'true' + } catch { + return !!fallback + } +} + +export const writePrivacyMode = (enabled) => { + if (!process.client) return + try { + localStorage.setItem(PRIVACY_MODE_KEY, enabled ? '1' : '0') + } catch {} +} diff --git a/frontend/lib/server-error-logging.js b/frontend/lib/server-error-logging.js new file mode 100644 index 0000000..04c9276 --- /dev/null +++ b/frontend/lib/server-error-logging.js @@ -0,0 +1,206 @@ +import { useApiBase } from '~/composables/useApiBase' + +const FRONTEND_SERVER_ERROR_ENDPOINT = '/admin/log-frontend-server-error' + +const normalizeStatus = (value) => { + const n = Number(value) + if (!Number.isInteger(n)) return 0 + return n +} + +const stringifyDetail = (value) => { + if (value == null) return '' + if (typeof value === 'string') return value.trim() + try { + return JSON.stringify(value) + } catch { + return String(value).trim() + } +} + +const currentOrigin = () => { + if (!process.client || typeof window === 'undefined') return '' + try { + return String(window.location?.origin || '').trim() + } catch { + return '' + } +} + +const normalizeBasePath = (apiBase) => { + const raw = String(apiBase || '').trim() + if (!raw) return '/api' + if (/^https?:\/\//i.test(raw)) { + try { + const u = new URL(raw) + return u.pathname.replace(/\/+$/, '') || '/' + } catch { + return '/api' + } + } + return raw.replace(/\/+$/, '') || '/' +} + +const normalizePathname = (value) => { + const raw = String(value || '').trim() + if (!raw) return '' + try { + return new URL(raw).pathname.replace(/\/+$/, '') + } catch { + return raw.split(/[?#]/, 1)[0].replace(/\/+$/, '') + } +} + +export const isServerErrorStatus = (status) => normalizeStatus(status) >= 500 + +export const resolveRequestUrl = (requestUrl, apiBase = '') => { + const raw = String(requestUrl || '').trim() + if (!raw) return '' + if (/^https?:\/\//i.test(raw)) return raw + + const origin = currentOrigin() + if (!origin) return raw + + if (raw.startsWith('/')) { + const prefix = normalizeBasePath(apiBase) + const combined = raw === prefix || raw.startsWith(`${prefix}/`) ? raw : `${prefix}${raw}` + if (/^https?:\/\//i.test(String(apiBase || '').trim())) { + try { + const baseUrl = new URL(String(apiBase).trim()) + return new URL(combined, `${baseUrl.origin}/`).toString() + } catch { + return new URL(combined, origin).toString() + } + } + return new URL(combined, origin).toString() + } + + if (/^https?:\/\//i.test(String(apiBase || '').trim())) { + try { + const base = String(apiBase).trim() + return new URL(raw, base.endsWith('/') ? base : `${base}/`).toString() + } catch { + return new URL(raw, origin).toString() + } + } + + return new URL(raw, origin).toString() +} + +const isFrontendServerLogUrl = (requestUrl) => { + const path = normalizePathname(requestUrl) + return path.endsWith('/api/admin/log-frontend-server-error') || path.endsWith('/admin/log-frontend-server-error') +} + +const extractBackendDetail = (data) => { + if (data == null) return '' + if (typeof data === 'string') return data.trim() + if (typeof data === 'object' && !Array.isArray(data) && Object.prototype.hasOwnProperty.call(data, 'detail')) { + return stringifyDetail(data.detail) + } + return stringifyDetail(data) +} + +const resolveApiBase = (apiBase) => { + const raw = String(apiBase || '').trim() + if (raw) return raw + if (!process.client) return '' + try { + return String(useApiBase() || '').trim() + } catch { + return '' + } +} + +export const extractServerErrorFromError = (error) => { + const response = error?.response + return { + status: normalizeStatus(error?.status ?? response?.status), + backendDetail: extractBackendDetail(response?._data ?? response?.data ?? error?.data), + message: String(error?.message || '').trim(), + requestUrl: String(response?.url || error?.request || '').trim(), + } +} + +export const extractServerErrorDetailFromResponse = async (response) => { + if (!response || typeof response.clone !== 'function') return '' + try { + const clone = response.clone() + const contentType = String(clone.headers?.get?.('content-type') || '').toLowerCase() + if (contentType.includes('json')) { + try { + const payload = await clone.json() + return extractBackendDetail(payload) + } catch {} + } + const text = String(await clone.text()).trim() + if (!text) return '' + if (contentType.includes('json')) { + try { + return extractBackendDetail(JSON.parse(text)) + } catch {} + } + return text + } catch { + return '' + } +} + +export const reportServerError = async (context = {}) => { + if (!process.client || typeof window === 'undefined') return false + + const status = normalizeStatus(context.status) + if (!isServerErrorStatus(status)) return false + + const apiBase = resolveApiBase(context.apiBase) + const requestUrl = resolveRequestUrl(context.requestUrl, apiBase) + if (!requestUrl || isFrontendServerLogUrl(requestUrl)) return false + + const endpointUrl = resolveRequestUrl(FRONTEND_SERVER_ERROR_ENDPOINT, apiBase) + if (!endpointUrl) return false + + const payload = { + status, + method: String(context.method || 'GET').trim().toUpperCase() || 'GET', + request_url: requestUrl, + message: String(context.message || '').trim(), + backend_detail: String(context.backendDetail || '').trim(), + source: String(context.source || '').trim(), + page_url: String(window.location?.href || '').trim(), + } + + try { + await fetch(endpointUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(payload), + keepalive: true, + }) + return true + } catch { + return false + } +} + +export const reportServerErrorFromError = async (error, context = {}) => { + const info = extractServerErrorFromError(error) + return await reportServerError({ + ...context, + status: context.status ?? info.status, + requestUrl: context.requestUrl || info.requestUrl, + message: context.message || info.message, + backendDetail: context.backendDetail || info.backendDetail, + }) +} + +export const reportServerErrorFromResponse = async (response, context = {}) => { + const status = normalizeStatus(context.status ?? response?.status) + if (!isServerErrorStatus(status)) return false + const backendDetail = context.backendDetail || (await extractServerErrorDetailFromResponse(response)) + return await reportServerError({ + ...context, + status, + requestUrl: context.requestUrl || response?.url || '', + backendDetail, + }) +} diff --git a/frontend/utils/wechat-emojis.ts b/frontend/lib/wechat-emojis.ts similarity index 100% rename from frontend/utils/wechat-emojis.ts rename to frontend/lib/wechat-emojis.ts diff --git a/frontend/lib/wrapped/heatmap.js b/frontend/lib/wrapped/heatmap.js new file mode 100644 index 0000000..d7dedd0 --- /dev/null +++ b/frontend/lib/wrapped/heatmap.js @@ -0,0 +1,46 @@ +// Utilities for Wrapped heatmap rendering. + +export const clamp01 = (v) => { + const n = Number(v) + if (!Number.isFinite(n)) return 0 + if (n < 0) return 0 + if (n > 1) return 1 + return n +} + +export const maxInMatrix = (matrix) => { + if (!Array.isArray(matrix)) return 0 + let m = 0 + for (const row of matrix) { + if (!Array.isArray(row)) continue + for (const v of row) { + const n = Number(v) + if (Number.isFinite(n) && n > m) m = n + } + } + return m +} + +// Color inspired by WeChat green, with a slight "gold" shift on high intensity +// (EchoTrace-style accent) while keeping the overall WeChat vibe. +export const heatColor = (value, max) => { + const v = Number(value) || 0 + const m = Number(max) || 0 + if (!(v > 0) || !(m > 0)) return 'rgba(0,0,0,0.05)' + + // Use sqrt scaling to make low values still visible. + const t = clamp01(Math.sqrt(v / m)) + + // Hue from green (~145) -> yellow-green (~95) + const hue = 145 - 50 * t + const sat = 70 + const light = 92 - 42 * t + return `hsl(${hue.toFixed(1)} ${sat}% ${light.toFixed(1)}%)` +} + +export const formatHourRange = (hour) => { + const h = Number(hour) + if (!Number.isFinite(h)) return '' + const hh = String(h).padStart(2, '0') + return `${hh}:00-${hh}:59` +} diff --git a/frontend/lib/wrapped/types.js b/frontend/lib/wrapped/types.js new file mode 100644 index 0000000..57083dd --- /dev/null +++ b/frontend/lib/wrapped/types.js @@ -0,0 +1,45 @@ +// JSDoc types for the Wrapped API (kept in JS to match the current codebase). + +/** + * @typedef {Object} WrappedCardBase + * @property {number} id + * @property {string} title + * @property {'global'} scope + * @property {'A'|'B'|'C'|'D'|'E'} category + * @property {'ok'|'error'|'idle'|'loading'} status + * @property {string} kind + * @property {string} narrative + * @property {Record} data + */ + +/** + * @typedef {Object} WrappedCardManifest + * @property {number} id + * @property {string} title + * @property {'global'} scope + * @property {'A'|'B'|'C'|'D'|'E'} category + * @property {string} kind + */ + +/** + * @typedef {Object} WrappedAnnualMetaResponse + * @property {string} account + * @property {number} year + * @property {'global'} scope + * @property {number[]|undefined} availableYears + * @property {WrappedCardManifest[]} cards + */ + +/** + * @typedef {Object} WrappedAnnualResponse + * @property {string} account + * @property {number} year + * @property {'global'} scope + * @property {string|null} username + * @property {number} generated_at + * @property {boolean} cached + * @property {number[]|undefined} availableYears + * @property {WrappedCardBase[]} cards + */ + +export {} diff --git a/frontend/nuxt.config.ts b/frontend/nuxt.config.ts index 3e6f203..7fe1eae 100644 --- a/frontend/nuxt.config.ts +++ b/frontend/nuxt.config.ts @@ -1,23 +1,50 @@ // https://nuxt.com/docs/api/configuration/nuxt-config +const frontendHost = String(process.env.NUXT_HOST || '').trim() +const frontendPort = Number.parseInt(String(process.env.NUXT_PORT || process.env.PORT || '3000').trim(), 10) +const backendPort = String(process.env.WECHAT_TOOL_PORT || '10392').trim() || '10392' +const devProxyTarget = `http://127.0.0.1:${backendPort}/api` + export default defineNuxtConfig({ compatibilityDate: '2025-07-15', devtools: { enabled: false }, + experimental: { + // This app does not use Nuxt route rules on the client, so disabling + // the app manifest avoids an unnecessary `/_nuxt/builds/meta/dev.json` + // preload request and the related Chrome warning in dev mode. + appManifest: false, + }, + + runtimeConfig: { + public: { + // Full API base, including `/api` when needed. + // Example: `NUXT_PUBLIC_API_BASE=http://127.0.0.1:10392/api` + apiBase: process.env.NUXT_PUBLIC_API_BASE || '/api', + }, + }, // 配置前端开发服务器端口 devServer: { - port: 3000 + ...(frontendHost ? { host: frontendHost } : {}), + port: Number.isInteger(frontendPort) && frontendPort >= 1 && frontendPort <= 65535 ? frontendPort : 3000 }, // 配置API代理,解决跨域问题 nitro: { devProxy: { '/api': { - target: 'http://localhost:8000', + // `h3` strips the matched prefix (`/api`) before calling the middleware, + // so the proxy target must include `/api` to preserve backend routes. + target: devProxyTarget, changeOrigin: true } } }, + // 应用配置 + css: [ + '~/assets/css/chat.css' + ], + // 应用配置 app: { head: { @@ -28,10 +55,10 @@ export default defineNuxtConfig({ { name: 'description', content: '微信4.x版本数据库解密工具' } ], link: [ - { rel: 'icon', type: 'image/png', href: '/logo.png' } + { rel: 'icon', type: 'image/png', href: '/logo.png' }, + { rel: 'stylesheet', href: 'https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.5.1/css/all.min.css' } ] - }, - pageTransition: { name: 'page', mode: 'out-in' } + } }, // 模块配置 diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 2139597..ee60af1 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -9,10 +9,16 @@ "dependencies": { "@nuxtjs/tailwindcss": "^6.14.0", "@pinia/nuxt": "^0.11.2", + "@vueuse/motion": "^3.0.3", "axios": "^1.11.0", + "gsap": "^3.14.2", "nuxt": "^4.0.1", + "ogl": "^1.0.11", "vue": "^3.5.17", "vue-router": "^4.5.1" + }, + "devDependencies": { + "tailwindcss": "3.4.17" } }, "node_modules/@alloc/quick-lru": { @@ -27,26 +33,13 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@ampproject/remapping": { - "version": "2.3.0", - "resolved": "https://registry.npmmirror.com/@ampproject/remapping/-/remapping-2.3.0.tgz", - "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", - "license": "Apache-2.0", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, "node_modules/@babel/code-frame": { - "version": "7.27.1", - "resolved": "https://registry.npmmirror.com/@babel/code-frame/-/code-frame-7.27.1.tgz", - "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" }, @@ -55,30 +48,30 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.28.0", - "resolved": "https://registry.npmmirror.com/@babel/compat-data/-/compat-data-7.28.0.tgz", - "integrity": "sha512-60X7qkglvrap8mn1lh2ebxXdZYtUcpd7gsmy9kLaBJ4i/WdY8PqTSdxyA8qraikqKQK5C1KRBKXqznrVapyNaw==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.28.0", - "resolved": "https://registry.npmmirror.com/@babel/core/-/core-7.28.0.tgz", - "integrity": "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ==", - "license": "MIT", - "dependencies": { - "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.0", - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-module-transforms": "^7.27.3", - "@babel/helpers": "^7.27.6", - "@babel/parser": "^7.28.0", - "@babel/template": "^7.27.2", - "@babel/traverse": "^7.28.0", - "@babel/types": "^7.28.0", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -95,7 +88,7 @@ }, "node_modules/@babel/core/node_modules/semver": { "version": "6.3.1", - "resolved": "https://registry.npmmirror.com/semver/-/semver-6.3.1.tgz", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "license": "ISC", "bin": { @@ -103,13 +96,13 @@ } }, "node_modules/@babel/generator": { - "version": "7.28.0", - "resolved": "https://registry.npmmirror.com/@babel/generator/-/generator-7.28.0.tgz", - "integrity": "sha512-lJjzvrbEeWrhB4P3QBsH7tey117PjLZnDbLiQEKjQ/fNJTjuq4HSqgFA+UNSwZT8D7dxxbnuSBMsa1lrWzKlQg==", + "version": "7.29.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.28.0", - "@babel/types": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", "@jridgewell/gen-mapping": "^0.3.12", "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" @@ -120,7 +113,7 @@ }, "node_modules/@babel/helper-annotate-as-pure": { "version": "7.27.3", - "resolved": "https://registry.npmmirror.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", "license": "MIT", "dependencies": { @@ -131,12 +124,12 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.27.2", - "resolved": "https://registry.npmmirror.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", - "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", "license": "MIT", "dependencies": { - "@babel/compat-data": "^7.27.2", + "@babel/compat-data": "^7.28.6", "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", @@ -148,7 +141,7 @@ }, "node_modules/@babel/helper-compilation-targets/node_modules/semver": { "version": "6.3.1", - "resolved": "https://registry.npmmirror.com/semver/-/semver-6.3.1.tgz", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "license": "ISC", "bin": { @@ -156,17 +149,17 @@ } }, "node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.27.1", - "resolved": "https://registry.npmmirror.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.27.1.tgz", - "integrity": "sha512-QwGAmuvM17btKU5VqXfb+Giw4JcN0hjuufz3DYnpeVDvZLAObloM77bhMXiqry3Iio+Ai4phVRDwl6WU10+r5A==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.6.tgz", + "integrity": "sha512-dTOdvsjnG3xNT9Y0AUg1wAl38y+4Rl4sf9caSQZOXdNqVn+H+HbbJ4IyyHaIqNR6SW9oJpA/RuRjsjCw2IdIow==", "license": "MIT", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "@babel/helper-member-expression-to-functions": "^7.27.1", + "@babel/helper-annotate-as-pure": "^7.27.3", + "@babel/helper-member-expression-to-functions": "^7.28.5", "@babel/helper-optimise-call-expression": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1", + "@babel/helper-replace-supers": "^7.28.6", "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/traverse": "^7.27.1", + "@babel/traverse": "^7.28.6", "semver": "^6.3.1" }, "engines": { @@ -178,7 +171,7 @@ }, "node_modules/@babel/helper-create-class-features-plugin/node_modules/semver": { "version": "6.3.1", - "resolved": "https://registry.npmmirror.com/semver/-/semver-6.3.1.tgz", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "license": "ISC", "bin": { @@ -187,7 +180,7 @@ }, "node_modules/@babel/helper-globals": { "version": "7.28.0", - "resolved": "https://registry.npmmirror.com/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", "license": "MIT", "engines": { @@ -195,40 +188,40 @@ } }, "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.27.1", - "resolved": "https://registry.npmmirror.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.27.1.tgz", - "integrity": "sha512-E5chM8eWjTp/aNoVpcbfM7mLxu9XGLWYise2eBKGQomAk/Mb4XoxyqXTZbuTohbsl8EKqdlMhnDI2CCLfcs9wA==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.28.5.tgz", + "integrity": "sha512-cwM7SBRZcPCLgl8a7cY0soT1SptSzAlMH39vwiRpOQkJlh53r5hdHwLSCZpQdVLT39sZt+CRpNwYG4Y2v77atg==", "license": "MIT", "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" + "@babel/traverse": "^7.28.5", + "@babel/types": "^7.28.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-imports": { - "version": "7.27.1", - "resolved": "https://registry.npmmirror.com/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", - "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", "license": "MIT", "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.27.3", - "resolved": "https://registry.npmmirror.com/@babel/helper-module-transforms/-/helper-module-transforms-7.27.3.tgz", - "integrity": "sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", "license": "MIT", "dependencies": { - "@babel/helper-module-imports": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1", - "@babel/traverse": "^7.27.3" + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" }, "engines": { "node": ">=6.9.0" @@ -239,7 +232,7 @@ }, "node_modules/@babel/helper-optimise-call-expression": { "version": "7.27.1", - "resolved": "https://registry.npmmirror.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.27.1.tgz", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.27.1.tgz", "integrity": "sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==", "license": "MIT", "dependencies": { @@ -250,23 +243,23 @@ } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.27.1", - "resolved": "https://registry.npmmirror.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", - "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-replace-supers": { - "version": "7.27.1", - "resolved": "https://registry.npmmirror.com/@babel/helper-replace-supers/-/helper-replace-supers-7.27.1.tgz", - "integrity": "sha512-7EHz6qDZc8RYS5ElPoShMheWvEgERonFCs7IAonWLLUTXW59DP14bCZt89/GKyreYn8g3S83m21FelHKbeDCKA==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.28.6.tgz", + "integrity": "sha512-mq8e+laIk94/yFec3DxSjCRD2Z0TAjhVbEJY3UQrlwVo15Lmt7C2wAUbK4bjnTs4APkwsYLTahXRraQXhb1WCg==", "license": "MIT", "dependencies": { - "@babel/helper-member-expression-to-functions": "^7.27.1", + "@babel/helper-member-expression-to-functions": "^7.28.5", "@babel/helper-optimise-call-expression": "^7.27.1", - "@babel/traverse": "^7.27.1" + "@babel/traverse": "^7.28.6" }, "engines": { "node": ">=6.9.0" @@ -277,7 +270,7 @@ }, "node_modules/@babel/helper-skip-transparent-expression-wrappers": { "version": "7.27.1", - "resolved": "https://registry.npmmirror.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.27.1.tgz", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.27.1.tgz", "integrity": "sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==", "license": "MIT", "dependencies": { @@ -290,7 +283,7 @@ }, "node_modules/@babel/helper-string-parser": { "version": "7.27.1", - "resolved": "https://registry.npmmirror.com/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "license": "MIT", "engines": { @@ -298,9 +291,9 @@ } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.27.1", - "resolved": "https://registry.npmmirror.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", - "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", "license": "MIT", "engines": { "node": ">=6.9.0" @@ -308,7 +301,7 @@ }, "node_modules/@babel/helper-validator-option": { "version": "7.27.1", - "resolved": "https://registry.npmmirror.com/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", "license": "MIT", "engines": { @@ -316,25 +309,25 @@ } }, "node_modules/@babel/helpers": { - "version": "7.28.2", - "resolved": "https://registry.npmmirror.com/@babel/helpers/-/helpers-7.28.2.tgz", - "integrity": "sha512-/V9771t+EgXz62aCcyofnQhGM8DQACbRhvzKFsXKC9QM+5MadF8ZmIm0crDMaz3+o0h0zXfJnd4EhbYbxsrcFw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz", + "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==", "license": "MIT", "dependencies": { - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.2" + "@babel/template": "^7.28.6", + "@babel/types": "^7.28.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.28.0", - "resolved": "https://registry.npmmirror.com/@babel/parser/-/parser-7.28.0.tgz", - "integrity": "sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz", + "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", "license": "MIT", "dependencies": { - "@babel/types": "^7.28.0" + "@babel/types": "^7.29.0" }, "bin": { "parser": "bin/babel-parser.js" @@ -344,12 +337,12 @@ } }, "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.27.1", - "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz", - "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.28.6.tgz", + "integrity": "sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w==", "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" + "@babel/helper-plugin-utils": "^7.28.6" }, "engines": { "node": ">=6.9.0" @@ -359,12 +352,12 @@ } }, "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.27.1", - "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", - "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.28.6.tgz", + "integrity": "sha512-+nDNmQye7nlnuuHDboPbGm00Vqg3oO8niRRL27/4LYHUsHYh0zJ1xWOz0uRwNFmM1Avzk8wZbc6rdiYhomzv/A==", "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" + "@babel/helper-plugin-utils": "^7.28.6" }, "engines": { "node": ">=6.9.0" @@ -374,16 +367,16 @@ } }, "node_modules/@babel/plugin-transform-typescript": { - "version": "7.28.0", - "resolved": "https://registry.npmmirror.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.28.0.tgz", - "integrity": "sha512-4AEiDEBPIZvLQaWlc9liCavE0xRM0dNca41WtBeM3jgFptfUOSG9z0uteLhq6+3rq+WB6jIvUwKDTpXEHPJ2Vg==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.28.6.tgz", + "integrity": "sha512-0YWL2RFxOqEm9Efk5PvreamxPME8OyY0wM5wh5lHjF+VtVhdneCWGzZeSqzOfiobVqQaNCd2z0tQvnI9DaPWPw==", "license": "MIT", "dependencies": { "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-create-class-features-plugin": "^7.28.6", + "@babel/helper-plugin-utils": "^7.28.6", "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/plugin-syntax-typescript": "^7.27.1" + "@babel/plugin-syntax-typescript": "^7.28.6" }, "engines": { "node": ">=6.9.0" @@ -393,31 +386,31 @@ } }, "node_modules/@babel/template": { - "version": "7.27.2", - "resolved": "https://registry.npmmirror.com/@babel/template/-/template-7.27.2.tgz", - "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/parser": "^7.27.2", - "@babel/types": "^7.27.1" + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.28.0", - "resolved": "https://registry.npmmirror.com/@babel/traverse/-/traverse-7.28.0.tgz", - "integrity": "sha512-mGe7UK5wWyh0bKRfupsUchrQGqvDbZDbKJw+kcRGSmdHVYrv+ltd0pnpDTVpiTqnaBru9iEvA8pz8W46v0Amwg==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.0", + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.0", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", "debug": "^4.3.1" }, "engines": { @@ -425,49 +418,71 @@ } }, "node_modules/@babel/types": { - "version": "7.28.2", - "resolved": "https://registry.npmmirror.com/@babel/types/-/types-7.28.2.tgz", - "integrity": "sha512-ruv7Ae4J5dUYULmeXw1gmb7rYRz57OWCPM57pHojnLq/3Z1CK2lNSLTCVjxVk1F/TZHwOZZrOWi0ur95BbLxNQ==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", "license": "MIT", "dependencies": { "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" + "@babel/helper-validator-identifier": "^7.28.5" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@cloudflare/kv-asset-handler": { - "version": "0.4.0", - "resolved": "https://registry.npmmirror.com/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.4.0.tgz", - "integrity": "sha512-+tv3z+SPp+gqTIcImN9o0hqE9xyfQjI1XD9pL6NuKjua9B1y7mNYv0S9cP+QEbA4ppVgGZEmKOvHX5G5Ei1CVA==", - "license": "MIT OR Apache-2.0", - "dependencies": { - "mime": "^3.0.0" + "node_modules/@bomb.sh/tab": { + "version": "0.0.12", + "resolved": "https://registry.npmjs.org/@bomb.sh/tab/-/tab-0.0.12.tgz", + "integrity": "sha512-dYRwg4MqfHR5/BcTy285XOGRhjQFmNpaJBZ0tl2oU+RY595MQ5ApTF6j3OvauPAooHL6cfoOZMySQrOQztT8RQ==", + "license": "MIT", + "bin": { + "tab": "dist/bin/cli.js" }, - "engines": { - "node": ">=18.0.0" + "peerDependencies": { + "cac": "^6.7.14", + "citty": "^0.1.6", + "commander": "^13.1.0" + }, + "peerDependenciesMeta": { + "cac": { + "optional": true + }, + "citty": { + "optional": true + }, + "commander": { + "optional": true + } } }, - "node_modules/@cloudflare/kv-asset-handler/node_modules/mime": { - "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/mime/-/mime-3.0.0.tgz", - "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "node_modules/@clack/core": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@clack/core/-/core-1.0.1.tgz", + "integrity": "sha512-WKeyK3NOBwDOzagPR5H08rFk9D/WuN705yEbuZvKqlkmoLM2woKtXb10OO2k1NoSU4SFG947i2/SCYh+2u5e4g==", "license": "MIT", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=10.0.0" + "dependencies": { + "picocolors": "^1.0.0", + "sisteransi": "^1.0.5" } }, - "node_modules/@colors/colors": { - "version": "1.6.0", - "resolved": "https://registry.npmmirror.com/@colors/colors/-/colors-1.6.0.tgz", - "integrity": "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==", + "node_modules/@clack/prompts": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@clack/prompts/-/prompts-1.0.1.tgz", + "integrity": "sha512-/42G73JkuYdyWZ6m8d/CJtBrGl1Hegyc7Fy78m5Ob+jF85TOUmLR5XLce/U3LxYAw0kJ8CT5aI99RIvPHcGp/Q==", "license": "MIT", + "dependencies": { + "@clack/core": "1.0.1", + "picocolors": "^1.0.0", + "sisteransi": "^1.0.5" + } + }, + "node_modules/@cloudflare/kv-asset-handler": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.4.2.tgz", + "integrity": "sha512-SIOD2DxrRRwQ+jgzlXCqoEFiKOFqaPjhnNTGKXSRLvp1HiOvapLaFG2kEr9dYQTYe8rKrd9uvDUzmAITeNyaHQ==", + "license": "MIT OR Apache-2.0", "engines": { - "node": ">=0.1.90" + "node": ">=18.0.0" } }, "node_modules/@csstools/selector-resolve-nested": { @@ -514,45 +529,40 @@ "postcss-selector-parser": "^7.0.0" } }, - "node_modules/@dabh/diagnostics": { - "version": "2.0.3", - "resolved": "https://registry.npmmirror.com/@dabh/diagnostics/-/diagnostics-2.0.3.tgz", - "integrity": "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==", + "node_modules/@dxup/nuxt": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@dxup/nuxt/-/nuxt-0.3.2.tgz", + "integrity": "sha512-2f2usP4oLNsIGjPprvABe3f3GWuIhIDp0169pGLFxTDRI5A4d4sBbGpR+tD9bGZCT+1Btb6Q2GKlyv3LkDCW5g==", "license": "MIT", "dependencies": { - "colorspace": "1.1.x", - "enabled": "2.0.x", - "kuler": "^2.0.0" + "@dxup/unimport": "^0.1.2", + "@nuxt/kit": "^4.2.2", + "chokidar": "^5.0.0", + "pathe": "^2.0.3", + "tinyglobby": "^0.2.15" } }, - "node_modules/@dependents/detective-less": { - "version": "5.0.1", - "resolved": "https://registry.npmmirror.com/@dependents/detective-less/-/detective-less-5.0.1.tgz", - "integrity": "sha512-Y6+WUMsTFWE5jb20IFP4YGa5IrGY/+a/FbOSjDF/wz9gepU2hwCYSXRHP/vPwBvwcY3SVMASt4yXxbXNXigmZQ==", - "license": "MIT", - "dependencies": { - "gonzales-pe": "^4.3.0", - "node-source-walk": "^7.0.1" - }, - "engines": { - "node": ">=18" - } + "node_modules/@dxup/unimport": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@dxup/unimport/-/unimport-0.1.2.tgz", + "integrity": "sha512-/B8YJGPzaYq1NbsQmwgP8EZqg40NpTw4ZB3suuI0TplbxKHeK94jeaawLmVhCv+YwUnOpiWEz9U6SeThku/8JQ==", + "license": "MIT" }, "node_modules/@emnapi/core": { - "version": "1.4.5", - "resolved": "https://registry.npmmirror.com/@emnapi/core/-/core-1.4.5.tgz", - "integrity": "sha512-XsLw1dEOpkSX/WucdqUhPWP7hDxSvZiY+fsUC14h+FtQ2Ifni4znbBt8punRX+Uj2JG/uDb8nEHVKvrVlvdZ5Q==", + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.8.1.tgz", + "integrity": "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==", "license": "MIT", "optional": true, "dependencies": { - "@emnapi/wasi-threads": "1.0.4", + "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" } }, "node_modules/@emnapi/runtime": { - "version": "1.4.5", - "resolved": "https://registry.npmmirror.com/@emnapi/runtime/-/runtime-1.4.5.tgz", - "integrity": "sha512-++LApOtY0pEEz1zrd9vy1/zXVaVJJ/EbAF3u0fXIzPJEDtnITsBGbbK0EkM72amhl/R5b+5xx0Y/QhcVOpuulg==", + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz", + "integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==", "license": "MIT", "optional": true, "dependencies": { @@ -560,9 +570,9 @@ } }, "node_modules/@emnapi/wasi-threads": { - "version": "1.0.4", - "resolved": "https://registry.npmmirror.com/@emnapi/wasi-threads/-/wasi-threads-1.0.4.tgz", - "integrity": "sha512-PJR+bOmMOPH8AtcTGAyYNiuJ3/Fcoj2XN/gBEWzDIKh254XO+mM9XoXHk5GNEhodxeMznbg7BlRojVbKN+gC6g==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.1.0.tgz", + "integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==", "license": "MIT", "optional": true, "dependencies": { @@ -570,9 +580,9 @@ } }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/aix-ppc64/-/aix-ppc64-0.25.8.tgz", - "integrity": "sha512-urAvrUedIqEiFR3FYSLTWQgLu5tb+m0qZw0NBEasUeo6wuqatkMDaRT+1uABiGXEu5vqgPd7FGE1BhsAIy9QVA==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz", + "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==", "cpu": [ "ppc64" ], @@ -586,9 +596,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/android-arm/-/android-arm-0.25.8.tgz", - "integrity": "sha512-RONsAvGCz5oWyePVnLdZY/HHwA++nxYWIX1atInlaW6SEkwq6XkP3+cb825EUcRs5Vss/lGh/2YxAb5xqc07Uw==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz", + "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==", "cpu": [ "arm" ], @@ -602,9 +612,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/android-arm64/-/android-arm64-0.25.8.tgz", - "integrity": "sha512-OD3p7LYzWpLhZEyATcTSJ67qB5D+20vbtr6vHlHWSQYhKtzUYrETuWThmzFpZtFsBIxRvhO07+UgVA9m0i/O1w==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz", + "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==", "cpu": [ "arm64" ], @@ -618,9 +628,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/android-x64/-/android-x64-0.25.8.tgz", - "integrity": "sha512-yJAVPklM5+4+9dTeKwHOaA+LQkmrKFX96BM0A/2zQrbS6ENCmxc4OVoBs5dPkCCak2roAD+jKCdnmOqKszPkjA==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz", + "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==", "cpu": [ "x64" ], @@ -634,9 +644,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/darwin-arm64/-/darwin-arm64-0.25.8.tgz", - "integrity": "sha512-Jw0mxgIaYX6R8ODrdkLLPwBqHTtYHJSmzzd+QeytSugzQ0Vg4c5rDky5VgkoowbZQahCbsv1rT1KW72MPIkevw==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz", + "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==", "cpu": [ "arm64" ], @@ -650,9 +660,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/darwin-x64/-/darwin-x64-0.25.8.tgz", - "integrity": "sha512-Vh2gLxxHnuoQ+GjPNvDSDRpoBCUzY4Pu0kBqMBDlK4fuWbKgGtmDIeEC081xi26PPjn+1tct+Bh8FjyLlw1Zlg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz", + "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==", "cpu": [ "x64" ], @@ -666,9 +676,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.8.tgz", - "integrity": "sha512-YPJ7hDQ9DnNe5vxOm6jaie9QsTwcKedPvizTVlqWG9GBSq+BuyWEDazlGaDTC5NGU4QJd666V0yqCBL2oWKPfA==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz", + "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==", "cpu": [ "arm64" ], @@ -682,9 +692,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-x64/-/freebsd-x64-0.25.8.tgz", - "integrity": "sha512-MmaEXxQRdXNFsRN/KcIimLnSJrk2r5H8v+WVafRWz5xdSVmWLoITZQXcgehI2ZE6gioE6HirAEToM/RvFBeuhw==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz", + "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==", "cpu": [ "x64" ], @@ -698,9 +708,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm/-/linux-arm-0.25.8.tgz", - "integrity": "sha512-FuzEP9BixzZohl1kLf76KEVOsxtIBFwCaLupVuk4eFVnOZfU+Wsn+x5Ryam7nILV2pkq2TqQM9EZPsOBuMC+kg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz", + "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==", "cpu": [ "arm" ], @@ -714,9 +724,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm64/-/linux-arm64-0.25.8.tgz", - "integrity": "sha512-WIgg00ARWv/uYLU7lsuDK00d/hHSfES5BzdWAdAig1ioV5kaFNrtK8EqGcUBJhYqotlUByUKz5Qo6u8tt7iD/w==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz", + "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==", "cpu": [ "arm64" ], @@ -730,9 +740,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-ia32/-/linux-ia32-0.25.8.tgz", - "integrity": "sha512-A1D9YzRX1i+1AJZuFFUMP1E9fMaYY+GnSQil9Tlw05utlE86EKTUA7RjwHDkEitmLYiFsRd9HwKBPEftNdBfjg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz", + "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==", "cpu": [ "ia32" ], @@ -746,9 +756,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-loong64/-/linux-loong64-0.25.8.tgz", - "integrity": "sha512-O7k1J/dwHkY1RMVvglFHl1HzutGEFFZ3kNiDMSOyUrB7WcoHGf96Sh+64nTRT26l3GMbCW01Ekh/ThKM5iI7hQ==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz", + "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==", "cpu": [ "loong64" ], @@ -762,9 +772,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-mips64el/-/linux-mips64el-0.25.8.tgz", - "integrity": "sha512-uv+dqfRazte3BzfMp8PAQXmdGHQt2oC/y2ovwpTteqrMx2lwaksiFZ/bdkXJC19ttTvNXBuWH53zy/aTj1FgGw==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz", + "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==", "cpu": [ "mips64el" ], @@ -778,9 +788,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-ppc64/-/linux-ppc64-0.25.8.tgz", - "integrity": "sha512-GyG0KcMi1GBavP5JgAkkstMGyMholMDybAf8wF5A70CALlDM2p/f7YFE7H92eDeH/VBtFJA5MT4nRPDGg4JuzQ==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz", + "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==", "cpu": [ "ppc64" ], @@ -794,9 +804,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-riscv64/-/linux-riscv64-0.25.8.tgz", - "integrity": "sha512-rAqDYFv3yzMrq7GIcen3XP7TUEG/4LK86LUPMIz6RT8A6pRIDn0sDcvjudVZBiiTcZCY9y2SgYX2lgK3AF+1eg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz", + "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==", "cpu": [ "riscv64" ], @@ -810,9 +820,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-s390x/-/linux-s390x-0.25.8.tgz", - "integrity": "sha512-Xutvh6VjlbcHpsIIbwY8GVRbwoviWT19tFhgdA7DlenLGC/mbc3lBoVb7jxj9Z+eyGqvcnSyIltYUrkKzWqSvg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz", + "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==", "cpu": [ "s390x" ], @@ -826,9 +836,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-x64/-/linux-x64-0.25.8.tgz", - "integrity": "sha512-ASFQhgY4ElXh3nDcOMTkQero4b1lgubskNlhIfJrsH5OKZXDpUAKBlNS0Kx81jwOBp+HCeZqmoJuihTv57/jvQ==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz", + "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==", "cpu": [ "x64" ], @@ -842,9 +852,9 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.8.tgz", - "integrity": "sha512-d1KfruIeohqAi6SA+gENMuObDbEjn22olAR7egqnkCD9DGBG0wsEARotkLgXDu6c4ncgWTZJtN5vcgxzWRMzcw==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz", + "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==", "cpu": [ "arm64" ], @@ -858,9 +868,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-x64/-/netbsd-x64-0.25.8.tgz", - "integrity": "sha512-nVDCkrvx2ua+XQNyfrujIG38+YGyuy2Ru9kKVNyh5jAys6n+l44tTtToqHjino2My8VAY6Lw9H7RI73XFi66Cg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz", + "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==", "cpu": [ "x64" ], @@ -874,9 +884,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.8.tgz", - "integrity": "sha512-j8HgrDuSJFAujkivSMSfPQSAa5Fxbvk4rgNAS5i3K+r8s1X0p1uOO2Hl2xNsGFppOeHOLAVgYwDVlmxhq5h+SQ==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz", + "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==", "cpu": [ "arm64" ], @@ -890,9 +900,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-x64/-/openbsd-x64-0.25.8.tgz", - "integrity": "sha512-1h8MUAwa0VhNCDp6Af0HToI2TJFAn1uqT9Al6DJVzdIBAd21m/G0Yfc77KDM3uF3T/YaOgQq3qTJHPbTOInaIQ==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz", + "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==", "cpu": [ "x64" ], @@ -906,9 +916,9 @@ } }, "node_modules/@esbuild/openharmony-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.8.tgz", - "integrity": "sha512-r2nVa5SIK9tSWd0kJd9HCffnDHKchTGikb//9c7HX+r+wHYCpQrSgxhlY6KWV1nFo1l4KFbsMlHk+L6fekLsUg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz", + "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==", "cpu": [ "arm64" ], @@ -922,9 +932,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/sunos-x64/-/sunos-x64-0.25.8.tgz", - "integrity": "sha512-zUlaP2S12YhQ2UzUfcCuMDHQFJyKABkAjvO5YSndMiIkMimPmxA+BYSBikWgsRpvyxuRnow4nS5NPnf9fpv41w==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz", + "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==", "cpu": [ "x64" ], @@ -938,9 +948,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/win32-arm64/-/win32-arm64-0.25.8.tgz", - "integrity": "sha512-YEGFFWESlPva8hGL+zvj2z/SaK+pH0SwOM0Nc/d+rVnW7GSTFlLBGzZkuSU9kFIGIo8q9X3ucpZhu8PDN5A2sQ==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz", + "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==", "cpu": [ "arm64" ], @@ -954,9 +964,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/win32-ia32/-/win32-ia32-0.25.8.tgz", - "integrity": "sha512-hiGgGC6KZ5LZz58OL/+qVVoZiuZlUYlYHNAmczOm7bs2oE1XriPFi5ZHHrS8ACpV5EjySrnoCKmcbQMN+ojnHg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz", + "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==", "cpu": [ "ia32" ], @@ -970,9 +980,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/@esbuild/win32-x64/-/win32-x64-0.25.8.tgz", - "integrity": "sha512-cn3Yr7+OaaZq1c+2pe+8yxC8E144SReCQjN6/2ynubzYjvyqZjTXfQJpAcQpsdJq3My7XADANiYGHoFC69pLQw==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz", + "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==", "cpu": [ "x64" ], @@ -985,16 +995,10 @@ "node": ">=18" } }, - "node_modules/@fastify/busboy": { - "version": "3.1.1", - "resolved": "https://registry.npmmirror.com/@fastify/busboy/-/busboy-3.1.1.tgz", - "integrity": "sha512-5DGmA8FTdB2XbDeEwc/5ZXBl6UbBAyBOOLlPuBnZ/N1SwdH9Ii+cOX3tBROlDgcTXxjOYnLMVoKk9+FXAw0CJw==", - "license": "MIT" - }, "node_modules/@ioredis/commands": { - "version": "1.2.0", - "resolved": "https://registry.npmmirror.com/@ioredis/commands/-/commands-1.2.0.tgz", - "integrity": "sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg==", + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.5.1.tgz", + "integrity": "sha512-JH8ZL/ywcJyR9MmJ5BNqZllXNZQqQbnVZOqpPQqE1vHiFgAw4NHbvE0FOduNU8IX9babitBT46571OnPTT0Zcw==", "license": "MIT" }, "node_modules/@isaacs/cliui": { @@ -1016,7 +1020,7 @@ }, "node_modules/@isaacs/fs-minipass": { "version": "4.0.1", - "resolved": "https://registry.npmmirror.com/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", + "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", "license": "ISC", "dependencies": { @@ -1036,6 +1040,16 @@ "@jridgewell/trace-mapping": "^0.3.24" } }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmmirror.com/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, "node_modules/@jridgewell/resolve-uri": { "version": "3.1.2", "resolved": "https://registry.npmmirror.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", @@ -1046,9 +1060,9 @@ } }, "node_modules/@jridgewell/source-map": { - "version": "0.3.10", - "resolved": "https://registry.npmmirror.com/@jridgewell/source-map/-/source-map-0.3.10.tgz", - "integrity": "sha512-0pPkgz9dY+bijgistcTTJ5mR+ocqRXLuhXHYdzoMmmoJ2C9S46RCm2GMUbatPEUK9Yjy26IrAy8D/M00lLkv+Q==", + "version": "0.3.11", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.11.tgz", + "integrity": "sha512-ZMp1V8ZFcPG5dIWnQLr3NSI1MiCU7UETdS/A0G8V/XWHvJv3ZsFqutJn1Y5RPmAPX6F3BiE397OqveU/9NCuIA==", "license": "MIT", "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", @@ -1056,9 +1070,9 @@ } }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.4", - "resolved": "https://registry.npmmirror.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz", - "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==", + "version": "1.5.5", + "resolved": "https://registry.npmmirror.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { @@ -1089,7 +1103,7 @@ }, "node_modules/@kwsites/file-exists": { "version": "1.1.1", - "resolved": "https://registry.npmmirror.com/@kwsites/file-exists/-/file-exists-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/@kwsites/file-exists/-/file-exists-1.1.1.tgz", "integrity": "sha512-m9/5YGR18lIwxSFDwfE3oA7bWuq9kdau6ugN4H2rJeyhFQZcG9AgSHkQtSD15a8WvTgfz9aikZMrKPHvbpqFiw==", "license": "MIT", "dependencies": { @@ -1098,14 +1112,14 @@ }, "node_modules/@kwsites/promise-deferred": { "version": "1.1.1", - "resolved": "https://registry.npmmirror.com/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz", "integrity": "sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw==", "license": "MIT" }, "node_modules/@mapbox/node-pre-gyp": { - "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/@mapbox/node-pre-gyp/-/node-pre-gyp-2.0.0.tgz", - "integrity": "sha512-llMXd39jtP0HpQLVI37Bf1m2ADlEb35GYSh1SDSLsBhR+5iCxiNGlT31yqbNtVHygHAtMy6dWFERpU2JgufhPg==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-2.0.3.tgz", + "integrity": "sha512-uwPAhccfFJlsfCxMYTwOdVfOz3xqyj8xYL3zJj8f0pb30tLohnnFPhLuqp4/qoEz8sNxe4SESZedcBojRefIzg==", "license": "BSD-3-Clause", "dependencies": { "consola": "^3.2.3", @@ -1123,1015 +1137,471 @@ "node": ">=18" } }, - "node_modules/@mapbox/node-pre-gyp/node_modules/detect-libc": { - "version": "2.0.4", - "resolved": "https://registry.npmmirror.com/detect-libc/-/detect-libc-2.0.4.tgz", - "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==", - "license": "Apache-2.0", - "engines": { - "node": ">=8" - } - }, "node_modules/@napi-rs/wasm-runtime": { - "version": "1.0.1", - "resolved": "https://registry.npmmirror.com/@napi-rs/wasm-runtime/-/wasm-runtime-1.0.1.tgz", - "integrity": "sha512-KVlQ/jgywZpixGCKMNwxStmmbYEMyokZpCf2YuIChhfJA2uqfAKNEM8INz7zzTo55iEXfBhIIs3VqYyqzDLj8g==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-1.1.1.tgz", + "integrity": "sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==", "license": "MIT", "optional": true, "dependencies": { - "@emnapi/core": "^1.4.5", - "@emnapi/runtime": "^1.4.5", - "@tybys/wasm-util": "^0.10.0" + "@emnapi/core": "^1.7.1", + "@emnapi/runtime": "^1.7.1", + "@tybys/wasm-util": "^0.10.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Brooooooklyn" } }, - "node_modules/@netlify/binary-info": { - "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/@netlify/binary-info/-/binary-info-1.0.0.tgz", - "integrity": "sha512-4wMPu9iN3/HL97QblBsBay3E1etIciR84izI3U+4iALY+JHCrI+a2jO0qbAZ/nxKoegypYEaiiqWXylm+/zfrw==", - "license": "Apache 2" - }, - "node_modules/@netlify/blobs": { - "version": "9.1.2", - "resolved": "https://registry.npmmirror.com/@netlify/blobs/-/blobs-9.1.2.tgz", - "integrity": "sha512-7dMjExSH4zj4ShvLem49mE3mf0K171Tx2pV4WDWhJbRUWW3SJIR2qntz0LvUGS97N5HO1SmnzrgWUhEXCsApiw==", + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmmirror.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", "license": "MIT", "dependencies": { - "@netlify/dev-utils": "2.2.0", - "@netlify/runtime-utils": "1.3.1" + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" }, "engines": { - "node": "^14.16.0 || >=16.0.0" + "node": ">= 8" } }, - "node_modules/@netlify/dev-utils": { - "version": "2.2.0", - "resolved": "https://registry.npmmirror.com/@netlify/dev-utils/-/dev-utils-2.2.0.tgz", - "integrity": "sha512-5XUvZuffe3KetyhbWwd4n2ktd7wraocCYw10tlM+/u/95iAz29GjNiuNxbCD1T6Bn1MyGc4QLVNKOWhzJkVFAw==", + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmmirror.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", "license": "MIT", - "dependencies": { - "@whatwg-node/server": "^0.9.60", - "chokidar": "^4.0.1", - "decache": "^4.6.2", - "dot-prop": "9.0.0", - "env-paths": "^3.0.0", - "find-up": "7.0.0", - "lodash.debounce": "^4.0.8", - "netlify": "^13.3.5", - "parse-gitignore": "^2.0.0", - "uuid": "^11.1.0", - "write-file-atomic": "^6.0.0" - }, "engines": { - "node": "^14.16.0 || >=16.0.0" + "node": ">= 8" } }, - "node_modules/@netlify/functions": { - "version": "3.1.10", - "resolved": "https://registry.npmmirror.com/@netlify/functions/-/functions-3.1.10.tgz", - "integrity": "sha512-sI93kcJ2cUoMgDRPnrEm0lZhuiDVDqM6ngS/UbHTApIH3+eg3yZM5p/0SDFQQq9Bad0/srFmgBmTdXushzY5kg==", + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmmirror.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", "license": "MIT", "dependencies": { - "@netlify/blobs": "9.1.2", - "@netlify/dev-utils": "2.2.0", - "@netlify/serverless-functions-api": "1.41.2", - "@netlify/zip-it-and-ship-it": "^12.1.0", - "cron-parser": "^4.9.0", - "decache": "^4.6.2", - "extract-zip": "^2.0.1", - "is-stream": "^4.0.1", - "jwt-decode": "^4.0.0", - "lambda-local": "^2.2.0", - "read-package-up": "^11.0.0", - "source-map-support": "^0.5.21" + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" }, "engines": { - "node": ">=14.0.0" + "node": ">= 8" } }, - "node_modules/@netlify/functions/node_modules/is-stream": { - "version": "4.0.1", - "resolved": "https://registry.npmmirror.com/is-stream/-/is-stream-4.0.1.tgz", - "integrity": "sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==", + "node_modules/@nuxt/cli": { + "version": "3.33.1", + "resolved": "https://registry.npmjs.org/@nuxt/cli/-/cli-3.33.1.tgz", + "integrity": "sha512-/sCrcI0WemING9zASaXPgPDY7PrQTPlRyCXlSgGx8VwRAkWbxGaPhIc3kZQikgLwVAwy+muWVV4Wks8OTtW5Tw==", "license": "MIT", + "dependencies": { + "@bomb.sh/tab": "^0.0.12", + "@clack/prompts": "^1.0.0", + "c12": "^3.3.3", + "citty": "^0.2.0", + "confbox": "^0.2.4", + "consola": "^3.4.2", + "copy-paste": "^2.2.0", + "debug": "^4.4.3", + "defu": "^6.1.4", + "exsolve": "^1.0.8", + "fuse.js": "^7.1.0", + "fzf": "^0.5.2", + "giget": "^3.1.2", + "jiti": "^2.6.1", + "listhen": "^1.9.0", + "nypm": "^0.6.5", + "ofetch": "^1.5.1", + "ohash": "^2.0.11", + "pathe": "^2.0.3", + "perfect-debounce": "^2.1.0", + "pkg-types": "^2.3.0", + "scule": "^1.3.0", + "semver": "^7.7.4", + "srvx": "^0.11.2", + "std-env": "^3.10.0", + "tinyexec": "^1.0.2", + "ufo": "^1.6.3", + "youch": "^4.1.0-beta.13" + }, + "bin": { + "nuxi": "bin/nuxi.mjs", + "nuxi-ng": "bin/nuxi.mjs", + "nuxt": "bin/nuxi.mjs", + "nuxt-cli": "bin/nuxi.mjs" + }, "engines": { - "node": ">=18" + "node": "^16.10.0 || >=18.0.0" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "peerDependencies": { + "@nuxt/schema": "^4.3.0" + }, + "peerDependenciesMeta": { + "@nuxt/schema": { + "optional": true + } } }, - "node_modules/@netlify/open-api": { - "version": "2.37.0", - "resolved": "https://registry.npmmirror.com/@netlify/open-api/-/open-api-2.37.0.tgz", - "integrity": "sha512-zXnRFkxgNsalSgU8/vwTWnav3R+8KG8SsqHxqaoJdjjJtnZR7wo3f+qqu4z+WtZ/4V7fly91HFUwZ6Uz2OdW7w==", + "node_modules/@nuxt/cli/node_modules/citty": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/citty/-/citty-0.2.1.tgz", + "integrity": "sha512-kEV95lFBhQgtogAPlQfJJ0WGVSokvLr/UEoFPiKKOXF7pl98HfUVUD0ejsuTCld/9xH9vogSywZ5KqHzXrZpqg==", + "license": "MIT" + }, + "node_modules/@nuxt/cli/node_modules/giget": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/giget/-/giget-3.1.2.tgz", + "integrity": "sha512-T2qUpKBHeUTwHcIhydgnJzhL0Hj785ms+JkxaaWQH9SDM/llXeewnOkfJcFShAHjWI+26hOChwUfCoupaXLm8g==", "license": "MIT", - "engines": { - "node": ">=14.8.0" + "bin": { + "giget": "dist/cli.mjs" } }, - "node_modules/@netlify/runtime-utils": { - "version": "1.3.1", - "resolved": "https://registry.npmmirror.com/@netlify/runtime-utils/-/runtime-utils-1.3.1.tgz", - "integrity": "sha512-7/vIJlMYrPJPlEW84V2yeRuG3QBu66dmlv9neTmZ5nXzwylhBEOhy11ai+34A8mHCSZI4mKns25w3HM9kaDdJg==", + "node_modules/@nuxt/cli/node_modules/perfect-debounce": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-2.1.0.tgz", + "integrity": "sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==", + "license": "MIT" + }, + "node_modules/@nuxt/devalue": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@nuxt/devalue/-/devalue-2.0.2.tgz", + "integrity": "sha512-GBzP8zOc7CGWyFQS6dv1lQz8VVpz5C2yRszbXufwG/9zhStTIH50EtD87NmWbTMwXDvZLNg8GIpb1UFdH93JCA==", + "license": "MIT" + }, + "node_modules/@nuxt/devtools": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@nuxt/devtools/-/devtools-3.2.2.tgz", + "integrity": "sha512-b6roSuKed5XMg09oWejXb4bRG+iYPDFRHEP2HpAfwpFWgAhpiQIAdrdjZNt4f/pzbfhDqb1R5TSa1KmztOuMKw==", "license": "MIT", - "engines": { - "node": ">=16.0.0" + "dependencies": { + "@nuxt/devtools-kit": "3.2.2", + "@nuxt/devtools-wizard": "3.2.2", + "@nuxt/kit": "^4.3.1", + "@vue/devtools-core": "^8.0.6", + "@vue/devtools-kit": "^8.0.6", + "birpc": "^4.0.0", + "consola": "^3.4.2", + "destr": "^2.0.5", + "error-stack-parser-es": "^1.0.5", + "execa": "^8.0.1", + "fast-npm-meta": "^1.2.1", + "get-port-please": "^3.2.0", + "hookable": "^6.0.1", + "image-meta": "^0.2.2", + "is-installed-globally": "^1.0.0", + "launch-editor": "^2.13.0", + "local-pkg": "^1.1.2", + "magicast": "^0.5.2", + "nypm": "^0.6.5", + "ohash": "^2.0.11", + "pathe": "^2.0.3", + "perfect-debounce": "^2.1.0", + "pkg-types": "^2.3.0", + "semver": "^7.7.4", + "simple-git": "^3.32.2", + "sirv": "^3.0.2", + "structured-clone-es": "^1.0.0", + "tinyglobby": "^0.2.15", + "vite-plugin-inspect": "^11.3.3", + "vite-plugin-vue-tracer": "^1.2.0", + "which": "^5.0.0", + "ws": "^8.19.0" + }, + "bin": { + "devtools": "cli.mjs" + }, + "peerDependencies": { + "@vitejs/devtools": "*", + "vite": ">=6.0" + }, + "peerDependenciesMeta": { + "@vitejs/devtools": { + "optional": true + } } }, - "node_modules/@netlify/serverless-functions-api": { - "version": "1.41.2", - "resolved": "https://registry.npmmirror.com/@netlify/serverless-functions-api/-/serverless-functions-api-1.41.2.tgz", - "integrity": "sha512-pfCkH50JV06SGMNsNPjn8t17hOcId4fA881HeYQgMBOrewjsw4csaYgHEnCxCEu24Y5x75E2ULbFpqm9CvRCqw==", + "node_modules/@nuxt/devtools-kit": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@nuxt/devtools-kit/-/devtools-kit-3.2.2.tgz", + "integrity": "sha512-07E1phqoVPNlexlkrYuOMPhTzLIRjcl9iEqyc/vZLH2zWeH/T1X3v+RLTVW5Oio40f/XBp9yQuyihmX34ddjgQ==", "license": "MIT", - "engines": { - "node": ">=18.0.0" + "dependencies": { + "@nuxt/kit": "^4.3.1", + "execa": "^8.0.1" + }, + "peerDependencies": { + "vite": ">=6.0" } }, - "node_modules/@netlify/zip-it-and-ship-it": { - "version": "12.2.1", - "resolved": "https://registry.npmmirror.com/@netlify/zip-it-and-ship-it/-/zip-it-and-ship-it-12.2.1.tgz", - "integrity": "sha512-zAr+8Tg80y/sUbhdUkZsq4Uy1IMzkSB6H/sKRMrDQ2NJx4uPgf5X5jMdg9g2FljNcxzpfJwc1Gg4OXQrjD0Z4A==", + "node_modules/@nuxt/devtools-wizard": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@nuxt/devtools-wizard/-/devtools-wizard-3.2.2.tgz", + "integrity": "sha512-FaKV3xZF+Sj2ORxJNWTUalnEV8cpXW2rkg60KzQd7LryEHgUdFMuY/oTSVh9YmURqSzwVlfYd1Su56yi02pxlA==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.22.5", - "@babel/types": "7.28.0", - "@netlify/binary-info": "^1.0.0", - "@netlify/serverless-functions-api": "^2.1.3", - "@vercel/nft": "0.29.4", - "archiver": "^7.0.0", - "common-path-prefix": "^3.0.0", - "copy-file": "^11.0.0", - "es-module-lexer": "^1.0.0", - "esbuild": "0.25.5", - "execa": "^8.0.0", - "fast-glob": "^3.3.3", - "filter-obj": "^6.0.0", - "find-up": "^7.0.0", - "is-builtin-module": "^3.1.0", - "is-path-inside": "^4.0.0", - "junk": "^4.0.0", - "locate-path": "^7.0.0", - "merge-options": "^3.0.4", - "minimatch": "^9.0.0", - "normalize-path": "^3.0.0", - "p-map": "^7.0.0", - "path-exists": "^5.0.0", - "precinct": "^12.0.0", - "require-package-name": "^2.0.1", - "resolve": "^2.0.0-next.1", - "semver": "^7.3.8", - "tmp-promise": "^3.0.2", - "toml": "^3.0.0", - "unixify": "^1.0.0", - "urlpattern-polyfill": "8.0.2", - "yargs": "^17.0.0", - "zod": "^3.23.8" + "@clack/prompts": "^1.0.1", + "consola": "^3.4.2", + "diff": "^8.0.3", + "execa": "^8.0.1", + "magicast": "^0.5.2", + "pathe": "^2.0.3", + "pkg-types": "^2.3.0", + "semver": "^7.7.4" }, "bin": { - "zip-it-and-ship-it": "bin.js" - }, - "engines": { - "node": ">=18.14.0" + "devtools-wizard": "cli.mjs" } }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@babel/types": { - "version": "7.28.0", - "resolved": "https://registry.npmmirror.com/@babel/types/-/types-7.28.0.tgz", - "integrity": "sha512-jYnje+JyZG5YThjHiF28oT4SIZLnYOcSBb6+SDaFIyzDVSkXQmQQYclJ2R+YxcdmK0AX6x1E5OQNtuh3jHDrUg==", + "node_modules/@nuxt/devtools/node_modules/@vue/devtools-kit": { + "version": "8.0.6", + "resolved": "https://registry.npmjs.org/@vue/devtools-kit/-/devtools-kit-8.0.6.tgz", + "integrity": "sha512-9zXZPTJW72OteDXeSa5RVML3zWDCRcO5t77aJqSs228mdopYj5AiTpihozbsfFJ0IodfNs7pSgOGO3qfCuxDtw==", "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" + "@vue/devtools-shared": "^8.0.6", + "birpc": "^2.6.1", + "hookable": "^5.5.3", + "mitt": "^3.0.1", + "perfect-debounce": "^2.0.0", + "speakingurl": "^14.0.1", + "superjson": "^2.2.2" } }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/aix-ppc64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/aix-ppc64/-/aix-ppc64-0.25.5.tgz", - "integrity": "sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA==", - "cpu": [ - "ppc64" - ], + "node_modules/@nuxt/devtools/node_modules/@vue/devtools-kit/node_modules/birpc": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/birpc/-/birpc-2.9.0.tgz", + "integrity": "sha512-KrayHS5pBi69Xi9JmvoqrIgYGDkD6mcSe/i6YKi3w5kekCLzrX4+nawcXqrj2tIp50Kw/mT/s3p+GVK0A0sKxw==", "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" + "funding": { + "url": "https://github.com/sponsors/antfu" } }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/android-arm": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/android-arm/-/android-arm-0.25.5.tgz", - "integrity": "sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA==", - "cpu": [ - "arm" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/android-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/android-arm64/-/android-arm64-0.25.5.tgz", - "integrity": "sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/android-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/android-x64/-/android-x64-0.25.5.tgz", - "integrity": "sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/darwin-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/darwin-arm64/-/darwin-arm64-0.25.5.tgz", - "integrity": "sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/darwin-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/darwin-x64/-/darwin-x64-0.25.5.tgz", - "integrity": "sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.5.tgz", - "integrity": "sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/freebsd-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-x64/-/freebsd-x64-0.25.5.tgz", - "integrity": "sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/linux-arm": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm/-/linux-arm-0.25.5.tgz", - "integrity": "sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw==", - "cpu": [ - "arm" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/linux-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm64/-/linux-arm64-0.25.5.tgz", - "integrity": "sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/linux-ia32": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-ia32/-/linux-ia32-0.25.5.tgz", - "integrity": "sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA==", - "cpu": [ - "ia32" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/linux-loong64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-loong64/-/linux-loong64-0.25.5.tgz", - "integrity": "sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg==", - "cpu": [ - "loong64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/linux-mips64el": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-mips64el/-/linux-mips64el-0.25.5.tgz", - "integrity": "sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg==", - "cpu": [ - "mips64el" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/linux-ppc64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-ppc64/-/linux-ppc64-0.25.5.tgz", - "integrity": "sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ==", - "cpu": [ - "ppc64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/linux-riscv64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-riscv64/-/linux-riscv64-0.25.5.tgz", - "integrity": "sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA==", - "cpu": [ - "riscv64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/linux-s390x": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-s390x/-/linux-s390x-0.25.5.tgz", - "integrity": "sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ==", - "cpu": [ - "s390x" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/linux-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/linux-x64/-/linux-x64-0.25.5.tgz", - "integrity": "sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.5.tgz", - "integrity": "sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/netbsd-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-x64/-/netbsd-x64-0.25.5.tgz", - "integrity": "sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.5.tgz", - "integrity": "sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/openbsd-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-x64/-/openbsd-x64-0.25.5.tgz", - "integrity": "sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/sunos-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/sunos-x64/-/sunos-x64-0.25.5.tgz", - "integrity": "sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/win32-arm64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/win32-arm64/-/win32-arm64-0.25.5.tgz", - "integrity": "sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/win32-ia32": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/win32-ia32/-/win32-ia32-0.25.5.tgz", - "integrity": "sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ==", - "cpu": [ - "ia32" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@esbuild/win32-x64": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/@esbuild/win32-x64/-/win32-x64-0.25.5.tgz", - "integrity": "sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/@netlify/serverless-functions-api": { - "version": "2.1.3", - "resolved": "https://registry.npmmirror.com/@netlify/serverless-functions-api/-/serverless-functions-api-2.1.3.tgz", - "integrity": "sha512-bNlN/hpND8xFQzpjyKxm6vJayD+bPBlOvs4lWihE7WULrphuH1UuFsoVE5386bNNGH8Rs1IH01AFsl7ALQgOlQ==", - "license": "MIT", - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@netlify/zip-it-and-ship-it/node_modules/esbuild": { - "version": "0.25.5", - "resolved": "https://registry.npmmirror.com/esbuild/-/esbuild-0.25.5.tgz", - "integrity": "sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ==", - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.5", - "@esbuild/android-arm": "0.25.5", - "@esbuild/android-arm64": "0.25.5", - "@esbuild/android-x64": "0.25.5", - "@esbuild/darwin-arm64": "0.25.5", - "@esbuild/darwin-x64": "0.25.5", - "@esbuild/freebsd-arm64": "0.25.5", - "@esbuild/freebsd-x64": "0.25.5", - "@esbuild/linux-arm": "0.25.5", - "@esbuild/linux-arm64": "0.25.5", - "@esbuild/linux-ia32": "0.25.5", - "@esbuild/linux-loong64": "0.25.5", - "@esbuild/linux-mips64el": "0.25.5", - "@esbuild/linux-ppc64": "0.25.5", - "@esbuild/linux-riscv64": "0.25.5", - "@esbuild/linux-s390x": "0.25.5", - "@esbuild/linux-x64": "0.25.5", - "@esbuild/netbsd-arm64": "0.25.5", - "@esbuild/netbsd-x64": "0.25.5", - "@esbuild/openbsd-arm64": "0.25.5", - "@esbuild/openbsd-x64": "0.25.5", - "@esbuild/sunos-x64": "0.25.5", - "@esbuild/win32-arm64": "0.25.5", - "@esbuild/win32-ia32": "0.25.5", - "@esbuild/win32-x64": "0.25.5" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmmirror.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmmirror.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "license": "MIT", - "engines": { - "node": ">= 8" - } + "node_modules/@nuxt/devtools/node_modules/@vue/devtools-kit/node_modules/hookable": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/hookable/-/hookable-5.5.3.tgz", + "integrity": "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==", + "license": "MIT" }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmmirror.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "node_modules/@nuxt/devtools/node_modules/@vue/devtools-shared": { + "version": "8.0.6", + "resolved": "https://registry.npmjs.org/@vue/devtools-shared/-/devtools-shared-8.0.6.tgz", + "integrity": "sha512-Pp1JylTqlgMJvxW6MGyfTF8vGvlBSCAvMFaDCYa82Mgw7TT5eE5kkHgDvmOGHWeJE4zIDfCpCxHapsK2LtIAJg==", "license": "MIT", "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" + "rfdc": "^1.4.1" } }, - "node_modules/@nuxt/cli": { - "version": "3.26.4", - "resolved": "https://registry.npmmirror.com/@nuxt/cli/-/cli-3.26.4.tgz", - "integrity": "sha512-PeZcH7ghQxIcCaKyu+So3qGEjKG18TN1ic4jKvKFQouNgzPSVfvZAeBOHU4znEuDFp/wmoN5EliyHO4HaSs+rw==", + "node_modules/@nuxt/devtools/node_modules/birpc": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/birpc/-/birpc-4.0.0.tgz", + "integrity": "sha512-LShSxJP0KTmd101b6DRyGBj57LZxSDYWKitQNW/mi8GRMvZb078Uf9+pveax1DrVL89vm7mWe+TovdI/UDOuPw==", "license": "MIT", - "dependencies": { - "c12": "^3.1.0", - "citty": "^0.1.6", - "clipboardy": "^4.0.0", - "confbox": "^0.2.2", - "consola": "^3.4.2", - "defu": "^6.1.4", - "exsolve": "^1.0.7", - "fuse.js": "^7.1.0", - "get-port-please": "^3.2.0", - "giget": "^2.0.0", - "h3": "^1.15.3", - "httpxy": "^0.1.7", - "jiti": "^2.4.2", - "listhen": "^1.9.0", - "nypm": "^0.6.0", - "ofetch": "^1.4.1", - "ohash": "^2.0.11", - "pathe": "^2.0.3", - "perfect-debounce": "^1.0.0", - "pkg-types": "^2.2.0", - "scule": "^1.3.0", - "semver": "^7.7.2", - "std-env": "^3.9.0", - "tinyexec": "^1.0.1", - "ufo": "^1.6.1", - "youch": "^4.1.0-beta.10" - }, - "bin": { - "nuxi": "bin/nuxi.mjs", - "nuxi-ng": "bin/nuxi.mjs", - "nuxt": "bin/nuxi.mjs", - "nuxt-cli": "bin/nuxi.mjs" - }, - "engines": { - "node": "^16.10.0 || >=18.0.0" + "funding": { + "url": "https://github.com/sponsors/antfu" } }, - "node_modules/@nuxt/devalue": { - "version": "2.0.2", - "resolved": "https://registry.npmmirror.com/@nuxt/devalue/-/devalue-2.0.2.tgz", - "integrity": "sha512-GBzP8zOc7CGWyFQS6dv1lQz8VVpz5C2yRszbXufwG/9zhStTIH50EtD87NmWbTMwXDvZLNg8GIpb1UFdH93JCA==", + "node_modules/@nuxt/devtools/node_modules/hookable": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/hookable/-/hookable-6.0.1.tgz", + "integrity": "sha512-uKGyY8BuzN/a5gvzvA+3FVWo0+wUjgtfSdnmjtrOVwQCZPHpHDH2WRO3VZSOeluYrHoDCiXFffZXs8Dj1ULWtw==", "license": "MIT" }, - "node_modules/@nuxt/devtools": { - "version": "2.6.2", - "resolved": "https://registry.npmmirror.com/@nuxt/devtools/-/devtools-2.6.2.tgz", - "integrity": "sha512-pqcSDPv1I+8fxa6FvhAxVrfcN/sXYLOBe9scTLbRQOVLTO0pHzryayho678qNKiwWGgj/rcjEDr6IZCgwqOCfA==", - "license": "MIT", - "dependencies": { - "@nuxt/devtools-kit": "2.6.2", - "@nuxt/devtools-wizard": "2.6.2", - "@nuxt/kit": "^3.17.6", - "@vue/devtools-core": "^7.7.7", - "@vue/devtools-kit": "^7.7.7", - "birpc": "^2.4.0", - "consola": "^3.4.2", - "destr": "^2.0.5", - "error-stack-parser-es": "^1.0.5", - "execa": "^8.0.1", - "fast-npm-meta": "^0.4.4", - "get-port-please": "^3.1.2", - "hookable": "^5.5.3", - "image-meta": "^0.2.1", - "is-installed-globally": "^1.0.0", - "launch-editor": "^2.10.0", - "local-pkg": "^1.1.1", - "magicast": "^0.3.5", - "nypm": "^0.6.0", - "ohash": "^2.0.11", - "pathe": "^2.0.3", - "perfect-debounce": "^1.0.0", - "pkg-types": "^2.2.0", - "semver": "^7.7.2", - "simple-git": "^3.28.0", - "sirv": "^3.0.1", - "structured-clone-es": "^1.0.0", - "tinyglobby": "^0.2.14", - "vite-plugin-inspect": "^11.3.0", - "vite-plugin-vue-tracer": "^1.0.0", - "which": "^5.0.0", - "ws": "^8.18.3" - }, - "bin": { - "devtools": "cli.mjs" - }, - "peerDependencies": { - "vite": ">=6.0" - } - }, - "node_modules/@nuxt/devtools-kit": { - "version": "2.6.2", - "resolved": "https://registry.npmmirror.com/@nuxt/devtools-kit/-/devtools-kit-2.6.2.tgz", - "integrity": "sha512-esErdMQ0u3wXXogKQ3IE2m0fxv52w6CzPsfsXF4o5ZVrUQrQaH58ygupDAQTYdlGTgtqmEA6KkHTGG5cM6yxeg==", - "license": "MIT", - "dependencies": { - "@nuxt/kit": "^3.17.6", - "execa": "^8.0.1" - }, - "peerDependencies": { - "vite": ">=6.0" - } + "node_modules/@nuxt/devtools/node_modules/perfect-debounce": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-2.1.0.tgz", + "integrity": "sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==", + "license": "MIT" }, - "node_modules/@nuxt/devtools-kit/node_modules/@nuxt/kit": { - "version": "3.17.7", - "resolved": "https://registry.npmmirror.com/@nuxt/kit/-/kit-3.17.7.tgz", - "integrity": "sha512-JLno3ur7Pix2o/StxIMlEHRkMawA6h7uzjZBDgxdeKXRWTYY8ID9YekSkN4PBlEFGXBfCBOcPd5+YqcyBUAMkw==", + "node_modules/@nuxt/kit": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/@nuxt/kit/-/kit-4.3.1.tgz", + "integrity": "sha512-UjBFt72dnpc+83BV3OIbCT0YHLevJtgJCHpxMX0YRKWLDhhbcDdUse87GtsQBrjvOzK7WUNUYLDS/hQLYev5rA==", "license": "MIT", "dependencies": { - "c12": "^3.0.4", + "c12": "^3.3.3", "consola": "^3.4.2", "defu": "^6.1.4", "destr": "^2.0.5", "errx": "^0.1.0", - "exsolve": "^1.0.7", + "exsolve": "^1.0.8", "ignore": "^7.0.5", - "jiti": "^2.4.2", + "jiti": "^2.6.1", "klona": "^2.0.6", - "knitwork": "^1.2.0", - "mlly": "^1.7.4", + "mlly": "^1.8.0", "ohash": "^2.0.11", "pathe": "^2.0.3", - "pkg-types": "^2.2.0", + "pkg-types": "^2.3.0", + "rc9": "^3.0.0", "scule": "^1.3.0", - "semver": "^7.7.2", - "std-env": "^3.9.0", - "tinyglobby": "^0.2.14", - "ufo": "^1.6.1", - "unctx": "^2.4.1", - "unimport": "^5.1.0", + "semver": "^7.7.4", + "tinyglobby": "^0.2.15", + "ufo": "^1.6.3", + "unctx": "^2.5.0", "untyped": "^2.0.0" }, "engines": { "node": ">=18.12.0" } }, - "node_modules/@nuxt/devtools-wizard": { - "version": "2.6.2", - "resolved": "https://registry.npmmirror.com/@nuxt/devtools-wizard/-/devtools-wizard-2.6.2.tgz", - "integrity": "sha512-s1eYYKi2eZu2ZUPQrf22C0SceWs5/C3c3uow/DVunD304Um/Tj062xM9E4p1B9L8yjaq8t0Gtyu/YvZdo/reyg==", - "license": "MIT", - "dependencies": { - "consola": "^3.4.2", - "diff": "^8.0.2", - "execa": "^8.0.1", - "magicast": "^0.3.5", - "pathe": "^2.0.3", - "pkg-types": "^2.2.0", - "prompts": "^2.4.2", - "semver": "^7.7.2" - }, - "bin": { - "devtools-wizard": "cli.mjs" - } - }, - "node_modules/@nuxt/devtools/node_modules/@nuxt/kit": { - "version": "3.17.7", - "resolved": "https://registry.npmmirror.com/@nuxt/kit/-/kit-3.17.7.tgz", - "integrity": "sha512-JLno3ur7Pix2o/StxIMlEHRkMawA6h7uzjZBDgxdeKXRWTYY8ID9YekSkN4PBlEFGXBfCBOcPd5+YqcyBUAMkw==", + "node_modules/@nuxt/kit/node_modules/rc9": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/rc9/-/rc9-3.0.0.tgz", + "integrity": "sha512-MGOue0VqscKWQ104udASX/3GYDcKyPI4j4F8gu/jHHzglpmy9a/anZK3PNe8ug6aZFl+9GxLtdhe3kVZuMaQbA==", "license": "MIT", "dependencies": { - "c12": "^3.0.4", - "consola": "^3.4.2", "defu": "^6.1.4", - "destr": "^2.0.5", - "errx": "^0.1.0", - "exsolve": "^1.0.7", - "ignore": "^7.0.5", - "jiti": "^2.4.2", - "klona": "^2.0.6", - "knitwork": "^1.2.0", - "mlly": "^1.7.4", - "ohash": "^2.0.11", - "pathe": "^2.0.3", - "pkg-types": "^2.2.0", - "scule": "^1.3.0", - "semver": "^7.7.2", - "std-env": "^3.9.0", - "tinyglobby": "^0.2.14", - "ufo": "^1.6.1", - "unctx": "^2.4.1", - "unimport": "^5.1.0", - "untyped": "^2.0.0" - }, - "engines": { - "node": ">=18.12.0" + "destr": "^2.0.5" } }, - "node_modules/@nuxt/kit": { - "version": "4.0.1", - "resolved": "https://registry.npmmirror.com/@nuxt/kit/-/kit-4.0.1.tgz", - "integrity": "sha512-9vYpbuK3xcVhuDq+NyoLhbAolV/bEESaozFOMutl0jhrODcNWFrJ8wQSZIt9yxcFXUgXgUa2ms31qaUEpXrykw==", + "node_modules/@nuxt/nitro-server": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/@nuxt/nitro-server/-/nitro-server-4.3.1.tgz", + "integrity": "sha512-4aNiM69Re02gI1ywnDND0m6QdVKXhWzDdtvl/16veytdHZj3FSq57ZCwOClNJ7HQkEMqXgS+bi6S2HmJX+et+g==", "license": "MIT", "dependencies": { - "c12": "^3.1.0", + "@nuxt/devalue": "^2.0.2", + "@nuxt/kit": "4.3.1", + "@unhead/vue": "^2.1.3", + "@vue/shared": "^3.5.27", "consola": "^3.4.2", "defu": "^6.1.4", - "destr": "^2.0.5", - "errx": "^0.1.0", - "exsolve": "^1.0.7", - "ignore": "^7.0.5", - "jiti": "^2.4.2", - "klona": "^2.0.6", - "mlly": "^1.7.4", - "ohash": "^2.0.11", - "pathe": "^2.0.3", - "pkg-types": "^2.2.0", - "scule": "^1.3.0", - "semver": "^7.7.2", - "std-env": "^3.9.0", - "tinyglobby": "^0.2.14", - "ufo": "^1.6.1", - "unctx": "^2.4.1", - "unimport": "^5.1.0", - "untyped": "^2.0.0" + "destr": "^2.0.5", + "devalue": "^5.6.2", + "errx": "^0.1.0", + "escape-string-regexp": "^5.0.0", + "exsolve": "^1.0.8", + "h3": "^1.15.5", + "impound": "^1.0.0", + "klona": "^2.0.6", + "mocked-exports": "^0.1.1", + "nitropack": "^2.13.1", + "ohash": "^2.0.11", + "pathe": "^2.0.3", + "pkg-types": "^2.3.0", + "rou3": "^0.7.12", + "std-env": "^3.10.0", + "ufo": "^1.6.3", + "unctx": "^2.5.0", + "unstorage": "^1.17.4", + "vue": "^3.5.27", + "vue-bundle-renderer": "^2.2.0", + "vue-devtools-stub": "^0.1.0" }, "engines": { - "node": ">=18.12.0" + "node": "^20.19.0 || >=22.12.0" + }, + "peerDependencies": { + "nuxt": "^4.3.1" } }, "node_modules/@nuxt/schema": { - "version": "4.0.1", - "resolved": "https://registry.npmmirror.com/@nuxt/schema/-/schema-4.0.1.tgz", - "integrity": "sha512-/e/avVyJ/pLydTQL9iGlpvyGiJ0Y6+TKLXlUFR0zPTJv6asHzCqHKbiL84+wSAQmTw6Hl+z0yZv8uEx21+JoHw==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/@nuxt/schema/-/schema-4.3.1.tgz", + "integrity": "sha512-S+wHJdYDuyk9I43Ej27y5BeWMZgi7R/UVql3b3qtT35d0fbpXW7fUenzhLRCCDC6O10sjguc6fcMcR9sMKvV8g==", "license": "MIT", "dependencies": { - "@vue/shared": "^3.5.17", - "consola": "^3.4.2", + "@vue/shared": "^3.5.27", "defu": "^6.1.4", "pathe": "^2.0.3", - "std-env": "^3.9.0" + "pkg-types": "^2.3.0", + "std-env": "^3.10.0" }, "engines": { "node": "^14.18.0 || >=16.10.0" } }, "node_modules/@nuxt/telemetry": { - "version": "2.6.6", - "resolved": "https://registry.npmmirror.com/@nuxt/telemetry/-/telemetry-2.6.6.tgz", - "integrity": "sha512-Zh4HJLjzvm3Cq9w6sfzIFyH9ozK5ePYVfCUzzUQNiZojFsI2k1QkSBrVI9BGc6ArKXj/O6rkI6w7qQ+ouL8Cag==", + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@nuxt/telemetry/-/telemetry-2.7.0.tgz", + "integrity": "sha512-mrKC3NjAlBOooLLVTYcIUie1meipoYq5vkoESoVTEWTB34T3a0QJzOfOPch+HYlUR+5Lqy1zLMv6epHFgYAKLA==", "license": "MIT", "dependencies": { - "@nuxt/kit": "^3.15.4", - "citty": "^0.1.6", + "citty": "^0.2.0", "consola": "^3.4.2", - "destr": "^2.0.3", - "dotenv": "^16.4.7", - "git-url-parse": "^16.0.1", - "is-docker": "^3.0.0", - "ofetch": "^1.4.1", - "package-manager-detector": "^1.1.0", - "pathe": "^2.0.3", - "rc9": "^2.1.2", - "std-env": "^3.8.1" + "ofetch": "^2.0.0-alpha.3", + "rc9": "^3.0.0", + "std-env": "^3.10.0" }, "bin": { "nuxt-telemetry": "bin/nuxt-telemetry.mjs" }, "engines": { "node": ">=18.12.0" + }, + "peerDependencies": { + "@nuxt/kit": ">=3.0.0" } }, - "node_modules/@nuxt/telemetry/node_modules/@nuxt/kit": { - "version": "3.17.7", - "resolved": "https://registry.npmmirror.com/@nuxt/kit/-/kit-3.17.7.tgz", - "integrity": "sha512-JLno3ur7Pix2o/StxIMlEHRkMawA6h7uzjZBDgxdeKXRWTYY8ID9YekSkN4PBlEFGXBfCBOcPd5+YqcyBUAMkw==", + "node_modules/@nuxt/telemetry/node_modules/citty": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/citty/-/citty-0.2.1.tgz", + "integrity": "sha512-kEV95lFBhQgtogAPlQfJJ0WGVSokvLr/UEoFPiKKOXF7pl98HfUVUD0ejsuTCld/9xH9vogSywZ5KqHzXrZpqg==", + "license": "MIT" + }, + "node_modules/@nuxt/telemetry/node_modules/ofetch": { + "version": "2.0.0-alpha.3", + "resolved": "https://registry.npmjs.org/ofetch/-/ofetch-2.0.0-alpha.3.tgz", + "integrity": "sha512-zpYTCs2byOuft65vI3z43Dd6iSdFbOZZLb9/d21aCpx2rGastVU9dOCv0lu4ykc1Ur1anAYjDi3SUvR0vq50JA==", + "license": "MIT" + }, + "node_modules/@nuxt/telemetry/node_modules/rc9": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/rc9/-/rc9-3.0.0.tgz", + "integrity": "sha512-MGOue0VqscKWQ104udASX/3GYDcKyPI4j4F8gu/jHHzglpmy9a/anZK3PNe8ug6aZFl+9GxLtdhe3kVZuMaQbA==", "license": "MIT", "dependencies": { - "c12": "^3.0.4", - "consola": "^3.4.2", "defu": "^6.1.4", - "destr": "^2.0.5", - "errx": "^0.1.0", - "exsolve": "^1.0.7", - "ignore": "^7.0.5", - "jiti": "^2.4.2", - "klona": "^2.0.6", - "knitwork": "^1.2.0", - "mlly": "^1.7.4", - "ohash": "^2.0.11", - "pathe": "^2.0.3", - "pkg-types": "^2.2.0", - "scule": "^1.3.0", - "semver": "^7.7.2", - "std-env": "^3.9.0", - "tinyglobby": "^0.2.14", - "ufo": "^1.6.1", - "unctx": "^2.4.1", - "unimport": "^5.1.0", - "untyped": "^2.0.0" - }, - "engines": { - "node": ">=18.12.0" + "destr": "^2.0.5" } }, "node_modules/@nuxt/vite-builder": { - "version": "4.0.1", - "resolved": "https://registry.npmmirror.com/@nuxt/vite-builder/-/vite-builder-4.0.1.tgz", - "integrity": "sha512-+ScfRxpCCHkJgkBYRXkvQHLsF/vxyFkwQzTBDL6+8sg9+BcTYkxOjVHDZ1l0qzeVORXzoq4G+oPfKsob64vODA==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/@nuxt/vite-builder/-/vite-builder-4.3.1.tgz", + "integrity": "sha512-LndnxPJzDUDbWAB8q5gZZN1mSOLHEyMOoj4T3pTdPydGf31QZdMR0V1fQ1fdRgtgNtWB3WLP0d1ZfaAOITsUpw==", "license": "MIT", "dependencies": { - "@nuxt/kit": "4.0.1", - "@rollup/plugin-replace": "^6.0.2", - "@vitejs/plugin-vue": "^6.0.0", - "@vitejs/plugin-vue-jsx": "^5.0.1", - "autoprefixer": "^10.4.21", + "@nuxt/kit": "4.3.1", + "@rollup/plugin-replace": "^6.0.3", + "@vitejs/plugin-vue": "^6.0.4", + "@vitejs/plugin-vue-jsx": "^5.1.4", + "autoprefixer": "^10.4.24", "consola": "^3.4.2", - "cssnano": "^7.1.0", + "cssnano": "^7.1.2", "defu": "^6.1.4", - "esbuild": "^0.25.8", + "esbuild": "^0.27.3", "escape-string-regexp": "^5.0.0", - "exsolve": "^1.0.7", + "exsolve": "^1.0.8", "get-port-please": "^3.2.0", - "h3": "^1.15.3", - "jiti": "^2.4.2", - "knitwork": "^1.2.0", - "magic-string": "^0.30.17", - "mlly": "^1.7.4", + "jiti": "^2.6.1", + "knitwork": "^1.3.0", + "magic-string": "^0.30.21", + "mlly": "^1.8.0", "mocked-exports": "^0.1.1", "pathe": "^2.0.3", - "pkg-types": "^2.2.0", + "pkg-types": "^2.3.0", "postcss": "^8.5.6", - "rollup-plugin-visualizer": "^6.0.3", - "std-env": "^3.9.0", - "ufo": "^1.6.1", - "unenv": "^2.0.0-rc.18", - "vite": "^7.0.5", - "vite-node": "^3.2.4", - "vite-plugin-checker": "^0.10.0", - "vue-bundle-renderer": "^2.1.1" + "rollup-plugin-visualizer": "^6.0.5", + "seroval": "^1.5.0", + "std-env": "^3.10.0", + "ufo": "^1.6.3", + "unenv": "^2.0.0-rc.24", + "vite": "^7.3.1", + "vite-node": "^5.3.0", + "vite-plugin-checker": "^0.12.0", + "vue-bundle-renderer": "^2.2.0" }, "engines": { "node": "^20.19.0 || >=22.12.0" }, "peerDependencies": { + "nuxt": "4.3.1", + "rolldown": "^1.0.0-beta.38", "vue": "^3.3.4" + }, + "peerDependenciesMeta": { + "rolldown": { + "optional": true + } } }, "node_modules/@nuxtjs/tailwindcss": { @@ -2191,10 +1661,26 @@ "node": ">=18.12.0" } }, + "node_modules/@oxc-minify/binding-android-arm-eabi": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-android-arm-eabi/-/binding-android-arm-eabi-0.112.0.tgz", + "integrity": "sha512-m7TGBR2hjsBJIN9UJ909KBoKsuogo6CuLsHKvUIBXdjI0JVHP8g4ZHeB+BJpGn5LJdeSGDfz9MWiuXrZDRzunw==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, "node_modules/@oxc-minify/binding-android-arm64": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-android-arm64/-/binding-android-arm64-0.77.3.tgz", - "integrity": "sha512-9bGiDHSkPr6eaP4+/2DQerG+V69Ut4mezL1JtBTk54Iyc6tNsoHa9s+3wJSUHesXEgiHd/IxwuSXRtD9yC3VhQ==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-android-arm64/-/binding-android-arm64-0.112.0.tgz", + "integrity": "sha512-RvxOOkzvP5NeeoraBtgNJSBqO+XzlS7DooxST/drAXCfO52GsmxVB1N7QmifrsTYtH8GC2z3DTFjZQ1w/AJOWg==", "cpu": [ "arm64" ], @@ -2204,13 +1690,13 @@ "android" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-minify/binding-darwin-arm64": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-darwin-arm64/-/binding-darwin-arm64-0.77.3.tgz", - "integrity": "sha512-DcRuFK/W3VqIlS8Wvb9bwd5yX+QTlr2ds2f5HW52OPx4odFwyF3+dD6nj3kyxvxITtf6U3jjqyaZEkq+LSQ5RQ==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-darwin-arm64/-/binding-darwin-arm64-0.112.0.tgz", + "integrity": "sha512-hDslO3uVHza3kB9zkcsi25JzN65Gj5ZYty0OvylS11Mhg9ydCYxAzfQ/tISHW/YmV1NRUJX8+GGqM1cKmrHaTA==", "cpu": [ "arm64" ], @@ -2220,13 +1706,13 @@ "darwin" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-minify/binding-darwin-x64": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-darwin-x64/-/binding-darwin-x64-0.77.3.tgz", - "integrity": "sha512-ZOKwC0nRNKpDKZq+sbFTbzJbrGR+drhIx3jhaTzSFpTWyzs3m5PW0yB+bKhhrqnk1Y26jtNixykBNiyhuPhCxQ==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-darwin-x64/-/binding-darwin-x64-0.112.0.tgz", + "integrity": "sha512-mWA2Y5bUyNoGM+gSGGHesgtQ3LDWgpRe4zDGkBDovxNIiDLBXqu/7QcuS+G918w8oG9VYm1q1iinILer/2pD1Q==", "cpu": [ "x64" ], @@ -2236,13 +1722,13 @@ "darwin" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-minify/binding-freebsd-x64": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-freebsd-x64/-/binding-freebsd-x64-0.77.3.tgz", - "integrity": "sha512-z2LgrCT0YjxNIZRTOBFY5/FnqGX9S5QvkC/yoYqfDDuest8T6feTN68xXWg6D8+vFJPukvKEGY1xGikybc33xA==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-freebsd-x64/-/binding-freebsd-x64-0.112.0.tgz", + "integrity": "sha512-T7fsegxcy82xS0jWPXkz/BMhrkb3D7YOCiV0R9pDksjaov+iIFoNEWAoBsaC5NtpdzkX+bmffwDpu336EIfEeg==", "cpu": [ "x64" ], @@ -2252,13 +1738,13 @@ "freebsd" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-minify/binding-linux-arm-gnueabihf": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.77.3.tgz", - "integrity": "sha512-VdpPQk9Xuu6C+p2DprWAEhIyELBrZLAzipMxoRnmox/HlFigs+FIeEfklCMls3yMSLCu6wKTdMdWeRu+dLXEHg==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.112.0.tgz", + "integrity": "sha512-yePavbIilAcpVYc8vRsDCn3xJxHMXDZIiamyH9fuLosAHNELcLib4/JR4fhDk4NmHVagQH3kRhsnm5Q9cm3pAw==", "cpu": [ "arm" ], @@ -2268,13 +1754,13 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-minify/binding-linux-arm-musleabihf": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.77.3.tgz", - "integrity": "sha512-bhiPBIQKIxKtOSHgxYQiVeJ7CrfHWDZxaNFMf6ktDBmYBeD9lE9A356wDfgBPFkVOGV+juSPrnpu7qg2si/Q7Q==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.112.0.tgz", + "integrity": "sha512-lmPWLXtW6FspERhy97iP0hwbmLtL66xI29QQ9GpHmTiE4k+zv/FaefuV/Qw+LuHnmFSYzUNrLcxh4ulOZTIP2g==", "cpu": [ "arm" ], @@ -2284,13 +1770,13 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-minify/binding-linux-arm64-gnu": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.77.3.tgz", - "integrity": "sha512-PYjFgTLCMxoa4yIgxVTNOltGk9nuPWTYZpDGEZu0he+0HC4iD86ZJIEl0mW0CaNaMU2np/7gAr+Izu3W71q+FQ==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.112.0.tgz", + "integrity": "sha512-gySS5XqU5MKs/oCjsTlVm8zb8lqcNKHEANsaRmhW2qvGKJoeGwFb6Fbq6TLCZMRuk143mLbncbverBCa1c3dog==", "cpu": [ "arm64" ], @@ -2300,13 +1786,13 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-minify/binding-linux-arm64-musl": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.77.3.tgz", - "integrity": "sha512-GWa6MyEIwrDfEruj9SmIi/eG0XyEPSSupbltCL2k/cYgb+aUl1lT3sJLbOlKZqBbTzpuouAd+CkDqz+8UH/0qA==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.112.0.tgz", + "integrity": "sha512-IRFMZX589lr3rjG0jc8N261/7wqFq2Vl0OMrJWeFls5BF8HiB+fRYuf0Zy2CyRH6NCY2vbdDdp+QCAavQGVsGw==", "cpu": [ "arm64" ], @@ -2316,13 +1802,29 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-minify/binding-linux-ppc64-gnu": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-0.112.0.tgz", + "integrity": "sha512-V/69XqIW9hCUceDpcZh79oDg+F4ptEgIfKRENzYs41LRbSoJ7sNjjcW4zifqyviTvzcnXLgK4uoTyoymmNZBMQ==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-minify/binding-linux-riscv64-gnu": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.77.3.tgz", - "integrity": "sha512-Wj1h95rGfMMVu0NMBNzo56WaB+z/mBVFRF4ij4Dbf2oBy4o3qDe2Q5Doa5U5c1k/uJbsM1X/mV7vqqgkHdORBA==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.112.0.tgz", + "integrity": "sha512-zghvexySyGXGNW+MutjZN7UGTyOQl56RWMlPe1gb+knBm/+0hf9qjk7Q6ofm2tSte+vQolPfQttifGl0dP9uvQ==", "cpu": [ "riscv64" ], @@ -2332,13 +1834,29 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-minify/binding-linux-riscv64-musl": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-0.112.0.tgz", + "integrity": "sha512-E4a8VUFDJPb2mPcc7J4NQQPi1ssHKF7/g4r6KD2+SBVERIaEEd3cGNqR7SG3g82/BLGV2UDoQe/WvZCkt5M/bQ==", + "cpu": [ + "riscv64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-minify/binding-linux-s390x-gnu": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.77.3.tgz", - "integrity": "sha512-xTIGeZZoOfa7c4FU+1OcZTk73W/0YD2m3Zwg4p0Wtch+0Z6VRyu/7CENjBXpCRkWF4C8sgvl6d8ZKOzF5wU+Dw==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.112.0.tgz", + "integrity": "sha512-2Hx87sK3y6jBV364Mvv0zyxiITIuy26Ixenv6pK7e+4an3HgNdhAj8nk3aLoLTTSvLik5/MaGhcZGEu9tYV1aA==", "cpu": [ "s390x" ], @@ -2348,13 +1866,13 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-minify/binding-linux-x64-gnu": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.77.3.tgz", - "integrity": "sha512-YkgfAVmdtPMqJO/elfYBstnwGjD2P0SJwAs02c84/1JKRemrjSKqSewg3ETFIpo43c6b0g9OtoWj47Wwpnka7A==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.112.0.tgz", + "integrity": "sha512-2MSCnEPLk9ddSouMhJo78Xy2/JbYC80OYzWdR4yWTGSULsgH3d1VXg73DSwFL8vU7Ad9oK10DioBY2ww7sQTEg==", "cpu": [ "x64" ], @@ -2364,13 +1882,13 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-minify/binding-linux-x64-musl": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-linux-x64-musl/-/binding-linux-x64-musl-0.77.3.tgz", - "integrity": "sha512-//A5mBFmxvV+JzqI2/94SFpEF+nev0I/urXwhYPe8qzCYTlnzwxodH0Yb6js+BgebqiPdYs6YEp5Q2C/6OgsbA==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-linux-x64-musl/-/binding-linux-x64-musl-0.112.0.tgz", + "integrity": "sha512-HAPfmQKlkVi97/zRonVE9t/kKUG3ni+mOuU1Euw+3s37KwUuOJjmcwXdclVgXKBlTkCGO0FajPwW5dAJeIXCCw==", "cpu": [ "x64" ], @@ -2380,29 +1898,45 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-minify/binding-openharmony-arm64": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-openharmony-arm64/-/binding-openharmony-arm64-0.112.0.tgz", + "integrity": "sha512-bLnMojcPadYzMNpB6IAqMiTOag4etc0zbs8On73JsotO1W5c5/j/ncplpSokpEpNasKRUpHVRXpmq0KRXprNhw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-minify/binding-wasm32-wasi": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-wasm32-wasi/-/binding-wasm32-wasi-0.77.3.tgz", - "integrity": "sha512-4RCG1ZZyEyKIaZE2vXyFnVocDF1jIbfE/f5qbb1l0Wql2s4K5m1QDkKqPAVPuCmYiJ6+X2HyWus5QGqgnUKrXA==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-wasm32-wasi/-/binding-wasm32-wasi-0.112.0.tgz", + "integrity": "sha512-tv7PmHYq/8QBlqMaDjsy51GF5KQkG17Yc/PsgB5OVndU34kwbQuebBIic7UfK9ygzidI8moYq3ztnu3za/rqHw==", "cpu": [ "wasm32" ], "license": "MIT", "optional": true, "dependencies": { - "@napi-rs/wasm-runtime": "^1.0.0" + "@napi-rs/wasm-runtime": "^1.1.1" }, "engines": { "node": ">=14.0.0" } }, "node_modules/@oxc-minify/binding-win32-arm64-msvc": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.77.3.tgz", - "integrity": "sha512-ppyKF8Y3iASeIBnPDL0mwDxnlq/nnKFEZpZ9dy2hDma/JDD9qmOheP3CGYZyUnkS9r0LvEtrtR5/FjKXF2VQOw==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.112.0.tgz", + "integrity": "sha512-d+jes2jwRkcBSpcaZC6cL8GBi56Br6uAorn9dfquhWLczWL+hHSvvVrRgT1i5/6dkf5UWx2zdoEsAMiJ11w78A==", "cpu": [ "arm64" ], @@ -2412,13 +1946,29 @@ "win32" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-minify/binding-win32-ia32-msvc": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-0.112.0.tgz", + "integrity": "sha512-TV1C3qDwj7//jNIi5tnNRhReSUgtaRQKi5KobDE6zVAc5gjeuBA8G2qizS9ziXlf/I0dlelrGmGMMDJmH9ekWg==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-minify/binding-win32-x64-msvc": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-minify/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.77.3.tgz", - "integrity": "sha512-8IY7xgdZjBDFyQCF0s7EB7YzVB+C4+p8AKDbPfKLYhSlntIfIqTYvSXc3dZQb83OH6kDLAs1GpdWgb8ByDu4kg==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-minify/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.112.0.tgz", + "integrity": "sha512-LML2Gld6VY8/+7a3VH4k1qngsBXvTkXgbmYgSYwaElqtiQiYaAcXfi0XKOUGe3k3GbBK4juAGixC31CrdFHAQw==", "cpu": [ "x64" ], @@ -2428,13 +1978,29 @@ "win32" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-android-arm-eabi": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-android-arm-eabi/-/binding-android-arm-eabi-0.112.0.tgz", + "integrity": "sha512-retxBzJ39Da7Lh/eZTn9+HJgTeDUxZIpuI0urOsmcFsBKXAth3lc1jIvwseQ9qbAI/VrsoFOXiGIzgclARbAHg==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-android-arm64": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-android-arm64/-/binding-android-arm64-0.77.3.tgz", - "integrity": "sha512-Tr9pldnu+Csd5dQm2/fLKJyBloxiBC/Xl3c3Ki1ZGQewndsFyfFOklFpigZCCqlt75o+HtwtoLiCx3y4i8cdjg==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-android-arm64/-/binding-android-arm64-0.112.0.tgz", + "integrity": "sha512-pRkbBRbuIIsufUWpOJ+JHWfJFNupkidy4sbjfcm37e6xwYrn9LSKMLubPHvNaL1Zf92ZRhGiwaYkEcmaFg2VcA==", "cpu": [ "arm64" ], @@ -2444,13 +2010,13 @@ "android" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-darwin-arm64": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-darwin-arm64/-/binding-darwin-arm64-0.77.3.tgz", - "integrity": "sha512-KL91O6OpfVUTOhTW8cQWQ44z4VhyqBAsRfTm7DQCczBZkArygp2Sg+uaYLXNLWlGPWs4CoyZPCvu4FC6p1Q+nA==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-darwin-arm64/-/binding-darwin-arm64-0.112.0.tgz", + "integrity": "sha512-fh6/KQL/cbH5DukT3VkdCqnULLuvVnszVKySD5IgSE0WZb32YZo/cPsPdEv052kk6w3N4agu+NTiMnZjcvhUIg==", "cpu": [ "arm64" ], @@ -2460,13 +2026,13 @@ "darwin" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-darwin-x64": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-darwin-x64/-/binding-darwin-x64-0.77.3.tgz", - "integrity": "sha512-BTWnX9ymZFdkJONuL20Y63ODjDo1hpRHcqa0Z9pqcLANFgS+sDltcu0DXkJpNuJoZQJ+/44FVSWFmbYGG+862g==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-darwin-x64/-/binding-darwin-x64-0.112.0.tgz", + "integrity": "sha512-vUBOOY1E30vlu/DoTGDoT1UbLlwu5Yv9tqeBabAwRzwNDz8Skho16VKhsBDUiyqddtpsR3//v6vNk38w4c+6IA==", "cpu": [ "x64" ], @@ -2476,13 +2042,13 @@ "darwin" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-freebsd-x64": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-freebsd-x64/-/binding-freebsd-x64-0.77.3.tgz", - "integrity": "sha512-YGp4lA0deJXrqrQC1PZwfQSPuY+TPZJOr5pqB+GLekRVZDlq2++Wr3lZfsESp1inVZHGFZS0x55/MadABG23rg==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-freebsd-x64/-/binding-freebsd-x64-0.112.0.tgz", + "integrity": "sha512-hnEtO/9AVnYWzrgnp6L+oPs/6UqlFeteUL6n7magkd2tttgmx1C01hyNNh6nTpZfLzEVJSNJ0S+4NTsK2q2CxA==", "cpu": [ "x64" ], @@ -2492,13 +2058,13 @@ "freebsd" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-linux-arm-gnueabihf": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.77.3.tgz", - "integrity": "sha512-C05V3gAtSM1j2gsybF4Z+vlA5wsuNJ+Ciklc0K9y1SNbObz2JDv/Q7PTYMUz9EFk7Y00aCzjy5sXEdCI191Htw==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.112.0.tgz", + "integrity": "sha512-WxJrUz3pcIc2hp4lvJbvt/sTL33oX9NPvkD3vDDybE6tc0V++rS+hNOJxwXdD2FDIFPkHs/IEn5asEZFVH+VKw==", "cpu": [ "arm" ], @@ -2508,13 +2074,13 @@ "linux" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-linux-arm-musleabihf": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.77.3.tgz", - "integrity": "sha512-g4bbjZ/fDm1rQbfEhqXCtK4eLmmm6U+W37zsl5Lpy7c24RJYhR25keI+RWfwH5f31Sn5ytuwfxwgXeCby6AiVA==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.112.0.tgz", + "integrity": "sha512-jj8A8WWySaJQqM9XKAIG8U2Q3qxhFQKrXPWv98d1oC35at+L1h+C+V4M3l8BAKhpHKCu3dYlloaAbHd5q1Hw6A==", "cpu": [ "arm" ], @@ -2524,13 +2090,13 @@ "linux" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-linux-arm64-gnu": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.77.3.tgz", - "integrity": "sha512-c2Ak73vOeGnSQhsaZpqVyGYXtmQ8TR4L3uX34LNavXTnzrXm20bk6i80Nxnksz3B+5ohYRiYhb+UVk1zk1Gl1A==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.112.0.tgz", + "integrity": "sha512-G2F8H6FcAExVK5vvhpSh61tqWx5QoaXXUnSsj5FyuDiFT/K7AMMVSQVqnZREDc+YxhrjB0vnKjCcuobXK63kIw==", "cpu": [ "arm64" ], @@ -2540,13 +2106,13 @@ "linux" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-linux-arm64-musl": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.77.3.tgz", - "integrity": "sha512-1DNLBoJ6fsEdymD8Q4bo5zXkK0gw3ZMkEZ+F5w+7OrJOiQqzp8JzCQ6HRmSsJgjvaXzBvy95nCH2RegoeSN9JQ==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.112.0.tgz", + "integrity": "sha512-3R0iqjM3xYOZCnwgcxOQXH7hrz64/USDIuLbNTM1kZqQzRqaR4w7SwoWKU934zABo8d0op2oSwOp+CV3hZnM7A==", "cpu": [ "arm64" ], @@ -2556,13 +2122,29 @@ "linux" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-ppc64-gnu": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-0.112.0.tgz", + "integrity": "sha512-lAQf8PQxfgy7h0bmcfSVE3hg3qMueshPYULFsCrHM+8KefGZ9W+ZMvRyU33gLrB4w1O3Fz1orR0hmKMCRxXNrQ==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-linux-riscv64-gnu": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.77.3.tgz", - "integrity": "sha512-Lv0RQCHRKezkDzNPXoPuB7KTnK7ktw3OgyuZmNJKFGmZRFjlm8w+sEhAiE8XaCGqoOA6ivNIRSheUYeFNpAANA==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.112.0.tgz", + "integrity": "sha512-2QlvQBUhHuAE3ezD4X3CAEKMXdfgInggQ5Bj/7gb5NcYP3GyfLTj7c+mMu+BRwfC9B3AXBNyqHWbqEuuUvZyRQ==", "cpu": [ "riscv64" ], @@ -2572,13 +2154,29 @@ "linux" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-riscv64-musl": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-0.112.0.tgz", + "integrity": "sha512-v06iu0osHszgqJ1dLQRb6leWFU1sjG/UQk4MoVBtE6ZPewgfTkby6G9II1SpEAf2onnAuQceVYxQH9iuU3NJqw==", + "cpu": [ + "riscv64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-linux-s390x-gnu": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.77.3.tgz", - "integrity": "sha512-Q0sOdRzyhhUaATgtSR7lG23SvalRI9/7oVAWArU/8fEXCU9NsfKnpeuXsgT/N5lG4mgcbhUrnGzKaOzYcaatdQ==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.112.0.tgz", + "integrity": "sha512-+5HhNHtxsdcd7+ljXFnn9FOoCNXJX3UPgIfIE6vdwS1HqdGNH6eAcVobuqGOp54l8pvcxDQA6F4cPswCgLrQfQ==", "cpu": [ "s390x" ], @@ -2588,13 +2186,13 @@ "linux" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-linux-x64-gnu": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.77.3.tgz", - "integrity": "sha512-ckcntxRTyPE+4nnCDnc9t4kiO1CSs5jOR7Qe7KLStkU9SPQkUZyjNP2aSaHre+iQha5xXABag9pamqb0dOY/PQ==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.112.0.tgz", + "integrity": "sha512-jKwO7ZLNkjxwg7FoCLw+fJszooL9yXRZsDN0AQ1AQUTWq1l8GH/2e44k68N3fcP19jl8O8jGpqLAZcQTYk6skA==", "cpu": [ "x64" ], @@ -2604,13 +2202,13 @@ "linux" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-linux-x64-musl": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-linux-x64-musl/-/binding-linux-x64-musl-0.77.3.tgz", - "integrity": "sha512-jrKtGQrjcocnWpUIxJ3qzb0WpLGcDZoQTen/CZ5QtuwFA5EudM5rAJMt+SQpYYL4UPK0CPm8G5ZWJXqernLe1A==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-x64-musl/-/binding-linux-x64-musl-0.112.0.tgz", + "integrity": "sha512-TYqnuKV/p3eOc+N61E0961nA7DC+gaCeJ3+V2LcjJdTwFMdikqWL6uVk1jlrpUCBrozHDATVUKDZYH7r4FQYjQ==", "cpu": [ "x64" ], @@ -2620,29 +2218,45 @@ "linux" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-openharmony-arm64": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-openharmony-arm64/-/binding-openharmony-arm64-0.112.0.tgz", + "integrity": "sha512-ZhrVmWFifVEFQX4XPwLoVFDHw9tAWH9p9vHsHFH+5uCKdfVR+jje4WxVo6YrokWCboGckoOzHq5KKMOcPZfkRg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-wasm32-wasi": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-wasm32-wasi/-/binding-wasm32-wasi-0.77.3.tgz", - "integrity": "sha512-76f53rr4Dz7A/FdUaM1NegHsQqT2w8CDBnRCptzapVA8humKA/tlJ24XfLvvr76JeT/OSKXorPyJ5xyGCa+yQg==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-wasm32-wasi/-/binding-wasm32-wasi-0.112.0.tgz", + "integrity": "sha512-Gr8X2PUU3hX1g3F5oLWIZB8DhzDmjr5TfOrmn5tlBOo9l8ojPGdKjnIBfObM7X15928vza8QRKW25RTR7jfivg==", "cpu": [ "wasm32" ], "license": "MIT", "optional": true, "dependencies": { - "@napi-rs/wasm-runtime": "^1.0.0" + "@napi-rs/wasm-runtime": "^1.1.1" }, "engines": { "node": ">=14.0.0" } }, "node_modules/@oxc-parser/binding-win32-arm64-msvc": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.77.3.tgz", - "integrity": "sha512-YiUlN4yS5U7ntU1eVsaSiKD5PzW3zaW1tSB6RIp/eaDg10xORAPXEpoCXYlo35tAOV3IklOrX8ClhSJxF99AEQ==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.112.0.tgz", + "integrity": "sha512-t5CDLbU70Ea88bGRhvU/dLJTc/Wcrtf2Jp534E8P3cgjAvHDjdKsfDDqBZrhybJ8Jv9v9vW5ngE40EK51BluDA==", "cpu": [ "arm64" ], @@ -2652,13 +2266,29 @@ "win32" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-win32-ia32-msvc": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-0.112.0.tgz", + "integrity": "sha512-rZH0JynCCwnhe2HfRoyNOl/Kfd9pudoWxgpC5OZhj7j77pMK0UOAa35hYDfrtSOUk2HLzrikV5dPUOY2DpSBSA==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-parser/binding-win32-x64-msvc": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-parser/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.77.3.tgz", - "integrity": "sha512-d4JRqTtkpyB7QrGQk65xhiSOIwK2WZiTW5aBjyoQ+SicrvnHtviAY1U1Mnl2AyldUZ6MkUvaR6k8tCm9FMhawg==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.112.0.tgz", + "integrity": "sha512-oGHluohzmVFAuQrkEnl1OXAxMz2aYmimxUqIgKXpBgbr7PvFv0doELB273sX+5V3fKeggohKg1A2Qq21W9Z9cQ==", "cpu": [ "x64" ], @@ -2668,22 +2298,38 @@ "win32" ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-project/types": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-project/types/-/types-0.77.3.tgz", - "integrity": "sha512-5Vh+neJhhxuF0lYCjZXbxjqm2EO6YJ1jG+KuHntrd6VY67OMpYhWq2cZhUhy+xL9qLJVJRaeII7Xj9fciA6v7A==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.112.0.tgz", + "integrity": "sha512-m6RebKHIRsax2iCwVpYW2ErQwa4ywHJrE4sCK3/8JK8ZZAWOKXaRJFl/uP51gaVyyXlaS4+chU1nSCdzYf6QqQ==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/Boshen" } }, + "node_modules/@oxc-transform/binding-android-arm-eabi": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-android-arm-eabi/-/binding-android-arm-eabi-0.112.0.tgz", + "integrity": "sha512-r4LuBaPnOAi0eUOBNi880Fm2tO2omH7N1FRrL6+nyz/AjQ+QPPLtoyZJva0O+sKi1buyN/7IzM5p9m+5ANSDbg==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, "node_modules/@oxc-transform/binding-android-arm64": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-android-arm64/-/binding-android-arm64-0.77.3.tgz", - "integrity": "sha512-HZdfhSsaqBCwl/HtsRVNh7binRz0N3IdwlTc5emEqYWMMZ94RkhPheNnbhRCzdvnzRKrpGirf3Rsk1X2oqSlxg==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-android-arm64/-/binding-android-arm64-0.112.0.tgz", + "integrity": "sha512-ve46vQcQrY8eGe8990VSlS9gkD+AogJqbtfOkeua+5sQGQTDgeIRRxOm7ktCo19uZc2bEBwXRJITgosd+NRVmQ==", "cpu": [ "arm64" ], @@ -2693,13 +2339,13 @@ "android" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-transform/binding-darwin-arm64": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-darwin-arm64/-/binding-darwin-arm64-0.77.3.tgz", - "integrity": "sha512-5sMgT6Ie7S5UqqZCdssAGBVU5PouZKIIfUf10SM4dY7J/1M0Sb4E1E7O+p2VUkECJ2j2RFRykK5rdKz71na8hg==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-darwin-arm64/-/binding-darwin-arm64-0.112.0.tgz", + "integrity": "sha512-ddbmLU3Tr+i7MOynfwAXxUXud3SjJKlv7XNjaq08qiI8Av/QvhXVGc2bMhXkWQSMSBUeTDoiughKjK+Zsb6y/A==", "cpu": [ "arm64" ], @@ -2709,13 +2355,13 @@ "darwin" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-transform/binding-darwin-x64": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-darwin-x64/-/binding-darwin-x64-0.77.3.tgz", - "integrity": "sha512-k99EStA6V4jOoFwN0pblhWuOFTKnaMasTpJIq30227U/Cg1J+rttK8loONSvgrw6FUKLJSymUA2Ydwpdvn5+sg==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-darwin-x64/-/binding-darwin-x64-0.112.0.tgz", + "integrity": "sha512-TKvmNw96jQZPqYb4pRrzLFDailNB3YS14KNn+x2hwRbqc6CqY96S9PYwyOpVpYdxfoRjYO9WgX9SoS+62a1DPA==", "cpu": [ "x64" ], @@ -2725,13 +2371,13 @@ "darwin" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-transform/binding-freebsd-x64": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-freebsd-x64/-/binding-freebsd-x64-0.77.3.tgz", - "integrity": "sha512-pxtPtFdJcI0xkUKWMaHV/fXy9MY5ugocA/gLoXIjTDKZC1OMVjr6Srrtk0CoUIU7l7DMePbcJIAtwrpHwRiwpQ==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-freebsd-x64/-/binding-freebsd-x64-0.112.0.tgz", + "integrity": "sha512-YPMkSCDaelO8HHYRMYjm+Q+IfkfIbdtQzwPuasItYkq8UUkNeHNPheNh2JkvQa3c+io3E9ePOgHQ2yihpk7o/Q==", "cpu": [ "x64" ], @@ -2741,13 +2387,13 @@ "freebsd" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-transform/binding-linux-arm-gnueabihf": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.77.3.tgz", - "integrity": "sha512-zXsbUE/5tU7OJwyhhKUfl559W9w7QJp8USKA3WyW7BzHrBe0V0U6Lw+tM18tgyEvvwvXn5Wg0Jj/RWZwhO9BAA==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.112.0.tgz", + "integrity": "sha512-nA7kzQGNEpuTRknst/IJ3l8hqmDmEda3aun6jkXgp7gKxESjuHeaNH04mKISxvJ7fIacvP2g/wtTSnm4u5jL8Q==", "cpu": [ "arm" ], @@ -2757,13 +2403,13 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-transform/binding-linux-arm-musleabihf": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.77.3.tgz", - "integrity": "sha512-D3o/POM0GUno8x0zKgFKmlO5shpB/j0FdNiOXhv8nilNGQgUXwkEHC/SDjmYJNGZy1HTcXyB7P+yRX9dTUUaAg==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.112.0.tgz", + "integrity": "sha512-w8GuLmckKlGc3YujaZKhtbFxziCcosvM2l9GnQjCb/yENWLGDiyQOy0BTAgPGdJwpYTiOeJblEXSuXYvlE1Ong==", "cpu": [ "arm" ], @@ -2773,13 +2419,13 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-transform/binding-linux-arm64-gnu": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.77.3.tgz", - "integrity": "sha512-LgY4sT+bnt01l3Dxq3Zv19gMAsJ5kI7sdVvL3CNCtAj47h/Zdfxg7WlD+L+FJZ3sfTQ5n2SJ0WDiZm380isBxg==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.112.0.tgz", + "integrity": "sha512-9LwwGnJ8+WT0rXcrI8M0RJtDNt91eMqcDPPEvJxhRFHIMcHTy5D5xT+fOl3Us0yMqKo3HUWkbfUYqAp4GoZ3Jw==", "cpu": [ "arm64" ], @@ -2789,13 +2435,13 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-transform/binding-linux-arm64-musl": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.77.3.tgz", - "integrity": "sha512-Fq72ARLt8iriotueGp7zaWjFpfYBpRS5WElmAtpZLIy/p1dNwBEDhVUIjAl+sU14y0odp+yaTRHM7ULnMYGZhQ==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.112.0.tgz", + "integrity": "sha512-Lg6VOuSd3oXv7J0eGywgqh/086h+qQzIBOD+47pYKMTTJcbDe+f3h/RgGoMKJE5HhiwT5sH1aGEJfIfaYUiVSw==", "cpu": [ "arm64" ], @@ -2805,13 +2451,29 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-linux-ppc64-gnu": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-0.112.0.tgz", + "integrity": "sha512-PXzmj82o1moA4IGphYImTRgc2youTi4VRfyFX3CHwLjxPcQ5JtcsgbDt4QUdOzXZ+zC07s5jf2ZzhRapEOlj2w==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-transform/binding-linux-riscv64-gnu": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.77.3.tgz", - "integrity": "sha512-jtq6JREdyZ6xdTFJGM5Gm068WCkoMwh3Fkm08rZ2TAu4qjISdkJvTQ1wiEDDz2F8sqAdmASDqxnE/2DJ6Z6Clg==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.112.0.tgz", + "integrity": "sha512-vhJsMsVH/6xwa3bt1LGts33FXUkGjaEGDwsRyp4lIfOjSfQVWMtCmWMFNaA0dW9FVWdD2Gt2fSFBSZ+azDxlpg==", "cpu": [ "riscv64" ], @@ -2821,13 +2483,29 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-linux-riscv64-musl": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-0.112.0.tgz", + "integrity": "sha512-cXWFb7z+2IjFUEcXtRwluq9oEG5qnyFCjiu3SWrgYNcWwPdHusv3I/7K5/CTbbi4StoZ5txbi7/iSfDHNyWuRw==", + "cpu": [ + "riscv64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-transform/binding-linux-s390x-gnu": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.77.3.tgz", - "integrity": "sha512-HQz++ZmT9xWU9KS24DE+8oVTeUPd/JQkbjL2uvr0+SWY3loPnLG3kFAOLE/xXgYG/0D24mZylbZUwhzYND4snw==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.112.0.tgz", + "integrity": "sha512-eEFu4SRqJTJ20/88KRWmp+jpHKAw0Y1DsnSgpEeXyBIIcsOaLIUMU/TfYWUmqRbvbMV9rmOmI3kp5xWYUq6kSQ==", "cpu": [ "s390x" ], @@ -2837,13 +2515,13 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-transform/binding-linux-x64-gnu": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.77.3.tgz", - "integrity": "sha512-GcuFDJf/pxrfd2hq+gBytlnr/hiPn36JxuPXP0nToNG4SNa1gHT8K0bDxZuN2UjmZlWmIC8ELDdpVcNeZON+lQ==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.112.0.tgz", + "integrity": "sha512-ST1MDT+TlOyZ1c5btrGinRSUW2Jf4Pa+0gdKwsyjDSOC3dxy2ZNkN3mosTf4ywc3J+mxfYKqtjs7zSwHz03ILA==", "cpu": [ "x64" ], @@ -2853,13 +2531,13 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-transform/binding-linux-x64-musl": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-linux-x64-musl/-/binding-linux-x64-musl-0.77.3.tgz", - "integrity": "sha512-unhkqVg/jb/kghmiMCto8AGKm3uBwH2P5/GwR8jZkBjSFX7ekNu6/8P5IuIs5KDiZXzcjww84vCzQVBlql6WkA==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-linux-x64-musl/-/binding-linux-x64-musl-0.112.0.tgz", + "integrity": "sha512-ISQoA3pD4cyTGpf9sXXeerH6pL2L6EIpdy6oAy2ttkswyVFDyQNVOVIGIdLZDgbpmqGljxZnWqt/J/N68pQaig==", "cpu": [ "x64" ], @@ -2869,29 +2547,45 @@ "linux" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-openharmony-arm64": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-openharmony-arm64/-/binding-openharmony-arm64-0.112.0.tgz", + "integrity": "sha512-UOGVrGIv7yLJovyEXEyUTADuLq98vd/cbMHFLJweRXD+11I8Tn4jASi4WzdsN8C3BVYGRHrXH2NlSBmhz33a4g==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-transform/binding-wasm32-wasi": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-wasm32-wasi/-/binding-wasm32-wasi-0.77.3.tgz", - "integrity": "sha512-FOGQzHLYpf1Yx0KpaqRz9cuXwvlTu8RprjL1NLpuUKT/D7O3SThm+qhFX3El9RFj67jrSCcHhlElYCJB2p794g==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-wasm32-wasi/-/binding-wasm32-wasi-0.112.0.tgz", + "integrity": "sha512-XIX7Gpq9koAvzBVHDlVFHM79r5uOVK6kTEsdsN4qaajpjkgtv4tdsAOKIYK6l7fUbsbE6xS+6w1+yRFrDeC1kg==", "cpu": [ "wasm32" ], "license": "MIT", "optional": true, "dependencies": { - "@napi-rs/wasm-runtime": "^1.0.0" + "@napi-rs/wasm-runtime": "^1.1.1" }, "engines": { "node": ">=14.0.0" } }, "node_modules/@oxc-transform/binding-win32-arm64-msvc": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.77.3.tgz", - "integrity": "sha512-o4EmaPBrdYv/mb4uU/ZzAZ6KGczcPnDwA3lZbVEuFMDPwczqL581gpJHFFlfXUwxToCosiHot8y4ELV+mKkZjw==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.112.0.tgz", + "integrity": "sha512-EgXef9kOne9BNsbYBbuRqxk2hteT0xsAGcx/VbtCBMJYNj8fANFhT271DUSOgfa4DAgrQQmsyt/Kr1aV9mpU9w==", "cpu": [ "arm64" ], @@ -2901,13 +2595,29 @@ "win32" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-transform/binding-win32-ia32-msvc": { + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-0.112.0.tgz", + "integrity": "sha512-6QaB0qjNaou2YR+blncHdw7j0e26IOwOIjLbhVGDeuf9+4rjJeiqRXJ2hOtCcS4zblnao/MjdgQuZ3fM0nl+Kw==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@oxc-transform/binding-win32-x64-msvc": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/@oxc-transform/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.77.3.tgz", - "integrity": "sha512-l/J/T6jAL6QnsvdjzS7EcxwwToaGx9GPqXNGPU2sqbo8o/4ATB9Ky1/8oG/Mb+mPHgiULPBtFpJtDiDSI9fBIA==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/@oxc-transform/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.112.0.tgz", + "integrity": "sha512-FRKYlY959QeqRPx9kXs0HjU2xuXPT1cdF+vvA200D9uAX/KLcC34MwRqUKTYml4kCc2Vf/P2pBR9cQuBm3zECQ==", "cpu": [ "x64" ], @@ -2917,20 +2627,20 @@ "win32" ], "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" } }, "node_modules/@parcel/watcher": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher/-/watcher-2.5.1.tgz", - "integrity": "sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.6.tgz", + "integrity": "sha512-tmmZ3lQxAe/k/+rNnXQRawJ4NjxO2hqiOLTHvWchtGZULp4RyFeh6aU4XdOYBFe2KE1oShQTv4AblOs2iOrNnQ==", "hasInstallScript": true, "license": "MIT", "dependencies": { - "detect-libc": "^1.0.3", + "detect-libc": "^2.0.3", "is-glob": "^4.0.3", - "micromatch": "^4.0.5", - "node-addon-api": "^7.0.0" + "node-addon-api": "^7.0.0", + "picomatch": "^4.0.3" }, "engines": { "node": ">= 10.0.0" @@ -2940,25 +2650,25 @@ "url": "https://opencollective.com/parcel" }, "optionalDependencies": { - "@parcel/watcher-android-arm64": "2.5.1", - "@parcel/watcher-darwin-arm64": "2.5.1", - "@parcel/watcher-darwin-x64": "2.5.1", - "@parcel/watcher-freebsd-x64": "2.5.1", - "@parcel/watcher-linux-arm-glibc": "2.5.1", - "@parcel/watcher-linux-arm-musl": "2.5.1", - "@parcel/watcher-linux-arm64-glibc": "2.5.1", - "@parcel/watcher-linux-arm64-musl": "2.5.1", - "@parcel/watcher-linux-x64-glibc": "2.5.1", - "@parcel/watcher-linux-x64-musl": "2.5.1", - "@parcel/watcher-win32-arm64": "2.5.1", - "@parcel/watcher-win32-ia32": "2.5.1", - "@parcel/watcher-win32-x64": "2.5.1" + "@parcel/watcher-android-arm64": "2.5.6", + "@parcel/watcher-darwin-arm64": "2.5.6", + "@parcel/watcher-darwin-x64": "2.5.6", + "@parcel/watcher-freebsd-x64": "2.5.6", + "@parcel/watcher-linux-arm-glibc": "2.5.6", + "@parcel/watcher-linux-arm-musl": "2.5.6", + "@parcel/watcher-linux-arm64-glibc": "2.5.6", + "@parcel/watcher-linux-arm64-musl": "2.5.6", + "@parcel/watcher-linux-x64-glibc": "2.5.6", + "@parcel/watcher-linux-x64-musl": "2.5.6", + "@parcel/watcher-win32-arm64": "2.5.6", + "@parcel/watcher-win32-ia32": "2.5.6", + "@parcel/watcher-win32-x64": "2.5.6" } }, "node_modules/@parcel/watcher-android-arm64": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.5.1.tgz", - "integrity": "sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.5.6.tgz", + "integrity": "sha512-YQxSS34tPF/6ZG7r/Ih9xy+kP/WwediEUsqmtf0cuCV5TPPKw/PQHRhueUo6JdeFJaqV3pyjm0GdYjZotbRt/A==", "cpu": [ "arm64" ], @@ -2976,9 +2686,9 @@ } }, "node_modules/@parcel/watcher-darwin-arm64": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.1.tgz", - "integrity": "sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.6.tgz", + "integrity": "sha512-Z2ZdrnwyXvvvdtRHLmM4knydIdU9adO3D4n/0cVipF3rRiwP+3/sfzpAwA/qKFL6i1ModaabkU7IbpeMBgiVEA==", "cpu": [ "arm64" ], @@ -2996,9 +2706,9 @@ } }, "node_modules/@parcel/watcher-darwin-x64": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.5.1.tgz", - "integrity": "sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.5.6.tgz", + "integrity": "sha512-HgvOf3W9dhithcwOWX9uDZyn1lW9R+7tPZ4sug+NGrGIo4Rk1hAXLEbcH1TQSqxts0NYXXlOWqVpvS1SFS4fRg==", "cpu": [ "x64" ], @@ -3016,9 +2726,9 @@ } }, "node_modules/@parcel/watcher-freebsd-x64": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.5.1.tgz", - "integrity": "sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.5.6.tgz", + "integrity": "sha512-vJVi8yd/qzJxEKHkeemh7w3YAn6RJCtYlE4HPMoVnCpIXEzSrxErBW5SJBgKLbXU3WdIpkjBTeUNtyBVn8TRng==", "cpu": [ "x64" ], @@ -3036,9 +2746,9 @@ } }, "node_modules/@parcel/watcher-linux-arm-glibc": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.5.1.tgz", - "integrity": "sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.5.6.tgz", + "integrity": "sha512-9JiYfB6h6BgV50CCfasfLf/uvOcJskMSwcdH1PHH9rvS1IrNy8zad6IUVPVUfmXr+u+Km9IxcfMLzgdOudz9EQ==", "cpu": [ "arm" ], @@ -3056,9 +2766,9 @@ } }, "node_modules/@parcel/watcher-linux-arm-musl": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-linux-arm-musl/-/watcher-linux-arm-musl-2.5.1.tgz", - "integrity": "sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-musl/-/watcher-linux-arm-musl-2.5.6.tgz", + "integrity": "sha512-Ve3gUCG57nuUUSyjBq/MAM0CzArtuIOxsBdQ+ftz6ho8n7s1i9E1Nmk/xmP323r2YL0SONs1EuwqBp2u1k5fxg==", "cpu": [ "arm" ], @@ -3076,9 +2786,9 @@ } }, "node_modules/@parcel/watcher-linux-arm64-glibc": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.5.1.tgz", - "integrity": "sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.5.6.tgz", + "integrity": "sha512-f2g/DT3NhGPdBmMWYoxixqYr3v/UXcmLOYy16Bx0TM20Tchduwr4EaCbmxh1321TABqPGDpS8D/ggOTaljijOA==", "cpu": [ "arm64" ], @@ -3096,9 +2806,9 @@ } }, "node_modules/@parcel/watcher-linux-arm64-musl": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.5.1.tgz", - "integrity": "sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.5.6.tgz", + "integrity": "sha512-qb6naMDGlbCwdhLj6hgoVKJl2odL34z2sqkC7Z6kzir8b5W65WYDpLB6R06KabvZdgoHI/zxke4b3zR0wAbDTA==", "cpu": [ "arm64" ], @@ -3116,9 +2826,9 @@ } }, "node_modules/@parcel/watcher-linux-x64-glibc": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.1.tgz", - "integrity": "sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.6.tgz", + "integrity": "sha512-kbT5wvNQlx7NaGjzPFu8nVIW1rWqV780O7ZtkjuWaPUgpv2NMFpjYERVi0UYj1msZNyCzGlaCWEtzc+exjMGbQ==", "cpu": [ "x64" ], @@ -3136,9 +2846,9 @@ } }, "node_modules/@parcel/watcher-linux-x64-musl": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.1.tgz", - "integrity": "sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.6.tgz", + "integrity": "sha512-1JRFeC+h7RdXwldHzTsmdtYR/Ku8SylLgTU/reMuqdVD7CtLwf0VR1FqeprZ0eHQkO0vqsbvFLXUmYm/uNKJBg==", "cpu": [ "x64" ], @@ -3156,17 +2866,17 @@ } }, "node_modules/@parcel/watcher-wasm": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-wasm/-/watcher-wasm-2.5.1.tgz", - "integrity": "sha512-RJxlQQLkaMMIuWRozy+z2vEqbaQlCuaCgVZIUCzQLYggY22LZbP5Y1+ia+FD724Ids9e+XIyOLXLrLgQSHIthw==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-wasm/-/watcher-wasm-2.5.6.tgz", + "integrity": "sha512-byAiBZ1t3tXQvc8dMD/eoyE7lTXYorhn+6uVW5AC+JGI1KtJC/LvDche5cfUE+qiefH+Ybq0bUCJU0aB1cSHUA==", "bundleDependencies": [ "napi-wasm" ], "license": "MIT", "dependencies": { "is-glob": "^4.0.3", - "micromatch": "^4.0.5", - "napi-wasm": "^1.1.0" + "napi-wasm": "^1.1.0", + "picomatch": "^4.0.3" }, "engines": { "node": ">= 10.0.0" @@ -3182,9 +2892,9 @@ "license": "MIT" }, "node_modules/@parcel/watcher-win32-arm64": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.5.1.tgz", - "integrity": "sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.5.6.tgz", + "integrity": "sha512-3ukyebjc6eGlw9yRt678DxVF7rjXatWiHvTXqphZLvo7aC5NdEgFufVwjFfY51ijYEWpXbqF5jtrK275z52D4Q==", "cpu": [ "arm64" ], @@ -3202,9 +2912,9 @@ } }, "node_modules/@parcel/watcher-win32-ia32": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.5.1.tgz", - "integrity": "sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.5.6.tgz", + "integrity": "sha512-k35yLp1ZMwwee3Ez/pxBi5cf4AoBKYXj00CZ80jUz5h8prpiaQsiRPKQMxoLstNuqe2vR4RNPEAEcjEFzhEz/g==", "cpu": [ "ia32" ], @@ -3222,9 +2932,9 @@ } }, "node_modules/@parcel/watcher-win32-x64": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.5.1.tgz", - "integrity": "sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.5.6.tgz", + "integrity": "sha512-hbQlYcCq5dlAX9Qx+kFb0FHue6vbjlf0FrNzSKdYK2APUf7tGfGxQCk2ihEREmbR6ZMc0MVAD5RIX/41gpUzTw==", "cpu": [ "x64" ], @@ -3301,32 +3011,23 @@ }, "node_modules/@polka/url": { "version": "1.0.0-next.29", - "resolved": "https://registry.npmmirror.com/@polka/url/-/url-1.0.0-next.29.tgz", + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.29.tgz", "integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==", "license": "MIT" }, "node_modules/@poppinss/colors": { - "version": "4.1.5", - "resolved": "https://registry.npmmirror.com/@poppinss/colors/-/colors-4.1.5.tgz", - "integrity": "sha512-FvdDqtcRCtz6hThExcFOgW0cWX+xwSMWcRuQe5ZEb2m7cVQOAVZOIMt+/v9RxGiD9/OY16qJBXK4CVKWAPalBw==", + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@poppinss/colors/-/colors-4.1.6.tgz", + "integrity": "sha512-H9xkIdFswbS8n1d6vmRd8+c10t2Qe+rZITbbDHHkQixH5+2x1FDGmi/0K+WgWiqQFKPSlIYB7jlH6Kpfn6Fleg==", "license": "MIT", "dependencies": { "kleur": "^4.1.5" } }, - "node_modules/@poppinss/colors/node_modules/kleur": { - "version": "4.1.5", - "resolved": "https://registry.npmmirror.com/kleur/-/kleur-4.1.5.tgz", - "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/@poppinss/dumper": { - "version": "0.6.4", - "resolved": "https://registry.npmmirror.com/@poppinss/dumper/-/dumper-0.6.4.tgz", - "integrity": "sha512-iG0TIdqv8xJ3Lt9O8DrPRxw1MRLjNpoqiSGU03P/wNLP/s0ra0udPJ1J2Tx5M0J3H/cVyEgpbn8xUKRY9j59kQ==", + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/@poppinss/dumper/-/dumper-0.7.0.tgz", + "integrity": "sha512-0UTYalzk2t6S4rA2uHOz5bSSW2CHdv4vggJI6Alg90yvl0UgXs6XSXpH96OH+bRkX4J/06djv29pqXJ0lq5Kag==", "license": "MIT", "dependencies": { "@poppinss/colors": "^4.1.5", @@ -3335,27 +3036,27 @@ } }, "node_modules/@poppinss/exception": { - "version": "1.2.2", - "resolved": "https://registry.npmmirror.com/@poppinss/exception/-/exception-1.2.2.tgz", - "integrity": "sha512-m7bpKCD4QMlFCjA/nKTs23fuvoVFoA83brRKmObCUNmi/9tVu8Ve3w4YQAnJu4q3Tjf5fr685HYIC/IA2zHRSg==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@poppinss/exception/-/exception-1.2.3.tgz", + "integrity": "sha512-dCED+QRChTVatE9ibtoaxc+WkdzOSjYTKi/+uacHWIsfodVfpsueo3+DKpgU5Px8qXjgmXkSvhXvSCz3fnP9lw==", "license": "MIT" }, "node_modules/@rolldown/pluginutils": { - "version": "1.0.0-beta.19", - "resolved": "https://registry.npmmirror.com/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.19.tgz", - "integrity": "sha512-3FL3mnMbPu0muGOCaKAhhFEYmqv9eTfPSJRJmANrCwtgK8VuxpsZDGK+m0LYAGoyO8+0j5uRe4PeyPDK1yA/hA==", + "version": "1.0.0-rc.2", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.2.tgz", + "integrity": "sha512-izyXV/v+cHiRfozX62W9htOAvwMo4/bXKDrQ+vom1L1qRuexPock/7VZDAhnpHCLNejd3NJ6hiab+tO0D44Rgw==", "license": "MIT" }, "node_modules/@rollup/plugin-alias": { - "version": "5.1.1", - "resolved": "https://registry.npmmirror.com/@rollup/plugin-alias/-/plugin-alias-5.1.1.tgz", - "integrity": "sha512-PR9zDb+rOzkRb2VD+EuKB7UC41vU5DIwZ5qqCpk0KJudcWAyi8rvYOhS7+L5aZCspw1stTViLgN5v6FF1p5cgQ==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@rollup/plugin-alias/-/plugin-alias-6.0.0.tgz", + "integrity": "sha512-tPCzJOtS7uuVZd+xPhoy5W4vThe6KWXNmsFCNktaAh5RTqcLiSfT4huPQIXkgJ6YCOjJHvecOAzQxLFhPxKr+g==", "license": "MIT", "engines": { - "node": ">=14.0.0" + "node": ">=20.19.0" }, "peerDependencies": { - "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + "rollup": ">=4.0.0" }, "peerDependenciesMeta": { "rollup": { @@ -3364,9 +3065,9 @@ } }, "node_modules/@rollup/plugin-commonjs": { - "version": "28.0.6", - "resolved": "https://registry.npmmirror.com/@rollup/plugin-commonjs/-/plugin-commonjs-28.0.6.tgz", - "integrity": "sha512-XSQB1K7FUU5QP+3lOQmVCE3I0FcbbNvmNT4VJSj93iUjayaARrTQeoRdiYQoftAJBLrR9t2agwAd3ekaTgHNlw==", + "version": "29.0.0", + "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-29.0.0.tgz", + "integrity": "sha512-U2YHaxR2cU/yAiwKJtJRhnyLk7cifnQw0zUpISsocBDoHDJn+HTV74ABqnwr5bEgWUwFZC9oFL6wLe21lHu5eQ==", "license": "MIT", "dependencies": { "@rollup/pluginutils": "^5.0.1", @@ -3391,13 +3092,13 @@ }, "node_modules/@rollup/plugin-commonjs/node_modules/estree-walker": { "version": "2.0.2", - "resolved": "https://registry.npmmirror.com/estree-walker/-/estree-walker-2.0.2.tgz", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", "license": "MIT" }, "node_modules/@rollup/plugin-inject": { "version": "5.0.5", - "resolved": "https://registry.npmmirror.com/@rollup/plugin-inject/-/plugin-inject-5.0.5.tgz", + "resolved": "https://registry.npmjs.org/@rollup/plugin-inject/-/plugin-inject-5.0.5.tgz", "integrity": "sha512-2+DEJbNBoPROPkgTDNe8/1YXWcqxbN5DTjASVIOx8HS+pITXushyNiBV56RB08zuptzz8gT3YfkqriTBVycepg==", "license": "MIT", "dependencies": { @@ -3419,13 +3120,13 @@ }, "node_modules/@rollup/plugin-inject/node_modules/estree-walker": { "version": "2.0.2", - "resolved": "https://registry.npmmirror.com/estree-walker/-/estree-walker-2.0.2.tgz", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", "license": "MIT" }, "node_modules/@rollup/plugin-json": { "version": "6.1.0", - "resolved": "https://registry.npmmirror.com/@rollup/plugin-json/-/plugin-json-6.1.0.tgz", + "resolved": "https://registry.npmjs.org/@rollup/plugin-json/-/plugin-json-6.1.0.tgz", "integrity": "sha512-EGI2te5ENk1coGeADSIwZ7G2Q8CJS2sF120T7jLw4xFw9n7wIOXHo+kIYRAoVpJAN+kmqZSoO3Fp4JtoNF4ReA==", "license": "MIT", "dependencies": { @@ -3444,9 +3145,9 @@ } }, "node_modules/@rollup/plugin-node-resolve": { - "version": "16.0.1", - "resolved": "https://registry.npmmirror.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-16.0.1.tgz", - "integrity": "sha512-tk5YCxJWIG81umIvNkSod2qK5KyQW19qcBF/B78n1bjtOON6gzKoVeSzAE8yHCZEDmqkHKkxplExA8KzdJLJpA==", + "version": "16.0.3", + "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-16.0.3.tgz", + "integrity": "sha512-lUYM3UBGuM93CnMPG1YocWu7X802BrNF3jW2zny5gQyLQgRFJhV1Sq0Zi74+dh/6NBx1DxFC4b4GXg9wUCG5Qg==", "license": "MIT", "dependencies": { "@rollup/pluginutils": "^5.0.1", @@ -3467,30 +3168,10 @@ } } }, - "node_modules/@rollup/plugin-node-resolve/node_modules/resolve": { - "version": "1.22.10", - "resolved": "https://registry.npmmirror.com/resolve/-/resolve-1.22.10.tgz", - "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", - "license": "MIT", - "dependencies": { - "is-core-module": "^2.16.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/@rollup/plugin-replace": { - "version": "6.0.2", - "resolved": "https://registry.npmmirror.com/@rollup/plugin-replace/-/plugin-replace-6.0.2.tgz", - "integrity": "sha512-7QaYCf8bqF04dOy7w/eHmJeNExxTYwvKAmlSAH/EaWWUzbT0h5sbF6bktFoX/0F/0qwng5/dWFMyf3gzaM8DsQ==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/@rollup/plugin-replace/-/plugin-replace-6.0.3.tgz", + "integrity": "sha512-J4RZarRvQAm5IF0/LwUUg+obsm+xZhYnbMXmXROyoSE1ATJe3oXSb9L5MMppdxP2ylNSjv6zFBwKYjcKMucVfA==", "license": "MIT", "dependencies": { "@rollup/pluginutils": "^5.0.1", @@ -3510,7 +3191,7 @@ }, "node_modules/@rollup/plugin-terser": { "version": "0.4.4", - "resolved": "https://registry.npmmirror.com/@rollup/plugin-terser/-/plugin-terser-0.4.4.tgz", + "resolved": "https://registry.npmjs.org/@rollup/plugin-terser/-/plugin-terser-0.4.4.tgz", "integrity": "sha512-XHeJC5Bgvs8LfukDwWZp7yeqin6ns8RTl2B9avbejt6tZqsqvVoWI7ZTQrcNsfKEDWBTnTxM8nMDkO2IFFbd0A==", "license": "MIT", "dependencies": { @@ -3531,9 +3212,9 @@ } }, "node_modules/@rollup/pluginutils": { - "version": "5.2.0", - "resolved": "https://registry.npmmirror.com/@rollup/pluginutils/-/pluginutils-5.2.0.tgz", - "integrity": "sha512-qWJ2ZTbmumwiLFomfzTyt5Kng4hwPi9rwCYN4SHb6eaRU1KNO4ccxINHr/VhH4GgPlt1XfSTLX2LBTme8ne4Zw==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz", + "integrity": "sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==", "license": "MIT", "dependencies": { "@types/estree": "^1.0.0", @@ -3554,14 +3235,14 @@ }, "node_modules/@rollup/pluginutils/node_modules/estree-walker": { "version": "2.0.2", - "resolved": "https://registry.npmmirror.com/estree-walker/-/estree-walker-2.0.2.tgz", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", "license": "MIT" }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.45.1.tgz", - "integrity": "sha512-NEySIFvMY0ZQO+utJkgoMiCAjMrGvnbDLHvcmlA33UXJpYBCvlBEbMMtV837uCkS+plG2umfhn0T5mMAxGrlRA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz", + "integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==", "cpu": [ "arm" ], @@ -3572,9 +3253,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.45.1.tgz", - "integrity": "sha512-ujQ+sMXJkg4LRJaYreaVx7Z/VMgBBd89wGS4qMrdtfUFZ+TSY5Rs9asgjitLwzeIbhwdEhyj29zhst3L1lKsRQ==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz", + "integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==", "cpu": [ "arm64" ], @@ -3585,9 +3266,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.45.1.tgz", - "integrity": "sha512-FSncqHvqTm3lC6Y13xncsdOYfxGSLnP+73k815EfNmpewPs+EyM49haPS105Rh4aF5mJKywk9X0ogzLXZzN9lA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz", + "integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==", "cpu": [ "arm64" ], @@ -3598,9 +3279,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.45.1.tgz", - "integrity": "sha512-2/vVn/husP5XI7Fsf/RlhDaQJ7x9zjvC81anIVbr4b/f0xtSmXQTFcGIQ/B1cXIYM6h2nAhJkdMHTnD7OtQ9Og==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz", + "integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==", "cpu": [ "x64" ], @@ -3611,9 +3292,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.45.1.tgz", - "integrity": "sha512-4g1kaDxQItZsrkVTdYQ0bxu4ZIQ32cotoQbmsAnW1jAE4XCMbcBPDirX5fyUzdhVCKgPcrwWuucI8yrVRBw2+g==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz", + "integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==", "cpu": [ "arm64" ], @@ -3624,9 +3305,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.45.1.tgz", - "integrity": "sha512-L/6JsfiL74i3uK1Ti2ZFSNsp5NMiM4/kbbGEcOCps99aZx3g8SJMO1/9Y0n/qKlWZfn6sScf98lEOUe2mBvW9A==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz", + "integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==", "cpu": [ "x64" ], @@ -3637,9 +3318,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.45.1.tgz", - "integrity": "sha512-RkdOTu2jK7brlu+ZwjMIZfdV2sSYHK2qR08FUWcIoqJC2eywHbXr0L8T/pONFwkGukQqERDheaGTeedG+rra6Q==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz", + "integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==", "cpu": [ "arm" ], @@ -3650,9 +3331,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.45.1.tgz", - "integrity": "sha512-3kJ8pgfBt6CIIr1o+HQA7OZ9mp/zDk3ctekGl9qn/pRBgrRgfwiffaUmqioUGN9hv0OHv2gxmvdKOkARCtRb8Q==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz", + "integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==", "cpu": [ "arm" ], @@ -3663,9 +3344,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.45.1.tgz", - "integrity": "sha512-k3dOKCfIVixWjG7OXTCOmDfJj3vbdhN0QYEqB+OuGArOChek22hn7Uy5A/gTDNAcCy5v2YcXRJ/Qcnm4/ma1xw==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz", + "integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==", "cpu": [ "arm64" ], @@ -3676,9 +3357,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.45.1.tgz", - "integrity": "sha512-PmI1vxQetnM58ZmDFl9/Uk2lpBBby6B6rF4muJc65uZbxCs0EA7hhKCk2PKlmZKuyVSHAyIw3+/SiuMLxKxWog==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz", + "integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==", "cpu": [ "arm64" ], @@ -3688,10 +3369,10 @@ "linux" ] }, - "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.45.1.tgz", - "integrity": "sha512-9UmI0VzGmNJ28ibHW2GpE2nF0PBQqsyiS4kcJ5vK+wuwGnV5RlqdczVocDSUfGX/Na7/XINRVoUgJyFIgipoRg==", + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz", + "integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==", "cpu": [ "loong64" ], @@ -3701,10 +3382,36 @@ "linux" ] }, - "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.45.1.tgz", - "integrity": "sha512-7nR2KY8oEOUTD3pBAxIBBbZr0U7U+R9HDTPNy+5nVVHDXI4ikYniH1oxQz9VoB5PbBU1CZuDGHkLJkd3zLMWsg==", + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz", + "integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==", + "cpu": [ + "loong64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz", + "integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz", + "integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==", "cpu": [ "ppc64" ], @@ -3715,9 +3422,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.45.1.tgz", - "integrity": "sha512-nlcl3jgUultKROfZijKjRQLUu9Ma0PeNv/VFHkZiKbXTBQXhpytS8CIj5/NfBeECZtY2FJQubm6ltIxm/ftxpw==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz", + "integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==", "cpu": [ "riscv64" ], @@ -3728,9 +3435,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.45.1.tgz", - "integrity": "sha512-HJV65KLS51rW0VY6rvZkiieiBnurSzpzore1bMKAhunQiECPuxsROvyeaot/tcK3A3aGnI+qTHqisrpSgQrpgA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz", + "integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==", "cpu": [ "riscv64" ], @@ -3741,9 +3448,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.45.1.tgz", - "integrity": "sha512-NITBOCv3Qqc6hhwFt7jLV78VEO/il4YcBzoMGGNxznLgRQf43VQDae0aAzKiBeEPIxnDrACiMgbqjuihx08OOw==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz", + "integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==", "cpu": [ "s390x" ], @@ -3754,9 +3461,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.45.1.tgz", - "integrity": "sha512-+E/lYl6qu1zqgPEnTrs4WysQtvc/Sh4fC2nByfFExqgYrqkKWp1tWIbe+ELhixnenSpBbLXNi6vbEEJ8M7fiHw==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz", + "integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==", "cpu": [ "x64" ], @@ -3767,9 +3474,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.45.1.tgz", - "integrity": "sha512-a6WIAp89p3kpNoYStITT9RbTbTnqarU7D8N8F2CV+4Cl9fwCOZraLVuVFvlpsW0SbIiYtEnhCZBPLoNdRkjQFw==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz", + "integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==", "cpu": [ "x64" ], @@ -3779,10 +3486,36 @@ "linux" ] }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz", + "integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz", + "integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.45.1.tgz", - "integrity": "sha512-T5Bi/NS3fQiJeYdGvRpTAP5P02kqSOpqiopwhj0uaXB6nzs5JVi2XMJb18JUSKhCOX8+UE1UKQufyD6Or48dJg==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz", + "integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==", "cpu": [ "arm64" ], @@ -3793,9 +3526,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.45.1.tgz", - "integrity": "sha512-lxV2Pako3ujjuUe9jiU3/s7KSrDfH6IgTSQOnDWr9aJ92YsFd7EurmClK0ly/t8dzMkDtd04g60WX6yl0sGfdw==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz", + "integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==", "cpu": [ "ia32" ], @@ -3805,10 +3538,23 @@ "win32" ] }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz", + "integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.45.1.tgz", - "integrity": "sha512-M/fKi4sasCdM8i0aWJjCSFm2qEnYRR8AMLG2kxp6wD13+tMGA4Z1tVAuHkNRjud5SW2EM3naLuK35w9twvf6aA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz", + "integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==", "cpu": [ "x64" ], @@ -3819,9 +3565,9 @@ ] }, "node_modules/@sindresorhus/is": { - "version": "7.0.2", - "resolved": "https://registry.npmmirror.com/@sindresorhus/is/-/is-7.0.2.tgz", - "integrity": "sha512-d9xRovfKNz1SKieM0qJdO+PQonjnnIfSNWfHYnBSJ9hkjm0ZPw6HlxscDXYstp3z+7V2GOFHc+J0CYrYTjqCJw==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-7.2.0.tgz", + "integrity": "sha512-P1Cz1dWaFfR4IR+U13mqqiGsLFf1KbayybWwdd2vfctdV6hDpUkgCY0nKOLLTMSoRd/jJNjtbqzf13K8DCCXQw==", "license": "MIT", "engines": { "node": ">=18" @@ -3831,9 +3577,9 @@ } }, "node_modules/@sindresorhus/merge-streams": { - "version": "2.3.0", - "resolved": "https://registry.npmmirror.com/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", - "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-4.0.0.tgz", + "integrity": "sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==", "license": "MIT", "engines": { "node": ">=18" @@ -3843,15 +3589,15 @@ } }, "node_modules/@speed-highlight/core": { - "version": "1.2.7", - "resolved": "https://registry.npmmirror.com/@speed-highlight/core/-/core-1.2.7.tgz", - "integrity": "sha512-0dxmVj4gxg3Jg879kvFS/msl4s9F3T9UXC1InxgOf7t5NvcPD97u/WTA5vL/IxWHMn7qSxBozqrnnE2wvl1m8g==", + "version": "1.2.14", + "resolved": "https://registry.npmjs.org/@speed-highlight/core/-/core-1.2.14.tgz", + "integrity": "sha512-G4ewlBNhUtlLvrJTb88d2mdy2KRijzs4UhnlrOSRT4bmjh/IqNElZa3zkrZ+TC47TwtlDWzVLFADljF1Ijp5hA==", "license": "CC0-1.0" }, "node_modules/@tybys/wasm-util": { - "version": "0.10.0", - "resolved": "https://registry.npmmirror.com/@tybys/wasm-util/-/wasm-util-0.10.0.tgz", - "integrity": "sha512-VyyPYFlOMNylG45GoAe0xDoLwWuowvf92F9kySqzYh8vmYm7D2u4iUJKa1tOUpS70Ku13ASrOkS4ScXFsTaCNQ==", + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz", + "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==", "license": "MIT", "optional": true, "dependencies": { @@ -3864,248 +3610,215 @@ "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", "license": "MIT" }, - "node_modules/@types/node": { - "version": "24.1.0", - "resolved": "https://registry.npmmirror.com/@types/node/-/node-24.1.0.tgz", - "integrity": "sha512-ut5FthK5moxFKH2T1CUOC6ctR67rQRvvHdFLCD2Ql6KXmMuCrjsSsRI9UsLCm9M18BMwClv4pn327UvB7eeO1w==", - "license": "MIT", - "optional": true, - "dependencies": { - "undici-types": "~7.8.0" - } - }, - "node_modules/@types/normalize-package-data": { - "version": "2.4.4", - "resolved": "https://registry.npmmirror.com/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", - "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", - "license": "MIT" - }, - "node_modules/@types/parse-path": { - "version": "7.0.3", - "resolved": "https://registry.npmmirror.com/@types/parse-path/-/parse-path-7.0.3.tgz", - "integrity": "sha512-LriObC2+KYZD3FzCrgWGv/qufdUy4eXrxcLgQMfYXgPbLIecKIsVBaQgUPmxSSLcjmYbDTQbMgr6qr6l/eb7Bg==", - "license": "MIT" - }, "node_modules/@types/resolve": { "version": "1.20.2", - "resolved": "https://registry.npmmirror.com/@types/resolve/-/resolve-1.20.2.tgz", + "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz", "integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==", "license": "MIT" }, - "node_modules/@types/triple-beam": { - "version": "1.3.5", - "resolved": "https://registry.npmmirror.com/@types/triple-beam/-/triple-beam-1.3.5.tgz", - "integrity": "sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==", + "node_modules/@types/web-bluetooth": { + "version": "0.0.21", + "resolved": "https://registry.npmmirror.com/@types/web-bluetooth/-/web-bluetooth-0.0.21.tgz", + "integrity": "sha512-oIQLCGWtcFZy2JW77j9k8nHzAOpqMHLQejDA48XXMWH6tjCQHz5RCFz1bzsmROyL6PUm+LLnUiI4BCn221inxA==", "license": "MIT" }, - "node_modules/@types/yauzl": { - "version": "2.10.3", - "resolved": "https://registry.npmmirror.com/@types/yauzl/-/yauzl-2.10.3.tgz", - "integrity": "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q==", - "license": "MIT", - "optional": true, - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@typescript-eslint/project-service": { - "version": "8.38.0", - "resolved": "https://registry.npmmirror.com/@typescript-eslint/project-service/-/project-service-8.38.0.tgz", - "integrity": "sha512-dbK7Jvqcb8c9QfH01YB6pORpqX1mn5gDZc9n63Ak/+jD67oWXn3Gs0M6vddAN+eDXBCS5EmNWzbSxsn9SzFWWg==", + "node_modules/@unhead/vue": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@unhead/vue/-/vue-2.1.9.tgz", + "integrity": "sha512-7SqqDEn5zFID1PnEdjLCLa/kOhoAlzol0JdYfVr2Ejek+H4ON4s8iyExv2QQ8bReMosbXQ/Bw41j2CF1NUuGSA==", "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.38.0", - "@typescript-eslint/types": "^8.38.0", - "debug": "^4.3.4" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + "hookable": "^6.0.1", + "unhead": "2.1.9" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "url": "https://github.com/sponsors/harlan-zw" }, "peerDependencies": { - "typescript": ">=4.8.4 <5.9.0" + "vue": ">=3.5.18" } }, - "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.38.0", - "resolved": "https://registry.npmmirror.com/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.38.0.tgz", - "integrity": "sha512-Lum9RtSE3EroKk/bYns+sPOodqb2Fv50XOl/gMviMKNvanETUuUcC9ObRbzrJ4VSd2JalPqgSAavwrPiPvnAiQ==", + "node_modules/@unhead/vue/node_modules/hookable": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/hookable/-/hookable-6.0.1.tgz", + "integrity": "sha512-uKGyY8BuzN/a5gvzvA+3FVWo0+wUjgtfSdnmjtrOVwQCZPHpHDH2WRO3VZSOeluYrHoDCiXFffZXs8Dj1ULWtw==", + "license": "MIT" + }, + "node_modules/@vercel/nft": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@vercel/nft/-/nft-1.3.2.tgz", + "integrity": "sha512-HC8venRc4Ya7vNeBsJneKHHMDDWpQie7VaKhAIOst3MKO+DES+Y/SbzSp8mFkD7OzwAE2HhHkeSuSmwS20mz3A==", "license": "MIT", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + "dependencies": { + "@mapbox/node-pre-gyp": "^2.0.0", + "@rollup/pluginutils": "^5.1.3", + "acorn": "^8.6.0", + "acorn-import-attributes": "^1.9.5", + "async-sema": "^3.1.1", + "bindings": "^1.4.0", + "estree-walker": "2.0.2", + "glob": "^13.0.0", + "graceful-fs": "^4.2.9", + "node-gyp-build": "^4.2.2", + "picomatch": "^4.0.2", + "resolve-from": "^5.0.0" }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "bin": { + "nft": "out/cli.js" }, - "peerDependencies": { - "typescript": ">=4.8.4 <5.9.0" - } - }, - "node_modules/@typescript-eslint/types": { - "version": "8.38.0", - "resolved": "https://registry.npmmirror.com/@typescript-eslint/types/-/types-8.38.0.tgz", - "integrity": "sha512-wzkUfX3plUqij4YwWaJyqhiPE5UCRVlFpKn1oCRn2O1bJ592XxWJj8ROQ3JD5MYXLORW84063z3tZTb/cs4Tyw==", - "license": "MIT", "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "node": ">=20" } }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.38.0", - "resolved": "https://registry.npmmirror.com/@typescript-eslint/typescript-estree/-/typescript-estree-8.38.0.tgz", - "integrity": "sha512-fooELKcAKzxux6fA6pxOflpNS0jc+nOQEEOipXFNjSlBS6fqrJOVY/whSn70SScHrcJ2LDsxWrneFoWYSVfqhQ==", + "node_modules/@vercel/nft/node_modules/balanced-match": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz", + "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==", "license": "MIT", - "dependencies": { - "@typescript-eslint/project-service": "8.38.0", - "@typescript-eslint/tsconfig-utils": "8.38.0", - "@typescript-eslint/types": "8.38.0", - "@typescript-eslint/visitor-keys": "8.38.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^2.1.0" - }, "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "typescript": ">=4.8.4 <5.9.0" + "node": "18 || 20 || >=22" } }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.38.0", - "resolved": "https://registry.npmmirror.com/@typescript-eslint/visitor-keys/-/visitor-keys-8.38.0.tgz", - "integrity": "sha512-pWrTcoFNWuwHlA9CvlfSsGWs14JxfN1TH25zM5L7o0pRLhsoZkDnTsXfQRJBEWJoV5DL0jf+Z+sxiud+K0mq1g==", + "node_modules/@vercel/nft/node_modules/brace-expansion": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.4.tgz", + "integrity": "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==", "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.38.0", - "eslint-visitor-keys": "^4.2.1" + "balanced-match": "^4.0.2" }, "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "node": "18 || 20 || >=22" } }, - "node_modules/@unhead/vue": { - "version": "2.0.12", - "resolved": "https://registry.npmmirror.com/@unhead/vue/-/vue-2.0.12.tgz", - "integrity": "sha512-WFaiCVbBd39FK6Bx3GQskhgT9s45Vjx6dRQegYheVwU1AnF+FAfJVgWbrl21p6fRJcLAFp0xDz6wE18JYBM0eQ==", - "license": "MIT", - "dependencies": { - "hookable": "^5.5.3", - "unhead": "2.0.12" - }, - "funding": { - "url": "https://github.com/sponsors/harlan-zw" - }, - "peerDependencies": { - "vue": ">=3.5.13" - } + "node_modules/@vercel/nft/node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "license": "MIT" }, - "node_modules/@vercel/nft": { - "version": "0.29.4", - "resolved": "https://registry.npmmirror.com/@vercel/nft/-/nft-0.29.4.tgz", - "integrity": "sha512-6lLqMNX3TuycBPABycx7A9F1bHQR7kiQln6abjFbPrf5C/05qHM9M5E4PeTE59c7z8g6vHnx1Ioihb2AQl7BTA==", - "license": "MIT", + "node_modules/@vercel/nft/node_modules/glob": { + "version": "13.0.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-13.0.6.tgz", + "integrity": "sha512-Wjlyrolmm8uDpm/ogGyXZXb1Z+Ca2B8NbJwqBVg0axK9GbBeoS7yGV6vjXnYdGm6X53iehEuxxbyiKp8QmN4Vw==", + "license": "BlueOak-1.0.0", "dependencies": { - "@mapbox/node-pre-gyp": "^2.0.0", - "@rollup/pluginutils": "^5.1.3", - "acorn": "^8.6.0", - "acorn-import-attributes": "^1.9.5", - "async-sema": "^3.1.1", - "bindings": "^1.4.0", - "estree-walker": "2.0.2", - "glob": "^10.4.5", - "graceful-fs": "^4.2.9", - "node-gyp-build": "^4.2.2", - "picomatch": "^4.0.2", - "resolve-from": "^5.0.0" + "minimatch": "^10.2.2", + "minipass": "^7.1.3", + "path-scurry": "^2.0.2" + }, + "engines": { + "node": "18 || 20 || >=22" }, - "bin": { - "nft": "out/cli.js" + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@vercel/nft/node_modules/lru-cache": { + "version": "11.2.6", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", + "integrity": "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==", + "license": "BlueOak-1.0.0", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/@vercel/nft/node_modules/minimatch": { + "version": "10.2.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz", + "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==", + "license": "BlueOak-1.0.0", + "dependencies": { + "brace-expansion": "^5.0.2" }, "engines": { - "node": ">=18" + "node": "18 || 20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/@vercel/nft/node_modules/estree-walker": { + "node_modules/@vercel/nft/node_modules/path-scurry": { "version": "2.0.2", - "resolved": "https://registry.npmmirror.com/estree-walker/-/estree-walker-2.0.2.tgz", - "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", - "license": "MIT" + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.2.tgz", + "integrity": "sha512-3O/iVVsJAPsOnpwWIeD+d6z/7PmqApyQePUtCndjatj/9I5LylHvt5qluFaBT3I5h3r1ejfR056c+FCv+NnNXg==", + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^11.0.0", + "minipass": "^7.1.2" + }, + "engines": { + "node": "18 || 20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } }, "node_modules/@vitejs/plugin-vue": { - "version": "6.0.0", - "resolved": "https://registry.npmmirror.com/@vitejs/plugin-vue/-/plugin-vue-6.0.0.tgz", - "integrity": "sha512-iAliE72WsdhjzTOp2DtvKThq1VBC4REhwRcaA+zPAAph6I+OQhUXv+Xu2KS7ElxYtb7Zc/3R30Hwv1DxEo7NXQ==", + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-6.0.4.tgz", + "integrity": "sha512-uM5iXipgYIn13UUQCZNdWkYk+sysBeA97d5mHsAoAt1u/wpN3+zxOmsVJWosuzX+IMGRzeYUNytztrYznboIkQ==", "license": "MIT", "dependencies": { - "@rolldown/pluginutils": "1.0.0-beta.19" + "@rolldown/pluginutils": "1.0.0-rc.2" }, "engines": { "node": "^20.19.0 || >=22.12.0" }, "peerDependencies": { - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0", "vue": "^3.2.25" } }, "node_modules/@vitejs/plugin-vue-jsx": { - "version": "5.0.1", - "resolved": "https://registry.npmmirror.com/@vitejs/plugin-vue-jsx/-/plugin-vue-jsx-5.0.1.tgz", - "integrity": "sha512-X7qmQMXbdDh+sfHUttXokPD0cjPkMFoae7SgbkF9vi3idGUKmxLcnU2Ug49FHwiKXebfzQRIm5yK3sfCJzNBbg==", + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue-jsx/-/plugin-vue-jsx-5.1.4.tgz", + "integrity": "sha512-70LmoVk9riR7qc4W2CpjsbNMWTPnuZb9dpFKX1emru0yP57nsc9k8nhLA6U93ngQapv5VDIUq2JatNfLbBIkrA==", "license": "MIT", "dependencies": { - "@babel/core": "^7.27.7", - "@babel/plugin-transform-typescript": "^7.27.1", - "@rolldown/pluginutils": "^1.0.0-beta.21", - "@vue/babel-plugin-jsx": "^1.4.0" + "@babel/core": "^7.29.0", + "@babel/plugin-syntax-typescript": "^7.28.6", + "@babel/plugin-transform-typescript": "^7.28.6", + "@rolldown/pluginutils": "^1.0.0-rc.2", + "@vue/babel-plugin-jsx": "^2.0.1" }, "engines": { "node": "^20.19.0 || >=22.12.0" }, "peerDependencies": { - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0", "vue": "^3.0.0" } }, - "node_modules/@vitejs/plugin-vue-jsx/node_modules/@rolldown/pluginutils": { - "version": "1.0.0-beta.29", - "resolved": "https://registry.npmmirror.com/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.29.tgz", - "integrity": "sha512-NIJgOsMjbxAXvoGq/X0gD7VPMQ8j9g0BiDaNjVNVjvl+iKXxL3Jre0v31RmBYeLEmkbj2s02v8vFTbUXi5XS2Q==", + "node_modules/@volar/language-core": { + "version": "2.4.28", + "resolved": "https://registry.npmjs.org/@volar/language-core/-/language-core-2.4.28.tgz", + "integrity": "sha512-w4qhIJ8ZSitgLAkVay6AbcnC7gP3glYM3fYwKV3srj8m494E3xtrCv6E+bWviiK/8hs6e6t1ij1s2Endql7vzQ==", + "license": "MIT", + "dependencies": { + "@volar/source-map": "2.4.28" + } + }, + "node_modules/@volar/source-map": { + "version": "2.4.28", + "resolved": "https://registry.npmjs.org/@volar/source-map/-/source-map-2.4.28.tgz", + "integrity": "sha512-yX2BDBqJkRXfKw8my8VarTyjv48QwxdJtvRgUpNE5erCsgEUdI2DsLbpa+rOQVAJYshY99szEcRDmyHbF10ggQ==", "license": "MIT" }, "node_modules/@vue-macros/common": { - "version": "3.0.0-beta.15", - "resolved": "https://registry.npmmirror.com/@vue-macros/common/-/common-3.0.0-beta.15.tgz", - "integrity": "sha512-DMgq/rIh1H20WYNWU7krIbEfJRYDDhy7ix64GlT4AVUJZZWCZ5pxiYVJR3A3GmWQPkn7Pg7i3oIiGqu4JGC65w==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@vue-macros/common/-/common-3.1.2.tgz", + "integrity": "sha512-h9t4ArDdniO9ekYHAD95t9AZcAbb19lEGK+26iAjUODOIJKmObDNBSe4+6ELQAA3vtYiFPPBtHh7+cQCKi3Dng==", "license": "MIT", "dependencies": { - "@vue/compiler-sfc": "^3.5.17", - "ast-kit": "^2.1.0", - "local-pkg": "^1.1.1", - "magic-string-ast": "^1.0.0", - "unplugin-utils": "^0.2.4" + "@vue/compiler-sfc": "^3.5.22", + "ast-kit": "^2.1.2", + "local-pkg": "^1.1.2", + "magic-string-ast": "^1.0.2", + "unplugin-utils": "^0.3.0" }, "engines": { - "node": ">=20.18.0" + "node": ">=20.19.0" }, "funding": { "url": "https://github.com/sponsors/vue-macros" @@ -4119,27 +3832,43 @@ } } }, + "node_modules/@vue-macros/common/node_modules/unplugin-utils": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/unplugin-utils/-/unplugin-utils-0.3.1.tgz", + "integrity": "sha512-5lWVjgi6vuHhJ526bI4nlCOmkCIF3nnfXkCMDeMJrtdvxTs6ZFCM8oNufGTsDbKv/tJ/xj8RpvXjRuPBZJuJog==", + "license": "MIT", + "dependencies": { + "pathe": "^2.0.3", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, "node_modules/@vue/babel-helper-vue-transform-on": { - "version": "1.4.0", - "resolved": "https://registry.npmmirror.com/@vue/babel-helper-vue-transform-on/-/babel-helper-vue-transform-on-1.4.0.tgz", - "integrity": "sha512-mCokbouEQ/ocRce/FpKCRItGo+013tHg7tixg3DUNS+6bmIchPt66012kBMm476vyEIJPafrvOf4E5OYj3shSw==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@vue/babel-helper-vue-transform-on/-/babel-helper-vue-transform-on-2.0.1.tgz", + "integrity": "sha512-uZ66EaFbnnZSYqYEyplWvn46GhZ1KuYSThdT68p+am7MgBNbQ3hphTL9L+xSIsWkdktwhPYLwPgVWqo96jDdRA==", "license": "MIT" }, "node_modules/@vue/babel-plugin-jsx": { - "version": "1.4.0", - "resolved": "https://registry.npmmirror.com/@vue/babel-plugin-jsx/-/babel-plugin-jsx-1.4.0.tgz", - "integrity": "sha512-9zAHmwgMWlaN6qRKdrg1uKsBKHvnUU+Py+MOCTuYZBoZsopa90Di10QRjB+YPnVss0BZbG/H5XFwJY1fTxJWhA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@vue/babel-plugin-jsx/-/babel-plugin-jsx-2.0.1.tgz", + "integrity": "sha512-a8CaLQjD/s4PVdhrLD/zT574ZNPnZBOY+IhdtKWRB4HRZ0I2tXBi5ne7d9eCfaYwp5gU5+4KIyFTV1W1YL9xZA==", "license": "MIT", "dependencies": { - "@babel/helper-module-imports": "^7.25.9", - "@babel/helper-plugin-utils": "^7.26.5", - "@babel/plugin-syntax-jsx": "^7.25.9", - "@babel/template": "^7.26.9", - "@babel/traverse": "^7.26.9", - "@babel/types": "^7.26.9", - "@vue/babel-helper-vue-transform-on": "1.4.0", - "@vue/babel-plugin-resolve-type": "1.4.0", - "@vue/shared": "^3.5.13" + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/plugin-syntax-jsx": "^7.27.1", + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.4", + "@babel/types": "^7.28.4", + "@vue/babel-helper-vue-transform-on": "2.0.1", + "@vue/babel-plugin-resolve-type": "2.0.1", + "@vue/shared": "^3.5.22" }, "peerDependencies": { "@babel/core": "^7.0.0-0" @@ -4151,16 +3880,16 @@ } }, "node_modules/@vue/babel-plugin-resolve-type": { - "version": "1.4.0", - "resolved": "https://registry.npmmirror.com/@vue/babel-plugin-resolve-type/-/babel-plugin-resolve-type-1.4.0.tgz", - "integrity": "sha512-4xqDRRbQQEWHQyjlYSgZsWj44KfiF6D+ktCuXyZ8EnVDYV3pztmXJDf1HveAjUAXxAnR8daCQT51RneWWxtTyQ==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@vue/babel-plugin-resolve-type/-/babel-plugin-resolve-type-2.0.1.tgz", + "integrity": "sha512-ybwgIuRGRRBhOU37GImDoWQoz+TlSqap65qVI6iwg/J7FfLTLmMf97TS7xQH9I7Qtr/gp161kYVdhr1ZMraSYQ==", "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.26.2", - "@babel/helper-module-imports": "^7.25.9", - "@babel/helper-plugin-utils": "^7.26.5", - "@babel/parser": "^7.26.9", - "@vue/compiler-sfc": "^3.5.13" + "@babel/code-frame": "^7.27.1", + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/parser": "^7.28.4", + "@vue/compiler-sfc": "^3.5.22" }, "funding": { "url": "https://github.com/sponsors/sxzz" @@ -4170,47 +3899,47 @@ } }, "node_modules/@vue/compiler-core": { - "version": "3.5.18", - "resolved": "https://registry.npmmirror.com/@vue/compiler-core/-/compiler-core-3.5.18.tgz", - "integrity": "sha512-3slwjQrrV1TO8MoXgy3aynDQ7lslj5UqDxuHnrzHtpON5CBinhWjJETciPngpin/T3OuW3tXUf86tEurusnztw==", + "version": "3.5.29", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.29.tgz", + "integrity": "sha512-cuzPhD8fwRHk8IGfmYaR4eEe4cAyJEL66Ove/WZL7yWNL134nqLddSLwNRIsFlnnW1kK+p8Ck3viFnC0chXCXw==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.28.0", - "@vue/shared": "3.5.18", - "entities": "^4.5.0", + "@babel/parser": "^7.29.0", + "@vue/shared": "3.5.29", + "entities": "^7.0.1", "estree-walker": "^2.0.2", "source-map-js": "^1.2.1" } }, "node_modules/@vue/compiler-core/node_modules/estree-walker": { "version": "2.0.2", - "resolved": "https://registry.npmmirror.com/estree-walker/-/estree-walker-2.0.2.tgz", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", "license": "MIT" }, "node_modules/@vue/compiler-dom": { - "version": "3.5.18", - "resolved": "https://registry.npmmirror.com/@vue/compiler-dom/-/compiler-dom-3.5.18.tgz", - "integrity": "sha512-RMbU6NTU70++B1JyVJbNbeFkK+A+Q7y9XKE2EM4NLGm2WFR8x9MbAtWxPPLdm0wUkuZv9trpwfSlL6tjdIa1+A==", + "version": "3.5.29", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.29.tgz", + "integrity": "sha512-n0G5o7R3uBVmVxjTIYcz7ovr8sy7QObFG8OQJ3xGCDNhbG60biP/P5KnyY8NLd81OuT1WJflG7N4KWYHaeeaIg==", "license": "MIT", "dependencies": { - "@vue/compiler-core": "3.5.18", - "@vue/shared": "3.5.18" + "@vue/compiler-core": "3.5.29", + "@vue/shared": "3.5.29" } }, "node_modules/@vue/compiler-sfc": { - "version": "3.5.18", - "resolved": "https://registry.npmmirror.com/@vue/compiler-sfc/-/compiler-sfc-3.5.18.tgz", - "integrity": "sha512-5aBjvGqsWs+MoxswZPoTB9nSDb3dhd1x30xrrltKujlCxo48j8HGDNj3QPhF4VIS0VQDUrA1xUfp2hEa+FNyXA==", + "version": "3.5.29", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.29.tgz", + "integrity": "sha512-oJZhN5XJs35Gzr50E82jg2cYdZQ78wEwvRO6Y63TvLVTc+6xICzJHP1UIecdSPPYIbkautNBanDiWYa64QSFIA==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.28.0", - "@vue/compiler-core": "3.5.18", - "@vue/compiler-dom": "3.5.18", - "@vue/compiler-ssr": "3.5.18", - "@vue/shared": "3.5.18", + "@babel/parser": "^7.29.0", + "@vue/compiler-core": "3.5.29", + "@vue/compiler-dom": "3.5.29", + "@vue/compiler-ssr": "3.5.29", + "@vue/shared": "3.5.29", "estree-walker": "^2.0.2", - "magic-string": "^0.30.17", + "magic-string": "^0.30.21", "postcss": "^8.5.6", "source-map-js": "^1.2.1" } @@ -4222,13 +3951,13 @@ "license": "MIT" }, "node_modules/@vue/compiler-ssr": { - "version": "3.5.18", - "resolved": "https://registry.npmmirror.com/@vue/compiler-ssr/-/compiler-ssr-3.5.18.tgz", - "integrity": "sha512-xM16Ak7rSWHkM3m22NlmcdIM+K4BMyFARAfV9hYFl+SFuRzrZ3uGMNW05kA5pmeMa0X9X963Kgou7ufdbpOP9g==", + "version": "3.5.29", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.29.tgz", + "integrity": "sha512-Y/ARJZE6fpjzL5GH/phJmsFwx3g6t2KmHKHx5q+MLl2kencADKIrhH5MLF6HHpRMmlRAYBRSvv347Mepf1zVNw==", "license": "MIT", "dependencies": { - "@vue/compiler-dom": "3.5.18", - "@vue/shared": "3.5.18" + "@vue/compiler-dom": "3.5.29", + "@vue/shared": "3.5.29" } }, "node_modules/@vue/devtools-api": { @@ -4238,27 +3967,58 @@ "license": "MIT" }, "node_modules/@vue/devtools-core": { - "version": "7.7.7", - "resolved": "https://registry.npmmirror.com/@vue/devtools-core/-/devtools-core-7.7.7.tgz", - "integrity": "sha512-9z9TLbfC+AjAi1PQyWX+OErjIaJmdFlbDHcD+cAMYKY6Bh5VlsAtCeGyRMrXwIlMEQPukvnWt3gZBLwTAIMKzQ==", + "version": "8.0.6", + "resolved": "https://registry.npmjs.org/@vue/devtools-core/-/devtools-core-8.0.6.tgz", + "integrity": "sha512-fN7iVtpSQQdtMORWwVZ1JiIAKriinhD+lCHqPw9Rr252ae2TczILEmW0zcAZifPW8HfYcbFkn+h7Wv6kQQCayw==", "license": "MIT", "dependencies": { - "@vue/devtools-kit": "^7.7.7", - "@vue/devtools-shared": "^7.7.7", + "@vue/devtools-kit": "^8.0.6", + "@vue/devtools-shared": "^8.0.6", "mitt": "^3.0.1", - "nanoid": "^5.1.0", + "nanoid": "^5.1.5", "pathe": "^2.0.3", - "vite-hot-client": "^2.0.4" + "vite-hot-client": "^2.1.0" }, "peerDependencies": { "vue": "^3.0.0" } }, + "node_modules/@vue/devtools-core/node_modules/@vue/devtools-kit": { + "version": "8.0.6", + "resolved": "https://registry.npmjs.org/@vue/devtools-kit/-/devtools-kit-8.0.6.tgz", + "integrity": "sha512-9zXZPTJW72OteDXeSa5RVML3zWDCRcO5t77aJqSs228mdopYj5AiTpihozbsfFJ0IodfNs7pSgOGO3qfCuxDtw==", + "license": "MIT", + "dependencies": { + "@vue/devtools-shared": "^8.0.6", + "birpc": "^2.6.1", + "hookable": "^5.5.3", + "mitt": "^3.0.1", + "perfect-debounce": "^2.0.0", + "speakingurl": "^14.0.1", + "superjson": "^2.2.2" + } + }, + "node_modules/@vue/devtools-core/node_modules/@vue/devtools-shared": { + "version": "8.0.6", + "resolved": "https://registry.npmjs.org/@vue/devtools-shared/-/devtools-shared-8.0.6.tgz", + "integrity": "sha512-Pp1JylTqlgMJvxW6MGyfTF8vGvlBSCAvMFaDCYa82Mgw7TT5eE5kkHgDvmOGHWeJE4zIDfCpCxHapsK2LtIAJg==", + "license": "MIT", + "dependencies": { + "rfdc": "^1.4.1" + } + }, + "node_modules/@vue/devtools-core/node_modules/perfect-debounce": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-2.1.0.tgz", + "integrity": "sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==", + "license": "MIT" + }, "node_modules/@vue/devtools-kit": { "version": "7.7.7", "resolved": "https://registry.npmmirror.com/@vue/devtools-kit/-/devtools-kit-7.7.7.tgz", "integrity": "sha512-wgoZtxcTta65cnZ1Q6MbAfePVFxfM+gq0saaeytoph7nEa7yMXoi6sCPy4ufO111B9msnw0VOWjPEFCXuAKRHA==", "license": "MIT", + "peer": true, "dependencies": { "@vue/devtools-shared": "^7.7.7", "birpc": "^2.3.0", @@ -4274,137 +4034,170 @@ "resolved": "https://registry.npmmirror.com/@vue/devtools-shared/-/devtools-shared-7.7.7.tgz", "integrity": "sha512-+udSj47aRl5aKb0memBvcUG9koarqnxNM5yjuREvqwK6T3ap4mn3Zqqc17QrBFTqSMjr3HK1cvStEZpMDpfdyw==", "license": "MIT", + "peer": true, "dependencies": { "rfdc": "^1.4.1" } }, + "node_modules/@vue/language-core": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/@vue/language-core/-/language-core-3.2.5.tgz", + "integrity": "sha512-d3OIxN/+KRedeM5wQ6H6NIpwS3P5gC9nmyaHgBk+rO6dIsjY+tOh4UlPpiZbAh3YtLdCGEX4M16RmsBqPmJV+g==", + "license": "MIT", + "dependencies": { + "@volar/language-core": "2.4.28", + "@vue/compiler-dom": "^3.5.0", + "@vue/shared": "^3.5.0", + "alien-signals": "^3.0.0", + "muggle-string": "^0.4.1", + "path-browserify": "^1.0.1", + "picomatch": "^4.0.2" + } + }, "node_modules/@vue/reactivity": { - "version": "3.5.18", - "resolved": "https://registry.npmmirror.com/@vue/reactivity/-/reactivity-3.5.18.tgz", - "integrity": "sha512-x0vPO5Imw+3sChLM5Y+B6G1zPjwdOri9e8V21NnTnlEvkxatHEH5B5KEAJcjuzQ7BsjGrKtfzuQ5eQwXh8HXBg==", + "version": "3.5.29", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.29.tgz", + "integrity": "sha512-zcrANcrRdcLtmGZETBxWqIkoQei8HaFpZWx/GHKxx79JZsiZ8j1du0VUJtu4eJjgFvU/iKL5lRXFXksVmI+5DA==", "license": "MIT", "dependencies": { - "@vue/shared": "3.5.18" + "@vue/shared": "3.5.29" } }, "node_modules/@vue/runtime-core": { - "version": "3.5.18", - "resolved": "https://registry.npmmirror.com/@vue/runtime-core/-/runtime-core-3.5.18.tgz", - "integrity": "sha512-DUpHa1HpeOQEt6+3nheUfqVXRog2kivkXHUhoqJiKR33SO4x+a5uNOMkV487WPerQkL0vUuRvq/7JhRgLW3S+w==", + "version": "3.5.29", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.29.tgz", + "integrity": "sha512-8DpW2QfdwIWOLqtsNcds4s+QgwSaHSJY/SUe04LptianUQ/0xi6KVsu/pYVh+HO3NTVvVJjIPL2t6GdeKbS4Lg==", "license": "MIT", "dependencies": { - "@vue/reactivity": "3.5.18", - "@vue/shared": "3.5.18" + "@vue/reactivity": "3.5.29", + "@vue/shared": "3.5.29" } }, "node_modules/@vue/runtime-dom": { - "version": "3.5.18", - "resolved": "https://registry.npmmirror.com/@vue/runtime-dom/-/runtime-dom-3.5.18.tgz", - "integrity": "sha512-YwDj71iV05j4RnzZnZtGaXwPoUWeRsqinblgVJwR8XTXYZ9D5PbahHQgsbmzUvCWNF6x7siQ89HgnX5eWkr3mw==", + "version": "3.5.29", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.29.tgz", + "integrity": "sha512-AHvvJEtcY9tw/uk+s/YRLSlxxQnqnAkjqvK25ZiM4CllCZWzElRAoQnCM42m9AHRLNJ6oe2kC5DCgD4AUdlvXg==", "license": "MIT", "dependencies": { - "@vue/reactivity": "3.5.18", - "@vue/runtime-core": "3.5.18", - "@vue/shared": "3.5.18", - "csstype": "^3.1.3" + "@vue/reactivity": "3.5.29", + "@vue/runtime-core": "3.5.29", + "@vue/shared": "3.5.29", + "csstype": "^3.2.3" } }, "node_modules/@vue/server-renderer": { - "version": "3.5.18", - "resolved": "https://registry.npmmirror.com/@vue/server-renderer/-/server-renderer-3.5.18.tgz", - "integrity": "sha512-PvIHLUoWgSbDG7zLHqSqaCoZvHi6NNmfVFOqO+OnwvqMz/tqQr3FuGWS8ufluNddk7ZLBJYMrjcw1c6XzR12mA==", + "version": "3.5.29", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.29.tgz", + "integrity": "sha512-G/1k6WK5MusLlbxSE2YTcqAAezS+VuwHhOvLx2KnQU7G2zCH6KIb+5Wyt6UjMq7a3qPzNEjJXs1hvAxDclQH+g==", "license": "MIT", "dependencies": { - "@vue/compiler-ssr": "3.5.18", - "@vue/shared": "3.5.18" + "@vue/compiler-ssr": "3.5.29", + "@vue/shared": "3.5.29" }, "peerDependencies": { - "vue": "3.5.18" + "vue": "3.5.29" } }, "node_modules/@vue/shared": { - "version": "3.5.18", - "resolved": "https://registry.npmmirror.com/@vue/shared/-/shared-3.5.18.tgz", - "integrity": "sha512-cZy8Dq+uuIXbxCZpuLd2GJdeSO/lIzIspC2WtkqIpje5QyFbvLaI5wZtdUjLHjGZrlVX6GilejatWwVYYRc8tA==", + "version": "3.5.29", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.29.tgz", + "integrity": "sha512-w7SR0A5zyRByL9XUkCfdLs7t9XOHUyJ67qPGQjOou3p6GvBeBW+AVjUUmlxtZ4PIYaRvE+1LmK44O4uajlZwcg==", "license": "MIT" }, - "node_modules/@whatwg-node/disposablestack": { - "version": "0.0.6", - "resolved": "https://registry.npmmirror.com/@whatwg-node/disposablestack/-/disposablestack-0.0.6.tgz", - "integrity": "sha512-LOtTn+JgJvX8WfBVJtF08TGrdjuFzGJc4mkP8EdDI8ADbvO7kiexYep1o8dwnt0okb0jYclCDXF13xU7Ge4zSw==", + "node_modules/@vueuse/core": { + "version": "13.9.0", + "resolved": "https://registry.npmmirror.com/@vueuse/core/-/core-13.9.0.tgz", + "integrity": "sha512-ts3regBQyURfCE2BcytLqzm8+MmLlo5Ln/KLoxDVcsZ2gzIwVNnQpQOL/UKV8alUqjSZOlpFZcRNsLRqj+OzyA==", "license": "MIT", "dependencies": { - "@whatwg-node/promise-helpers": "^1.0.0", - "tslib": "^2.6.3" + "@types/web-bluetooth": "^0.0.21", + "@vueuse/metadata": "13.9.0", + "@vueuse/shared": "13.9.0" }, - "engines": { - "node": ">=18.0.0" + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "vue": "^3.5.0" } }, - "node_modules/@whatwg-node/fetch": { - "version": "0.10.9", - "resolved": "https://registry.npmmirror.com/@whatwg-node/fetch/-/fetch-0.10.9.tgz", - "integrity": "sha512-2TaXKmjy53cybNtaAtzbPOzwIPkjXbzvZcimnaJxQwYXKSC8iYnWoZOyT4+CFt8w0KDieg5J5dIMNzUrW/UZ5g==", + "node_modules/@vueuse/metadata": { + "version": "13.9.0", + "resolved": "https://registry.npmmirror.com/@vueuse/metadata/-/metadata-13.9.0.tgz", + "integrity": "sha512-1AFRvuiGphfF7yWixZa0KwjYH8ulyjDCC0aFgrGRz8+P4kvDFSdXLVfTk5xAN9wEuD1J6z4/myMoYbnHoX07zg==", "license": "MIT", - "dependencies": { - "@whatwg-node/node-fetch": "^0.7.22", - "urlpattern-polyfill": "^10.0.0" - }, - "engines": { - "node": ">=18.0.0" + "funding": { + "url": "https://github.com/sponsors/antfu" } }, - "node_modules/@whatwg-node/fetch/node_modules/urlpattern-polyfill": { - "version": "10.1.0", - "resolved": "https://registry.npmmirror.com/urlpattern-polyfill/-/urlpattern-polyfill-10.1.0.tgz", - "integrity": "sha512-IGjKp/o0NL3Bso1PymYURCJxMPNAf/ILOpendP9f5B6e1rTJgdgiOvgfoT8VxCAdY+Wisb9uhGaJJf3yZ2V9nw==", - "license": "MIT" - }, - "node_modules/@whatwg-node/node-fetch": { - "version": "0.7.22", - "resolved": "https://registry.npmmirror.com/@whatwg-node/node-fetch/-/node-fetch-0.7.22.tgz", - "integrity": "sha512-h4GGjGF2vH3kGJ/fEOeg9Xfu4ncoyRwFcjGIxr/5dTBgZNVwq888byIsZ+XXRDJnNnRlzVVVQDcqrZpY2yctGA==", + "node_modules/@vueuse/motion": { + "version": "3.0.3", + "resolved": "https://registry.npmmirror.com/@vueuse/motion/-/motion-3.0.3.tgz", + "integrity": "sha512-4B+ITsxCI9cojikvrpaJcLXyq0spj3sdlzXjzesWdMRd99hhtFI6OJ/1JsqwtF73YooLe0hUn/xDR6qCtmn5GQ==", "license": "MIT", "dependencies": { - "@fastify/busboy": "^3.1.1", - "@whatwg-node/disposablestack": "^0.0.6", - "@whatwg-node/promise-helpers": "^1.3.2", - "tslib": "^2.6.3" + "@vueuse/core": "^13.0.0", + "@vueuse/shared": "^13.0.0", + "defu": "^6.1.4", + "framesync": "^6.1.2", + "popmotion": "^11.0.5", + "style-value-types": "^5.1.2" }, - "engines": { - "node": ">=18.0.0" + "optionalDependencies": { + "@nuxt/kit": "^3.13.0" + }, + "peerDependencies": { + "vue": ">=3.0.0" } }, - "node_modules/@whatwg-node/promise-helpers": { - "version": "1.3.2", - "resolved": "https://registry.npmmirror.com/@whatwg-node/promise-helpers/-/promise-helpers-1.3.2.tgz", - "integrity": "sha512-Nst5JdK47VIl9UcGwtv2Rcgyn5lWtZ0/mhRQ4G8NN2isxpq2TO30iqHzmwoJycjWuyUfg3GFXqP/gFHXeV57IA==", + "node_modules/@vueuse/motion/node_modules/@nuxt/kit": { + "version": "3.21.0", + "resolved": "https://registry.npmmirror.com/@nuxt/kit/-/kit-3.21.0.tgz", + "integrity": "sha512-KMTLK/dsGaQioZzkYUvgfN9le4grNW54aNcA1jqzgVZLcFVy4jJfrJr5WZio9NT2EMfajdoZ+V28aD7BRr4Zfw==", "license": "MIT", + "optional": true, "dependencies": { - "tslib": "^2.6.3" + "c12": "^3.3.3", + "consola": "^3.4.2", + "defu": "^6.1.4", + "destr": "^2.0.5", + "errx": "^0.1.0", + "exsolve": "^1.0.8", + "ignore": "^7.0.5", + "jiti": "^2.6.1", + "klona": "^2.0.6", + "knitwork": "^1.3.0", + "mlly": "^1.8.0", + "ohash": "^2.0.11", + "pathe": "^2.0.3", + "pkg-types": "^2.3.0", + "rc9": "^2.1.2", + "scule": "^1.3.0", + "semver": "^7.7.3", + "tinyglobby": "^0.2.15", + "ufo": "^1.6.3", + "unctx": "^2.5.0", + "untyped": "^2.0.0" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.12.0" } }, - "node_modules/@whatwg-node/server": { - "version": "0.9.71", - "resolved": "https://registry.npmmirror.com/@whatwg-node/server/-/server-0.9.71.tgz", - "integrity": "sha512-ueFCcIPaMgtuYDS9u0qlUoEvj6GiSsKrwnOLPp9SshqjtcRaR1IEHRjoReq3sXNydsF5i0ZnmuYgXq9dV53t0g==", + "node_modules/@vueuse/shared": { + "version": "13.9.0", + "resolved": "https://registry.npmmirror.com/@vueuse/shared/-/shared-13.9.0.tgz", + "integrity": "sha512-e89uuTLMh0U5cZ9iDpEI2senqPGfbPRTHM/0AaQkcxnpqjkZqDYP8rpfm7edOz8s+pOCOROEy1PIveSW8+fL5g==", "license": "MIT", - "dependencies": { - "@whatwg-node/disposablestack": "^0.0.6", - "@whatwg-node/fetch": "^0.10.5", - "@whatwg-node/promise-helpers": "^1.2.2", - "tslib": "^2.6.3" + "funding": { + "url": "https://github.com/sponsors/antfu" }, - "engines": { - "node": ">=18.0.0" + "peerDependencies": { + "vue": "^3.5.0" } }, "node_modules/abbrev": { "version": "3.0.1", - "resolved": "https://registry.npmmirror.com/abbrev/-/abbrev-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-3.0.1.tgz", "integrity": "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg==", "license": "ISC", "engines": { @@ -4413,7 +4206,7 @@ }, "node_modules/abort-controller": { "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/abort-controller/-/abort-controller-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", "license": "MIT", "dependencies": { @@ -4436,31 +4229,10 @@ "node": ">= 0.6" } }, - "node_modules/accepts/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmmirror.com/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/accepts/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmmirror.com/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, "node_modules/acorn": { - "version": "8.15.0", - "resolved": "https://registry.npmmirror.com/acorn/-/acorn-8.15.0.tgz", - "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "version": "8.16.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", + "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", "license": "MIT", "bin": { "acorn": "bin/acorn" @@ -4471,7 +4243,7 @@ }, "node_modules/acorn-import-attributes": { "version": "1.9.5", - "resolved": "https://registry.npmmirror.com/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz", + "resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz", "integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==", "license": "MIT", "peerDependencies": { @@ -4480,13 +4252,19 @@ }, "node_modules/agent-base": { "version": "7.1.4", - "resolved": "https://registry.npmmirror.com/agent-base/-/agent-base-7.1.4.tgz", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", "license": "MIT", "engines": { "node": ">= 14" } }, + "node_modules/alien-signals": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/alien-signals/-/alien-signals-3.1.2.tgz", + "integrity": "sha512-d9dYqZTS90WLiU0I5c6DHj/HcKkF8ZyGN3G5x8wSbslulz70KOxaqCT0hQCo9KOyhVqzqGojvNdJXoTumZOtcw==", + "license": "MIT" + }, "node_modules/ansi-regex": { "version": "6.1.0", "resolved": "https://registry.npmmirror.com/ansi-regex/-/ansi-regex-6.1.0.tgz", @@ -4512,9 +4290,9 @@ } }, "node_modules/ansis": { - "version": "4.1.0", - "resolved": "https://registry.npmmirror.com/ansis/-/ansis-4.1.0.tgz", - "integrity": "sha512-BGcItUBWSMRgOCe+SVZJ+S7yTRG0eGt9cXAHev72yuGcY23hnLA7Bky5L/xLyPINoSN95geovfBkqoTlNZYa7w==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/ansis/-/ansis-4.2.0.tgz", + "integrity": "sha512-HqZ5rWlFjGiV0tDm3UxxgNRqsOTniqoKZu0pIAfh7TZQMGuZK+hH0drySty0si0QXj1ieop4+SkSfPZBPPkHig==", "license": "ISC", "engines": { "node": ">=14" @@ -4553,7 +4331,7 @@ }, "node_modules/archiver": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/archiver/-/archiver-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/archiver/-/archiver-7.0.1.tgz", "integrity": "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==", "license": "MIT", "dependencies": { @@ -4571,7 +4349,7 @@ }, "node_modules/archiver-utils": { "version": "5.0.2", - "resolved": "https://registry.npmmirror.com/archiver-utils/-/archiver-utils-5.0.2.tgz", + "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-5.0.2.tgz", "integrity": "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==", "license": "MIT", "dependencies": { @@ -4589,7 +4367,7 @@ }, "node_modules/archiver-utils/node_modules/is-stream": { "version": "2.0.1", - "resolved": "https://registry.npmmirror.com/is-stream/-/is-stream-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", "license": "MIT", "engines": { @@ -4606,41 +4384,32 @@ "license": "MIT" }, "node_modules/ast-kit": { - "version": "2.1.1", - "resolved": "https://registry.npmmirror.com/ast-kit/-/ast-kit-2.1.1.tgz", - "integrity": "sha512-mfh6a7gKXE8pDlxTvqIc/syH/P3RkzbOF6LeHdcKztLEzYe6IMsRCL7N8vI7hqTGWNxpkCuuRTpT21xNWqhRtQ==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ast-kit/-/ast-kit-2.2.0.tgz", + "integrity": "sha512-m1Q/RaVOnTp9JxPX+F+Zn7IcLYMzM8kZofDImfsKZd8MbR+ikdOzTeztStWqfrqIxZnYWryyI9ePm3NGjnZgGw==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.27.7", + "@babel/parser": "^7.28.5", "pathe": "^2.0.3" }, "engines": { - "node": ">=20.18.0" + "node": ">=20.19.0" }, "funding": { "url": "https://github.com/sponsors/sxzz" } }, - "node_modules/ast-module-types": { - "version": "6.0.1", - "resolved": "https://registry.npmmirror.com/ast-module-types/-/ast-module-types-6.0.1.tgz", - "integrity": "sha512-WHw67kLXYbZuHTmcdbIrVArCq5wxo6NEuj3hiYAWr8mwJeC+C2mMCIBIWCiDoCye/OF/xelc+teJ1ERoWmnEIA==", - "license": "MIT", - "engines": { - "node": ">=18" - } - }, "node_modules/ast-walker-scope": { - "version": "0.8.1", - "resolved": "https://registry.npmmirror.com/ast-walker-scope/-/ast-walker-scope-0.8.1.tgz", - "integrity": "sha512-72XOdbzQCMKERvFrxAykatn2pu7osPNq/sNUzwcHdWzwPvOsNpPqkawfDXVvQbA2RT+ivtsMNjYdojTUZitt1A==", + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/ast-walker-scope/-/ast-walker-scope-0.8.3.tgz", + "integrity": "sha512-cbdCP0PGOBq0ASG+sjnKIoYkWMKhhz+F/h9pRexUdX2Hd38+WOlBkRKlqkGOSm0YQpcFMQBJeK4WspUAkwsEdg==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.27.2", - "ast-kit": "^2.0.0" + "@babel/parser": "^7.28.4", + "ast-kit": "^2.1.3" }, "engines": { - "node": ">=20.18.0" + "node": ">=20.19.0" }, "funding": { "url": "https://github.com/sponsors/sxzz" @@ -4654,13 +4423,13 @@ }, "node_modules/async-sema": { "version": "3.1.1", - "resolved": "https://registry.npmmirror.com/async-sema/-/async-sema-3.1.1.tgz", + "resolved": "https://registry.npmjs.org/async-sema/-/async-sema-3.1.1.tgz", "integrity": "sha512-tLRNUXati5MFePdAk8dw7Qt7DpxPB60ofAgn8WRhW6a2rcimZnYBP9oxHiv0OHy+Wz7kPMG+t4LGdt31+4EmGg==", "license": "MIT" }, "node_modules/asynckit": { "version": "0.4.0", - "resolved": "https://registry.npmmirror.com/asynckit/-/asynckit-0.4.0.tgz", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", "license": "MIT" }, @@ -4674,9 +4443,9 @@ } }, "node_modules/autoprefixer": { - "version": "10.4.21", - "resolved": "https://registry.npmmirror.com/autoprefixer/-/autoprefixer-10.4.21.tgz", - "integrity": "sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ==", + "version": "10.4.27", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.27.tgz", + "integrity": "sha512-NP9APE+tO+LuJGn7/9+cohklunJsXWiaWEfV3si4Gi/XHDwVNgkwr1J3RQYFIvPy76GmJ9/bW8vyoU1LcxwKHA==", "funding": [ { "type": "opencollective", @@ -4693,10 +4462,9 @@ ], "license": "MIT", "dependencies": { - "browserslist": "^4.24.4", - "caniuse-lite": "^1.0.30001702", - "fraction.js": "^4.3.7", - "normalize-range": "^0.1.2", + "browserslist": "^4.28.1", + "caniuse-lite": "^1.0.30001774", + "fraction.js": "^5.3.4", "picocolors": "^1.1.1", "postcss-value-parser": "^4.2.0" }, @@ -4711,21 +4479,29 @@ } }, "node_modules/axios": { - "version": "1.11.0", - "resolved": "https://registry.npmmirror.com/axios/-/axios-1.11.0.tgz", - "integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==", + "version": "1.13.6", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.6.tgz", + "integrity": "sha512-ChTCHMouEe2kn713WHbQGcuYrr6fXTBiu460OTwWrWob16g1bXn4vtz07Ope7ewMozJAnEquLk5lWQWtBig9DQ==", "license": "MIT", "dependencies": { - "follow-redirects": "^1.15.6", - "form-data": "^4.0.4", + "follow-redirects": "^1.15.11", + "form-data": "^4.0.5", "proxy-from-env": "^1.1.0" } }, "node_modules/b4a": { - "version": "1.6.7", - "resolved": "https://registry.npmmirror.com/b4a/-/b4a-1.6.7.tgz", - "integrity": "sha512-OnAYlL5b7LEkALw87fUVafQw5rVR9RjwGd4KUwNQ6DrrNmaVaUCgLipfVlzrPQ4tWOR9P0IXGNOx50jYCCdSJg==", - "license": "Apache-2.0" + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.8.0.tgz", + "integrity": "sha512-qRuSmNSkGQaHwNbM7J78Wwy+ghLEYF1zNrSeMxj4Kgw6y33O3mXcQ6Ie9fRvfU/YnxWkOchPXbaLb73TkIsfdg==", + "license": "Apache-2.0", + "peerDependencies": { + "react-native-b4a": "*" + }, + "peerDependenciesMeta": { + "react-native-b4a": { + "optional": true + } + } }, "node_modules/balanced-match": { "version": "1.0.2", @@ -4734,15 +4510,22 @@ "license": "MIT" }, "node_modules/bare-events": { - "version": "2.6.0", - "resolved": "https://registry.npmmirror.com/bare-events/-/bare-events-2.6.0.tgz", - "integrity": "sha512-EKZ5BTXYExaNqi3I3f9RtEsaI/xBSGjE0XZCZilPzFAV/goswFHuPd9jEZlPIZ/iNZJwDSao9qRiScySz7MbQg==", + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.8.2.tgz", + "integrity": "sha512-riJjyv1/mHLIPX4RwiK+oW9/4c3TEUeORHKefKAKnZ5kyslbN+HXowtbaVEqt4IMUB7OXlfixcs6gsFeo/jhiQ==", "license": "Apache-2.0", - "optional": true + "peerDependencies": { + "bare-abort-controller": "*" + }, + "peerDependenciesMeta": { + "bare-abort-controller": { + "optional": true + } + } }, "node_modules/base64-js": { "version": "1.5.1", - "resolved": "https://registry.npmmirror.com/base64-js/-/base64-js-1.5.1.tgz", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", "funding": [ { @@ -4760,6 +4543,18 @@ ], "license": "MIT" }, + "node_modules/baseline-browser-mapping": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.0.tgz", + "integrity": "sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==", + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.cjs" + }, + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/binary-extensions": { "version": "2.3.0", "resolved": "https://registry.npmmirror.com/binary-extensions/-/binary-extensions-2.3.0.tgz", @@ -4774,7 +4569,7 @@ }, "node_modules/bindings": { "version": "1.5.0", - "resolved": "https://registry.npmmirror.com/bindings/-/bindings-1.5.0.tgz", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", "license": "MIT", "dependencies": { @@ -4782,9 +4577,9 @@ } }, "node_modules/birpc": { - "version": "2.5.0", - "resolved": "https://registry.npmmirror.com/birpc/-/birpc-2.5.0.tgz", - "integrity": "sha512-VSWO/W6nNQdyP520F1mhf+Lc2f8pjGQOtoHHm7Ze8Go1kX7akpVIrtTa0fn+HB0QJEDVacl6aO08YE0PgXfdnQ==", + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/birpc/-/birpc-2.9.0.tgz", + "integrity": "sha512-KrayHS5pBi69Xi9JmvoqrIgYGDkD6mcSe/i6YKi3w5kekCLzrX4+nawcXqrj2tIp50Kw/mT/s3p+GVK0A0sKxw==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/antfu" @@ -4792,7 +4587,7 @@ }, "node_modules/boolbase": { "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/boolbase/-/boolbase-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", "license": "ISC" }, @@ -4818,9 +4613,9 @@ } }, "node_modules/browserslist": { - "version": "4.25.1", - "resolved": "https://registry.npmmirror.com/browserslist/-/browserslist-4.25.1.tgz", - "integrity": "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", "funding": [ { "type": "opencollective", @@ -4837,10 +4632,11 @@ ], "license": "MIT", "dependencies": { - "caniuse-lite": "^1.0.30001726", - "electron-to-chromium": "^1.5.173", - "node-releases": "^2.0.19", - "update-browserslist-db": "^1.1.3" + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" }, "bin": { "browserslist": "cli.js" @@ -4851,7 +4647,7 @@ }, "node_modules/buffer": { "version": "6.0.3", - "resolved": "https://registry.npmmirror.com/buffer/-/buffer-6.0.3.tgz", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", "funding": [ { @@ -4875,7 +4671,7 @@ }, "node_modules/buffer-crc32": { "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/buffer-crc32/-/buffer-crc32-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz", "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==", "license": "MIT", "engines": { @@ -4884,25 +4680,13 @@ }, "node_modules/buffer-from": { "version": "1.1.2", - "resolved": "https://registry.npmmirror.com/buffer-from/-/buffer-from-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "license": "MIT" }, - "node_modules/builtin-modules": { - "version": "3.3.0", - "resolved": "https://registry.npmmirror.com/builtin-modules/-/builtin-modules-3.3.0.tgz", - "integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==", - "license": "MIT", - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/bundle-name": { "version": "4.1.0", - "resolved": "https://registry.npmmirror.com/bundle-name/-/bundle-name-4.1.0.tgz", + "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-4.1.0.tgz", "integrity": "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==", "license": "MIT", "dependencies": { @@ -4916,26 +4700,26 @@ } }, "node_modules/c12": { - "version": "3.1.0", - "resolved": "https://registry.npmmirror.com/c12/-/c12-3.1.0.tgz", - "integrity": "sha512-uWoS8OU1MEIsOv8p/5a82c3H31LsWVR5qiyXVfBNOzfffjUWtPnhAb4BYI2uG2HfGmZmFjCtui5XNWaps+iFuw==", + "version": "3.3.3", + "resolved": "https://registry.npmmirror.com/c12/-/c12-3.3.3.tgz", + "integrity": "sha512-750hTRvgBy5kcMNPdh95Qo+XUBeGo8C7nsKSmedDmaQI+E0r82DwHeM6vBewDe4rGFbnxoa4V9pw+sPh5+Iz8Q==", "license": "MIT", "dependencies": { - "chokidar": "^4.0.3", + "chokidar": "^5.0.0", "confbox": "^0.2.2", "defu": "^6.1.4", - "dotenv": "^16.6.1", - "exsolve": "^1.0.7", + "dotenv": "^17.2.3", + "exsolve": "^1.0.8", "giget": "^2.0.0", - "jiti": "^2.4.2", + "jiti": "^2.6.1", "ohash": "^2.0.11", "pathe": "^2.0.3", - "perfect-debounce": "^1.0.0", - "pkg-types": "^2.2.0", + "perfect-debounce": "^2.0.0", + "pkg-types": "^2.3.0", "rc9": "^2.1.2" }, "peerDependencies": { - "magicast": "^0.3.5" + "magicast": "*" }, "peerDependenciesMeta": { "magicast": { @@ -4943,9 +4727,27 @@ } } }, + "node_modules/c12/node_modules/dotenv": { + "version": "17.2.3", + "resolved": "https://registry.npmmirror.com/dotenv/-/dotenv-17.2.3.tgz", + "integrity": "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/c12/node_modules/perfect-debounce": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/perfect-debounce/-/perfect-debounce-2.1.0.tgz", + "integrity": "sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==", + "license": "MIT" + }, "node_modules/cac": { "version": "6.7.14", - "resolved": "https://registry.npmmirror.com/cac/-/cac-6.7.14.tgz", + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", "license": "MIT", "engines": { @@ -4965,27 +4767,6 @@ "node": ">= 6.0.0" } }, - "node_modules/cache-content-type/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmmirror.com/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/cache-content-type/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmmirror.com/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, "node_modules/call-bind-apply-helpers": { "version": "1.0.2", "resolved": "https://registry.npmmirror.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", @@ -5015,14 +4796,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/callsite": { - "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/callsite/-/callsite-1.0.0.tgz", - "integrity": "sha512-0vdNRFXn5q+dtOqjfFtmtlI9N2eVZ7LMyEV2iKC5mEEFvSg/69Ml6b/WU2qF8W1nLRa0wiSrDT3Y5jOHZCwKPQ==", - "engines": { - "node": "*" - } - }, "node_modules/camelcase-css": { "version": "2.0.1", "resolved": "https://registry.npmmirror.com/camelcase-css/-/camelcase-css-2.0.1.tgz", @@ -5034,7 +4807,7 @@ }, "node_modules/caniuse-api": { "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/caniuse-api/-/caniuse-api-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz", "integrity": "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==", "license": "MIT", "dependencies": { @@ -5045,9 +4818,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001727", - "resolved": "https://registry.npmmirror.com/caniuse-lite/-/caniuse-lite-1.0.30001727.tgz", - "integrity": "sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q==", + "version": "1.0.30001774", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001774.tgz", + "integrity": "sha512-DDdwPGz99nmIEv216hKSgLD+D4ikHQHjBC/seF98N9CPqRX4M5mSxT9eTV6oyisnJcuzxtZy4n17yKKQYmYQOA==", "funding": [ { "type": "opencollective", @@ -5120,15 +4893,15 @@ } }, "node_modules/chokidar": { - "version": "4.0.3", - "resolved": "https://registry.npmmirror.com/chokidar/-/chokidar-4.0.3.tgz", - "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-5.0.0.tgz", + "integrity": "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw==", "license": "MIT", "dependencies": { - "readdirp": "^4.0.1" + "readdirp": "^5.0.0" }, "engines": { - "node": ">= 14.16.0" + "node": ">= 20.19.0" }, "funding": { "url": "https://paulmillr.com/funding/" @@ -5136,7 +4909,7 @@ }, "node_modules/chownr": { "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/chownr/-/chownr-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", "license": "BlueOak-1.0.0", "engines": { @@ -5154,7 +4927,7 @@ }, "node_modules/clipboardy": { "version": "4.0.0", - "resolved": "https://registry.npmmirror.com/clipboardy/-/clipboardy-4.0.0.tgz", + "resolved": "https://registry.npmjs.org/clipboardy/-/clipboardy-4.0.0.tgz", "integrity": "sha512-5mOlNS0mhX0707P2I0aZ2V/cmHUEO/fL7VFLqszkhUsxt7RwnmrInf/eEQKlf5GzvYeHIjT+Ov1HRfNmymlG0w==", "license": "MIT", "dependencies": { @@ -5270,7 +5043,7 @@ }, "node_modules/cluster-key-slot": { "version": "1.1.2", - "resolved": "https://registry.npmmirror.com/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", "integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==", "license": "Apache-2.0", "engines": { @@ -5287,66 +5060,21 @@ "node": ">= 0.12.0" } }, - "node_modules/color": { - "version": "3.2.1", - "resolved": "https://registry.npmmirror.com/color/-/color-3.2.1.tgz", - "integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==", - "license": "MIT", - "dependencies": { - "color-convert": "^1.9.3", - "color-string": "^1.6.0" - } - }, - "node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmmirror.com/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "license": "MIT", - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/color-convert/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmmirror.com/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "license": "MIT" - }, "node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmmirror.com/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "license": "MIT" }, - "node_modules/color-string": { - "version": "1.9.1", - "resolved": "https://registry.npmmirror.com/color-string/-/color-string-1.9.1.tgz", - "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", - "license": "MIT", - "dependencies": { - "color-name": "^1.0.0", - "simple-swizzle": "^0.2.2" - } - }, "node_modules/colord": { "version": "2.9.3", - "resolved": "https://registry.npmmirror.com/colord/-/colord-2.9.3.tgz", + "resolved": "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz", "integrity": "sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==", "license": "MIT" }, - "node_modules/colorspace": { - "version": "1.1.4", - "resolved": "https://registry.npmmirror.com/colorspace/-/colorspace-1.1.4.tgz", - "integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==", - "license": "MIT", - "dependencies": { - "color": "^3.1.3", - "text-hex": "1.0.x" - } - }, "node_modules/combined-stream": { "version": "1.0.8", - "resolved": "https://registry.npmmirror.com/combined-stream/-/combined-stream-1.0.8.tgz", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", "license": "MIT", "dependencies": { @@ -5357,35 +5085,31 @@ } }, "node_modules/commander": { - "version": "10.0.1", - "resolved": "https://registry.npmmirror.com/commander/-/commander-10.0.1.tgz", - "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", + "version": "13.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-13.1.0.tgz", + "integrity": "sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==", "license": "MIT", + "optional": true, + "peer": true, "engines": { - "node": ">=14" + "node": ">=18" } }, - "node_modules/common-path-prefix": { - "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz", - "integrity": "sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==", - "license": "ISC" - }, "node_modules/commondir": { "version": "1.0.1", - "resolved": "https://registry.npmmirror.com/commondir/-/commondir-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==", "license": "MIT" }, "node_modules/compatx": { "version": "0.2.0", - "resolved": "https://registry.npmmirror.com/compatx/-/compatx-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/compatx/-/compatx-0.2.0.tgz", "integrity": "sha512-6gLRNt4ygsi5NyMVhceOCFv14CIdDFN7fQjX1U4+47qVE/+kjPoXMK65KWK+dWxmFzMTuKazoQ9sch6pM0p5oA==", "license": "MIT" }, "node_modules/compress-commons": { "version": "6.0.2", - "resolved": "https://registry.npmmirror.com/compress-commons/-/compress-commons-6.0.2.tgz", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz", "integrity": "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==", "license": "MIT", "dependencies": { @@ -5401,7 +5125,7 @@ }, "node_modules/compress-commons/node_modules/is-stream": { "version": "2.0.1", - "resolved": "https://registry.npmmirror.com/is-stream/-/is-stream-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", "license": "MIT", "engines": { @@ -5418,9 +5142,9 @@ "license": "MIT" }, "node_modules/confbox": { - "version": "0.2.2", - "resolved": "https://registry.npmmirror.com/confbox/-/confbox-0.2.2.tgz", - "integrity": "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==", + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.2.4.tgz", + "integrity": "sha512-ysOGlgTFbN2/Y6Cg3Iye8YKulHw+R2fNXHrgSmXISQdMnomY6eNDprVdW9R5xBguEqI954+S6709UyiO7B+6OQ==", "license": "MIT" }, "node_modules/consola": { @@ -5455,22 +5179,13 @@ }, "node_modules/convert-source-map": { "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/convert-source-map/-/convert-source-map-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", "license": "MIT" }, - "node_modules/cookie": { - "version": "1.0.2", - "resolved": "https://registry.npmmirror.com/cookie/-/cookie-1.0.2.tgz", - "integrity": "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA==", - "license": "MIT", - "engines": { - "node": ">=18" - } - }, "node_modules/cookie-es": { "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/cookie-es/-/cookie-es-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/cookie-es/-/cookie-es-2.0.0.tgz", "integrity": "sha512-RAj4E421UYRgqokKUmotqAwuplYw15qtdXfY+hGzgCJ/MBjCVZcSoHK/kH9kocfjRjcDME7IiDWR/1WX1TM2Pg==", "license": "MIT" }, @@ -5502,31 +5217,23 @@ "url": "https://github.com/sponsors/mesqueeb" } }, - "node_modules/copy-file": { - "version": "11.0.0", - "resolved": "https://registry.npmmirror.com/copy-file/-/copy-file-11.0.0.tgz", - "integrity": "sha512-mFsNh/DIANLqFt5VHZoGirdg7bK5+oTWlhnGu6tgRhzBlnEKWaPX2xrFaLltii/6rmhqFMJqffUgknuRdpYlHw==", - "license": "MIT", + "node_modules/copy-paste": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/copy-paste/-/copy-paste-2.2.0.tgz", + "integrity": "sha512-jqSL4r9DSeiIvJZStLzY/sMLt9ToTM7RsK237lYOTG+KcbQJHGala3R1TUpa8h1p9adswVgIdV4qGbseVhL4lg==", "dependencies": { - "graceful-fs": "^4.2.11", - "p-event": "^6.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "iconv-lite": "^0.4.8" } }, "node_modules/core-util-is": { "version": "1.0.3", - "resolved": "https://registry.npmmirror.com/core-util-is/-/core-util-is-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", "license": "MIT" }, "node_modules/crc-32": { "version": "1.2.2", - "resolved": "https://registry.npmmirror.com/crc-32/-/crc-32-1.2.2.tgz", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", "license": "Apache-2.0", "bin": { @@ -5538,7 +5245,7 @@ }, "node_modules/crc32-stream": { "version": "6.0.0", - "resolved": "https://registry.npmmirror.com/crc32-stream/-/crc32-stream-6.0.0.tgz", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-6.0.0.tgz", "integrity": "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==", "license": "MIT", "dependencies": { @@ -5549,21 +5256,9 @@ "node": ">= 14" } }, - "node_modules/cron-parser": { - "version": "4.9.0", - "resolved": "https://registry.npmmirror.com/cron-parser/-/cron-parser-4.9.0.tgz", - "integrity": "sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q==", - "license": "MIT", - "dependencies": { - "luxon": "^3.2.1" - }, - "engines": { - "node": ">=12.0.0" - } - }, "node_modules/croner": { "version": "9.1.0", - "resolved": "https://registry.npmmirror.com/croner/-/croner-9.1.0.tgz", + "resolved": "https://registry.npmjs.org/croner/-/croner-9.1.0.tgz", "integrity": "sha512-p9nwwR4qyT5W996vBZhdvBCnMhicY5ytZkR4D1Xj0wuTDEiMnjwR57Q3RXYY/s0EpX6Ay3vgIcfaR+ewGHsi+g==", "license": "MIT", "engines": { @@ -5615,9 +5310,9 @@ } }, "node_modules/css-declaration-sorter": { - "version": "7.2.0", - "resolved": "https://registry.npmmirror.com/css-declaration-sorter/-/css-declaration-sorter-7.2.0.tgz", - "integrity": "sha512-h70rUM+3PNFuaBDTLe8wF/cdWu+dOZmb7pJt8Z2sedYbAcQVQV/tEchueg3GWxwqS0cxtbxmaHEdkNACqcvsow==", + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-7.3.1.tgz", + "integrity": "sha512-gz6x+KkgNCjxq3Var03pRYLhyNfwhkKF1g/yoLgDNtFvVu0/fOLV9C8fFEZRjACp/XQLumjAYo7JVjzH3wLbxA==", "license": "ISC", "engines": { "node": "^14 || ^16 || >=18" @@ -5628,7 +5323,7 @@ }, "node_modules/css-select": { "version": "5.2.2", - "resolved": "https://registry.npmmirror.com/css-select/-/css-select-5.2.2.tgz", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.2.2.tgz", "integrity": "sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==", "license": "BSD-2-Clause", "dependencies": { @@ -5644,7 +5339,7 @@ }, "node_modules/css-tree": { "version": "3.1.0", - "resolved": "https://registry.npmmirror.com/css-tree/-/css-tree-3.1.0.tgz", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-3.1.0.tgz", "integrity": "sha512-0eW44TGN5SQXU1mWSkKwFstI/22X2bG1nYzZTYMAWjylYURhse752YgbE4Cx46AC+bAvI+/dYTPRk1LqSUnu6w==", "license": "MIT", "dependencies": { @@ -5657,7 +5352,7 @@ }, "node_modules/css-what": { "version": "6.2.2", - "resolved": "https://registry.npmmirror.com/css-what/-/css-what-6.2.2.tgz", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.2.2.tgz", "integrity": "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==", "license": "BSD-2-Clause", "engines": { @@ -5680,12 +5375,12 @@ } }, "node_modules/cssnano": { - "version": "7.1.0", - "resolved": "https://registry.npmmirror.com/cssnano/-/cssnano-7.1.0.tgz", - "integrity": "sha512-Pu3rlKkd0ZtlCUzBrKL1Z4YmhKppjC1H9jo7u1o4qaKqyhvixFgu5qLyNIAOjSTg9DjVPtUqdROq2EfpVMEe+w==", + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-7.1.2.tgz", + "integrity": "sha512-HYOPBsNvoiFeR1eghKD5C3ASm64v9YVyJB4Ivnl2gqKoQYvjjN/G0rztvKQq8OxocUtC6sjqY8jwYngIB4AByA==", "license": "MIT", "dependencies": { - "cssnano-preset-default": "^7.0.8", + "cssnano-preset-default": "^7.0.10", "lilconfig": "^3.1.3" }, "engines": { @@ -5700,26 +5395,26 @@ } }, "node_modules/cssnano-preset-default": { - "version": "7.0.8", - "resolved": "https://registry.npmmirror.com/cssnano-preset-default/-/cssnano-preset-default-7.0.8.tgz", - "integrity": "sha512-d+3R2qwrUV3g4LEMOjnndognKirBZISylDZAF/TPeCWVjEwlXS2e4eN4ICkoobRe7pD3H6lltinKVyS1AJhdjQ==", + "version": "7.0.10", + "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-7.0.10.tgz", + "integrity": "sha512-6ZBjW0Lf1K1Z+0OKUAUpEN62tSXmYChXWi2NAA0afxEVsj9a+MbcB1l5qel6BHJHmULai2fCGRthCeKSFbScpA==", "license": "MIT", "dependencies": { - "browserslist": "^4.25.1", + "browserslist": "^4.27.0", "css-declaration-sorter": "^7.2.0", "cssnano-utils": "^5.0.1", "postcss-calc": "^10.1.1", - "postcss-colormin": "^7.0.4", - "postcss-convert-values": "^7.0.6", - "postcss-discard-comments": "^7.0.4", + "postcss-colormin": "^7.0.5", + "postcss-convert-values": "^7.0.8", + "postcss-discard-comments": "^7.0.5", "postcss-discard-duplicates": "^7.0.2", "postcss-discard-empty": "^7.0.1", "postcss-discard-overridden": "^7.0.1", "postcss-merge-longhand": "^7.0.5", - "postcss-merge-rules": "^7.0.6", + "postcss-merge-rules": "^7.0.7", "postcss-minify-font-values": "^7.0.1", "postcss-minify-gradients": "^7.0.1", - "postcss-minify-params": "^7.0.4", + "postcss-minify-params": "^7.0.5", "postcss-minify-selectors": "^7.0.5", "postcss-normalize-charset": "^7.0.1", "postcss-normalize-display-values": "^7.0.1", @@ -5727,11 +5422,11 @@ "postcss-normalize-repeat-style": "^7.0.1", "postcss-normalize-string": "^7.0.1", "postcss-normalize-timing-functions": "^7.0.1", - "postcss-normalize-unicode": "^7.0.4", + "postcss-normalize-unicode": "^7.0.5", "postcss-normalize-url": "^7.0.1", "postcss-normalize-whitespace": "^7.0.1", "postcss-ordered-values": "^7.0.2", - "postcss-reduce-initial": "^7.0.4", + "postcss-reduce-initial": "^7.0.5", "postcss-reduce-transforms": "^7.0.1", "postcss-svgo": "^7.1.0", "postcss-unique-selectors": "^7.0.4" @@ -5745,7 +5440,7 @@ }, "node_modules/cssnano-utils": { "version": "5.0.1", - "resolved": "https://registry.npmmirror.com/cssnano-utils/-/cssnano-utils-5.0.1.tgz", + "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-5.0.1.tgz", "integrity": "sha512-ZIP71eQgG9JwjVZsTPSqhc6GHgEr53uJ7tK5///VfyWj6Xp2DBmixWHqJgPno+PqATzn48pL42ww9x5SSGmhZg==", "license": "MIT", "engines": { @@ -5757,7 +5452,7 @@ }, "node_modules/csso": { "version": "5.0.5", - "resolved": "https://registry.npmmirror.com/csso/-/csso-5.0.5.tgz", + "resolved": "https://registry.npmjs.org/csso/-/csso-5.0.5.tgz", "integrity": "sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==", "license": "MIT", "dependencies": { @@ -5770,7 +5465,7 @@ }, "node_modules/csso/node_modules/css-tree": { "version": "2.2.1", - "resolved": "https://registry.npmmirror.com/css-tree/-/css-tree-2.2.1.tgz", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.2.1.tgz", "integrity": "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==", "license": "MIT", "dependencies": { @@ -5784,29 +5479,20 @@ }, "node_modules/csso/node_modules/mdn-data": { "version": "2.0.28", - "resolved": "https://registry.npmmirror.com/mdn-data/-/mdn-data-2.0.28.tgz", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.28.tgz", "integrity": "sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==", "license": "CC0-1.0" }, "node_modules/csstype": { - "version": "3.1.3", - "resolved": "https://registry.npmmirror.com/csstype/-/csstype-3.1.3.tgz", - "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", "license": "MIT" }, - "node_modules/data-uri-to-buffer": { - "version": "4.0.1", - "resolved": "https://registry.npmmirror.com/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", - "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", - "license": "MIT", - "engines": { - "node": ">= 12" - } - }, "node_modules/db0": { - "version": "0.3.2", - "resolved": "https://registry.npmmirror.com/db0/-/db0-0.3.2.tgz", - "integrity": "sha512-xzWNQ6jk/+NtdfLyXEipbX55dmDSeteLFt/ayF+wZUU5bzKgmrDOxmInUTbyVRp46YwnJdkDA1KhB7WIXFofJw==", + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/db0/-/db0-0.3.4.tgz", + "integrity": "sha512-RiXXi4WaNzPTHEOu8UPQKMooIbqOEyqA1t7Z6MsdxSCeb8iUC9ko3LcmsLmeUt2SM5bctfArZKkRQggKZz7JNw==", "license": "MIT", "peerDependencies": { "@electric-sql/pglite": "*", @@ -5838,9 +5524,9 @@ } }, "node_modules/debug": { - "version": "4.4.1", - "resolved": "https://registry.npmmirror.com/debug/-/debug-4.4.1.tgz", - "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "license": "MIT", "dependencies": { "ms": "^2.1.3" @@ -5854,15 +5540,6 @@ } } }, - "node_modules/decache": { - "version": "4.6.2", - "resolved": "https://registry.npmmirror.com/decache/-/decache-4.6.2.tgz", - "integrity": "sha512-2LPqkLeu8XWHU8qNCS3kcF6sCcb5zIzvWaAHYSvPfwhdd7mHuah29NssMzrTYyHN4F5oFy2ko9OBYxegtU0FEw==", - "license": "MIT", - "dependencies": { - "callsite": "^1.0.0" - } - }, "node_modules/deep-equal": { "version": "1.0.1", "resolved": "https://registry.npmmirror.com/deep-equal/-/deep-equal-1.0.1.tgz", @@ -5871,7 +5548,7 @@ }, "node_modules/deepmerge": { "version": "4.3.1", - "resolved": "https://registry.npmmirror.com/deepmerge/-/deepmerge-4.3.1.tgz", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", "license": "MIT", "engines": { @@ -5879,9 +5556,9 @@ } }, "node_modules/default-browser": { - "version": "5.2.1", - "resolved": "https://registry.npmmirror.com/default-browser/-/default-browser-5.2.1.tgz", - "integrity": "sha512-WY/3TUME0x3KPYdRRxEJJvXRHV4PyPoUsxtZa78lwItwRQRHhd2U9xOscaT/YTf8uCXIAjeJOFBVEh/7FtD8Xg==", + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-5.5.0.tgz", + "integrity": "sha512-H9LMLr5zwIbSxrmvikGuI/5KGhZ8E2zH3stkMgM5LpOWDutGM2JZaj460Udnf1a+946zc7YBgrqEWwbk7zHvGw==", "license": "MIT", "dependencies": { "bundle-name": "^4.1.0", @@ -5895,9 +5572,9 @@ } }, "node_modules/default-browser-id": { - "version": "5.0.0", - "resolved": "https://registry.npmmirror.com/default-browser-id/-/default-browser-id-5.0.0.tgz", - "integrity": "sha512-A6p/pu/6fyBcA1TRz/GqWYPViplrftcW2gZC9q79ngNCKAeR/X3gcEdXQHl4KNXV+3wgIJ1CPkJQ3IHM6lcsyA==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-5.0.1.tgz", + "integrity": "sha512-x1VCxdX4t+8wVfd1so/9w+vQ4vx7lKd2Qp5tDRutErwmR85OgmfX7RlLRMWafRMY7hbEiXIbudNrjOAPa/hL8Q==", "license": "MIT", "engines": { "node": ">=18" @@ -5908,7 +5585,7 @@ }, "node_modules/define-lazy-prop": { "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", "license": "MIT", "engines": { @@ -5923,7 +5600,7 @@ }, "node_modules/delayed-stream": { "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/delayed-stream/-/delayed-stream-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", "license": "MIT", "engines": { @@ -5938,7 +5615,7 @@ }, "node_modules/denque": { "version": "2.1.0", - "resolved": "https://registry.npmmirror.com/denque/-/denque-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz", "integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==", "license": "Apache-2.0", "engines": { @@ -5971,153 +5648,18 @@ } }, "node_modules/detect-libc": { - "version": "1.0.3", - "resolved": "https://registry.npmmirror.com/detect-libc/-/detect-libc-1.0.3.tgz", - "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", "license": "Apache-2.0", - "bin": { - "detect-libc": "bin/detect-libc.js" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/detective-amd": { - "version": "6.0.1", - "resolved": "https://registry.npmmirror.com/detective-amd/-/detective-amd-6.0.1.tgz", - "integrity": "sha512-TtyZ3OhwUoEEIhTFoc1C9IyJIud3y+xYkSRjmvCt65+ycQuc3VcBrPRTMWoO/AnuCyOB8T5gky+xf7Igxtjd3g==", - "license": "MIT", - "dependencies": { - "ast-module-types": "^6.0.1", - "escodegen": "^2.1.0", - "get-amd-module-type": "^6.0.1", - "node-source-walk": "^7.0.1" - }, - "bin": { - "detective-amd": "bin/cli.js" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/detective-cjs": { - "version": "6.0.1", - "resolved": "https://registry.npmmirror.com/detective-cjs/-/detective-cjs-6.0.1.tgz", - "integrity": "sha512-tLTQsWvd2WMcmn/60T2inEJNhJoi7a//PQ7DwRKEj1yEeiQs4mrONgsUtEJKnZmrGWBBmE0kJ1vqOG/NAxwaJw==", - "license": "MIT", - "dependencies": { - "ast-module-types": "^6.0.1", - "node-source-walk": "^7.0.1" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/detective-es6": { - "version": "5.0.1", - "resolved": "https://registry.npmmirror.com/detective-es6/-/detective-es6-5.0.1.tgz", - "integrity": "sha512-XusTPuewnSUdoxRSx8OOI6xIA/uld/wMQwYsouvFN2LAg7HgP06NF1lHRV3x6BZxyL2Kkoih4ewcq8hcbGtwew==", - "license": "MIT", - "dependencies": { - "node-source-walk": "^7.0.1" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/detective-postcss": { - "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/detective-postcss/-/detective-postcss-7.0.1.tgz", - "integrity": "sha512-bEOVpHU9picRZux5XnwGsmCN4+8oZo7vSW0O0/Enq/TO5R2pIAP2279NsszpJR7ocnQt4WXU0+nnh/0JuK4KHQ==", - "license": "MIT", - "dependencies": { - "is-url": "^1.2.4", - "postcss-values-parser": "^6.0.2" - }, - "engines": { - "node": "^14.0.0 || >=16.0.0" - }, - "peerDependencies": { - "postcss": "^8.4.47" - } - }, - "node_modules/detective-sass": { - "version": "6.0.1", - "resolved": "https://registry.npmmirror.com/detective-sass/-/detective-sass-6.0.1.tgz", - "integrity": "sha512-jSGPO8QDy7K7pztUmGC6aiHkexBQT4GIH+mBAL9ZyBmnUIOFbkfZnO8wPRRJFP/QP83irObgsZHCoDHZ173tRw==", - "license": "MIT", - "dependencies": { - "gonzales-pe": "^4.3.0", - "node-source-walk": "^7.0.1" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/detective-scss": { - "version": "5.0.1", - "resolved": "https://registry.npmmirror.com/detective-scss/-/detective-scss-5.0.1.tgz", - "integrity": "sha512-MAyPYRgS6DCiS6n6AoSBJXLGVOydsr9huwXORUlJ37K3YLyiN0vYHpzs3AdJOgHobBfispokoqrEon9rbmKacg==", - "license": "MIT", - "dependencies": { - "gonzales-pe": "^4.3.0", - "node-source-walk": "^7.0.1" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/detective-stylus": { - "version": "5.0.1", - "resolved": "https://registry.npmmirror.com/detective-stylus/-/detective-stylus-5.0.1.tgz", - "integrity": "sha512-Dgn0bUqdGbE3oZJ+WCKf8Dmu7VWLcmRJGc6RCzBgG31DLIyai9WAoEhYRgIHpt/BCRMrnXLbGWGPQuBUrnF0TA==", - "license": "MIT", - "engines": { - "node": ">=18" - } - }, - "node_modules/detective-typescript": { - "version": "14.0.0", - "resolved": "https://registry.npmmirror.com/detective-typescript/-/detective-typescript-14.0.0.tgz", - "integrity": "sha512-pgN43/80MmWVSEi5LUuiVvO/0a9ss5V7fwVfrJ4QzAQRd3cwqU1SfWGXJFcNKUqoD5cS+uIovhw5t/0rSeC5Mw==", - "license": "MIT", - "dependencies": { - "@typescript-eslint/typescript-estree": "^8.23.0", - "ast-module-types": "^6.0.1", - "node-source-walk": "^7.0.1" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "typescript": "^5.4.4" - } - }, - "node_modules/detective-vue2": { - "version": "2.2.0", - "resolved": "https://registry.npmmirror.com/detective-vue2/-/detective-vue2-2.2.0.tgz", - "integrity": "sha512-sVg/t6O2z1zna8a/UIV6xL5KUa2cMTQbdTIIvqNM0NIPswp52fe43Nwmbahzj3ww4D844u/vC2PYfiGLvD3zFA==", - "license": "MIT", - "dependencies": { - "@dependents/detective-less": "^5.0.1", - "@vue/compiler-sfc": "^3.5.13", - "detective-es6": "^5.0.1", - "detective-sass": "^6.0.1", - "detective-scss": "^5.0.1", - "detective-stylus": "^5.0.1", - "detective-typescript": "^14.0.0" - }, "engines": { - "node": ">=18" - }, - "peerDependencies": { - "typescript": "^5.4.4" + "node": ">=8" } }, "node_modules/devalue": { - "version": "5.1.1", - "resolved": "https://registry.npmmirror.com/devalue/-/devalue-5.1.1.tgz", - "integrity": "sha512-maua5KUiapvEwiEAe+XnlZ3Rh0GD+qI1J/nb9vrJc3muPXvcF/8gXYTWF76+5DAqHyDUtOIImEuo0YKE9mshVw==", + "version": "5.6.3", + "resolved": "https://registry.npmjs.org/devalue/-/devalue-5.6.3.tgz", + "integrity": "sha512-nc7XjUU/2Lb+SvEFVGcWLiKkzfw8+qHI7zn8WYXKkLMgfGSHbgCEaR6bJpev8Cm6Rmrb19Gfd/tZvGqx9is3wg==", "license": "MIT" }, "node_modules/didyoumean": { @@ -6127,9 +5669,9 @@ "license": "Apache-2.0" }, "node_modules/diff": { - "version": "8.0.2", - "resolved": "https://registry.npmmirror.com/diff/-/diff-8.0.2.tgz", - "integrity": "sha512-sSuxWU5j5SR9QQji/o2qMvqRNYRDOcBTgsJ/DeCf4iSN4gW+gNMXM7wFIP+fdXZxoNiAnHUTGjCr+TSWXdRDKg==", + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/diff/-/diff-8.0.3.tgz", + "integrity": "sha512-qejHi7bcSD4hQAZE0tNAawRK1ZtafHDmMTMkrrIGgSLl7hTnQHmKCeB45xAcbfTqK2zowkM3j3bHt/4b/ARbYQ==", "license": "BSD-3-Clause", "engines": { "node": ">=0.3.1" @@ -6143,7 +5685,7 @@ }, "node_modules/dom-serializer": { "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/dom-serializer/-/dom-serializer-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", "license": "MIT", "dependencies": { @@ -6155,9 +5697,21 @@ "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" } }, + "node_modules/dom-serializer/node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, "node_modules/domelementtype": { "version": "2.3.0", - "resolved": "https://registry.npmmirror.com/domelementtype/-/domelementtype-2.3.0.tgz", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", "funding": [ { @@ -6169,7 +5723,7 @@ }, "node_modules/domhandler": { "version": "5.0.3", - "resolved": "https://registry.npmmirror.com/domhandler/-/domhandler-5.0.3.tgz", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", "license": "BSD-2-Clause", "dependencies": { @@ -6184,7 +5738,7 @@ }, "node_modules/domutils": { "version": "3.2.2", - "resolved": "https://registry.npmmirror.com/domutils/-/domutils-3.2.2.tgz", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz", "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==", "license": "BSD-2-Clause", "dependencies": { @@ -6197,32 +5751,20 @@ } }, "node_modules/dot-prop": { - "version": "9.0.0", - "resolved": "https://registry.npmmirror.com/dot-prop/-/dot-prop-9.0.0.tgz", - "integrity": "sha512-1gxPBJpI/pcjQhKgIU91II6Wkay+dLcN3M6rf2uwP8hRur3HtQXjVrdAK3sjC0piaEuxzMwjXChcETiJl47lAQ==", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-10.1.0.tgz", + "integrity": "sha512-MVUtAugQMOff5RnBy2d9N31iG0lNwg1qAoAOn7pOK5wf94WIaE3My2p3uwTQuvS2AcqchkcR3bHByjaM0mmi7Q==", "license": "MIT", "dependencies": { - "type-fest": "^4.18.2" + "type-fest": "^5.0.0" }, "engines": { - "node": ">=18" + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/dotenv": { - "version": "16.6.1", - "resolved": "https://registry.npmmirror.com/dotenv/-/dotenv-16.6.1.tgz", - "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", - "license": "BSD-2-Clause", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://dotenvx.com" - } - }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmmirror.com/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -6239,7 +5781,7 @@ }, "node_modules/duplexer": { "version": "0.1.2", - "resolved": "https://registry.npmmirror.com/duplexer/-/duplexer-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==", "license": "MIT" }, @@ -6256,9 +5798,9 @@ "license": "MIT" }, "node_modules/electron-to-chromium": { - "version": "1.5.191", - "resolved": "https://registry.npmmirror.com/electron-to-chromium/-/electron-to-chromium-1.5.191.tgz", - "integrity": "sha512-xcwe9ELcuxYLUFqZZxL19Z6HVKcvNkIwhbHUz7L3us6u12yR+7uY89dSl570f/IqNthx8dAw3tojG7i4Ni4tDA==", + "version": "1.5.302", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.302.tgz", + "integrity": "sha512-sM6HAN2LyK82IyPBpznDRqlTQAtuSaO+ShzFiWTvoMJLHyZ+Y39r8VMfHzwbU8MVBzQ4Wdn85+wlZl2TLGIlwg==", "license": "ISC" }, "node_modules/emoji-regex": { @@ -6267,34 +5809,19 @@ "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", "license": "MIT" }, - "node_modules/enabled": { - "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/enabled/-/enabled-2.0.0.tgz", - "integrity": "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==", - "license": "MIT" - }, "node_modules/encodeurl": { "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/encodeurl/-/encodeurl-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", "license": "MIT", "engines": { "node": ">= 0.8" } }, - "node_modules/end-of-stream": { - "version": "1.4.5", - "resolved": "https://registry.npmmirror.com/end-of-stream/-/end-of-stream-1.4.5.tgz", - "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", - "license": "MIT", - "dependencies": { - "once": "^1.4.0" - } - }, "node_modules/entities": { - "version": "4.5.0", - "resolved": "https://registry.npmmirror.com/entities/-/entities-4.5.0.tgz", - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/entities/-/entities-7.0.1.tgz", + "integrity": "sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA==", "license": "BSD-2-Clause", "engines": { "node": ">=0.12" @@ -6303,21 +5830,9 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, - "node_modules/env-paths": { - "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/env-paths/-/env-paths-3.0.0.tgz", - "integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/error-stack-parser-es": { "version": "1.0.5", - "resolved": "https://registry.npmmirror.com/error-stack-parser-es/-/error-stack-parser-es-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/error-stack-parser-es/-/error-stack-parser-es-1.0.5.tgz", "integrity": "sha512-5qucVt2XcuGMcEGgWI7i+yZpmpByQ8J1lHhcL7PwqCwu9FPP3VUXzT4ltHe5i2z9dePwEHcDVOAfSnHsOlCXRA==", "license": "MIT", "funding": { @@ -6349,9 +5864,9 @@ } }, "node_modules/es-module-lexer": { - "version": "1.7.0", - "resolved": "https://registry.npmmirror.com/es-module-lexer/-/es-module-lexer-1.7.0.tgz", - "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-2.0.0.tgz", + "integrity": "sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==", "license": "MIT" }, "node_modules/es-object-atoms": { @@ -6368,7 +5883,7 @@ }, "node_modules/es-set-tostringtag": { "version": "2.1.0", - "resolved": "https://registry.npmmirror.com/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", "license": "MIT", "dependencies": { @@ -6382,9 +5897,9 @@ } }, "node_modules/esbuild": { - "version": "0.25.8", - "resolved": "https://registry.npmmirror.com/esbuild/-/esbuild-0.25.8.tgz", - "integrity": "sha512-vVC0USHGtMi8+R4Kz8rt6JhEWLxsv9Rnu/lGYbPR8u47B+DCBksq9JarW0zOO7bs37hyOK1l2/oqtbciutL5+Q==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz", + "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==", "hasInstallScript": true, "license": "MIT", "bin": { @@ -6394,32 +5909,32 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.8", - "@esbuild/android-arm": "0.25.8", - "@esbuild/android-arm64": "0.25.8", - "@esbuild/android-x64": "0.25.8", - "@esbuild/darwin-arm64": "0.25.8", - "@esbuild/darwin-x64": "0.25.8", - "@esbuild/freebsd-arm64": "0.25.8", - "@esbuild/freebsd-x64": "0.25.8", - "@esbuild/linux-arm": "0.25.8", - "@esbuild/linux-arm64": "0.25.8", - "@esbuild/linux-ia32": "0.25.8", - "@esbuild/linux-loong64": "0.25.8", - "@esbuild/linux-mips64el": "0.25.8", - "@esbuild/linux-ppc64": "0.25.8", - "@esbuild/linux-riscv64": "0.25.8", - "@esbuild/linux-s390x": "0.25.8", - "@esbuild/linux-x64": "0.25.8", - "@esbuild/netbsd-arm64": "0.25.8", - "@esbuild/netbsd-x64": "0.25.8", - "@esbuild/openbsd-arm64": "0.25.8", - "@esbuild/openbsd-x64": "0.25.8", - "@esbuild/openharmony-arm64": "0.25.8", - "@esbuild/sunos-x64": "0.25.8", - "@esbuild/win32-arm64": "0.25.8", - "@esbuild/win32-ia32": "0.25.8", - "@esbuild/win32-x64": "0.25.8" + "@esbuild/aix-ppc64": "0.27.3", + "@esbuild/android-arm": "0.27.3", + "@esbuild/android-arm64": "0.27.3", + "@esbuild/android-x64": "0.27.3", + "@esbuild/darwin-arm64": "0.27.3", + "@esbuild/darwin-x64": "0.27.3", + "@esbuild/freebsd-arm64": "0.27.3", + "@esbuild/freebsd-x64": "0.27.3", + "@esbuild/linux-arm": "0.27.3", + "@esbuild/linux-arm64": "0.27.3", + "@esbuild/linux-ia32": "0.27.3", + "@esbuild/linux-loong64": "0.27.3", + "@esbuild/linux-mips64el": "0.27.3", + "@esbuild/linux-ppc64": "0.27.3", + "@esbuild/linux-riscv64": "0.27.3", + "@esbuild/linux-s390x": "0.27.3", + "@esbuild/linux-x64": "0.27.3", + "@esbuild/netbsd-arm64": "0.27.3", + "@esbuild/netbsd-x64": "0.27.3", + "@esbuild/openbsd-arm64": "0.27.3", + "@esbuild/openbsd-x64": "0.27.3", + "@esbuild/openharmony-arm64": "0.27.3", + "@esbuild/sunos-x64": "0.27.3", + "@esbuild/win32-arm64": "0.27.3", + "@esbuild/win32-ia32": "0.27.3", + "@esbuild/win32-x64": "0.27.3" } }, "node_modules/escalade": { @@ -6449,71 +5964,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/escodegen": { - "version": "2.1.0", - "resolved": "https://registry.npmmirror.com/escodegen/-/escodegen-2.1.0.tgz", - "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", - "license": "BSD-2-Clause", - "dependencies": { - "esprima": "^4.0.1", - "estraverse": "^5.2.0", - "esutils": "^2.0.2" - }, - "bin": { - "escodegen": "bin/escodegen.js", - "esgenerate": "bin/esgenerate.js" - }, - "engines": { - "node": ">=6.0" - }, - "optionalDependencies": { - "source-map": "~0.6.1" - } - }, - "node_modules/escodegen/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmmirror.com/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "license": "BSD-3-Clause", - "optional": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/eslint-visitor-keys": { - "version": "4.2.1", - "resolved": "https://registry.npmmirror.com/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", - "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", - "license": "Apache-2.0", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmmirror.com/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "license": "BSD-2-Clause", - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmmirror.com/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "license": "BSD-2-Clause", - "engines": { - "node": ">=4.0" - } - }, "node_modules/estree-walker": { "version": "3.0.3", "resolved": "https://registry.npmmirror.com/estree-walker/-/estree-walker-3.0.3.tgz", @@ -6523,18 +5973,9 @@ "@types/estree": "^1.0.0" } }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmmirror.com/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/etag": { "version": "1.8.1", - "resolved": "https://registry.npmmirror.com/etag/-/etag-1.8.1.tgz", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", "license": "MIT", "engines": { @@ -6543,7 +5984,7 @@ }, "node_modules/event-target-shim": { "version": "5.0.1", - "resolved": "https://registry.npmmirror.com/event-target-shim/-/event-target-shim-5.0.1.tgz", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", "license": "MIT", "engines": { @@ -6552,16 +5993,25 @@ }, "node_modules/events": { "version": "3.3.0", - "resolved": "https://registry.npmmirror.com/events/-/events-3.3.0.tgz", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", "license": "MIT", "engines": { "node": ">=0.8.x" } }, + "node_modules/events-universal": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/events-universal/-/events-universal-1.0.1.tgz", + "integrity": "sha512-LUd5euvbMLpwOF8m6ivPCbhQeSiYVNb8Vs0fQ8QjXo0JTkEHpz8pxdQf0gStltaPpw0Cca8b39KxvK9cfKRiAw==", + "license": "Apache-2.0", + "dependencies": { + "bare-events": "^2.7.0" + } + }, "node_modules/execa": { "version": "8.0.1", - "resolved": "https://registry.npmmirror.com/execa/-/execa-8.0.1.tgz", + "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", "license": "MIT", "dependencies": { @@ -6582,50 +6032,15 @@ "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, - "node_modules/exsolve": { - "version": "1.0.7", - "resolved": "https://registry.npmmirror.com/exsolve/-/exsolve-1.0.7.tgz", - "integrity": "sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw==", - "license": "MIT" - }, - "node_modules/extract-zip": { - "version": "2.0.1", - "resolved": "https://registry.npmmirror.com/extract-zip/-/extract-zip-2.0.1.tgz", - "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", - "license": "BSD-2-Clause", - "dependencies": { - "debug": "^4.1.1", - "get-stream": "^5.1.0", - "yauzl": "^2.10.0" - }, - "bin": { - "extract-zip": "cli.js" - }, - "engines": { - "node": ">= 10.17.0" - }, - "optionalDependencies": { - "@types/yauzl": "^2.9.1" - } - }, - "node_modules/extract-zip/node_modules/get-stream": { - "version": "5.2.0", - "resolved": "https://registry.npmmirror.com/get-stream/-/get-stream-5.2.0.tgz", - "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", - "license": "MIT", - "dependencies": { - "pump": "^3.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, + "node_modules/exsolve": { + "version": "1.0.8", + "resolved": "https://registry.npmmirror.com/exsolve/-/exsolve-1.0.8.tgz", + "integrity": "sha512-LmDxfWXwcTArk8fUEnOfSZpHOJ6zOMUJKOtFLFqJLoKJetuQG874Uc7/Kki7zFLzYybmZhp1M7+98pfMqeX8yA==", + "license": "MIT" + }, "node_modules/fast-fifo": { "version": "1.3.2", - "resolved": "https://registry.npmmirror.com/fast-fifo/-/fast-fifo-1.3.2.tgz", + "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==", "license": "MIT" }, @@ -6646,10 +6061,13 @@ } }, "node_modules/fast-npm-meta": { - "version": "0.4.4", - "resolved": "https://registry.npmmirror.com/fast-npm-meta/-/fast-npm-meta-0.4.4.tgz", - "integrity": "sha512-cq8EVW3jpX1U3dO1AYanz2BJ6n9ITQgCwE1xjNwI5jO2a9erE369OZNO8Wt/Wbw8YHhCD/dimH9BxRsY+6DinA==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/fast-npm-meta/-/fast-npm-meta-1.3.0.tgz", + "integrity": "sha512-Yz48hvMPiD+J5vPQj767Gdd3i6TOzqwBuvc0ylkzyxh2+VEJmtWBBy1OT1/CoeStcKhS6lBK8opUf13BNXBBYw==", "license": "MIT", + "bin": { + "fast-npm-meta": "dist/cli.mjs" + }, "funding": { "url": "https://github.com/sponsors/antfu" } @@ -6663,20 +6081,14 @@ "reusify": "^1.0.4" } }, - "node_modules/fd-slicer": { - "version": "1.1.0", - "resolved": "https://registry.npmmirror.com/fd-slicer/-/fd-slicer-1.1.0.tgz", - "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", - "license": "MIT", - "dependencies": { - "pend": "~1.2.0" - } - }, "node_modules/fdir": { - "version": "6.4.6", - "resolved": "https://registry.npmmirror.com/fdir/-/fdir-6.4.6.tgz", - "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==", + "version": "6.5.0", + "resolved": "https://registry.npmmirror.com/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, "peerDependencies": { "picomatch": "^3 || ^4" }, @@ -6686,38 +6098,9 @@ } } }, - "node_modules/fecha": { - "version": "4.2.3", - "resolved": "https://registry.npmmirror.com/fecha/-/fecha-4.2.3.tgz", - "integrity": "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==", - "license": "MIT" - }, - "node_modules/fetch-blob": { - "version": "3.2.0", - "resolved": "https://registry.npmmirror.com/fetch-blob/-/fetch-blob-3.2.0.tgz", - "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/jimmywarting" - }, - { - "type": "paypal", - "url": "https://paypal.me/jimmywarting" - } - ], - "license": "MIT", - "dependencies": { - "node-domexception": "^1.0.0", - "web-streams-polyfill": "^3.0.3" - }, - "engines": { - "node": "^12.20 || >= 14.13" - } - }, "node_modules/file-uri-to-path": { "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", "license": "MIT" }, @@ -6733,57 +6116,10 @@ "node": ">=8" } }, - "node_modules/filter-obj": { - "version": "6.1.0", - "resolved": "https://registry.npmmirror.com/filter-obj/-/filter-obj-6.1.0.tgz", - "integrity": "sha512-xdMtCAODmPloU9qtmPcdBV9Kd27NtMse+4ayThxqIHUES5Z2S6bGpap5PpdmNM56ub7y3i1eyr+vJJIIgWGKmA==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/find-up": { - "version": "7.0.0", - "resolved": "https://registry.npmmirror.com/find-up/-/find-up-7.0.0.tgz", - "integrity": "sha512-YyZM99iHrqLKjmt4LJDj58KI+fYyufRLBSYcqycxf//KpBk9FoewoGX0450m9nB44qrZnovzC2oeP5hUibxc/g==", - "license": "MIT", - "dependencies": { - "locate-path": "^7.2.0", - "path-exists": "^5.0.0", - "unicorn-magic": "^0.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/find-up-simple": { - "version": "1.0.1", - "resolved": "https://registry.npmmirror.com/find-up-simple/-/find-up-simple-1.0.1.tgz", - "integrity": "sha512-afd4O7zpqHeRyg4PfDQsXmlDe2PfdHtJt6Akt8jOWaApLOZk5JXs6VMR29lz03pRe9mpykrRCYIYxaJYcfpncQ==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/fn.name": { - "version": "1.1.0", - "resolved": "https://registry.npmmirror.com/fn.name/-/fn.name-1.1.0.tgz", - "integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==", - "license": "MIT" - }, "node_modules/follow-redirects": { - "version": "1.15.9", - "resolved": "https://registry.npmmirror.com/follow-redirects/-/follow-redirects-1.15.9.tgz", - "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", "funding": [ { "type": "individual", @@ -6817,9 +6153,9 @@ } }, "node_modules/form-data": { - "version": "4.0.4", - "resolved": "https://registry.npmmirror.com/form-data/-/form-data-4.0.4.tgz", - "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz", + "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==", "license": "MIT", "dependencies": { "asynckit": "^0.4.0", @@ -6832,55 +6168,37 @@ "node": ">= 6" } }, - "node_modules/form-data/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmmirror.com/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "node_modules/fraction.js": { + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz", + "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==", "license": "MIT", "engines": { - "node": ">= 0.6" - } - }, - "node_modules/form-data/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmmirror.com/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" + "node": "*" }, - "engines": { - "node": ">= 0.6" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/rawify" } }, - "node_modules/formdata-polyfill": { - "version": "4.0.10", - "resolved": "https://registry.npmmirror.com/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", - "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "node_modules/framesync": { + "version": "6.1.2", + "resolved": "https://registry.npmmirror.com/framesync/-/framesync-6.1.2.tgz", + "integrity": "sha512-jBTqhX6KaQVDyus8muwZbBeGGP0XgujBRbQ7gM7BRdS3CadCZIHiawyzYLnafYcvZIh5j8WE7cxZKFn7dXhu9g==", "license": "MIT", "dependencies": { - "fetch-blob": "^3.1.2" - }, - "engines": { - "node": ">=12.20.0" + "tslib": "2.4.0" } }, - "node_modules/fraction.js": { - "version": "4.3.7", - "resolved": "https://registry.npmmirror.com/fraction.js/-/fraction.js-4.3.7.tgz", - "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", - "license": "MIT", - "engines": { - "node": "*" - }, - "funding": { - "type": "patreon", - "url": "https://github.com/sponsors/rawify" - } + "node_modules/framesync/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==", + "license": "0BSD" }, "node_modules/fresh": { "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/fresh/-/fresh-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", "license": "MIT", "engines": { @@ -6933,35 +6251,28 @@ }, "node_modules/fuse.js": { "version": "7.1.0", - "resolved": "https://registry.npmmirror.com/fuse.js/-/fuse.js-7.1.0.tgz", + "resolved": "https://registry.npmjs.org/fuse.js/-/fuse.js-7.1.0.tgz", "integrity": "sha512-trLf4SzuuUxfusZADLINj+dE8clK1frKdmqiJNb1Es75fmI5oY6X2mxLVUciLLjxqw/xr72Dhy+lER6dGd02FQ==", "license": "Apache-2.0", "engines": { "node": ">=10" } }, + "node_modules/fzf": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fzf/-/fzf-0.5.2.tgz", + "integrity": "sha512-Tt4kuxLXFKHy8KT40zwsUPUkg1CrsgY25FxA2U/j/0WgEDCk3ddc/zLTCCcbSHX9FcKtLuVaDGtGE/STWC+j3Q==", + "license": "BSD-3-Clause" + }, "node_modules/gensync": { "version": "1.0.0-beta.2", - "resolved": "https://registry.npmmirror.com/gensync/-/gensync-1.0.0-beta.2.tgz", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, - "node_modules/get-amd-module-type": { - "version": "6.0.1", - "resolved": "https://registry.npmmirror.com/get-amd-module-type/-/get-amd-module-type-6.0.1.tgz", - "integrity": "sha512-MtjsmYiCXcYDDrGqtNbeIYdAl85n+5mSv2r3FbzER/YV3ZILw4HNNIw34HuV5pyl0jzs6GFYU1VHVEefhgcNHQ==", - "license": "MIT", - "dependencies": { - "ast-module-types": "^6.0.1", - "node-source-walk": "^7.0.1" - }, - "engines": { - "node": ">=18" - } - }, "node_modules/get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmmirror.com/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -6997,7 +6308,7 @@ }, "node_modules/get-port-please": { "version": "3.2.0", - "resolved": "https://registry.npmmirror.com/get-port-please/-/get-port-please-3.2.0.tgz", + "resolved": "https://registry.npmjs.org/get-port-please/-/get-port-please-3.2.0.tgz", "integrity": "sha512-I9QVvBw5U/hw3RmWpYKRumUeaDgxTPd401x364rLmWBJcOQ753eov1eTgzDqRG9bqFIfDc7gfzcQEWrUri3o1A==", "license": "MIT" }, @@ -7016,7 +6327,7 @@ }, "node_modules/get-stream": { "version": "8.0.1", - "resolved": "https://registry.npmmirror.com/get-stream/-/get-stream-8.0.1.tgz", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", "license": "MIT", "engines": { @@ -7043,29 +6354,11 @@ "giget": "dist/cli.mjs" } }, - "node_modules/git-up": { - "version": "8.1.1", - "resolved": "https://registry.npmmirror.com/git-up/-/git-up-8.1.1.tgz", - "integrity": "sha512-FDenSF3fVqBYSaJoYy1KSc2wosx0gCvKP+c+PRBht7cAaiCeQlBtfBDX9vgnNOHmdePlSFITVcn4pFfcgNvx3g==", - "license": "MIT", - "dependencies": { - "is-ssh": "^1.4.0", - "parse-url": "^9.2.0" - } - }, - "node_modules/git-url-parse": { - "version": "16.1.0", - "resolved": "https://registry.npmmirror.com/git-url-parse/-/git-url-parse-16.1.0.tgz", - "integrity": "sha512-cPLz4HuK86wClEW7iDdeAKcCVlWXmrLpb2L+G9goW0Z1dtpNS6BXXSOckUTlJT/LDQViE1QZKstNORzHsLnobw==", - "license": "MIT", - "dependencies": { - "git-up": "^8.1.0" - } - }, "node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmmirror.com/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "version": "10.5.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz", + "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", "license": "ISC", "dependencies": { "foreground-child": "^3.1.0", @@ -7096,7 +6389,7 @@ }, "node_modules/global-directory": { "version": "4.0.1", - "resolved": "https://registry.npmmirror.com/global-directory/-/global-directory-4.0.1.tgz", + "resolved": "https://registry.npmjs.org/global-directory/-/global-directory-4.0.1.tgz", "integrity": "sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q==", "license": "MIT", "dependencies": { @@ -7110,52 +6403,25 @@ } }, "node_modules/globby": { - "version": "14.1.0", - "resolved": "https://registry.npmmirror.com/globby/-/globby-14.1.0.tgz", - "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", + "version": "16.1.1", + "resolved": "https://registry.npmjs.org/globby/-/globby-16.1.1.tgz", + "integrity": "sha512-dW7vl+yiAJSp6aCekaVnVJxurRv7DCOLyXqEG3RYMYUg7AuJ2jCqPkZTA8ooqC2vtnkaMcV5WfFBMuEnTu1OQg==", "license": "MIT", "dependencies": { - "@sindresorhus/merge-streams": "^2.1.0", + "@sindresorhus/merge-streams": "^4.0.0", "fast-glob": "^3.3.3", - "ignore": "^7.0.3", - "path-type": "^6.0.0", + "ignore": "^7.0.5", + "is-path-inside": "^4.0.0", "slash": "^5.1.0", - "unicorn-magic": "^0.3.0" - }, - "engines": { - "node": ">=18" + "unicorn-magic": "^0.4.0" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/globby/node_modules/unicorn-magic": { - "version": "0.3.0", - "resolved": "https://registry.npmmirror.com/unicorn-magic/-/unicorn-magic-0.3.0.tgz", - "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", - "license": "MIT", "engines": { - "node": ">=18" + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/gonzales-pe": { - "version": "4.3.0", - "resolved": "https://registry.npmmirror.com/gonzales-pe/-/gonzales-pe-4.3.0.tgz", - "integrity": "sha512-otgSPpUmdWJ43VXyiNgEYE4luzHCL2pz4wQ0OnDluC6Eg4Ko3Vexy/SrSynglw/eR+OhkzmqFCZa/OFa/RgAOQ==", - "license": "MIT", - "dependencies": { - "minimist": "^1.2.5" - }, - "bin": { - "gonzales": "bin/gonzales.js" - }, - "engines": { - "node": ">=0.6.0" - } - }, "node_modules/gopd": { "version": "1.2.0", "resolved": "https://registry.npmmirror.com/gopd/-/gopd-1.2.0.tgz", @@ -7174,9 +6440,15 @@ "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", "license": "ISC" }, + "node_modules/gsap": { + "version": "3.14.2", + "resolved": "https://registry.npmmirror.com/gsap/-/gsap-3.14.2.tgz", + "integrity": "sha512-P8/mMxVLU7o4+55+1TCnQrPmgjPKnwkzkXOK1asnR9Jg2lna4tEY5qBJjMmAaOBDDZWtlRjBXjLa0w53G/uBLA==", + "license": "Standard 'no charge' license: https://gsap.com/standard-license." + }, "node_modules/gzip-size": { "version": "7.0.0", - "resolved": "https://registry.npmmirror.com/gzip-size/-/gzip-size-7.0.0.tgz", + "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-7.0.0.tgz", "integrity": "sha512-O1Ld7Dr+nqPnmGpdhzLmMTQ4vAsD+rHwMm1NLUmoUFFymBOMKxCCrtDxqdBRYXdeEPEi3SyoR4TizJLQrnKBNA==", "license": "MIT", "dependencies": { @@ -7190,19 +6462,19 @@ } }, "node_modules/h3": { - "version": "1.15.3", - "resolved": "https://registry.npmmirror.com/h3/-/h3-1.15.3.tgz", - "integrity": "sha512-z6GknHqyX0h9aQaTx22VZDf6QyZn+0Nh+Ym8O/u0SGSkyF5cuTJYKlc8MkzW3Nzf9LE1ivcpmYC3FUGpywhuUQ==", + "version": "1.15.5", + "resolved": "https://registry.npmjs.org/h3/-/h3-1.15.5.tgz", + "integrity": "sha512-xEyq3rSl+dhGX2Lm0+eFQIAzlDN6Fs0EcC4f7BNUmzaRX/PTzeuM+Tr2lHB8FoXggsQIeXLj8EDVgs5ywxyxmg==", "license": "MIT", "dependencies": { "cookie-es": "^1.2.2", - "crossws": "^0.3.4", + "crossws": "^0.3.5", "defu": "^6.1.4", "destr": "^2.0.5", "iron-webcrypto": "^1.2.1", - "node-mock-http": "^1.0.0", + "node-mock-http": "^1.0.4", "radix3": "^1.1.2", - "ufo": "^1.6.1", + "ufo": "^1.6.3", "uncrypto": "^0.1.3" } }, @@ -7260,30 +6532,18 @@ "node": ">= 0.4" } }, + "node_modules/hey-listen": { + "version": "1.0.8", + "resolved": "https://registry.npmmirror.com/hey-listen/-/hey-listen-1.0.8.tgz", + "integrity": "sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==", + "license": "MIT" + }, "node_modules/hookable": { "version": "5.5.3", "resolved": "https://registry.npmmirror.com/hookable/-/hookable-5.5.3.tgz", "integrity": "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==", "license": "MIT" }, - "node_modules/hosted-git-info": { - "version": "7.0.2", - "resolved": "https://registry.npmmirror.com/hosted-git-info/-/hosted-git-info-7.0.2.tgz", - "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==", - "license": "ISC", - "dependencies": { - "lru-cache": "^10.0.1" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/hosted-git-info/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmmirror.com/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "license": "ISC" - }, "node_modules/http-assert": { "version": "1.5.0", "resolved": "https://registry.npmmirror.com/http-assert/-/http-assert-1.5.0.tgz", @@ -7332,33 +6592,28 @@ } }, "node_modules/http-errors": { - "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==", "license": "MIT", "dependencies": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" + "depd": "~2.0.0", + "inherits": "~2.0.4", + "setprototypeof": "~1.2.0", + "statuses": "~2.0.2", + "toidentifier": "~1.0.1" }, "engines": { "node": ">= 0.8" - } - }, - "node_modules/http-errors/node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmmirror.com/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", - "license": "MIT", - "engines": { - "node": ">= 0.8" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/http-shutdown": { "version": "1.2.2", - "resolved": "https://registry.npmmirror.com/http-shutdown/-/http-shutdown-1.2.2.tgz", + "resolved": "https://registry.npmjs.org/http-shutdown/-/http-shutdown-1.2.2.tgz", "integrity": "sha512-S9wWkJ/VSY9/k4qcjG318bqJNruzE4HySUhFYknwmu6LBP97KLLfwNf+n4V1BHurvFNkSKLFnK/RsuUnRTf9Vw==", "license": "MIT", "engines": { @@ -7368,7 +6623,7 @@ }, "node_modules/https-proxy-agent": { "version": "7.0.6", - "resolved": "https://registry.npmmirror.com/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", "license": "MIT", "dependencies": { @@ -7381,22 +6636,34 @@ }, "node_modules/httpxy": { "version": "0.1.7", - "resolved": "https://registry.npmmirror.com/httpxy/-/httpxy-0.1.7.tgz", + "resolved": "https://registry.npmjs.org/httpxy/-/httpxy-0.1.7.tgz", "integrity": "sha512-pXNx8gnANKAndgga5ahefxc++tJvNL87CXoRwxn1cJE2ZkWEojF3tNfQIEhZX/vfpt+wzeAzpUI4qkediX1MLQ==", "license": "MIT" }, "node_modules/human-signals": { "version": "5.0.0", - "resolved": "https://registry.npmmirror.com/human-signals/-/human-signals-5.0.0.tgz", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", "license": "Apache-2.0", "engines": { "node": ">=16.17.0" } }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/ieee754": { "version": "1.2.1", - "resolved": "https://registry.npmmirror.com/ieee754/-/ieee754-1.2.1.tgz", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", "funding": [ { @@ -7424,9 +6691,9 @@ } }, "node_modules/image-meta": { - "version": "0.2.1", - "resolved": "https://registry.npmmirror.com/image-meta/-/image-meta-0.2.1.tgz", - "integrity": "sha512-K6acvFaelNxx8wc2VjbIzXKDVB0Khs0QT35U6NkGfTdCmjLNcO2945m7RFNR9/RPVFm48hq7QPzK8uGH18HCGw==", + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/image-meta/-/image-meta-0.2.2.tgz", + "integrity": "sha512-3MOLanc3sb3LNGWQl1RlQlNWURE5g32aUphrDyFeCsxBTk08iE3VNe4CwsUZ0Qs1X+EfX0+r29Sxdpza4B+yRA==", "license": "MIT" }, "node_modules/impound": { @@ -7442,27 +6709,6 @@ "unplugin-utils": "^0.2.4" } }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmmirror.com/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", - "license": "MIT", - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/index-to-position": { - "version": "1.1.0", - "resolved": "https://registry.npmmirror.com/index-to-position/-/index-to-position-1.1.0.tgz", - "integrity": "sha512-XPdx9Dq4t9Qk1mTMbWONJqU7boCoumEH7fRET37HX5+khDUl3J2W6PdALxhILYlIYx2amlwYcRPp28p0tSiojg==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/inflight": { "version": "1.0.6", "resolved": "https://registry.npmmirror.com/inflight/-/inflight-1.0.6.tgz", @@ -7482,7 +6728,7 @@ }, "node_modules/ini": { "version": "4.1.1", - "resolved": "https://registry.npmmirror.com/ini/-/ini-4.1.1.tgz", + "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.1.tgz", "integrity": "sha512-QQnnxNyfvmHFIsj7gkPcYymR8Jdw/o7mp5ZFihxn6h8Ci6fh3Dx4E1gPjpQEpIuPo9XVNY/ZUwh4BPMjGyL01g==", "license": "ISC", "engines": { @@ -7490,12 +6736,12 @@ } }, "node_modules/ioredis": { - "version": "5.6.1", - "resolved": "https://registry.npmmirror.com/ioredis/-/ioredis-5.6.1.tgz", - "integrity": "sha512-UxC0Yv1Y4WRJiGQxQkP0hfdL0/5/6YvdfOOClRgJ0qppSarkhneSa6UvkMkms0AkdGimSH3Ikqm+6mkMmX7vGA==", + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.10.0.tgz", + "integrity": "sha512-HVBe9OFuqs+Z6n64q09PQvP1/R4Bm+30PAyyD4wIEqssh3v9L21QjCVk4kRLucMBcDokJTcLjsGeVRlq/nH6DA==", "license": "MIT", "dependencies": { - "@ioredis/commands": "^1.1.1", + "@ioredis/commands": "1.5.1", "cluster-key-slot": "^1.1.0", "debug": "^4.3.4", "denque": "^2.1.0", @@ -7522,12 +6768,6 @@ "url": "https://github.com/sponsors/brc-dd" } }, - "node_modules/is-arrayish": { - "version": "0.3.2", - "resolved": "https://registry.npmmirror.com/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", - "license": "MIT" - }, "node_modules/is-binary-path": { "version": "2.1.0", "resolved": "https://registry.npmmirror.com/is-binary-path/-/is-binary-path-2.1.0.tgz", @@ -7540,21 +6780,6 @@ "node": ">=8" } }, - "node_modules/is-builtin-module": { - "version": "3.2.1", - "resolved": "https://registry.npmmirror.com/is-builtin-module/-/is-builtin-module-3.2.1.tgz", - "integrity": "sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==", - "license": "MIT", - "dependencies": { - "builtin-modules": "^3.3.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/is-core-module": { "version": "2.16.1", "resolved": "https://registry.npmmirror.com/is-core-module/-/is-core-module-2.16.1.tgz", @@ -7572,7 +6797,7 @@ }, "node_modules/is-docker": { "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/is-docker/-/is-docker-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", "license": "MIT", "bin": { @@ -7635,7 +6860,7 @@ }, "node_modules/is-inside-container": { "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/is-inside-container/-/is-inside-container-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", "license": "MIT", "dependencies": { @@ -7653,7 +6878,7 @@ }, "node_modules/is-installed-globally": { "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/is-installed-globally/-/is-installed-globally-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-1.0.0.tgz", "integrity": "sha512-K55T22lfpQ63N4KEN57jZUAaAYqYHEe8veb/TycJRk9DdSCLLcovXz/mL6mOnhQaZsQGwPhuFopdQIlqGSEjiQ==", "license": "MIT", "dependencies": { @@ -7669,7 +6894,7 @@ }, "node_modules/is-module": { "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/is-module/-/is-module-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz", "integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==", "license": "MIT" }, @@ -7684,7 +6909,7 @@ }, "node_modules/is-path-inside": { "version": "4.0.0", - "resolved": "https://registry.npmmirror.com/is-path-inside/-/is-path-inside-4.0.0.tgz", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-4.0.0.tgz", "integrity": "sha512-lJJV/5dYS+RcL8uQdBDW9c9uWFLLBNRyFhnAKXw5tVqLlKZ4RMGZKv+YQ/IA3OhD+RpbJa1LLFM1FQPGyIXvOA==", "license": "MIT", "engines": { @@ -7694,18 +6919,9 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/is-plain-obj": { - "version": "2.1.0", - "resolved": "https://registry.npmmirror.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz", - "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/is-reference": { "version": "1.2.1", - "resolved": "https://registry.npmmirror.com/is-reference/-/is-reference-1.2.1.tgz", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz", "integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==", "license": "MIT", "dependencies": { @@ -7730,18 +6946,9 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-ssh": { - "version": "1.4.1", - "resolved": "https://registry.npmmirror.com/is-ssh/-/is-ssh-1.4.1.tgz", - "integrity": "sha512-JNeu1wQsHjyHgn9NcWTaXq6zWSR6hqE0++zhfZlkFBbScNkyvxCdeV8sRkSBaeLKxmbpR21brail63ACNxJ0Tg==", - "license": "MIT", - "dependencies": { - "protocols": "^2.0.1" - } - }, "node_modules/is-stream": { "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/is-stream/-/is-stream-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", "license": "MIT", "engines": { @@ -7751,24 +6958,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/is-url": { - "version": "1.2.4", - "resolved": "https://registry.npmmirror.com/is-url/-/is-url-1.2.4.tgz", - "integrity": "sha512-ITvGim8FhRiYe4IQ5uHSkj7pVaPDrCTkNd3yq3cV7iZAcJdHTUMPMEHcqSOy9xZ9qFenQCvi+2wjH9a1nXqHww==", - "license": "MIT" - }, - "node_modules/is-url-superb": { - "version": "4.0.0", - "resolved": "https://registry.npmmirror.com/is-url-superb/-/is-url-superb-4.0.0.tgz", - "integrity": "sha512-GI+WjezhPPcbM+tqE9LnmsY5qqjwHzTvjJ36wxYX5ujNXefSUJ/T17r5bqDV8yLhcgB59KTPNOc9O9cmHTPWsA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/is-what": { "version": "4.1.16", "resolved": "https://registry.npmmirror.com/is-what/-/is-what-4.1.16.tgz", @@ -7782,9 +6971,9 @@ } }, "node_modules/is-wsl": { - "version": "3.1.0", - "resolved": "https://registry.npmmirror.com/is-wsl/-/is-wsl-3.1.0.tgz", - "integrity": "sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-3.1.1.tgz", + "integrity": "sha512-e6rvdUCiQCAuumZslxRJWR/Doq4VpPR82kqclvcS0efgt430SlGIk05vdCN58+VrzgtIcfNODjozVielycD4Sw==", "license": "MIT", "dependencies": { "is-inside-container": "^1.0.0" @@ -7798,7 +6987,7 @@ }, "node_modules/is64bit": { "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/is64bit/-/is64bit-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/is64bit/-/is64bit-2.0.0.tgz", "integrity": "sha512-jv+8jaWCl0g2lSBkNSVXdzfBA0npK1HGC2KtWM9FumFRoGS94g3NbCCLVnCYHLjp4GrW2KZeeSTMo5ddtznmGw==", "license": "MIT", "dependencies": { @@ -7813,17 +7002,17 @@ }, "node_modules/isarray": { "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/isarray/-/isarray-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", "license": "MIT" }, "node_modules/isexe": { - "version": "3.1.1", - "resolved": "https://registry.npmmirror.com/isexe/-/isexe-3.1.1.tgz", - "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", - "license": "ISC", + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.5.tgz", + "integrity": "sha512-6B3tLtFqtQS4ekarvLVMZ+X+VlvQekbe4taUkf/rhVO3d/h0M2rfARm/pXLcPEsjjMsFgrFgSrhQIxcSVrBz8w==", + "license": "BlueOak-1.0.0", "engines": { - "node": ">=16" + "node": ">=18" } }, "node_modules/jackspeak": { @@ -7842,9 +7031,9 @@ } }, "node_modules/jiti": { - "version": "2.5.1", - "resolved": "https://registry.npmmirror.com/jiti/-/jiti-2.5.1.tgz", - "integrity": "sha512-twQoecYPiVA5K/h6SxtORw/Bs3ar+mLUtoPSc7iMXzQzK8d7eJ/R09wmTwAjiamETn1cXYPGfNnu7DMoHgu12w==", + "version": "2.6.1", + "resolved": "https://registry.npmmirror.com/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", "license": "MIT", "bin": { "jiti": "lib/jiti-cli.mjs" @@ -7852,13 +7041,13 @@ }, "node_modules/js-tokens": { "version": "4.0.0", - "resolved": "https://registry.npmmirror.com/js-tokens/-/js-tokens-4.0.0.tgz", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", "license": "MIT" }, "node_modules/jsesc": { "version": "3.1.0", - "resolved": "https://registry.npmmirror.com/jsesc/-/jsesc-3.1.0.tgz", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", "license": "MIT", "bin": { @@ -7870,7 +7059,7 @@ }, "node_modules/json5": { "version": "2.2.3", - "resolved": "https://registry.npmmirror.com/json5/-/json5-2.2.3.tgz", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", "license": "MIT", "bin": { @@ -7880,37 +7069,16 @@ "node": ">=6" } }, - "node_modules/jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmmirror.com/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "license": "MIT", - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, - "node_modules/junk": { - "version": "4.0.1", - "resolved": "https://registry.npmmirror.com/junk/-/junk-4.0.1.tgz", - "integrity": "sha512-Qush0uP+G8ZScpGMZvHUiRfI0YBWuB3gVBYlI0v0vvOJt5FLicco+IkP0a50LqTTQhmts/m6tP5SWE+USyIvcQ==", - "license": "MIT", - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/jwt-decode": { - "version": "4.0.0", - "resolved": "https://registry.npmmirror.com/jwt-decode/-/jwt-decode-4.0.0.tgz", - "integrity": "sha512-+KJGIyHgkGuIq3IEBNftfhW/LfWhXUIY6OmyVWjliu5KH1y0fw7VQ8YndE2O4qZdMSd9SqbnC8GOcZEy0Om7sA==", + "node_modules/jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmmirror.com/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", "license": "MIT", - "engines": { - "node": ">=18" + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" } }, "node_modules/keygrip": { @@ -7926,9 +7094,9 @@ } }, "node_modules/kleur": { - "version": "3.0.3", - "resolved": "https://registry.npmmirror.com/kleur/-/kleur-3.0.3.tgz", - "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", "license": "MIT", "engines": { "node": ">=6" @@ -7944,15 +7112,15 @@ } }, "node_modules/knitwork": { - "version": "1.2.0", - "resolved": "https://registry.npmmirror.com/knitwork/-/knitwork-1.2.0.tgz", - "integrity": "sha512-xYSH7AvuQ6nXkq42x0v5S8/Iry+cfulBz/DJQzhIyESdLD7425jXsPy4vn5cCXU+HhRN2kVw51Vd1K6/By4BQg==", + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/knitwork/-/knitwork-1.3.0.tgz", + "integrity": "sha512-4LqMNoONzR43B1W0ek0fhXMsDNW/zxa1NdFAVMY+k28pgZLovR4G3PB5MrpTxCy1QaZCqNoiaKPr5w5qZHfSNw==", "license": "MIT" }, "node_modules/koa": { - "version": "2.16.1", - "resolved": "https://registry.npmmirror.com/koa/-/koa-2.16.1.tgz", - "integrity": "sha512-umfX9d3iuSxTQP4pnzLOz0HKnPg0FaUUIKcye2lOiz3KPu1Y3M3xlz76dISdFPQs37P9eJz1wUpcTS6KDPn9fA==", + "version": "2.16.4", + "resolved": "https://registry.npmjs.org/koa/-/koa-2.16.4.tgz", + "integrity": "sha512-3An0GCLDSR34tsCO4H8Tef8Pp2ngtaZDAZnsWJYelqXUK5wyiHvGItgK/xcSkmHLSTn1Jcho1mRQs2ehRzvKKw==", "license": "MIT", "dependencies": { "accepts": "^1.3.5", @@ -8124,42 +7292,19 @@ "node": ">= 0.6" } }, - "node_modules/kuler": { - "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/kuler/-/kuler-2.0.0.tgz", - "integrity": "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==", - "license": "MIT" - }, - "node_modules/lambda-local": { - "version": "2.2.0", - "resolved": "https://registry.npmmirror.com/lambda-local/-/lambda-local-2.2.0.tgz", - "integrity": "sha512-bPcgpIXbHnVGfI/omZIlgucDqlf4LrsunwoKue5JdZeGybt8L6KyJz2Zu19ffuZwIwLj2NAI2ZyaqNT6/cetcg==", - "license": "MIT", - "dependencies": { - "commander": "^10.0.1", - "dotenv": "^16.3.1", - "winston": "^3.10.0" - }, - "bin": { - "lambda-local": "build/cli.js" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/launch-editor": { - "version": "2.10.0", - "resolved": "https://registry.npmmirror.com/launch-editor/-/launch-editor-2.10.0.tgz", - "integrity": "sha512-D7dBRJo/qcGX9xlvt/6wUYzQxjh5G1RvZPgPv8vi4KRU99DVQL/oW7tnVOCCTm2HGeo3C5HvGE5Yrh6UBoZ0vA==", + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/launch-editor/-/launch-editor-2.13.1.tgz", + "integrity": "sha512-lPSddlAAluRKJ7/cjRFoXUFzaX7q/YKI7yPHuEvSJVqoXvFnJov1/Ud87Aa4zULIbA9Nja4mSPK8l0z/7eV2wA==", "license": "MIT", "dependencies": { - "picocolors": "^1.0.0", - "shell-quote": "^1.8.1" + "picocolors": "^1.1.1", + "shell-quote": "^1.8.3" } }, "node_modules/lazystream": { "version": "1.0.1", - "resolved": "https://registry.npmmirror.com/lazystream/-/lazystream-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", "license": "MIT", "dependencies": { @@ -8171,7 +7316,7 @@ }, "node_modules/lazystream/node_modules/readable-stream": { "version": "2.3.8", - "resolved": "https://registry.npmmirror.com/readable-stream/-/readable-stream-2.3.8.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", "license": "MIT", "dependencies": { @@ -8186,13 +7331,13 @@ }, "node_modules/lazystream/node_modules/safe-buffer": { "version": "5.1.2", - "resolved": "https://registry.npmmirror.com/safe-buffer/-/safe-buffer-5.1.2.tgz", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", "license": "MIT" }, "node_modules/lazystream/node_modules/string_decoder": { "version": "1.1.1", - "resolved": "https://registry.npmmirror.com/string_decoder/-/string_decoder-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "license": "MIT", "dependencies": { @@ -8219,7 +7364,7 @@ }, "node_modules/listhen": { "version": "1.9.0", - "resolved": "https://registry.npmmirror.com/listhen/-/listhen-1.9.0.tgz", + "resolved": "https://registry.npmjs.org/listhen/-/listhen-1.9.0.tgz", "integrity": "sha512-I8oW2+QL5KJo8zXNWX046M134WchxsXC7SawLPvRQpogCbkyQIaFxPE89A2HiwR7vAK2Dm2ERBAmyjTYGYEpBg==", "license": "MIT", "dependencies": { @@ -8249,19 +7394,19 @@ }, "node_modules/listhen/node_modules/pathe": { "version": "1.1.2", - "resolved": "https://registry.npmmirror.com/pathe/-/pathe-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", "license": "MIT" }, "node_modules/local-pkg": { - "version": "1.1.1", - "resolved": "https://registry.npmmirror.com/local-pkg/-/local-pkg-1.1.1.tgz", - "integrity": "sha512-WunYko2W1NcdfAFpuLUoucsgULmgDBRkdxHxWQ7mK0cQqwPiy8E1enjuRBrhLtZkB5iScJ1XIPdhVEFK8aOLSg==", + "version": "1.1.2", + "resolved": "https://registry.npmmirror.com/local-pkg/-/local-pkg-1.1.2.tgz", + "integrity": "sha512-arhlxbFRmoQHl33a0Zkle/YWlmNwoyt6QNZEIJcqNbdrsix5Lvc4HyyI3EnwxTYlZYc32EbYrQ8SzEZ7dqgg9A==", "license": "MIT", "dependencies": { "mlly": "^1.7.4", - "pkg-types": "^2.0.1", - "quansync": "^0.2.8" + "pkg-types": "^2.3.0", + "quansync": "^0.2.11" }, "engines": { "node": ">=14" @@ -8270,101 +7415,48 @@ "url": "https://github.com/sponsors/antfu" } }, - "node_modules/locate-path": { - "version": "7.2.0", - "resolved": "https://registry.npmmirror.com/locate-path/-/locate-path-7.2.0.tgz", - "integrity": "sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==", - "license": "MIT", - "dependencies": { - "p-locate": "^6.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmmirror.com/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "license": "MIT" - }, - "node_modules/lodash-es": { - "version": "4.17.21", - "resolved": "https://registry.npmmirror.com/lodash-es/-/lodash-es-4.17.21.tgz", - "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", - "license": "MIT" - }, - "node_modules/lodash.debounce": { - "version": "4.0.8", - "resolved": "https://registry.npmmirror.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz", - "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==", + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", "license": "MIT" }, "node_modules/lodash.defaults": { "version": "4.2.0", - "resolved": "https://registry.npmmirror.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz", + "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", "integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==", "license": "MIT" }, "node_modules/lodash.isarguments": { "version": "3.1.0", - "resolved": "https://registry.npmmirror.com/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", + "resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", "integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==", "license": "MIT" }, "node_modules/lodash.memoize": { "version": "4.1.2", - "resolved": "https://registry.npmmirror.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", "license": "MIT" }, "node_modules/lodash.uniq": { "version": "4.5.0", - "resolved": "https://registry.npmmirror.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz", + "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==", "license": "MIT" }, - "node_modules/logform": { - "version": "2.7.0", - "resolved": "https://registry.npmmirror.com/logform/-/logform-2.7.0.tgz", - "integrity": "sha512-TFYA4jnP7PVbmlBIfhlSe+WKxs9dklXMTEGcBCIvLhE/Tn3H6Gk1norupVW7m5Cnd4bLcr08AytbyV/xj7f/kQ==", - "license": "MIT", - "dependencies": { - "@colors/colors": "1.6.0", - "@types/triple-beam": "^1.3.2", - "fecha": "^4.2.0", - "ms": "^2.1.1", - "safe-stable-stringify": "^2.3.1", - "triple-beam": "^1.3.0" - }, - "engines": { - "node": ">= 12.0.0" - } - }, "node_modules/lru-cache": { "version": "5.1.1", - "resolved": "https://registry.npmmirror.com/lru-cache/-/lru-cache-5.1.1.tgz", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "license": "ISC", "dependencies": { "yallist": "^3.0.2" } }, - "node_modules/luxon": { - "version": "3.7.1", - "resolved": "https://registry.npmmirror.com/luxon/-/luxon-3.7.1.tgz", - "integrity": "sha512-RkRWjA926cTvz5rAb1BqyWkKbbjzCGchDUIKMCUvNi17j6f6j8uHGDV82Aqcqtzd+icoYpELmG3ksgGiFNNcNg==", - "license": "MIT", - "engines": { - "node": ">=12" - } - }, "node_modules/magic-regexp": { "version": "0.10.0", - "resolved": "https://registry.npmmirror.com/magic-regexp/-/magic-regexp-0.10.0.tgz", + "resolved": "https://registry.npmjs.org/magic-regexp/-/magic-regexp-0.10.0.tgz", "integrity": "sha512-Uly1Bu4lO1hwHUW0CQeSWuRtzCMNO00CmXtS8N6fyvB3B979GOEEeAkiTUDsmbYLAbvpUS/Kt5c4ibosAzVyVg==", "license": "MIT", "dependencies": { @@ -8378,38 +7470,38 @@ } }, "node_modules/magic-string": { - "version": "0.30.17", - "resolved": "https://registry.npmmirror.com/magic-string/-/magic-string-0.30.17.tgz", - "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", + "version": "0.30.21", + "resolved": "https://registry.npmmirror.com/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", "license": "MIT", "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0" + "@jridgewell/sourcemap-codec": "^1.5.5" } }, "node_modules/magic-string-ast": { - "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/magic-string-ast/-/magic-string-ast-1.0.0.tgz", - "integrity": "sha512-8rbuNizut2gW94kv7pqgt0dvk+AHLPVIm0iJtpSgQJ9dx21eWx5SBel8z3jp1xtC0j6/iyK3AWGhAR1H61s7LA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/magic-string-ast/-/magic-string-ast-1.0.3.tgz", + "integrity": "sha512-CvkkH1i81zl7mmb94DsRiFeG9V2fR2JeuK8yDgS8oiZSFa++wWLEgZ5ufEOyLHbvSbD1gTRKv9NdX69Rnvr9JA==", "license": "MIT", "dependencies": { - "magic-string": "^0.30.17" + "magic-string": "^0.30.19" }, "engines": { - "node": ">=20.18.0" + "node": ">=20.19.0" }, "funding": { "url": "https://github.com/sponsors/sxzz" } }, "node_modules/magicast": { - "version": "0.3.5", - "resolved": "https://registry.npmmirror.com/magicast/-/magicast-0.3.5.tgz", - "integrity": "sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==", + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.5.2.tgz", + "integrity": "sha512-E3ZJh4J3S9KfwdjZhe2afj6R9lGIN5Pher1pF39UGrXRqq/VDaGVIGN13BjHd2u8B61hArAGOnso7nBOouW3TQ==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.25.4", - "@babel/types": "^7.25.4", - "source-map-js": "^1.2.0" + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "source-map-js": "^1.2.1" } }, "node_modules/math-intrinsics": { @@ -8423,7 +7515,7 @@ }, "node_modules/mdn-data": { "version": "2.12.2", - "resolved": "https://registry.npmmirror.com/mdn-data/-/mdn-data-2.12.2.tgz", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.12.2.tgz", "integrity": "sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==", "license": "CC0-1.0" }, @@ -8436,21 +7528,9 @@ "node": ">= 0.6" } }, - "node_modules/merge-options": { - "version": "3.0.4", - "resolved": "https://registry.npmmirror.com/merge-options/-/merge-options-3.0.4.tgz", - "integrity": "sha512-2Sug1+knBjkaMsMgf1ctR1Ujx+Ayku4EdJN4Z+C2+JzoeF7A3OZ9KM2GY0CpQS51NR61LTurMJrRKPhSs3ZRTQ==", - "license": "MIT", - "dependencies": { - "is-plain-obj": "^2.1.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/merge-stream": { "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/merge-stream/-/merge-stream-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", "license": "MIT" }, @@ -8472,12 +7552,6 @@ "node": ">= 0.6" } }, - "node_modules/micro-api-client": { - "version": "3.3.0", - "resolved": "https://registry.npmmirror.com/micro-api-client/-/micro-api-client-3.3.0.tgz", - "integrity": "sha512-y0y6CUB9RLVsy3kfgayU28746QrNMpSm9O/AYGNsBgOkJr/X/Jk0VLGoO8Ude7Bpa8adywzF+MzXNZRFRsNPhg==", - "license": "ISC" - }, "node_modules/micromatch": { "version": "4.0.8", "resolved": "https://registry.npmmirror.com/micromatch/-/micromatch-4.0.8.tgz", @@ -8504,9 +7578,9 @@ } }, "node_modules/mime": { - "version": "4.0.7", - "resolved": "https://registry.npmmirror.com/mime/-/mime-4.0.7.tgz", - "integrity": "sha512-2OfDPL+e03E0LrXaGYOtTFIYhiuzep94NSsuhrNULq+stylcJedcHdzHtz0atMUuGwJfFYs0YL5xeC/Ca2x0eQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-4.1.0.tgz", + "integrity": "sha512-X5ju04+cAzsojXKes0B/S4tcYtFAJ6tTMuSPBEn9CPGlrWr8Fiw7qYeLT0XyH80HSoAoqWCaz+MWKh22P7G1cw==", "funding": [ "https://github.com/sponsors/broofa" ], @@ -8519,21 +7593,21 @@ } }, "node_modules/mime-db": { - "version": "1.54.0", - "resolved": "https://registry.npmmirror.com/mime-db/-/mime-db-1.54.0.tgz", - "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/mime-types": { - "version": "3.0.1", - "resolved": "https://registry.npmmirror.com/mime-types/-/mime-types-3.0.1.tgz", - "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", "license": "MIT", "dependencies": { - "mime-db": "^1.54.0" + "mime-db": "1.52.0" }, "engines": { "node": ">= 0.6" @@ -8541,7 +7615,7 @@ }, "node_modules/mimic-fn": { "version": "4.0.0", - "resolved": "https://registry.npmmirror.com/mimic-fn/-/mimic-fn-4.0.0.tgz", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", "license": "MIT", "engines": { @@ -8552,12 +7626,12 @@ } }, "node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "version": "9.0.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", "license": "ISC", "dependencies": { - "brace-expansion": "^2.0.1" + "brace-expansion": "^2.0.2" }, "engines": { "node": ">=16 || 14 >=14.17" @@ -8566,28 +7640,19 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/minimist": { - "version": "1.2.8", - "resolved": "https://registry.npmmirror.com/minimist/-/minimist-1.2.8.tgz", - "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmmirror.com/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "license": "ISC", + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", + "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", + "license": "BlueOak-1.0.0", "engines": { "node": ">=16 || 14 >=14.17" } }, "node_modules/minizlib": { - "version": "3.0.2", - "resolved": "https://registry.npmmirror.com/minizlib/-/minizlib-3.0.2.tgz", - "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", + "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", "license": "MIT", "dependencies": { "minipass": "^7.1.2" @@ -8602,31 +7667,16 @@ "integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==", "license": "MIT" }, - "node_modules/mkdirp": { - "version": "3.0.1", - "resolved": "https://registry.npmmirror.com/mkdirp/-/mkdirp-3.0.1.tgz", - "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", - "license": "MIT", - "bin": { - "mkdirp": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/mlly": { - "version": "1.7.4", - "resolved": "https://registry.npmmirror.com/mlly/-/mlly-1.7.4.tgz", - "integrity": "sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw==", + "version": "1.8.0", + "resolved": "https://registry.npmmirror.com/mlly/-/mlly-1.8.0.tgz", + "integrity": "sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==", "license": "MIT", "dependencies": { - "acorn": "^8.14.0", - "pathe": "^2.0.1", - "pkg-types": "^1.3.0", - "ufo": "^1.5.4" + "acorn": "^8.15.0", + "pathe": "^2.0.3", + "pkg-types": "^1.3.1", + "ufo": "^1.6.1" } }, "node_modules/mlly/node_modules/confbox": { @@ -8652,25 +7702,9 @@ "integrity": "sha512-aF7yRQr/Q0O2/4pIXm6PZ5G+jAd7QS4Yu8m+WEeEHGnbo+7mE36CbLSDQiXYV8bVL3NfmdeqPJct0tUlnjVSnA==", "license": "MIT" }, - "node_modules/module-definition": { - "version": "6.0.1", - "resolved": "https://registry.npmmirror.com/module-definition/-/module-definition-6.0.1.tgz", - "integrity": "sha512-FeVc50FTfVVQnolk/WQT8MX+2WVcDnTGiq6Wo+/+lJ2ET1bRVi3HG3YlJUfqagNMc/kUlFSoR96AJkxGpKz13g==", - "license": "MIT", - "dependencies": { - "ast-module-types": "^6.0.1", - "node-source-walk": "^7.0.1" - }, - "bin": { - "module-definition": "bin/cli.js" - }, - "engines": { - "node": ">=18" - } - }, "node_modules/mrmime": { "version": "2.0.1", - "resolved": "https://registry.npmmirror.com/mrmime/-/mrmime-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz", "integrity": "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==", "license": "MIT", "engines": { @@ -8683,6 +7717,12 @@ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "license": "MIT" }, + "node_modules/muggle-string": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/muggle-string/-/muggle-string-0.4.1.tgz", + "integrity": "sha512-VNTrAak/KhO2i8dqqnqnAHOa3cYBwXEZe9h+D5h/1ZqFSTEFHdM65lR7RoIqq3tBBYavsOXV84NoHXZ0AkPyqQ==", + "license": "MIT" + }, "node_modules/mz": { "version": "2.7.0", "resolved": "https://registry.npmmirror.com/mz/-/mz-2.7.0.tgz", @@ -8695,9 +7735,9 @@ } }, "node_modules/nanoid": { - "version": "5.1.5", - "resolved": "https://registry.npmmirror.com/nanoid/-/nanoid-5.1.5.tgz", - "integrity": "sha512-Ir/+ZpE9fDsNH0hQ3C68uyThDXzYcim2EqcZ8zn8Chtt1iylPT9xXJB0kPCnqzgcEGikO9RxSrh63MsmVCU7Fw==", + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.6.tgz", + "integrity": "sha512-c7+7RQ+dMB5dPwwCp4ee1/iV/q2P6aK1mTZcfr1BTuVlyW9hJYiMPybJCcnBlQtuSmTIWNeazm/zqNoZSSElBg==", "funding": [ { "type": "github", @@ -8713,9 +7753,9 @@ } }, "node_modules/nanotar": { - "version": "0.2.0", - "resolved": "https://registry.npmmirror.com/nanotar/-/nanotar-0.2.0.tgz", - "integrity": "sha512-9ca1h0Xjvo9bEkE4UOxgAzLV0jHKe6LMaxo37ND2DAhhAtd0j8pR1Wxz+/goMrZO8AEZTWCmyaOsFI/W5AdpCQ==", + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/nanotar/-/nanotar-0.2.1.tgz", + "integrity": "sha512-MUrzzDUcIOPbv7ubhDV/L4CIfVTATd9XhDE2ixFeCrM5yp9AlzUpn91JrnN0HD6hksdxvz9IW9aKANz0Bta0GA==", "license": "MIT" }, "node_modules/negotiator": { @@ -8727,60 +7767,24 @@ "node": ">= 0.6" } }, - "node_modules/netlify": { - "version": "13.3.5", - "resolved": "https://registry.npmmirror.com/netlify/-/netlify-13.3.5.tgz", - "integrity": "sha512-Nc3loyVASW59W+8fLDZT1lncpG7llffyZ2o0UQLx/Fr20i7P8oP+lE7+TEcFvXj9IUWU6LjB9P3BH+iFGyp+mg==", - "license": "MIT", - "dependencies": { - "@netlify/open-api": "^2.37.0", - "lodash-es": "^4.17.21", - "micro-api-client": "^3.3.0", - "node-fetch": "^3.0.0", - "p-wait-for": "^5.0.0", - "qs": "^6.9.6" - }, - "engines": { - "node": "^14.16.0 || >=16.0.0" - } - }, - "node_modules/netlify/node_modules/node-fetch": { - "version": "3.3.2", - "resolved": "https://registry.npmmirror.com/node-fetch/-/node-fetch-3.3.2.tgz", - "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", - "license": "MIT", - "dependencies": { - "data-uri-to-buffer": "^4.0.0", - "fetch-blob": "^3.1.4", - "formdata-polyfill": "^4.0.10" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/node-fetch" - } - }, "node_modules/nitropack": { - "version": "2.12.4", - "resolved": "https://registry.npmmirror.com/nitropack/-/nitropack-2.12.4.tgz", - "integrity": "sha512-MPmPRJWTeH03f/NmpN4q3iI3Woik4uaaWIoX34W3gMJiW06Vm1te/lPzuu5EXpXOK7Q2m3FymGMPXcExqih96Q==", + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/nitropack/-/nitropack-2.13.1.tgz", + "integrity": "sha512-2dDj89C4wC2uzG7guF3CnyG+zwkZosPEp7FFBGHB3AJo11AywOolWhyQJFHDzve8COvGxJaqscye9wW2IrUsNw==", "license": "MIT", "dependencies": { - "@cloudflare/kv-asset-handler": "^0.4.0", - "@netlify/functions": "^3.1.10", - "@rollup/plugin-alias": "^5.1.1", - "@rollup/plugin-commonjs": "^28.0.6", + "@cloudflare/kv-asset-handler": "^0.4.2", + "@rollup/plugin-alias": "^6.0.0", + "@rollup/plugin-commonjs": "^29.0.0", "@rollup/plugin-inject": "^5.0.5", "@rollup/plugin-json": "^6.1.0", - "@rollup/plugin-node-resolve": "^16.0.1", - "@rollup/plugin-replace": "^6.0.2", + "@rollup/plugin-node-resolve": "^16.0.3", + "@rollup/plugin-replace": "^6.0.3", "@rollup/plugin-terser": "^0.4.4", - "@vercel/nft": "^0.29.4", + "@vercel/nft": "^1.2.0", "archiver": "^7.0.1", - "c12": "^3.1.0", - "chokidar": "^4.0.3", + "c12": "^3.3.3", + "chokidar": "^5.0.0", "citty": "^0.1.6", "compatx": "^0.2.0", "confbox": "^0.2.2", @@ -8788,56 +7792,56 @@ "cookie-es": "^2.0.0", "croner": "^9.1.0", "crossws": "^0.3.5", - "db0": "^0.3.2", + "db0": "^0.3.4", "defu": "^6.1.4", "destr": "^2.0.5", - "dot-prop": "^9.0.0", - "esbuild": "^0.25.6", + "dot-prop": "^10.1.0", + "esbuild": "^0.27.2", "escape-string-regexp": "^5.0.0", "etag": "^1.8.1", - "exsolve": "^1.0.7", - "globby": "^14.1.0", + "exsolve": "^1.0.8", + "globby": "^16.1.0", "gzip-size": "^7.0.0", - "h3": "^1.15.3", + "h3": "^1.15.5", "hookable": "^5.5.3", "httpxy": "^0.1.7", - "ioredis": "^5.6.1", - "jiti": "^2.4.2", + "ioredis": "^5.9.1", + "jiti": "^2.6.1", "klona": "^2.0.6", - "knitwork": "^1.2.0", + "knitwork": "^1.3.0", "listhen": "^1.9.0", - "magic-string": "^0.30.17", - "magicast": "^0.3.5", - "mime": "^4.0.7", - "mlly": "^1.7.4", - "node-fetch-native": "^1.6.6", - "node-mock-http": "^1.0.1", - "ofetch": "^1.4.1", + "magic-string": "^0.30.21", + "magicast": "^0.5.1", + "mime": "^4.1.0", + "mlly": "^1.8.0", + "node-fetch-native": "^1.6.7", + "node-mock-http": "^1.0.4", + "ofetch": "^1.5.1", "ohash": "^2.0.11", "pathe": "^2.0.3", - "perfect-debounce": "^1.0.0", - "pkg-types": "^2.2.0", - "pretty-bytes": "^6.1.1", + "perfect-debounce": "^2.0.0", + "pkg-types": "^2.3.0", + "pretty-bytes": "^7.1.0", "radix3": "^1.1.2", - "rollup": "^4.45.0", - "rollup-plugin-visualizer": "^6.0.3", + "rollup": "^4.55.1", + "rollup-plugin-visualizer": "^6.0.5", "scule": "^1.3.0", - "semver": "^7.7.2", + "semver": "^7.7.3", "serve-placeholder": "^2.0.2", - "serve-static": "^2.2.0", - "source-map": "^0.7.4", - "std-env": "^3.9.0", - "ufo": "^1.6.1", + "serve-static": "^2.2.1", + "source-map": "^0.7.6", + "std-env": "^3.10.0", + "ufo": "^1.6.3", "ultrahtml": "^1.6.0", "uncrypto": "^0.1.3", - "unctx": "^2.4.1", - "unenv": "^2.0.0-rc.18", - "unimport": "^5.1.0", - "unplugin-utils": "^0.2.4", - "unstorage": "^1.16.1", + "unctx": "^2.5.0", + "unenv": "^2.0.0-rc.24", + "unimport": "^5.6.0", + "unplugin-utils": "^0.3.1", + "unstorage": "^1.17.4", "untyped": "^2.0.0", - "unwasm": "^0.3.9", - "youch": "4.1.0-beta.8", + "unwasm": "^0.5.3", + "youch": "^4.1.0-beta.13", "youch-core": "^0.3.3" }, "bin": { @@ -8845,7 +7849,7 @@ "nitropack": "dist/cli/index.mjs" }, "engines": { - "node": "^16.11.0 || >=17.0.0" + "node": "^20.19.0 || >=22.12.0" }, "peerDependencies": { "xml2js": "^0.6.2" @@ -8856,51 +7860,37 @@ } } }, - "node_modules/nitropack/node_modules/youch": { - "version": "4.1.0-beta.8", - "resolved": "https://registry.npmmirror.com/youch/-/youch-4.1.0-beta.8.tgz", - "integrity": "sha512-rY2A2lSF7zC+l7HH9Mq+83D1dLlsPnEvy8jTouzaptDZM6geqZ3aJe/b7ULCwRURPtWV3vbDjA2DDMdoBol0HQ==", + "node_modules/nitropack/node_modules/perfect-debounce": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-2.1.0.tgz", + "integrity": "sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==", + "license": "MIT" + }, + "node_modules/nitropack/node_modules/unplugin-utils": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/unplugin-utils/-/unplugin-utils-0.3.1.tgz", + "integrity": "sha512-5lWVjgi6vuHhJ526bI4nlCOmkCIF3nnfXkCMDeMJrtdvxTs6ZFCM8oNufGTsDbKv/tJ/xj8RpvXjRuPBZJuJog==", "license": "MIT", "dependencies": { - "@poppinss/colors": "^4.1.4", - "@poppinss/dumper": "^0.6.3", - "@speed-highlight/core": "^1.2.7", - "cookie": "^1.0.2", - "youch-core": "^0.3.1" + "pathe": "^2.0.3", + "picomatch": "^4.0.3" }, "engines": { - "node": ">=18" + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" } }, "node_modules/node-addon-api": { "version": "7.1.1", - "resolved": "https://registry.npmmirror.com/node-addon-api/-/node-addon-api-7.1.1.tgz", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz", "integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==", "license": "MIT" }, - "node_modules/node-domexception": { - "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/node-domexception/-/node-domexception-1.0.0.tgz", - "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", - "deprecated": "Use your platform's native DOMException instead", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/jimmywarting" - }, - { - "type": "github", - "url": "https://paypal.me/jimmywarting" - } - ], - "license": "MIT", - "engines": { - "node": ">=10.5.0" - } - }, "node_modules/node-fetch": { "version": "2.7.0", - "resolved": "https://registry.npmmirror.com/node-fetch/-/node-fetch-2.7.0.tgz", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", "license": "MIT", "dependencies": { @@ -8919,15 +7909,15 @@ } }, "node_modules/node-fetch-native": { - "version": "1.6.6", - "resolved": "https://registry.npmmirror.com/node-fetch-native/-/node-fetch-native-1.6.6.tgz", - "integrity": "sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ==", + "version": "1.6.7", + "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.7.tgz", + "integrity": "sha512-g9yhqoedzIUm0nTnTqAQvueMPVOuIY16bqgAJJC8XOOubYFNwz6IER9qs0Gq2Xd0+CecCKFjtdDTMA4u4xG06Q==", "license": "MIT" }, "node_modules/node-forge": { - "version": "1.3.1", - "resolved": "https://registry.npmmirror.com/node-forge/-/node-forge-1.3.1.tgz", - "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.3.tgz", + "integrity": "sha512-rLvcdSyRCyouf6jcOIPe/BgwG/d7hKjzMKOas33/pHEr6gbq18IK9zV7DiPvzsz0oBJPme6qr6H6kGZuI9/DZg==", "license": "(BSD-3-Clause OR GPL-2.0)", "engines": { "node": ">= 6.13.0" @@ -8935,7 +7925,7 @@ }, "node_modules/node-gyp-build": { "version": "4.8.4", - "resolved": "https://registry.npmmirror.com/node-gyp-build/-/node-gyp-build-4.8.4.tgz", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.4.tgz", "integrity": "sha512-LA4ZjwlnUblHVgq0oBF3Jl/6h/Nvs5fzBLwdEF4nuxnFdsfajde4WfxtJr3CaiH+F6ewcIB/q4jQ4UzPyid+CQ==", "license": "MIT", "bin": { @@ -8945,32 +7935,20 @@ } }, "node_modules/node-mock-http": { - "version": "1.0.1", - "resolved": "https://registry.npmmirror.com/node-mock-http/-/node-mock-http-1.0.1.tgz", - "integrity": "sha512-0gJJgENizp4ghds/Ywu2FCmcRsgBTmRQzYPZm61wy+Em2sBarSka0OhQS5huLBg6od1zkNpnWMCZloQDFVvOMQ==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/node-mock-http/-/node-mock-http-1.0.4.tgz", + "integrity": "sha512-8DY+kFsDkNXy1sJglUfuODx1/opAGJGyrTuFqEoN90oRc2Vk0ZbD4K2qmKXBBEhZQzdKHIVfEJpDU8Ak2NJEvQ==", "license": "MIT" }, "node_modules/node-releases": { - "version": "2.0.19", - "resolved": "https://registry.npmmirror.com/node-releases/-/node-releases-2.0.19.tgz", - "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", "license": "MIT" }, - "node_modules/node-source-walk": { - "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/node-source-walk/-/node-source-walk-7.0.1.tgz", - "integrity": "sha512-3VW/8JpPqPvnJvseXowjZcirPisssnBuDikk6JIZ8jQzF7KJQX52iPFX4RYYxLycYH7IbMRSPUOga/esVjy5Yg==", - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.26.7" - }, - "engines": { - "node": ">=18" - } - }, "node_modules/nopt": { "version": "8.1.0", - "resolved": "https://registry.npmmirror.com/nopt/-/nopt-8.1.0.tgz", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-8.1.0.tgz", "integrity": "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A==", "license": "ISC", "dependencies": { @@ -8983,20 +7961,6 @@ "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/normalize-package-data": { - "version": "6.0.2", - "resolved": "https://registry.npmmirror.com/normalize-package-data/-/normalize-package-data-6.0.2.tgz", - "integrity": "sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==", - "license": "BSD-2-Clause", - "dependencies": { - "hosted-git-info": "^7.0.0", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmmirror.com/normalize-path/-/normalize-path-3.0.0.tgz", @@ -9006,18 +7970,9 @@ "node": ">=0.10.0" } }, - "node_modules/normalize-range": { - "version": "0.1.2", - "resolved": "https://registry.npmmirror.com/normalize-range/-/normalize-range-0.1.2.tgz", - "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/npm-run-path": { "version": "5.3.0", - "resolved": "https://registry.npmmirror.com/npm-run-path/-/npm-run-path-5.3.0.tgz", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", "license": "MIT", "dependencies": { @@ -9032,7 +7987,7 @@ }, "node_modules/npm-run-path/node_modules/path-key": { "version": "4.0.0", - "resolved": "https://registry.npmmirror.com/path-key/-/path-key-4.0.0.tgz", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", "license": "MIT", "engines": { @@ -9044,7 +7999,7 @@ }, "node_modules/nth-check": { "version": "2.1.1", - "resolved": "https://registry.npmmirror.com/nth-check/-/nth-check-2.1.1.tgz", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", "license": "BSD-2-Clause", "dependencies": { @@ -9055,75 +8010,68 @@ } }, "node_modules/nuxt": { - "version": "4.0.1", - "resolved": "https://registry.npmmirror.com/nuxt/-/nuxt-4.0.1.tgz", - "integrity": "sha512-1WbtiX127640PXUJ2Mb32ck0A0/hzBk6+oPQ0YvJnS/HZK3A/oJEW7sYCRPYyEBwUyIQk12QRCBHxmr6LLeXZQ==", - "license": "MIT", - "dependencies": { - "@nuxt/cli": "^3.26.4", - "@nuxt/devalue": "^2.0.2", - "@nuxt/devtools": "^2.6.2", - "@nuxt/kit": "4.0.1", - "@nuxt/schema": "4.0.1", - "@nuxt/telemetry": "^2.6.6", - "@nuxt/vite-builder": "4.0.1", - "@unhead/vue": "^2.0.12", - "@vue/shared": "^3.5.17", - "c12": "^3.1.0", - "chokidar": "^4.0.3", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/nuxt/-/nuxt-4.3.1.tgz", + "integrity": "sha512-bl+0rFcT5Ax16aiWFBFPyWcsTob19NTZaDL5P6t0MQdK63AtgS6fN6fwvwdbXtnTk6/YdCzlmuLzXhSM22h0OA==", + "license": "MIT", + "dependencies": { + "@dxup/nuxt": "^0.3.2", + "@nuxt/cli": "^3.33.0", + "@nuxt/devtools": "^3.1.1", + "@nuxt/kit": "4.3.1", + "@nuxt/nitro-server": "4.3.1", + "@nuxt/schema": "4.3.1", + "@nuxt/telemetry": "^2.7.0", + "@nuxt/vite-builder": "4.3.1", + "@unhead/vue": "^2.1.3", + "@vue/shared": "^3.5.27", + "c12": "^3.3.3", + "chokidar": "^5.0.0", "compatx": "^0.2.0", "consola": "^3.4.2", "cookie-es": "^2.0.0", "defu": "^6.1.4", "destr": "^2.0.5", - "devalue": "^5.1.1", + "devalue": "^5.6.2", "errx": "^0.1.0", - "esbuild": "^0.25.8", "escape-string-regexp": "^5.0.0", - "estree-walker": "^3.0.3", - "exsolve": "^1.0.7", - "h3": "^1.15.3", + "exsolve": "^1.0.8", + "h3": "^1.15.5", "hookable": "^5.5.3", "ignore": "^7.0.5", "impound": "^1.0.0", - "jiti": "^2.4.2", + "jiti": "^2.6.1", "klona": "^2.0.6", - "knitwork": "^1.2.0", - "magic-string": "^0.30.17", - "mlly": "^1.7.4", - "mocked-exports": "^0.1.1", + "knitwork": "^1.3.0", + "magic-string": "^0.30.21", + "mlly": "^1.8.0", "nanotar": "^0.2.0", - "nitropack": "^2.12.3", - "nypm": "^0.6.0", - "ofetch": "^1.4.1", + "nypm": "^0.6.5", + "ofetch": "^1.5.1", "ohash": "^2.0.11", - "on-change": "^5.0.1", - "oxc-minify": "^0.77.3", - "oxc-parser": "^0.77.3", - "oxc-transform": "^0.77.3", - "oxc-walker": "^0.4.0", + "on-change": "^6.0.2", + "oxc-minify": "^0.112.0", + "oxc-parser": "^0.112.0", + "oxc-transform": "^0.112.0", + "oxc-walker": "^0.7.0", "pathe": "^2.0.3", - "perfect-debounce": "^1.0.0", - "pkg-types": "^2.2.0", - "radix3": "^1.1.2", + "perfect-debounce": "^2.1.0", + "pkg-types": "^2.3.0", + "rou3": "^0.7.12", "scule": "^1.3.0", - "semver": "^7.7.2", - "std-env": "^3.9.0", - "strip-literal": "^3.0.0", - "tinyglobby": "0.2.14", - "ufo": "^1.6.1", + "semver": "^7.7.4", + "std-env": "^3.10.0", + "tinyglobby": "^0.2.15", + "ufo": "^1.6.3", "ultrahtml": "^1.6.0", "uncrypto": "^0.1.3", - "unctx": "^2.4.1", - "unimport": "^5.1.0", - "unplugin": "^2.3.5", - "unplugin-vue-router": "^0.14.0", - "unstorage": "^1.16.1", + "unctx": "^2.5.0", + "unimport": "^5.6.0", + "unplugin": "^3.0.0", + "unplugin-vue-router": "^0.19.2", "untyped": "^2.0.0", - "vue": "^3.5.17", - "vue-bundle-renderer": "^2.1.1", - "vue-devtools-stub": "^0.1.0", - "vue-router": "^4.5.1" + "vue": "^3.5.27", + "vue-router": "^4.6.4" }, "bin": { "nuxi": "bin/nuxt.mjs", @@ -9145,29 +8093,47 @@ } } }, + "node_modules/nuxt/node_modules/perfect-debounce": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-2.1.0.tgz", + "integrity": "sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==", + "license": "MIT" + }, + "node_modules/nuxt/node_modules/unplugin": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unplugin/-/unplugin-3.0.0.tgz", + "integrity": "sha512-0Mqk3AT2TZCXWKdcoaufeXNukv2mTrEZExeXlHIOZXdqYoHHr4n51pymnwV8x2BOVxwXbK2HLlI7usrqMpycdg==", + "license": "MIT", + "dependencies": { + "@jridgewell/remapping": "^2.3.5", + "picomatch": "^4.0.3", + "webpack-virtual-modules": "^0.6.2" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, "node_modules/nypm": { - "version": "0.6.0", - "resolved": "https://registry.npmmirror.com/nypm/-/nypm-0.6.0.tgz", - "integrity": "sha512-mn8wBFV9G9+UFHIrq+pZ2r2zL4aPau/by3kJb3cM7+5tQHMt6HGQB8FDIeKFYp8o0D2pnH6nVsO88N4AmUxIWg==", + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/nypm/-/nypm-0.6.5.tgz", + "integrity": "sha512-K6AJy1GMVyfyMXRVB88700BJqNUkByijGJM8kEHpLdcAt+vSQAVfkWWHYzuRXHSY6xA2sNc5RjTj0p9rE2izVQ==", "license": "MIT", "dependencies": { - "citty": "^0.1.6", - "consola": "^3.4.0", + "citty": "^0.2.0", "pathe": "^2.0.3", - "pkg-types": "^2.0.0", - "tinyexec": "^0.3.2" + "tinyexec": "^1.0.2" }, "bin": { "nypm": "dist/cli.mjs" }, "engines": { - "node": "^14.16.0 || >=16.10.0" + "node": ">=18" } }, - "node_modules/nypm/node_modules/tinyexec": { - "version": "0.3.2", - "resolved": "https://registry.npmmirror.com/tinyexec/-/tinyexec-0.3.2.tgz", - "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", + "node_modules/nypm/node_modules/citty": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/citty/-/citty-0.2.1.tgz", + "integrity": "sha512-kEV95lFBhQgtogAPlQfJJ0WGVSokvLr/UEoFPiKKOXF7pl98HfUVUD0ejsuTCld/9xH9vogSywZ5KqHzXrZpqg==", "license": "MIT" }, "node_modules/object-assign": { @@ -9188,29 +8154,33 @@ "node": ">= 6" } }, - "node_modules/object-inspect": { - "version": "1.13.4", - "resolved": "https://registry.npmmirror.com/object-inspect/-/object-inspect-1.13.4.tgz", - "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } + "node_modules/obug": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/obug/-/obug-2.1.1.tgz", + "integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==", + "funding": [ + "https://github.com/sponsors/sxzz", + "https://opencollective.com/debug" + ], + "license": "MIT" }, "node_modules/ofetch": { - "version": "1.4.1", - "resolved": "https://registry.npmmirror.com/ofetch/-/ofetch-1.4.1.tgz", - "integrity": "sha512-QZj2DfGplQAr2oj9KzceK9Hwz6Whxazmn85yYeVuS3u9XTMOGMRx0kO95MQ+vLsj/S/NwBDMMLU5hpxvI6Tklw==", + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/ofetch/-/ofetch-1.5.1.tgz", + "integrity": "sha512-2W4oUZlVaqAPAil6FUg/difl6YhqhUR7x2eZY4bQCko22UXg3hptq9KLQdqFClV+Wu85UX7hNtdGTngi/1BxcA==", "license": "MIT", "dependencies": { - "destr": "^2.0.3", - "node-fetch-native": "^1.6.4", - "ufo": "^1.5.4" + "destr": "^2.0.5", + "node-fetch-native": "^1.6.7", + "ufo": "^1.6.1" } }, + "node_modules/ogl": { + "version": "1.0.11", + "resolved": "https://registry.npmmirror.com/ogl/-/ogl-1.0.11.tgz", + "integrity": "sha512-kUpC154AFfxi16pmZUK4jk3J+8zxwTWGPo03EoYA8QPbzikHoaC82n6pNTbd+oEaJonaE8aPWBlX7ad9zrqLsA==", + "license": "Unlicense" + }, "node_modules/ohash": { "version": "2.0.11", "resolved": "https://registry.npmmirror.com/ohash/-/ohash-2.0.11.tgz", @@ -9218,12 +8188,12 @@ "license": "MIT" }, "node_modules/on-change": { - "version": "5.0.1", - "resolved": "https://registry.npmmirror.com/on-change/-/on-change-5.0.1.tgz", - "integrity": "sha512-n7THCP7RkyReRSLkJb8kUWoNsxUIBxTkIp3JKno+sEz6o/9AJ3w3P9fzQkITEkMwyTKJjZciF3v/pVoouxZZMg==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/on-change/-/on-change-6.0.2.tgz", + "integrity": "sha512-08+12qcOVEA0fS9g/VxKS27HaT94nRutUT77J2dr8zv/unzXopvhBuF8tNLWsoLQ5IgrQ6eptGeGqUYat82U1w==", "license": "MIT", "engines": { - "node": ">=18" + "node": ">=20" }, "funding": { "url": "https://github.com/sindresorhus/on-change?sponsor=1" @@ -9250,18 +8220,9 @@ "wrappy": "1" } }, - "node_modules/one-time": { - "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/one-time/-/one-time-1.0.0.tgz", - "integrity": "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==", - "license": "MIT", - "dependencies": { - "fn.name": "1.x.x" - } - }, "node_modules/onetime": { "version": "6.0.0", - "resolved": "https://registry.npmmirror.com/onetime/-/onetime-6.0.0.tgz", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", "license": "MIT", "dependencies": { @@ -9281,7 +8242,7 @@ }, "node_modules/open": { "version": "8.4.2", - "resolved": "https://registry.npmmirror.com/open/-/open-8.4.2.tgz", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", "license": "MIT", "dependencies": { @@ -9298,7 +8259,7 @@ }, "node_modules/open/node_modules/is-docker": { "version": "2.2.1", - "resolved": "https://registry.npmmirror.com/is-docker/-/is-docker-2.2.1.tgz", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", "license": "MIT", "bin": { @@ -9313,7 +8274,7 @@ }, "node_modules/open/node_modules/is-wsl": { "version": "2.2.0", - "resolved": "https://registry.npmmirror.com/is-wsl/-/is-wsl-2.2.0.tgz", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", "license": "MIT", "dependencies": { @@ -9324,190 +8285,120 @@ } }, "node_modules/oxc-minify": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/oxc-minify/-/oxc-minify-0.77.3.tgz", - "integrity": "sha512-fYCSYazHno31eATVyHNyP2MEEMrVLaKVglac7bIoJC/qlb3x+Vqhv4eUViseOkoGM46rb9k8ZdDwhsEMtFUQhA==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/oxc-minify/-/oxc-minify-0.112.0.tgz", + "integrity": "sha512-rkVSeeIRSt+RYI9uX6xonBpLUpvZyegxIg0UL87ev7YAfUqp7IIZlRjkgQN5Us1lyXD//TOo0Dcuuro/TYOWoQ==", "license": "MIT", "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" }, "funding": { "url": "https://github.com/sponsors/Boshen" }, "optionalDependencies": { - "@oxc-minify/binding-android-arm64": "0.77.3", - "@oxc-minify/binding-darwin-arm64": "0.77.3", - "@oxc-minify/binding-darwin-x64": "0.77.3", - "@oxc-minify/binding-freebsd-x64": "0.77.3", - "@oxc-minify/binding-linux-arm-gnueabihf": "0.77.3", - "@oxc-minify/binding-linux-arm-musleabihf": "0.77.3", - "@oxc-minify/binding-linux-arm64-gnu": "0.77.3", - "@oxc-minify/binding-linux-arm64-musl": "0.77.3", - "@oxc-minify/binding-linux-riscv64-gnu": "0.77.3", - "@oxc-minify/binding-linux-s390x-gnu": "0.77.3", - "@oxc-minify/binding-linux-x64-gnu": "0.77.3", - "@oxc-minify/binding-linux-x64-musl": "0.77.3", - "@oxc-minify/binding-wasm32-wasi": "0.77.3", - "@oxc-minify/binding-win32-arm64-msvc": "0.77.3", - "@oxc-minify/binding-win32-x64-msvc": "0.77.3" + "@oxc-minify/binding-android-arm-eabi": "0.112.0", + "@oxc-minify/binding-android-arm64": "0.112.0", + "@oxc-minify/binding-darwin-arm64": "0.112.0", + "@oxc-minify/binding-darwin-x64": "0.112.0", + "@oxc-minify/binding-freebsd-x64": "0.112.0", + "@oxc-minify/binding-linux-arm-gnueabihf": "0.112.0", + "@oxc-minify/binding-linux-arm-musleabihf": "0.112.0", + "@oxc-minify/binding-linux-arm64-gnu": "0.112.0", + "@oxc-minify/binding-linux-arm64-musl": "0.112.0", + "@oxc-minify/binding-linux-ppc64-gnu": "0.112.0", + "@oxc-minify/binding-linux-riscv64-gnu": "0.112.0", + "@oxc-minify/binding-linux-riscv64-musl": "0.112.0", + "@oxc-minify/binding-linux-s390x-gnu": "0.112.0", + "@oxc-minify/binding-linux-x64-gnu": "0.112.0", + "@oxc-minify/binding-linux-x64-musl": "0.112.0", + "@oxc-minify/binding-openharmony-arm64": "0.112.0", + "@oxc-minify/binding-wasm32-wasi": "0.112.0", + "@oxc-minify/binding-win32-arm64-msvc": "0.112.0", + "@oxc-minify/binding-win32-ia32-msvc": "0.112.0", + "@oxc-minify/binding-win32-x64-msvc": "0.112.0" } }, "node_modules/oxc-parser": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/oxc-parser/-/oxc-parser-0.77.3.tgz", - "integrity": "sha512-1h7nXjL0IGRT539tReIadfIjgrPPuuD6HmQGsgKdOxMEZGzfMeBk19bfg+sXMQi462cCnu5s5IGTEhOOlcVt1w==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/oxc-parser/-/oxc-parser-0.112.0.tgz", + "integrity": "sha512-7rQ3QdJwobMQLMZwQaPuPYMEF2fDRZwf51lZ//V+bA37nejjKW5ifMHbbCwvA889Y4RLhT+/wLJpPRhAoBaZYw==", "license": "MIT", "dependencies": { - "@oxc-project/types": "^0.77.3" + "@oxc-project/types": "^0.112.0" }, "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" }, "funding": { "url": "https://github.com/sponsors/Boshen" }, "optionalDependencies": { - "@oxc-parser/binding-android-arm64": "0.77.3", - "@oxc-parser/binding-darwin-arm64": "0.77.3", - "@oxc-parser/binding-darwin-x64": "0.77.3", - "@oxc-parser/binding-freebsd-x64": "0.77.3", - "@oxc-parser/binding-linux-arm-gnueabihf": "0.77.3", - "@oxc-parser/binding-linux-arm-musleabihf": "0.77.3", - "@oxc-parser/binding-linux-arm64-gnu": "0.77.3", - "@oxc-parser/binding-linux-arm64-musl": "0.77.3", - "@oxc-parser/binding-linux-riscv64-gnu": "0.77.3", - "@oxc-parser/binding-linux-s390x-gnu": "0.77.3", - "@oxc-parser/binding-linux-x64-gnu": "0.77.3", - "@oxc-parser/binding-linux-x64-musl": "0.77.3", - "@oxc-parser/binding-wasm32-wasi": "0.77.3", - "@oxc-parser/binding-win32-arm64-msvc": "0.77.3", - "@oxc-parser/binding-win32-x64-msvc": "0.77.3" + "@oxc-parser/binding-android-arm-eabi": "0.112.0", + "@oxc-parser/binding-android-arm64": "0.112.0", + "@oxc-parser/binding-darwin-arm64": "0.112.0", + "@oxc-parser/binding-darwin-x64": "0.112.0", + "@oxc-parser/binding-freebsd-x64": "0.112.0", + "@oxc-parser/binding-linux-arm-gnueabihf": "0.112.0", + "@oxc-parser/binding-linux-arm-musleabihf": "0.112.0", + "@oxc-parser/binding-linux-arm64-gnu": "0.112.0", + "@oxc-parser/binding-linux-arm64-musl": "0.112.0", + "@oxc-parser/binding-linux-ppc64-gnu": "0.112.0", + "@oxc-parser/binding-linux-riscv64-gnu": "0.112.0", + "@oxc-parser/binding-linux-riscv64-musl": "0.112.0", + "@oxc-parser/binding-linux-s390x-gnu": "0.112.0", + "@oxc-parser/binding-linux-x64-gnu": "0.112.0", + "@oxc-parser/binding-linux-x64-musl": "0.112.0", + "@oxc-parser/binding-openharmony-arm64": "0.112.0", + "@oxc-parser/binding-wasm32-wasi": "0.112.0", + "@oxc-parser/binding-win32-arm64-msvc": "0.112.0", + "@oxc-parser/binding-win32-ia32-msvc": "0.112.0", + "@oxc-parser/binding-win32-x64-msvc": "0.112.0" } }, "node_modules/oxc-transform": { - "version": "0.77.3", - "resolved": "https://registry.npmmirror.com/oxc-transform/-/oxc-transform-0.77.3.tgz", - "integrity": "sha512-cFiyrki2/Tgs9i0GUe8zmnJNZsGrHtNoDcyo1zTHQl/Ak0/04PIBHzurX7ibMadxfRNIn0XG0tpNrrkGDJ3k6g==", + "version": "0.112.0", + "resolved": "https://registry.npmjs.org/oxc-transform/-/oxc-transform-0.112.0.tgz", + "integrity": "sha512-cIRRvZgrHfsAHrkt8LWdAX4+Do8R0MzQSfeo9yzErzHeYiuyNiP4PCTPbOy/wBXL4MYzt3ebrBa5jt3akQkKAg==", "license": "MIT", "engines": { - "node": ">=14.0.0" + "node": "^20.19.0 || >=22.12.0" }, "funding": { "url": "https://github.com/sponsors/Boshen" }, "optionalDependencies": { - "@oxc-transform/binding-android-arm64": "0.77.3", - "@oxc-transform/binding-darwin-arm64": "0.77.3", - "@oxc-transform/binding-darwin-x64": "0.77.3", - "@oxc-transform/binding-freebsd-x64": "0.77.3", - "@oxc-transform/binding-linux-arm-gnueabihf": "0.77.3", - "@oxc-transform/binding-linux-arm-musleabihf": "0.77.3", - "@oxc-transform/binding-linux-arm64-gnu": "0.77.3", - "@oxc-transform/binding-linux-arm64-musl": "0.77.3", - "@oxc-transform/binding-linux-riscv64-gnu": "0.77.3", - "@oxc-transform/binding-linux-s390x-gnu": "0.77.3", - "@oxc-transform/binding-linux-x64-gnu": "0.77.3", - "@oxc-transform/binding-linux-x64-musl": "0.77.3", - "@oxc-transform/binding-wasm32-wasi": "0.77.3", - "@oxc-transform/binding-win32-arm64-msvc": "0.77.3", - "@oxc-transform/binding-win32-x64-msvc": "0.77.3" + "@oxc-transform/binding-android-arm-eabi": "0.112.0", + "@oxc-transform/binding-android-arm64": "0.112.0", + "@oxc-transform/binding-darwin-arm64": "0.112.0", + "@oxc-transform/binding-darwin-x64": "0.112.0", + "@oxc-transform/binding-freebsd-x64": "0.112.0", + "@oxc-transform/binding-linux-arm-gnueabihf": "0.112.0", + "@oxc-transform/binding-linux-arm-musleabihf": "0.112.0", + "@oxc-transform/binding-linux-arm64-gnu": "0.112.0", + "@oxc-transform/binding-linux-arm64-musl": "0.112.0", + "@oxc-transform/binding-linux-ppc64-gnu": "0.112.0", + "@oxc-transform/binding-linux-riscv64-gnu": "0.112.0", + "@oxc-transform/binding-linux-riscv64-musl": "0.112.0", + "@oxc-transform/binding-linux-s390x-gnu": "0.112.0", + "@oxc-transform/binding-linux-x64-gnu": "0.112.0", + "@oxc-transform/binding-linux-x64-musl": "0.112.0", + "@oxc-transform/binding-openharmony-arm64": "0.112.0", + "@oxc-transform/binding-wasm32-wasi": "0.112.0", + "@oxc-transform/binding-win32-arm64-msvc": "0.112.0", + "@oxc-transform/binding-win32-ia32-msvc": "0.112.0", + "@oxc-transform/binding-win32-x64-msvc": "0.112.0" } }, "node_modules/oxc-walker": { - "version": "0.4.0", - "resolved": "https://registry.npmmirror.com/oxc-walker/-/oxc-walker-0.4.0.tgz", - "integrity": "sha512-x5TJAZQD3kRnRBGZ+8uryMZUwkTYddwzBftkqyJIcmpBOXmoK/fwriRKATjZroR2d+aS7+2w1B0oz189bBTwfw==", + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/oxc-walker/-/oxc-walker-0.7.0.tgz", + "integrity": "sha512-54B4KUhrzbzc4sKvKwVYm7E2PgeROpGba0/2nlNZMqfDyca+yOor5IMb4WLGBatGDT0nkzYdYuzylg7n3YfB7A==", "license": "MIT", "dependencies": { - "estree-walker": "^3.0.3", "magic-regexp": "^0.10.0" }, "peerDependencies": { - "oxc-parser": ">=0.72.0" - } - }, - "node_modules/p-event": { - "version": "6.0.1", - "resolved": "https://registry.npmmirror.com/p-event/-/p-event-6.0.1.tgz", - "integrity": "sha512-Q6Bekk5wpzW5qIyUP4gdMEujObYstZl6DMMOSenwBvV0BlE5LkDwkjs5yHbZmdCEq2o4RJx4tE1vwxFVf2FG1w==", - "license": "MIT", - "dependencies": { - "p-timeout": "^6.1.2" - }, - "engines": { - "node": ">=16.17" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-limit": { - "version": "4.0.0", - "resolved": "https://registry.npmmirror.com/p-limit/-/p-limit-4.0.0.tgz", - "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==", - "license": "MIT", - "dependencies": { - "yocto-queue": "^1.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "6.0.0", - "resolved": "https://registry.npmmirror.com/p-locate/-/p-locate-6.0.0.tgz", - "integrity": "sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==", - "license": "MIT", - "dependencies": { - "p-limit": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-map": { - "version": "7.0.3", - "resolved": "https://registry.npmmirror.com/p-map/-/p-map-7.0.3.tgz", - "integrity": "sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-timeout": { - "version": "6.1.4", - "resolved": "https://registry.npmmirror.com/p-timeout/-/p-timeout-6.1.4.tgz", - "integrity": "sha512-MyIV3ZA/PmyBN/ud8vV9XzwTrNtR4jFrObymZYnZqMmW0zA8Z17vnT0rBgFE/TlohB+YCHqXMgZzb3Csp49vqg==", - "license": "MIT", - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-wait-for": { - "version": "5.0.2", - "resolved": "https://registry.npmmirror.com/p-wait-for/-/p-wait-for-5.0.2.tgz", - "integrity": "sha512-lwx6u1CotQYPVju77R+D0vFomni/AqRfqLmqQ8hekklqZ6gAY9rONh7lBQ0uxWMkC2AuX9b2DVAl8To0NyP1JA==", - "license": "MIT", - "dependencies": { - "p-timeout": "^6.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "oxc-parser": ">=0.98.0" } }, "node_modules/package-json-from-dist": { @@ -9516,60 +8407,6 @@ "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", "license": "BlueOak-1.0.0" }, - "node_modules/package-manager-detector": { - "version": "1.3.0", - "resolved": "https://registry.npmmirror.com/package-manager-detector/-/package-manager-detector-1.3.0.tgz", - "integrity": "sha512-ZsEbbZORsyHuO00lY1kV3/t72yp6Ysay6Pd17ZAlNGuGwmWDLCJxFpRs0IzfXfj1o4icJOkUEioexFHzyPurSQ==", - "license": "MIT" - }, - "node_modules/parse-gitignore": { - "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/parse-gitignore/-/parse-gitignore-2.0.0.tgz", - "integrity": "sha512-RmVuCHWsfu0QPNW+mraxh/xjQVw/lhUCUru8Zni3Ctq3AoMhpDTq0OVdKS6iesd6Kqb7viCV3isAL43dciOSog==", - "license": "MIT", - "engines": { - "node": ">=14" - } - }, - "node_modules/parse-json": { - "version": "8.3.0", - "resolved": "https://registry.npmmirror.com/parse-json/-/parse-json-8.3.0.tgz", - "integrity": "sha512-ybiGyvspI+fAoRQbIPRddCcSTV9/LsJbf0e/S85VLowVGzRmokfneg2kwVW/KU5rOXrPSbF1qAKPMgNTqqROQQ==", - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.26.2", - "index-to-position": "^1.1.0", - "type-fest": "^4.39.1" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/parse-path": { - "version": "7.1.0", - "resolved": "https://registry.npmmirror.com/parse-path/-/parse-path-7.1.0.tgz", - "integrity": "sha512-EuCycjZtfPcjWk7KTksnJ5xPMvWGA/6i4zrLYhRG0hGvC3GPU/jGUj3Cy+ZR0v30duV3e23R95T1lE2+lsndSw==", - "license": "MIT", - "dependencies": { - "protocols": "^2.0.0" - } - }, - "node_modules/parse-url": { - "version": "9.2.0", - "resolved": "https://registry.npmmirror.com/parse-url/-/parse-url-9.2.0.tgz", - "integrity": "sha512-bCgsFI+GeGWPAvAiUv63ZorMeif3/U0zaXABGJbOWt5OH2KCaPHF6S+0ok4aqM9RuIPGyZdx9tR9l13PsW4AYQ==", - "license": "MIT", - "dependencies": { - "@types/parse-path": "^7.0.0", - "parse-path": "^7.0.0" - }, - "engines": { - "node": ">=14.13.0" - } - }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmmirror.com/parseurl/-/parseurl-1.3.3.tgz", @@ -9579,14 +8416,11 @@ "node": ">= 0.8" } }, - "node_modules/path-exists": { - "version": "5.0.0", - "resolved": "https://registry.npmmirror.com/path-exists/-/path-exists-5.0.0.tgz", - "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - } + "node_modules/path-browserify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", + "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==", + "license": "MIT" }, "node_modules/path-is-absolute": { "version": "1.0.1", @@ -9640,35 +8474,18 @@ "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==", "license": "MIT" }, - "node_modules/path-type": { - "version": "6.0.0", - "resolved": "https://registry.npmmirror.com/path-type/-/path-type-6.0.0.tgz", - "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/pathe": { "version": "2.0.3", "resolved": "https://registry.npmmirror.com/pathe/-/pathe-2.0.3.tgz", "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", "license": "MIT" }, - "node_modules/pend": { - "version": "1.2.0", - "resolved": "https://registry.npmmirror.com/pend/-/pend-1.2.0.tgz", - "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", - "license": "MIT" - }, "node_modules/perfect-debounce": { "version": "1.0.0", "resolved": "https://registry.npmmirror.com/perfect-debounce/-/perfect-debounce-1.0.0.tgz", "integrity": "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/picocolors": { "version": "1.1.1", @@ -9739,9 +8556,9 @@ } }, "node_modules/pkg-types": { - "version": "2.2.0", - "resolved": "https://registry.npmmirror.com/pkg-types/-/pkg-types-2.2.0.tgz", - "integrity": "sha512-2SM/GZGAEkPp3KWORxQZns4M+WSeXbC2HEvmOIJe3Cmiv6ieAJvdVhDldtHqM5J1Y7MrR1XhkBT/rMlhh9FdqQ==", + "version": "2.3.0", + "resolved": "https://registry.npmmirror.com/pkg-types/-/pkg-types-2.3.0.tgz", + "integrity": "sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig==", "license": "MIT", "dependencies": { "confbox": "^0.2.2", @@ -9749,6 +8566,24 @@ "pathe": "^2.0.3" } }, + "node_modules/popmotion": { + "version": "11.0.5", + "resolved": "https://registry.npmmirror.com/popmotion/-/popmotion-11.0.5.tgz", + "integrity": "sha512-la8gPM1WYeFznb/JqF4GiTkRRPZsfaj2+kCxqQgr2MJylMmIKUwBfWW8Wa5fml/8gmtlD5yI01MP1QCZPWmppA==", + "license": "MIT", + "dependencies": { + "framesync": "6.1.2", + "hey-listen": "^1.0.8", + "style-value-types": "5.1.2", + "tslib": "2.4.0" + } + }, + "node_modules/popmotion/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==", + "license": "0BSD" + }, "node_modules/portfinder": { "version": "1.0.37", "resolved": "https://registry.npmmirror.com/portfinder/-/portfinder-1.0.37.tgz", @@ -9792,7 +8627,7 @@ }, "node_modules/postcss-calc": { "version": "10.1.1", - "resolved": "https://registry.npmmirror.com/postcss-calc/-/postcss-calc-10.1.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-10.1.1.tgz", "integrity": "sha512-NYEsLHh8DgG/PRH2+G9BTuUdtf9ViS+vdoQ0YA5OQdGsfN4ztiwtDWNtBl9EKeqNMFnIu8IKZ0cLxEQ5r5KVMw==", "license": "MIT", "dependencies": { @@ -9807,12 +8642,12 @@ } }, "node_modules/postcss-colormin": { - "version": "7.0.4", - "resolved": "https://registry.npmmirror.com/postcss-colormin/-/postcss-colormin-7.0.4.tgz", - "integrity": "sha512-ziQuVzQZBROpKpfeDwmrG+Vvlr0YWmY/ZAk99XD+mGEBuEojoFekL41NCsdhyNUtZI7DPOoIWIR7vQQK9xwluw==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-7.0.5.tgz", + "integrity": "sha512-ekIBP/nwzRWhEMmIxHHbXHcMdzd1HIUzBECaj5KEdLz9DVP2HzT065sEhvOx1dkLjYW7jyD0CngThx6bpFi2fA==", "license": "MIT", "dependencies": { - "browserslist": "^4.25.1", + "browserslist": "^4.27.0", "caniuse-api": "^3.0.0", "colord": "^2.9.3", "postcss-value-parser": "^4.2.0" @@ -9825,12 +8660,12 @@ } }, "node_modules/postcss-convert-values": { - "version": "7.0.6", - "resolved": "https://registry.npmmirror.com/postcss-convert-values/-/postcss-convert-values-7.0.6.tgz", - "integrity": "sha512-MD/eb39Mr60hvgrqpXsgbiqluawYg/8K4nKsqRsuDX9f+xN1j6awZCUv/5tLH8ak3vYp/EMXwdcnXvfZYiejCQ==", + "version": "7.0.8", + "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-7.0.8.tgz", + "integrity": "sha512-+XNKuPfkHTCEo499VzLMYn94TiL3r9YqRE3Ty+jP7UX4qjewUONey1t7CG21lrlTLN07GtGM8MqFVp86D4uKJg==", "license": "MIT", "dependencies": { - "browserslist": "^4.25.1", + "browserslist": "^4.27.0", "postcss-value-parser": "^4.2.0" }, "engines": { @@ -9841,9 +8676,9 @@ } }, "node_modules/postcss-discard-comments": { - "version": "7.0.4", - "resolved": "https://registry.npmmirror.com/postcss-discard-comments/-/postcss-discard-comments-7.0.4.tgz", - "integrity": "sha512-6tCUoql/ipWwKtVP/xYiFf1U9QgJ0PUvxN7pTcsQ8Ns3Fnwq1pU5D5s1MhT/XySeLq6GXNvn37U46Ded0TckWg==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-7.0.5.tgz", + "integrity": "sha512-IR2Eja8WfYgN5n32vEGSctVQ1+JARfu4UH8M7bgGh1bC+xI/obsPJXaBpQF7MAByvgwZinhpHpdrmXtvVVlKcQ==", "license": "MIT", "dependencies": { "postcss-selector-parser": "^7.1.0" @@ -9857,7 +8692,7 @@ }, "node_modules/postcss-discard-duplicates": { "version": "7.0.2", - "resolved": "https://registry.npmmirror.com/postcss-discard-duplicates/-/postcss-discard-duplicates-7.0.2.tgz", + "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-7.0.2.tgz", "integrity": "sha512-eTonaQvPZ/3i1ASDHOKkYwAybiM45zFIc7KXils4mQmHLqIswXD9XNOKEVxtTFnsmwYzF66u4LMgSr0abDlh5w==", "license": "MIT", "engines": { @@ -9869,7 +8704,7 @@ }, "node_modules/postcss-discard-empty": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/postcss-discard-empty/-/postcss-discard-empty-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-7.0.1.tgz", "integrity": "sha512-cFrJKZvcg/uxB6Ijr4l6qmn3pXQBna9zyrPC+sK0zjbkDUZew+6xDltSF7OeB7rAtzaaMVYSdbod+sZOCWnMOg==", "license": "MIT", "engines": { @@ -9881,7 +8716,7 @@ }, "node_modules/postcss-discard-overridden": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/postcss-discard-overridden/-/postcss-discard-overridden-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-7.0.1.tgz", "integrity": "sha512-7c3MMjjSZ/qYrx3uc1940GSOzN1Iqjtlqe8uoSg+qdVPYyRb0TILSqqmtlSFuE4mTDECwsm397Ya7iXGzfF7lg==", "license": "MIT", "engines": { @@ -9908,26 +8743,6 @@ "postcss": "^8.0.0" } }, - "node_modules/postcss-import/node_modules/resolve": { - "version": "1.22.10", - "resolved": "https://registry.npmmirror.com/resolve/-/resolve-1.22.10.tgz", - "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", - "license": "MIT", - "dependencies": { - "is-core-module": "^2.16.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/postcss-js": { "version": "4.0.1", "resolved": "https://registry.npmmirror.com/postcss-js/-/postcss-js-4.0.1.tgz", @@ -9984,7 +8799,7 @@ }, "node_modules/postcss-merge-longhand": { "version": "7.0.5", - "resolved": "https://registry.npmmirror.com/postcss-merge-longhand/-/postcss-merge-longhand-7.0.5.tgz", + "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-7.0.5.tgz", "integrity": "sha512-Kpu5v4Ys6QI59FxmxtNB/iHUVDn9Y9sYw66D6+SZoIk4QTz1prC4aYkhIESu+ieG1iylod1f8MILMs1Em3mmIw==", "license": "MIT", "dependencies": { @@ -9999,12 +8814,12 @@ } }, "node_modules/postcss-merge-rules": { - "version": "7.0.6", - "resolved": "https://registry.npmmirror.com/postcss-merge-rules/-/postcss-merge-rules-7.0.6.tgz", - "integrity": "sha512-2jIPT4Tzs8K87tvgCpSukRQ2jjd+hH6Bb8rEEOUDmmhOeTcqDg5fEFK8uKIu+Pvc3//sm3Uu6FRqfyv7YF7+BQ==", + "version": "7.0.7", + "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-7.0.7.tgz", + "integrity": "sha512-njWJrd/Ms6XViwowaaCc+/vqhPG3SmXn725AGrnl+BgTuRPEacjiLEaGq16J6XirMJbtKkTwnt67SS+e2WGoew==", "license": "MIT", "dependencies": { - "browserslist": "^4.25.1", + "browserslist": "^4.27.0", "caniuse-api": "^3.0.0", "cssnano-utils": "^5.0.1", "postcss-selector-parser": "^7.1.0" @@ -10018,7 +8833,7 @@ }, "node_modules/postcss-minify-font-values": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/postcss-minify-font-values/-/postcss-minify-font-values-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-7.0.1.tgz", "integrity": "sha512-2m1uiuJeTplll+tq4ENOQSzB8LRnSUChBv7oSyFLsJRtUgAAJGP6LLz0/8lkinTgxrmJSPOEhgY1bMXOQ4ZXhQ==", "license": "MIT", "dependencies": { @@ -10033,7 +8848,7 @@ }, "node_modules/postcss-minify-gradients": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/postcss-minify-gradients/-/postcss-minify-gradients-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-7.0.1.tgz", "integrity": "sha512-X9JjaysZJwlqNkJbUDgOclyG3jZEpAMOfof6PUZjPnPrePnPG62pS17CjdM32uT1Uq1jFvNSff9l7kNbmMSL2A==", "license": "MIT", "dependencies": { @@ -10049,12 +8864,12 @@ } }, "node_modules/postcss-minify-params": { - "version": "7.0.4", - "resolved": "https://registry.npmmirror.com/postcss-minify-params/-/postcss-minify-params-7.0.4.tgz", - "integrity": "sha512-3OqqUddfH8c2e7M35W6zIwv7jssM/3miF9cbCSb1iJiWvtguQjlxZGIHK9JRmc8XAKmE2PFGtHSM7g/VcW97sw==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-7.0.5.tgz", + "integrity": "sha512-FGK9ky02h6Ighn3UihsyeAH5XmLEE2MSGH5Tc4tXMFtEDx7B+zTG6hD/+/cT+fbF7PbYojsmmWjyTwFwW1JKQQ==", "license": "MIT", "dependencies": { - "browserslist": "^4.25.1", + "browserslist": "^4.27.0", "cssnano-utils": "^5.0.1", "postcss-value-parser": "^4.2.0" }, @@ -10067,7 +8882,7 @@ }, "node_modules/postcss-minify-selectors": { "version": "7.0.5", - "resolved": "https://registry.npmmirror.com/postcss-minify-selectors/-/postcss-minify-selectors-7.0.5.tgz", + "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-7.0.5.tgz", "integrity": "sha512-x2/IvofHcdIrAm9Q+p06ZD1h6FPcQ32WtCRVodJLDR+WMn8EVHI1kvLxZuGKz/9EY5nAmI6lIQIrpo4tBy5+ug==", "license": "MIT", "dependencies": { @@ -10148,7 +8963,7 @@ }, "node_modules/postcss-normalize-charset": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/postcss-normalize-charset/-/postcss-normalize-charset-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-7.0.1.tgz", "integrity": "sha512-sn413ofhSQHlZFae//m9FTOfkmiZ+YQXsbosqOWRiVQncU2BA3daX3n0VF3cG6rGLSFVc5Di/yns0dFfh8NFgQ==", "license": "MIT", "engines": { @@ -10160,7 +8975,7 @@ }, "node_modules/postcss-normalize-display-values": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/postcss-normalize-display-values/-/postcss-normalize-display-values-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-7.0.1.tgz", "integrity": "sha512-E5nnB26XjSYz/mGITm6JgiDpAbVuAkzXwLzRZtts19jHDUBFxZ0BkXAehy0uimrOjYJbocby4FVswA/5noOxrQ==", "license": "MIT", "dependencies": { @@ -10175,7 +8990,7 @@ }, "node_modules/postcss-normalize-positions": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/postcss-normalize-positions/-/postcss-normalize-positions-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-7.0.1.tgz", "integrity": "sha512-pB/SzrIP2l50ZIYu+yQZyMNmnAcwyYb9R1fVWPRxm4zcUFCY2ign7rcntGFuMXDdd9L2pPNUgoODDk91PzRZuQ==", "license": "MIT", "dependencies": { @@ -10190,7 +9005,7 @@ }, "node_modules/postcss-normalize-repeat-style": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-7.0.1.tgz", "integrity": "sha512-NsSQJ8zj8TIDiF0ig44Byo3Jk9e4gNt9x2VIlJudnQQ5DhWAHJPF4Tr1ITwyHio2BUi/I6Iv0HRO7beHYOloYQ==", "license": "MIT", "dependencies": { @@ -10205,7 +9020,7 @@ }, "node_modules/postcss-normalize-string": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/postcss-normalize-string/-/postcss-normalize-string-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-7.0.1.tgz", "integrity": "sha512-QByrI7hAhsoze992kpbMlJSbZ8FuCEc1OT9EFbZ6HldXNpsdpZr+YXC5di3UEv0+jeZlHbZcoCADgb7a+lPmmQ==", "license": "MIT", "dependencies": { @@ -10220,7 +9035,7 @@ }, "node_modules/postcss-normalize-timing-functions": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-7.0.1.tgz", "integrity": "sha512-bHifyuuSNdKKsnNJ0s8fmfLMlvsQwYVxIoUBnowIVl2ZAdrkYQNGVB4RxjfpvkMjipqvbz0u7feBZybkl/6NJg==", "license": "MIT", "dependencies": { @@ -10234,12 +9049,12 @@ } }, "node_modules/postcss-normalize-unicode": { - "version": "7.0.4", - "resolved": "https://registry.npmmirror.com/postcss-normalize-unicode/-/postcss-normalize-unicode-7.0.4.tgz", - "integrity": "sha512-LvIURTi1sQoZqj8mEIE8R15yvM+OhbR1avynMtI9bUzj5gGKR/gfZFd8O7VMj0QgJaIFzxDwxGl/ASMYAkqO8g==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-7.0.5.tgz", + "integrity": "sha512-X6BBwiRxVaFHrb2WyBMddIeB5HBjJcAaUHyhLrM2FsxSq5TFqcHSsK7Zu1otag+o0ZphQGJewGH1tAyrD0zX1Q==", "license": "MIT", "dependencies": { - "browserslist": "^4.25.1", + "browserslist": "^4.27.0", "postcss-value-parser": "^4.2.0" }, "engines": { @@ -10251,7 +9066,7 @@ }, "node_modules/postcss-normalize-url": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/postcss-normalize-url/-/postcss-normalize-url-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-7.0.1.tgz", "integrity": "sha512-sUcD2cWtyK1AOL/82Fwy1aIVm/wwj5SdZkgZ3QiUzSzQQofrbq15jWJ3BA7Z+yVRwamCjJgZJN0I9IS7c6tgeQ==", "license": "MIT", "dependencies": { @@ -10266,7 +9081,7 @@ }, "node_modules/postcss-normalize-whitespace": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-7.0.1.tgz", "integrity": "sha512-vsbgFHMFQrJBJKrUFJNZ2pgBeBkC2IvvoHjz1to0/0Xk7sII24T0qFOiJzG6Fu3zJoq/0yI4rKWi7WhApW+EFA==", "license": "MIT", "dependencies": { @@ -10281,7 +9096,7 @@ }, "node_modules/postcss-ordered-values": { "version": "7.0.2", - "resolved": "https://registry.npmmirror.com/postcss-ordered-values/-/postcss-ordered-values-7.0.2.tgz", + "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-7.0.2.tgz", "integrity": "sha512-AMJjt1ECBffF7CEON/Y0rekRLS6KsePU6PRP08UqYW4UGFRnTXNrByUzYK1h8AC7UWTZdQ9O3Oq9kFIhm0SFEw==", "license": "MIT", "dependencies": { @@ -10296,12 +9111,12 @@ } }, "node_modules/postcss-reduce-initial": { - "version": "7.0.4", - "resolved": "https://registry.npmmirror.com/postcss-reduce-initial/-/postcss-reduce-initial-7.0.4.tgz", - "integrity": "sha512-rdIC9IlMBn7zJo6puim58Xd++0HdbvHeHaPgXsimMfG1ijC5A9ULvNLSE0rUKVJOvNMcwewW4Ga21ngyJjY/+Q==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-7.0.5.tgz", + "integrity": "sha512-RHagHLidG8hTZcnr4FpyMB2jtgd/OcyAazjMhoy5qmWJOx1uxKh4ntk0Pb46ajKM0rkf32lRH4C8c9qQiPR6IA==", "license": "MIT", "dependencies": { - "browserslist": "^4.25.1", + "browserslist": "^4.27.0", "caniuse-api": "^3.0.0" }, "engines": { @@ -10313,7 +9128,7 @@ }, "node_modules/postcss-reduce-transforms": { "version": "7.0.1", - "resolved": "https://registry.npmmirror.com/postcss-reduce-transforms/-/postcss-reduce-transforms-7.0.1.tgz", + "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-7.0.1.tgz", "integrity": "sha512-MhyEbfrm+Mlp/36hvZ9mT9DaO7dbncU0CvWI8V93LRkY6IYlu38OPg3FObnuKTUxJ4qA8HpurdQOo5CyqqO76g==", "license": "MIT", "dependencies": { @@ -10341,7 +9156,7 @@ }, "node_modules/postcss-svgo": { "version": "7.1.0", - "resolved": "https://registry.npmmirror.com/postcss-svgo/-/postcss-svgo-7.1.0.tgz", + "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-7.1.0.tgz", "integrity": "sha512-KnAlfmhtoLz6IuU3Sij2ycusNs4jPW+QoFE5kuuUOK8awR6tMxZQrs5Ey3BUz7nFCzT3eqyFgqkyrHiaU2xx3w==", "license": "MIT", "dependencies": { @@ -10357,7 +9172,7 @@ }, "node_modules/postcss-unique-selectors": { "version": "7.0.4", - "resolved": "https://registry.npmmirror.com/postcss-unique-selectors/-/postcss-unique-selectors-7.0.4.tgz", + "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-7.0.4.tgz", "integrity": "sha512-pmlZjsmEAG7cHd7uK3ZiNSW6otSZ13RHuZ/4cDN/bVglS5EpF2r2oxY99SuOHa8m7AWoBCelTS3JPpzsIs8skQ==", "license": "MIT", "dependencies": { @@ -10376,23 +9191,6 @@ "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", "license": "MIT" }, - "node_modules/postcss-values-parser": { - "version": "6.0.2", - "resolved": "https://registry.npmmirror.com/postcss-values-parser/-/postcss-values-parser-6.0.2.tgz", - "integrity": "sha512-YLJpK0N1brcNJrs9WatuJFtHaV9q5aAOj+S4DI5S7jgHlRfm0PIbDCAFRYMQD5SHq7Fy6xsDhyutgS0QOAs0qw==", - "license": "MPL-2.0", - "dependencies": { - "color-name": "^1.1.4", - "is-url-superb": "^4.0.0", - "quote-unquote": "^1.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "postcss": "^8.2.9" - } - }, "node_modules/postcss/node_modules/nanoid": { "version": "3.3.11", "resolved": "https://registry.npmmirror.com/nanoid/-/nanoid-3.3.11.tgz", @@ -10411,51 +9209,13 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, - "node_modules/precinct": { - "version": "12.2.0", - "resolved": "https://registry.npmmirror.com/precinct/-/precinct-12.2.0.tgz", - "integrity": "sha512-NFBMuwIfaJ4SocE9YXPU/n4AcNSoFMVFjP72nvl3cx69j/ke61/hPOWFREVxLkFhhEGnA8ZuVfTqJBa+PK3b5w==", - "license": "MIT", - "dependencies": { - "@dependents/detective-less": "^5.0.1", - "commander": "^12.1.0", - "detective-amd": "^6.0.1", - "detective-cjs": "^6.0.1", - "detective-es6": "^5.0.1", - "detective-postcss": "^7.0.1", - "detective-sass": "^6.0.1", - "detective-scss": "^5.0.1", - "detective-stylus": "^5.0.1", - "detective-typescript": "^14.0.0", - "detective-vue2": "^2.2.0", - "module-definition": "^6.0.1", - "node-source-walk": "^7.0.1", - "postcss": "^8.5.1", - "typescript": "^5.7.3" - }, - "bin": { - "precinct": "bin/cli.js" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/precinct/node_modules/commander": { - "version": "12.1.0", - "resolved": "https://registry.npmmirror.com/commander/-/commander-12.1.0.tgz", - "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", - "license": "MIT", - "engines": { - "node": ">=18" - } - }, "node_modules/pretty-bytes": { - "version": "6.1.1", - "resolved": "https://registry.npmmirror.com/pretty-bytes/-/pretty-bytes-6.1.1.tgz", - "integrity": "sha512-mQUvGU6aUFQ+rNvTIAcZuWGRT9a6f6Yrg9bHs4ImKF+HZCEK+plBvnAZYSIQztknZF2qnzNtr6F8s0+IuptdlQ==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-7.1.0.tgz", + "integrity": "sha512-nODzvTiYVRGRqAOvE84Vk5JDPyyxsVk0/fbA/bq7RqlnhksGpset09XTxbpvLTIjoaF7K8Z8DG8yHtKGTPSYRw==", "license": "MIT", "engines": { - "node": "^14.13.1 || >=16.0.0" + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -10463,7 +9223,7 @@ }, "node_modules/process": { "version": "0.11.10", - "resolved": "https://registry.npmmirror.com/process/-/process-0.11.10.tgz", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", "license": "MIT", "engines": { @@ -10472,64 +9232,20 @@ }, "node_modules/process-nextick-args": { "version": "2.0.1", - "resolved": "https://registry.npmmirror.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", "license": "MIT" }, - "node_modules/prompts": { - "version": "2.4.2", - "resolved": "https://registry.npmmirror.com/prompts/-/prompts-2.4.2.tgz", - "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", - "license": "MIT", - "dependencies": { - "kleur": "^3.0.3", - "sisteransi": "^1.0.5" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/protocols": { - "version": "2.0.2", - "resolved": "https://registry.npmmirror.com/protocols/-/protocols-2.0.2.tgz", - "integrity": "sha512-hHVTzba3wboROl0/aWRRG9dMytgH6ow//STBZh43l/wQgmMhYhOFi0EHWAPtoCz9IAUymsyP0TSBHkhgMEGNnQ==", - "license": "MIT" - }, "node_modules/proxy-from-env": { "version": "1.1.0", "resolved": "https://registry.npmmirror.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz", "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", "license": "MIT" }, - "node_modules/pump": { - "version": "3.0.3", - "resolved": "https://registry.npmmirror.com/pump/-/pump-3.0.3.tgz", - "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", - "license": "MIT", - "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "node_modules/qs": { - "version": "6.14.0", - "resolved": "https://registry.npmmirror.com/qs/-/qs-6.14.0.tgz", - "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", - "license": "BSD-3-Clause", - "dependencies": { - "side-channel": "^1.1.0" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/quansync": { - "version": "0.2.10", - "resolved": "https://registry.npmmirror.com/quansync/-/quansync-0.2.10.tgz", - "integrity": "sha512-t41VRkMYbkHyCYmOvx/6URnN80H7k4X0lLdBMGsz+maAwrJQYB1djpV6vHrQIBE0WBSGqhtEHrK9U3DWWH8v7A==", + "version": "0.2.11", + "resolved": "https://registry.npmmirror.com/quansync/-/quansync-0.2.11.tgz", + "integrity": "sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA==", "funding": [ { "type": "individual", @@ -10562,12 +9278,6 @@ ], "license": "MIT" }, - "node_modules/quote-unquote": { - "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/quote-unquote/-/quote-unquote-1.0.0.tgz", - "integrity": "sha512-twwRO/ilhlG/FIgYeKGFqyHhoEhqgnKVkcmqMKi2r524gz3ZbDTcyFt38E9xjJI2vT+KbRNHVbnJ/e0I25Azwg==", - "license": "MIT" - }, "node_modules/radix3": { "version": "1.1.2", "resolved": "https://registry.npmmirror.com/radix3/-/radix3-1.1.2.tgz", @@ -10576,7 +9286,7 @@ }, "node_modules/randombytes": { "version": "2.1.0", - "resolved": "https://registry.npmmirror.com/randombytes/-/randombytes-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", "license": "MIT", "dependencies": { @@ -10585,7 +9295,7 @@ }, "node_modules/range-parser": { "version": "1.2.1", - "resolved": "https://registry.npmmirror.com/range-parser/-/range-parser-1.2.1.tgz", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", "license": "MIT", "engines": { @@ -10611,45 +9321,9 @@ "pify": "^2.3.0" } }, - "node_modules/read-package-up": { - "version": "11.0.0", - "resolved": "https://registry.npmmirror.com/read-package-up/-/read-package-up-11.0.0.tgz", - "integrity": "sha512-MbgfoNPANMdb4oRBNg5eqLbB2t2r+o5Ua1pNt8BqGp4I0FJZhuVSOj3PaBPni4azWuSzEdNn2evevzVmEk1ohQ==", - "license": "MIT", - "dependencies": { - "find-up-simple": "^1.0.0", - "read-pkg": "^9.0.0", - "type-fest": "^4.6.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg": { - "version": "9.0.1", - "resolved": "https://registry.npmmirror.com/read-pkg/-/read-pkg-9.0.1.tgz", - "integrity": "sha512-9viLL4/n1BJUCT1NXVTdS1jtm80yDEgR5T4yCelII49Mbj0v1rZdKqj7zCiYdbB0CuCgdrvHcNogAKTFPBocFA==", - "license": "MIT", - "dependencies": { - "@types/normalize-package-data": "^2.4.3", - "normalize-package-data": "^6.0.0", - "parse-json": "^8.0.0", - "type-fest": "^4.6.0", - "unicorn-magic": "^0.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/readable-stream": { "version": "4.7.0", - "resolved": "https://registry.npmmirror.com/readable-stream/-/readable-stream-4.7.0.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", "integrity": "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==", "license": "MIT", "dependencies": { @@ -10665,7 +9339,7 @@ }, "node_modules/readdir-glob": { "version": "1.1.3", - "resolved": "https://registry.npmmirror.com/readdir-glob/-/readdir-glob-1.1.3.tgz", + "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", "license": "Apache-2.0", "dependencies": { @@ -10673,9 +9347,9 @@ } }, "node_modules/readdir-glob/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "version": "5.1.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.9.tgz", + "integrity": "sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==", "license": "ISC", "dependencies": { "brace-expansion": "^2.0.1" @@ -10685,12 +9359,12 @@ } }, "node_modules/readdirp": { - "version": "4.1.2", - "resolved": "https://registry.npmmirror.com/readdirp/-/readdirp-4.1.2.tgz", - "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-5.0.0.tgz", + "integrity": "sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ==", "license": "MIT", "engines": { - "node": ">= 14.18.0" + "node": ">= 20.19.0" }, "funding": { "type": "individual", @@ -10699,7 +9373,7 @@ }, "node_modules/redis-errors": { "version": "1.2.0", - "resolved": "https://registry.npmmirror.com/redis-errors/-/redis-errors-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", "integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==", "license": "MIT", "engines": { @@ -10708,7 +9382,7 @@ }, "node_modules/redis-parser": { "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/redis-parser/-/redis-parser-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz", "integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==", "license": "MIT", "dependencies": { @@ -10720,19 +9394,13 @@ }, "node_modules/regexp-tree": { "version": "0.1.27", - "resolved": "https://registry.npmmirror.com/regexp-tree/-/regexp-tree-0.1.27.tgz", + "resolved": "https://registry.npmjs.org/regexp-tree/-/regexp-tree-0.1.27.tgz", "integrity": "sha512-iETxpjK6YoRWJG5o6hXLwvjYAoW+FEZn9os0PD/b6AP6xQwsa/Y7lCVgIixBbUPMfhu+i2LtdeAqVTgGlQarfA==", "license": "MIT", "bin": { "regexp-tree": "bin/regexp-tree" } }, - "node_modules/remove-trailing-separator": { - "version": "1.1.0", - "resolved": "https://registry.npmmirror.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", - "integrity": "sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==", - "license": "ISC" - }, "node_modules/replace-in-file": { "version": "6.3.5", "resolved": "https://registry.npmmirror.com/replace-in-file/-/replace-in-file-6.3.5.tgz", @@ -10782,9 +9450,9 @@ } }, "node_modules/replace-in-file/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" @@ -10802,32 +9470,29 @@ "node": ">=0.10.0" } }, - "node_modules/require-package-name": { - "version": "2.0.1", - "resolved": "https://registry.npmmirror.com/require-package-name/-/require-package-name-2.0.1.tgz", - "integrity": "sha512-uuoJ1hU/k6M0779t3VMVIYpb2VMJk05cehCaABFhXaibcbvfgR8wKiozLjVFSzJPmQMRqIcO0HMyTFqfV09V6Q==", - "license": "MIT" - }, "node_modules/resolve": { - "version": "2.0.0-next.5", - "resolved": "https://registry.npmmirror.com/resolve/-/resolve-2.0.0-next.5.tgz", - "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==", + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", "license": "MIT", "dependencies": { - "is-core-module": "^2.13.0", + "is-core-module": "^2.16.1", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/resolve-from": { "version": "5.0.0", - "resolved": "https://registry.npmmirror.com/resolve-from/-/resolve-from-5.0.0.tgz", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "license": "MIT", "engines": { @@ -10909,9 +9574,9 @@ "license": "MIT" }, "node_modules/rollup": { - "version": "4.45.1", - "resolved": "https://registry.npmmirror.com/rollup/-/rollup-4.45.1.tgz", - "integrity": "sha512-4iya7Jb76fVpQyLoiVpzUrsjQ12r3dM7fIVz+4NwoYvZOShknRmiv+iu9CClZml5ZLGb0XMcYLutK6w9tgxHDw==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.59.0.tgz", + "integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==", "license": "MIT", "dependencies": { "@types/estree": "1.0.8" @@ -10924,33 +9589,38 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.45.1", - "@rollup/rollup-android-arm64": "4.45.1", - "@rollup/rollup-darwin-arm64": "4.45.1", - "@rollup/rollup-darwin-x64": "4.45.1", - "@rollup/rollup-freebsd-arm64": "4.45.1", - "@rollup/rollup-freebsd-x64": "4.45.1", - "@rollup/rollup-linux-arm-gnueabihf": "4.45.1", - "@rollup/rollup-linux-arm-musleabihf": "4.45.1", - "@rollup/rollup-linux-arm64-gnu": "4.45.1", - "@rollup/rollup-linux-arm64-musl": "4.45.1", - "@rollup/rollup-linux-loongarch64-gnu": "4.45.1", - "@rollup/rollup-linux-powerpc64le-gnu": "4.45.1", - "@rollup/rollup-linux-riscv64-gnu": "4.45.1", - "@rollup/rollup-linux-riscv64-musl": "4.45.1", - "@rollup/rollup-linux-s390x-gnu": "4.45.1", - "@rollup/rollup-linux-x64-gnu": "4.45.1", - "@rollup/rollup-linux-x64-musl": "4.45.1", - "@rollup/rollup-win32-arm64-msvc": "4.45.1", - "@rollup/rollup-win32-ia32-msvc": "4.45.1", - "@rollup/rollup-win32-x64-msvc": "4.45.1", + "@rollup/rollup-android-arm-eabi": "4.59.0", + "@rollup/rollup-android-arm64": "4.59.0", + "@rollup/rollup-darwin-arm64": "4.59.0", + "@rollup/rollup-darwin-x64": "4.59.0", + "@rollup/rollup-freebsd-arm64": "4.59.0", + "@rollup/rollup-freebsd-x64": "4.59.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.59.0", + "@rollup/rollup-linux-arm-musleabihf": "4.59.0", + "@rollup/rollup-linux-arm64-gnu": "4.59.0", + "@rollup/rollup-linux-arm64-musl": "4.59.0", + "@rollup/rollup-linux-loong64-gnu": "4.59.0", + "@rollup/rollup-linux-loong64-musl": "4.59.0", + "@rollup/rollup-linux-ppc64-gnu": "4.59.0", + "@rollup/rollup-linux-ppc64-musl": "4.59.0", + "@rollup/rollup-linux-riscv64-gnu": "4.59.0", + "@rollup/rollup-linux-riscv64-musl": "4.59.0", + "@rollup/rollup-linux-s390x-gnu": "4.59.0", + "@rollup/rollup-linux-x64-gnu": "4.59.0", + "@rollup/rollup-linux-x64-musl": "4.59.0", + "@rollup/rollup-openbsd-x64": "4.59.0", + "@rollup/rollup-openharmony-arm64": "4.59.0", + "@rollup/rollup-win32-arm64-msvc": "4.59.0", + "@rollup/rollup-win32-ia32-msvc": "4.59.0", + "@rollup/rollup-win32-x64-gnu": "4.59.0", + "@rollup/rollup-win32-x64-msvc": "4.59.0", "fsevents": "~2.3.2" } }, "node_modules/rollup-plugin-visualizer": { - "version": "6.0.3", - "resolved": "https://registry.npmmirror.com/rollup-plugin-visualizer/-/rollup-plugin-visualizer-6.0.3.tgz", - "integrity": "sha512-ZU41GwrkDcCpVoffviuM9Clwjy5fcUxlz0oMoTXTYsK+tcIFzbdacnrr2n8TXcHxbGKKXtOdjxM2HUS4HjkwIw==", + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/rollup-plugin-visualizer/-/rollup-plugin-visualizer-6.0.5.tgz", + "integrity": "sha512-9+HlNgKCVbJDs8tVtjQ43US12eqaiHyyiLMdBwQ7vSZPiHMysGNo2E88TAp1si5wx8NAoYriI2A5kuKfIakmJg==", "license": "MIT", "dependencies": { "open": "^8.0.0", @@ -10977,10 +9647,16 @@ } } }, + "node_modules/rou3": { + "version": "0.7.12", + "resolved": "https://registry.npmjs.org/rou3/-/rou3-0.7.12.tgz", + "integrity": "sha512-iFE4hLDuloSWcD7mjdCDhx2bKcIsYbtOTpfH5MHHLSKMOUyjqQXTeZVa289uuwEGEKFoE/BAPbhaU4B774nceg==", + "license": "MIT" + }, "node_modules/run-applescript": { - "version": "7.0.0", - "resolved": "https://registry.npmmirror.com/run-applescript/-/run-applescript-7.0.0.tgz", - "integrity": "sha512-9by4Ij99JUr/MCFBUkDKLWK3G9HVXmabKz9U5MlIAIuvuzkiOicRYs8XJLxX+xahD+mLiiCYDqF9dKAgtzKP1A==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-7.1.0.tgz", + "integrity": "sha512-DPe5pVFaAsinSaV6QjQ6gdiedWDcRCbUuiQfQa2wmWV7+xC9bGulGI8+TdRmoFkAPaBXk8CrAbnlY2ISniJ47Q==", "license": "MIT", "engines": { "node": ">=18" @@ -11049,20 +9725,20 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/safe-stable-stringify": { - "version": "2.5.0", - "resolved": "https://registry.npmmirror.com/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz", - "integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==", - "license": "MIT", - "engines": { - "node": ">=10" - } + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" }, "node_modules/sax": { - "version": "1.4.1", - "resolved": "https://registry.npmmirror.com/sax/-/sax-1.4.1.tgz", - "integrity": "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==", - "license": "ISC" + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.4.tgz", + "integrity": "sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==", + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=11.0.0" + } }, "node_modules/scule": { "version": "1.3.0", @@ -11071,9 +9747,9 @@ "license": "MIT" }, "node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmmirror.com/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -11083,39 +9759,77 @@ } }, "node_modules/send": { - "version": "1.2.0", - "resolved": "https://registry.npmmirror.com/send/-/send-1.2.0.tgz", - "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.1.tgz", + "integrity": "sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==", "license": "MIT", "dependencies": { - "debug": "^4.3.5", + "debug": "^4.4.3", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "fresh": "^2.0.0", - "http-errors": "^2.0.0", - "mime-types": "^3.0.1", + "http-errors": "^2.0.1", + "mime-types": "^3.0.2", "ms": "^2.1.3", "on-finished": "^2.4.1", "range-parser": "^1.2.1", - "statuses": "^2.0.1" + "statuses": "^2.0.2" }, "engines": { "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/send/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/send/node_modules/mime-types": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz", + "integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/serialize-javascript": { "version": "6.0.2", - "resolved": "https://registry.npmmirror.com/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", "license": "BSD-3-Clause", "dependencies": { "randombytes": "^2.1.0" } }, + "node_modules/seroval": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/seroval/-/seroval-1.5.0.tgz", + "integrity": "sha512-OE4cvmJ1uSPrKorFIH9/w/Qwuvi/IMcGbv5RKgcJ/zjA/IohDLU6SVaxFN9FwajbP7nsX0dQqMDes1whk3y+yw==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, "node_modules/serve-placeholder": { "version": "2.0.2", - "resolved": "https://registry.npmmirror.com/serve-placeholder/-/serve-placeholder-2.0.2.tgz", + "resolved": "https://registry.npmjs.org/serve-placeholder/-/serve-placeholder-2.0.2.tgz", "integrity": "sha512-/TMG8SboeiQbZJWRlfTCqMs2DD3SZgWp0kDQePz9yUuCnDfDh/92gf7/PxGhzXTKBIPASIHxFcZndoNbp6QOLQ==", "license": "MIT", "dependencies": { @@ -11123,9 +9837,9 @@ } }, "node_modules/serve-static": { - "version": "2.2.0", - "resolved": "https://registry.npmmirror.com/serve-static/-/serve-static-2.2.0.tgz", - "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.1.tgz", + "integrity": "sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==", "license": "MIT", "dependencies": { "encodeurl": "^2.0.0", @@ -11135,6 +9849,10 @@ }, "engines": { "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/setprototypeof": { @@ -11166,7 +9884,7 @@ }, "node_modules/shell-quote": { "version": "1.8.3", - "resolved": "https://registry.npmmirror.com/shell-quote/-/shell-quote-1.8.3.tgz", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz", "integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==", "license": "MIT", "engines": { @@ -11176,78 +9894,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/side-channel": { - "version": "1.1.0", - "resolved": "https://registry.npmmirror.com/side-channel/-/side-channel-1.1.0.tgz", - "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3", - "side-channel-list": "^1.0.0", - "side-channel-map": "^1.0.1", - "side-channel-weakmap": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-list": { - "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/side-channel-list/-/side-channel-list-1.0.0.tgz", - "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-map": { - "version": "1.0.1", - "resolved": "https://registry.npmmirror.com/side-channel-map/-/side-channel-map-1.0.1.tgz", - "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-weakmap": { - "version": "1.0.2", - "resolved": "https://registry.npmmirror.com/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", - "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3", - "side-channel-map": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/signal-exit": { "version": "4.1.0", "resolved": "https://registry.npmmirror.com/signal-exit/-/signal-exit-4.1.0.tgz", @@ -11261,9 +9907,9 @@ } }, "node_modules/simple-git": { - "version": "3.28.0", - "resolved": "https://registry.npmmirror.com/simple-git/-/simple-git-3.28.0.tgz", - "integrity": "sha512-Rs/vQRwsn1ILH1oBUy8NucJlXmnnLeLCfcvbSehkPzbv3wwoFWIdtfd6Ndo6ZPhlPsCZ60CPI4rxurnwAa+a2w==", + "version": "3.32.3", + "resolved": "https://registry.npmjs.org/simple-git/-/simple-git-3.32.3.tgz", + "integrity": "sha512-56a5oxFdWlsGygOXHWrG+xjj5w9ZIt2uQbzqiIGdR/6i5iococ7WQ/bNPzWxCJdEUGUCmyMH0t9zMpRJTaKxmw==", "license": "MIT", "dependencies": { "@kwsites/file-exists": "^1.1.1", @@ -11275,19 +9921,10 @@ "url": "https://github.com/steveukx/git-js?sponsor=1" } }, - "node_modules/simple-swizzle": { - "version": "0.2.2", - "resolved": "https://registry.npmmirror.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz", - "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", - "license": "MIT", - "dependencies": { - "is-arrayish": "^0.3.1" - } - }, "node_modules/sirv": { - "version": "3.0.1", - "resolved": "https://registry.npmmirror.com/sirv/-/sirv-3.0.1.tgz", - "integrity": "sha512-FoqMu0NCGBLCcAkS1qA+XJIQTR6/JHfQXl+uGteNCQ76T91DMUjPa9xfmeqMY3z80nLSg9yQmNjK0Px6RWsH/A==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.2.tgz", + "integrity": "sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==", "license": "MIT", "dependencies": { "@polka/url": "^1.0.0-next.24", @@ -11300,13 +9937,13 @@ }, "node_modules/sisteransi": { "version": "1.0.5", - "resolved": "https://registry.npmmirror.com/sisteransi/-/sisteransi-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", "license": "MIT" }, "node_modules/slash": { "version": "5.1.0", - "resolved": "https://registry.npmmirror.com/slash/-/slash-5.1.0.tgz", + "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", "license": "MIT", "engines": { @@ -11317,14 +9954,17 @@ } }, "node_modules/smob": { - "version": "1.5.0", - "resolved": "https://registry.npmmirror.com/smob/-/smob-1.5.0.tgz", - "integrity": "sha512-g6T+p7QO8npa+/hNx9ohv1E5pVCmWrVCUzUXJyLdMmftX6ER0oiWY/w9knEonLpnOp6b6FenKnMfR8gqwWdwig==", - "license": "MIT" + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/smob/-/smob-1.6.1.tgz", + "integrity": "sha512-KAkBqZl3c2GvNgNhcoyJae1aKldDW0LO279wF9bk1PnluRTETKBq0WyzRXxEhoQLk56yHaOY4JCBEKDuJIET5g==", + "license": "MIT", + "engines": { + "node": ">=20.0.0" + } }, "node_modules/source-map": { "version": "0.7.6", - "resolved": "https://registry.npmmirror.com/source-map/-/source-map-0.7.6.tgz", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.6.tgz", "integrity": "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==", "license": "BSD-3-Clause", "engines": { @@ -11342,7 +9982,7 @@ }, "node_modules/source-map-support": { "version": "0.5.21", - "resolved": "https://registry.npmmirror.com/source-map-support/-/source-map-support-0.5.21.tgz", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", "license": "MIT", "dependencies": { @@ -11352,45 +9992,13 @@ }, "node_modules/source-map-support/node_modules/source-map": { "version": "0.6.1", - "resolved": "https://registry.npmmirror.com/source-map/-/source-map-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "license": "BSD-3-Clause", "engines": { "node": ">=0.10.0" } }, - "node_modules/spdx-correct": { - "version": "3.2.0", - "resolved": "https://registry.npmmirror.com/spdx-correct/-/spdx-correct-3.2.0.tgz", - "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", - "license": "Apache-2.0", - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.5.0", - "resolved": "https://registry.npmmirror.com/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", - "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", - "license": "CC-BY-3.0" - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.1", - "resolved": "https://registry.npmmirror.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", - "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", - "license": "MIT", - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.21", - "resolved": "https://registry.npmmirror.com/spdx-license-ids/-/spdx-license-ids-3.0.21.tgz", - "integrity": "sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg==", - "license": "CC0-1.0" - }, "node_modules/speakingurl": { "version": "14.0.1", "resolved": "https://registry.npmmirror.com/speakingurl/-/speakingurl-14.0.1.tgz", @@ -11400,24 +10008,27 @@ "node": ">=0.10.0" } }, - "node_modules/stack-trace": { - "version": "0.0.10", - "resolved": "https://registry.npmmirror.com/stack-trace/-/stack-trace-0.0.10.tgz", - "integrity": "sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==", + "node_modules/srvx": { + "version": "0.11.8", + "resolved": "https://registry.npmjs.org/srvx/-/srvx-0.11.8.tgz", + "integrity": "sha512-2n9t0YnAXPJjinytvxccNgs7rOA5gmE7Wowt/8Dy2dx2fDC6sBhfBpbrCvjYKALlVukPS/Uq3QwkolKNa7P/2Q==", "license": "MIT", + "bin": { + "srvx": "bin/srvx.mjs" + }, "engines": { - "node": "*" + "node": ">=20.16.0" } }, "node_modules/standard-as-callback": { "version": "2.1.0", - "resolved": "https://registry.npmmirror.com/standard-as-callback/-/standard-as-callback-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz", "integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==", "license": "MIT" }, "node_modules/statuses": { "version": "2.0.2", - "resolved": "https://registry.npmmirror.com/statuses/-/statuses-2.0.2.tgz", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", "license": "MIT", "engines": { @@ -11425,27 +10036,25 @@ } }, "node_modules/std-env": { - "version": "3.9.0", - "resolved": "https://registry.npmmirror.com/std-env/-/std-env-3.9.0.tgz", - "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz", + "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", "license": "MIT" }, "node_modules/streamx": { - "version": "2.22.1", - "resolved": "https://registry.npmmirror.com/streamx/-/streamx-2.22.1.tgz", - "integrity": "sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA==", + "version": "2.23.0", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.23.0.tgz", + "integrity": "sha512-kn+e44esVfn2Fa/O0CPFcex27fjIL6MkVae0Mm6q+E6f0hWv578YCERbv+4m02cjxvDsPKLnmxral/rR6lBMAg==", "license": "MIT", "dependencies": { + "events-universal": "^1.0.0", "fast-fifo": "^1.3.2", "text-decoder": "^1.1.0" - }, - "optionalDependencies": { - "bare-events": "^2.2.0" } }, "node_modules/string_decoder": { "version": "1.3.0", - "resolved": "https://registry.npmmirror.com/string_decoder/-/string_decoder-1.3.0.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", "license": "MIT", "dependencies": { @@ -11550,7 +10159,7 @@ }, "node_modules/strip-final-newline": { "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", "license": "MIT", "engines": { @@ -11561,9 +10170,9 @@ } }, "node_modules/strip-literal": { - "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/strip-literal/-/strip-literal-3.0.0.tgz", - "integrity": "sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.1.0.tgz", + "integrity": "sha512-8r3mkIM/2+PpjHoOtiAW8Rg3jJLHaV7xPwG+YRGrv6FP0wwk/toTpATxWYOW0BKdWwl82VT2tFYi5DlROa0Mxg==", "license": "MIT", "dependencies": { "js-tokens": "^9.0.1" @@ -11574,23 +10183,39 @@ }, "node_modules/strip-literal/node_modules/js-tokens": { "version": "9.0.1", - "resolved": "https://registry.npmmirror.com/js-tokens/-/js-tokens-9.0.1.tgz", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", "license": "MIT" }, "node_modules/structured-clone-es": { "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/structured-clone-es/-/structured-clone-es-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/structured-clone-es/-/structured-clone-es-1.0.0.tgz", "integrity": "sha512-FL8EeKFFyNQv5cMnXI31CIMCsFarSVI2bF0U0ImeNE3g/F1IvJQyqzOXxPBRXiwQfyBTlbNe88jh1jFW0O/jiQ==", "license": "ISC" }, + "node_modules/style-value-types": { + "version": "5.1.2", + "resolved": "https://registry.npmmirror.com/style-value-types/-/style-value-types-5.1.2.tgz", + "integrity": "sha512-Vs9fNreYF9j6W2VvuDTP7kepALi7sk0xtk2Tu8Yxi9UoajJdEVpNpCov0HsLTqXvNGKX+Uv09pkozVITi1jf3Q==", + "license": "MIT", + "dependencies": { + "hey-listen": "^1.0.8", + "tslib": "2.4.0" + } + }, + "node_modules/style-value-types/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==", + "license": "0BSD" + }, "node_modules/stylehacks": { - "version": "7.0.6", - "resolved": "https://registry.npmmirror.com/stylehacks/-/stylehacks-7.0.6.tgz", - "integrity": "sha512-iitguKivmsueOmTO0wmxURXBP8uqOO+zikLGZ7Mm9e/94R4w5T999Js2taS/KBOnQ/wdC3jN3vNSrkGDrlnqQg==", + "version": "7.0.7", + "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-7.0.7.tgz", + "integrity": "sha512-bJkD0JkEtbRrMFtwgpJyBbFIwfDDONQ1Ov3sDLZQP8HuJ73kBOyx66H4bOcAbVWmnfLdvQ0AJwXxOMkpujcO6g==", "license": "MIT", "dependencies": { - "browserslist": "^4.25.1", + "browserslist": "^4.27.0", "postcss-selector-parser": "^7.1.0" }, "engines": { @@ -11644,9 +10269,9 @@ } }, "node_modules/supports-color": { - "version": "10.0.0", - "resolved": "https://registry.npmmirror.com/supports-color/-/supports-color-10.0.0.tgz", - "integrity": "sha512-HRVVSbCCMbj7/kdWF9Q+bbckjBHLtHMEoJWlkmYzzdwhYMkjkOwubLM6t7NbWKjgKamGDrWL1++KrjUO1t9oAQ==", + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-10.2.2.tgz", + "integrity": "sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==", "license": "MIT", "engines": { "node": ">=18" @@ -11669,7 +10294,7 @@ }, "node_modules/svgo": { "version": "4.0.0", - "resolved": "https://registry.npmmirror.com/svgo/-/svgo-4.0.0.tgz", + "resolved": "https://registry.npmjs.org/svgo/-/svgo-4.0.0.tgz", "integrity": "sha512-VvrHQ+9uniE+Mvx3+C9IEe/lWasXCU0nXMY2kZeLrHNICuRiC8uMPyM14UEaMOFA5mhyQqEkB02VoQ16n3DLaw==", "license": "MIT", "dependencies": { @@ -11694,7 +10319,7 @@ }, "node_modules/svgo/node_modules/commander": { "version": "11.1.0", - "resolved": "https://registry.npmmirror.com/commander/-/commander-11.1.0.tgz", + "resolved": "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz", "integrity": "sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ==", "license": "MIT", "engines": { @@ -11703,7 +10328,7 @@ }, "node_modules/system-architecture": { "version": "0.1.0", - "resolved": "https://registry.npmmirror.com/system-architecture/-/system-architecture-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/system-architecture/-/system-architecture-0.1.0.tgz", "integrity": "sha512-ulAk51I9UVUyJgxlv9M6lFot2WP3e7t8Kz9+IS6D4rVba1tR9kON+Ey69f+1R4Q8cd45Lod6a4IcJIxnzGc/zA==", "license": "MIT", "engines": { @@ -11713,6 +10338,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/tagged-tag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/tagged-tag/-/tagged-tag-1.0.0.tgz", + "integrity": "sha512-yEFYrVhod+hdNyx7g5Bnkkb0G6si8HJurOoOEgC8B/O0uXLHlaey/65KRv6cuWBNhBgHKAROVpc7QyYqE5gFng==", + "license": "MIT", + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/tailwind-config-viewer": { "version": "2.0.4", "resolved": "https://registry.npmmirror.com/tailwind-config-viewer/-/tailwind-config-viewer-2.0.4.tgz", @@ -11922,37 +10559,16 @@ "node": ">=8.10.0" } }, - "node_modules/tailwindcss/node_modules/resolve": { - "version": "1.22.10", - "resolved": "https://registry.npmmirror.com/resolve/-/resolve-1.22.10.tgz", - "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", - "license": "MIT", - "dependencies": { - "is-core-module": "^2.16.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/tar": { - "version": "7.4.3", - "resolved": "https://registry.npmmirror.com/tar/-/tar-7.4.3.tgz", - "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", - "license": "ISC", + "version": "7.5.9", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.9.tgz", + "integrity": "sha512-BTLcK0xsDh2+PUe9F6c2TlRp4zOOBMTkoQHQIWSIzI0R7KG46uEwq4OPk2W7bZcprBMsuaeFsqwYr7pjh6CuHg==", + "license": "BlueOak-1.0.0", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", - "minizlib": "^3.0.1", - "mkdirp": "^3.0.1", + "minizlib": "^3.1.0", "yallist": "^5.0.0" }, "engines": { @@ -11961,7 +10577,7 @@ }, "node_modules/tar-stream": { "version": "3.1.7", - "resolved": "https://registry.npmmirror.com/tar-stream/-/tar-stream-3.1.7.tgz", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", "license": "MIT", "dependencies": { @@ -11972,7 +10588,7 @@ }, "node_modules/tar/node_modules/yallist": { "version": "5.0.0", - "resolved": "https://registry.npmmirror.com/yallist/-/yallist-5.0.0.tgz", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", "license": "BlueOak-1.0.0", "engines": { @@ -11980,13 +10596,13 @@ } }, "node_modules/terser": { - "version": "5.43.1", - "resolved": "https://registry.npmmirror.com/terser/-/terser-5.43.1.tgz", - "integrity": "sha512-+6erLbBm0+LROX2sPXlUYx/ux5PyE9K/a92Wrt6oA+WDAoFTdpHE5tCYCI5PNzq2y8df4rA+QgHLJuR4jNymsg==", + "version": "5.46.0", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.46.0.tgz", + "integrity": "sha512-jTwoImyr/QbOWFFso3YoU3ik0jBBDJ6JTOQiy/J2YxVJdZCc+5u7skhNwiOR3FQIygFqVUPHl7qbbxtjW2K3Qg==", "license": "BSD-2-Clause", "dependencies": { "@jridgewell/source-map": "^0.3.3", - "acorn": "^8.14.0", + "acorn": "^8.15.0", "commander": "^2.20.0", "source-map-support": "~0.5.20" }, @@ -11999,25 +10615,19 @@ }, "node_modules/terser/node_modules/commander": { "version": "2.20.3", - "resolved": "https://registry.npmmirror.com/commander/-/commander-2.20.3.tgz", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", "license": "MIT" }, "node_modules/text-decoder": { - "version": "1.2.3", - "resolved": "https://registry.npmmirror.com/text-decoder/-/text-decoder-1.2.3.tgz", - "integrity": "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==", + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.7.tgz", + "integrity": "sha512-vlLytXkeP4xvEq2otHeJfSQIRyWxo/oZGEbXrtEEF9Hnmrdly59sUbzZ/QgyWuLYHctCHxFF4tRQZNQ9k60ExQ==", "license": "Apache-2.0", "dependencies": { "b4a": "^1.6.4" } }, - "node_modules/text-hex": { - "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/text-hex/-/text-hex-1.0.0.tgz", - "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==", - "license": "MIT" - }, "node_modules/thenify": { "version": "3.3.1", "resolved": "https://registry.npmmirror.com/thenify/-/thenify-3.3.1.tgz", @@ -12041,24 +10651,27 @@ }, "node_modules/tiny-invariant": { "version": "1.3.3", - "resolved": "https://registry.npmmirror.com/tiny-invariant/-/tiny-invariant-1.3.3.tgz", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", "license": "MIT" }, "node_modules/tinyexec": { - "version": "1.0.1", - "resolved": "https://registry.npmmirror.com/tinyexec/-/tinyexec-1.0.1.tgz", - "integrity": "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==", - "license": "MIT" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz", + "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", + "license": "MIT", + "engines": { + "node": ">=18" + } }, "node_modules/tinyglobby": { - "version": "0.2.14", - "resolved": "https://registry.npmmirror.com/tinyglobby/-/tinyglobby-0.2.14.tgz", - "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==", + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", "license": "MIT", "dependencies": { - "fdir": "^6.4.4", - "picomatch": "^4.0.2" + "fdir": "^6.5.0", + "picomatch": "^4.0.3" }, "engines": { "node": ">=12.0.0" @@ -12067,24 +10680,6 @@ "url": "https://github.com/sponsors/SuperchupuDev" } }, - "node_modules/tmp": { - "version": "0.2.3", - "resolved": "https://registry.npmmirror.com/tmp/-/tmp-0.2.3.tgz", - "integrity": "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==", - "license": "MIT", - "engines": { - "node": ">=14.14" - } - }, - "node_modules/tmp-promise": { - "version": "3.0.3", - "resolved": "https://registry.npmmirror.com/tmp-promise/-/tmp-promise-3.0.3.tgz", - "integrity": "sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ==", - "license": "MIT", - "dependencies": { - "tmp": "^0.2.0" - } - }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmmirror.com/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -12106,15 +10701,9 @@ "node": ">=0.6" } }, - "node_modules/toml": { - "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/toml/-/toml-3.0.0.tgz", - "integrity": "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==", - "license": "MIT" - }, "node_modules/totalist": { "version": "3.0.1", - "resolved": "https://registry.npmmirror.com/totalist/-/totalist-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", "license": "MIT", "engines": { @@ -12123,31 +10712,10 @@ }, "node_modules/tr46": { "version": "0.0.3", - "resolved": "https://registry.npmmirror.com/tr46/-/tr46-0.0.3.tgz", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", "license": "MIT" }, - "node_modules/triple-beam": { - "version": "1.4.1", - "resolved": "https://registry.npmmirror.com/triple-beam/-/triple-beam-1.4.1.tgz", - "integrity": "sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==", - "license": "MIT", - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/ts-api-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmmirror.com/ts-api-utils/-/ts-api-utils-2.1.0.tgz", - "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", - "license": "MIT", - "engines": { - "node": ">=18.12" - }, - "peerDependencies": { - "typescript": ">=4.8.4" - } - }, "node_modules/ts-interface-checker": { "version": "0.1.13", "resolved": "https://registry.npmmirror.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", @@ -12156,9 +10724,10 @@ }, "node_modules/tslib": { "version": "2.8.1", - "resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.8.1.tgz", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", - "license": "0BSD" + "license": "0BSD", + "optional": true }, "node_modules/tsscmp": { "version": "1.0.6", @@ -12170,12 +10739,15 @@ } }, "node_modules/type-fest": { - "version": "4.41.0", - "resolved": "https://registry.npmmirror.com/type-fest/-/type-fest-4.41.0.tgz", - "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", + "version": "5.4.4", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-5.4.4.tgz", + "integrity": "sha512-JnTrzGu+zPV3aXIUhnyWJj4z/wigMsdYajGLIYakqyOW1nPllzXEJee0QQbHj+CTIQtXGlAjuK0UY+2xTyjVAw==", "license": "(MIT OR CC0-1.0)", + "dependencies": { + "tagged-tag": "^1.0.0" + }, "engines": { - "node": ">=16" + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -12194,30 +10766,9 @@ "node": ">= 0.6" } }, - "node_modules/type-is/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmmirror.com/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/type-is/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmmirror.com/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, "node_modules/type-level-regexp": { "version": "0.1.17", - "resolved": "https://registry.npmmirror.com/type-level-regexp/-/type-level-regexp-0.1.17.tgz", + "resolved": "https://registry.npmjs.org/type-level-regexp/-/type-level-regexp-0.1.17.tgz", "integrity": "sha512-wTk4DH3cxwk196uGLK/E9pE45aLfeKJacKmcEgEOA/q5dnPGNxXt0cfYdFxb57L+sEpf1oJH4Dnx/pnRcku9jg==", "license": "MIT" }, @@ -12226,6 +10777,8 @@ "resolved": "https://registry.npmmirror.com/typescript/-/typescript-5.8.3.tgz", "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", "license": "Apache-2.0", + "optional": true, + "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -12235,14 +10788,14 @@ } }, "node_modules/ufo": { - "version": "1.6.1", - "resolved": "https://registry.npmmirror.com/ufo/-/ufo-1.6.1.tgz", - "integrity": "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==", + "version": "1.6.3", + "resolved": "https://registry.npmmirror.com/ufo/-/ufo-1.6.3.tgz", + "integrity": "sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==", "license": "MIT" }, "node_modules/ultrahtml": { "version": "1.6.0", - "resolved": "https://registry.npmmirror.com/ultrahtml/-/ultrahtml-1.6.0.tgz", + "resolved": "https://registry.npmjs.org/ultrahtml/-/ultrahtml-1.6.0.tgz", "integrity": "sha512-R9fBn90VTJrqqLDwyMph+HGne8eqY1iPfYhPzZrvKpIfwkWZbcYlfpsb8B9dTvBfpy1/hqAD7Wi8EKfP9e8zdw==", "license": "MIT" }, @@ -12253,127 +10806,115 @@ "license": "MIT" }, "node_modules/unctx": { - "version": "2.4.1", - "resolved": "https://registry.npmmirror.com/unctx/-/unctx-2.4.1.tgz", - "integrity": "sha512-AbaYw0Nm4mK4qjhns67C+kgxR2YWiwlDBPzxrN8h8C6VtAdCgditAY5Dezu3IJy4XVqAnbrXt9oQJvsn3fyozg==", + "version": "2.5.0", + "resolved": "https://registry.npmmirror.com/unctx/-/unctx-2.5.0.tgz", + "integrity": "sha512-p+Rz9x0R7X+CYDkT+Xg8/GhpcShTlU8n+cf9OtOEf7zEQsNcCZO1dPKNRDqvUTaq+P32PMMkxWHwfrxkqfqAYg==", "license": "MIT", "dependencies": { - "acorn": "^8.14.0", + "acorn": "^8.15.0", "estree-walker": "^3.0.3", - "magic-string": "^0.30.17", - "unplugin": "^2.1.0" + "magic-string": "^0.30.21", + "unplugin": "^2.3.11" } }, - "node_modules/undici-types": { - "version": "7.8.0", - "resolved": "https://registry.npmmirror.com/undici-types/-/undici-types-7.8.0.tgz", - "integrity": "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==", - "license": "MIT", - "optional": true - }, - "node_modules/unenv": { - "version": "2.0.0-rc.18", - "resolved": "https://registry.npmmirror.com/unenv/-/unenv-2.0.0-rc.18.tgz", - "integrity": "sha512-O0oVQVJ2X3Q8H4HITJr4e2cWxMYBeZ+p8S25yoKCxVCgDWtIJDcgwWNonYz12tI3ylVQCRyPV/Bdq0KJeXo7AA==", - "license": "MIT", - "dependencies": { - "defu": "^6.1.4", - "exsolve": "^1.0.7", - "ohash": "^2.0.11", - "pathe": "^2.0.3", - "ufo": "^1.6.1" + "node_modules/unenv": { + "version": "2.0.0-rc.24", + "resolved": "https://registry.npmjs.org/unenv/-/unenv-2.0.0-rc.24.tgz", + "integrity": "sha512-i7qRCmY42zmCwnYlh9H2SvLEypEFGye5iRmEMKjcGi7zk9UquigRjFtTLz0TYqr0ZGLZhaMHl/foy1bZR+Cwlw==", + "license": "MIT", + "dependencies": { + "pathe": "^2.0.3" } }, "node_modules/unhead": { - "version": "2.0.12", - "resolved": "https://registry.npmmirror.com/unhead/-/unhead-2.0.12.tgz", - "integrity": "sha512-5oo0lwz81XDXCmrHGzgmbaNOxM8R9MZ3FkEs2ROHeW8e16xsrv7qXykENlISrcxr3RLPHQEsD1b6js9P2Oj/Ow==", + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/unhead/-/unhead-2.1.9.tgz", + "integrity": "sha512-4GvP6YeJQzo9J3g9fFZUJOH6jacUp5JgJ0/zC8eZrt8Dwompg9SuOSfrYbZaEzsfMPgQc4fsEjMoY9WzGPOChg==", "license": "MIT", "dependencies": { - "hookable": "^5.5.3" + "hookable": "^6.0.1" }, "funding": { "url": "https://github.com/sponsors/harlan-zw" } }, + "node_modules/unhead/node_modules/hookable": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/hookable/-/hookable-6.0.1.tgz", + "integrity": "sha512-uKGyY8BuzN/a5gvzvA+3FVWo0+wUjgtfSdnmjtrOVwQCZPHpHDH2WRO3VZSOeluYrHoDCiXFffZXs8Dj1ULWtw==", + "license": "MIT" + }, "node_modules/unicorn-magic": { - "version": "0.1.0", - "resolved": "https://registry.npmmirror.com/unicorn-magic/-/unicorn-magic-0.1.0.tgz", - "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==", + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.4.0.tgz", + "integrity": "sha512-wH590V9VNgYH9g3lH9wWjTrUoKsjLF6sGLjhR4sH1LWpLmCOH0Zf7PukhDA8BiS7KHe4oPNkcTHqYkj7SOGUOw==", "license": "MIT", "engines": { - "node": ">=18" + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/unimport": { - "version": "5.2.0", - "resolved": "https://registry.npmmirror.com/unimport/-/unimport-5.2.0.tgz", - "integrity": "sha512-bTuAMMOOqIAyjV4i4UH7P07pO+EsVxmhOzQ2YJ290J6mkLUdozNhb5I/YoOEheeNADC03ent3Qj07X0fWfUpmw==", + "version": "5.7.0", + "resolved": "https://registry.npmjs.org/unimport/-/unimport-5.7.0.tgz", + "integrity": "sha512-njnL6sp8lEA8QQbZrt+52p/g4X0rw3bnGGmUcJnt1jeG8+iiqO779aGz0PirCtydAIVcuTBRlJ52F0u46z309Q==", "license": "MIT", "dependencies": { - "acorn": "^8.15.0", + "acorn": "^8.16.0", "escape-string-regexp": "^5.0.0", "estree-walker": "^3.0.3", - "local-pkg": "^1.1.1", - "magic-string": "^0.30.17", - "mlly": "^1.7.4", + "local-pkg": "^1.1.2", + "magic-string": "^0.30.21", + "mlly": "^1.8.0", "pathe": "^2.0.3", "picomatch": "^4.0.3", - "pkg-types": "^2.2.0", + "pkg-types": "^2.3.0", "scule": "^1.3.0", - "strip-literal": "^3.0.0", - "tinyglobby": "^0.2.14", - "unplugin": "^2.3.5", - "unplugin-utils": "^0.2.4" + "strip-literal": "^3.1.0", + "tinyglobby": "^0.2.15", + "unplugin": "^2.3.11", + "unplugin-utils": "^0.3.1" }, "engines": { "node": ">=18.12.0" } }, - "node_modules/universalify": { - "version": "2.0.1", - "resolved": "https://registry.npmmirror.com/universalify/-/universalify-2.0.1.tgz", - "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", - "license": "MIT", - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/unixify": { - "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/unixify/-/unixify-1.0.0.tgz", - "integrity": "sha512-6bc58dPYhCMHHuwxldQxO3RRNZ4eCogZ/st++0+fcC1nr0jiGUtAdBJ2qzmLQWSxbtz42pWt4QQMiZ9HvZf5cg==", + "node_modules/unimport/node_modules/unplugin-utils": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/unplugin-utils/-/unplugin-utils-0.3.1.tgz", + "integrity": "sha512-5lWVjgi6vuHhJ526bI4nlCOmkCIF3nnfXkCMDeMJrtdvxTs6ZFCM8oNufGTsDbKv/tJ/xj8RpvXjRuPBZJuJog==", "license": "MIT", "dependencies": { - "normalize-path": "^2.1.1" + "pathe": "^2.0.3", + "picomatch": "^4.0.3" }, "engines": { - "node": ">=0.10.0" + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" } }, - "node_modules/unixify/node_modules/normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmmirror.com/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==", + "node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", "license": "MIT", - "dependencies": { - "remove-trailing-separator": "^1.0.1" - }, "engines": { - "node": ">=0.10.0" + "node": ">= 10.0.0" } }, "node_modules/unplugin": { - "version": "2.3.5", - "resolved": "https://registry.npmmirror.com/unplugin/-/unplugin-2.3.5.tgz", - "integrity": "sha512-RyWSb5AHmGtjjNQ6gIlA67sHOsWpsbWpwDokLwTcejVdOjEkJZh7QKu14J00gDDVSh8kGH4KYC/TNBceXFZhtw==", + "version": "2.3.11", + "resolved": "https://registry.npmmirror.com/unplugin/-/unplugin-2.3.11.tgz", + "integrity": "sha512-5uKD0nqiYVzlmCRs01Fhs2BdkEgBS3SAVP6ndrBsuK42iC2+JHyxM05Rm9G8+5mkmRtzMZGY8Ct5+mliZxU/Ww==", "license": "MIT", "dependencies": { - "acorn": "^8.14.1", - "picomatch": "^4.0.2", + "@jridgewell/remapping": "^2.3.5", + "acorn": "^8.15.0", + "picomatch": "^4.0.3", "webpack-virtual-modules": "^0.6.2" }, "engines": { @@ -12397,29 +10938,33 @@ } }, "node_modules/unplugin-vue-router": { - "version": "0.14.0", - "resolved": "https://registry.npmmirror.com/unplugin-vue-router/-/unplugin-vue-router-0.14.0.tgz", - "integrity": "sha512-ipjunvS5e2aFHBAUFuLbHl2aHKbXXXBhTxGT9wZx66fNVPdEQzVVitF8nODr1plANhTTa3UZ+DQu9uyLngMzoQ==", + "version": "0.19.2", + "resolved": "https://registry.npmjs.org/unplugin-vue-router/-/unplugin-vue-router-0.19.2.tgz", + "integrity": "sha512-u5dgLBarxE5cyDK/hzJGfpCTLIAyiTXGlo85COuD4Nssj6G7NxS+i9mhCWz/1p/ud1eMwdcUbTXehQe41jYZUA==", + "deprecated": "Merged into vuejs/router. Migrate: https://router.vuejs.org/guide/migration/v4-to-v5.html", "license": "MIT", "dependencies": { - "@vue-macros/common": "3.0.0-beta.15", - "ast-walker-scope": "^0.8.1", - "chokidar": "^4.0.3", - "fast-glob": "^3.3.3", + "@babel/generator": "^7.28.5", + "@vue-macros/common": "^3.1.1", + "@vue/language-core": "^3.2.1", + "ast-walker-scope": "^0.8.3", + "chokidar": "^5.0.0", "json5": "^2.2.3", - "local-pkg": "^1.1.1", - "magic-string": "^0.30.17", - "mlly": "^1.7.4", + "local-pkg": "^1.1.2", + "magic-string": "^0.30.21", + "mlly": "^1.8.0", + "muggle-string": "^0.4.1", "pathe": "^2.0.3", - "picomatch": "^4.0.2", + "picomatch": "^4.0.3", "scule": "^1.3.0", - "unplugin": "^2.3.5", - "unplugin-utils": "^0.2.4", - "yaml": "^2.8.0" + "tinyglobby": "^0.2.15", + "unplugin": "^2.3.11", + "unplugin-utils": "^0.3.1", + "yaml": "^2.8.2" }, "peerDependencies": { "@vue/compiler-sfc": "^3.5.17", - "vue-router": "^4.5.1" + "vue-router": "^4.6.0" }, "peerDependenciesMeta": { "vue-router": { @@ -12427,20 +10972,36 @@ } } }, + "node_modules/unplugin-vue-router/node_modules/unplugin-utils": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/unplugin-utils/-/unplugin-utils-0.3.1.tgz", + "integrity": "sha512-5lWVjgi6vuHhJ526bI4nlCOmkCIF3nnfXkCMDeMJrtdvxTs6ZFCM8oNufGTsDbKv/tJ/xj8RpvXjRuPBZJuJog==", + "license": "MIT", + "dependencies": { + "pathe": "^2.0.3", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, "node_modules/unstorage": { - "version": "1.16.1", - "resolved": "https://registry.npmmirror.com/unstorage/-/unstorage-1.16.1.tgz", - "integrity": "sha512-gdpZ3guLDhz+zWIlYP1UwQ259tG5T5vYRzDaHMkQ1bBY1SQPutvZnrRjTFaWUUpseErJIgAZS51h6NOcZVZiqQ==", + "version": "1.17.4", + "resolved": "https://registry.npmjs.org/unstorage/-/unstorage-1.17.4.tgz", + "integrity": "sha512-fHK0yNg38tBiJKp/Vgsq4j0JEsCmgqH58HAn707S7zGkArbZsVr/CwINoi+nh3h98BRCwKvx1K3Xg9u3VV83sw==", "license": "MIT", "dependencies": { "anymatch": "^3.1.3", - "chokidar": "^4.0.3", + "chokidar": "^5.0.0", "destr": "^2.0.5", - "h3": "^1.15.3", - "lru-cache": "^10.4.3", - "node-fetch-native": "^1.6.6", - "ofetch": "^1.4.1", - "ufo": "^1.6.1" + "h3": "^1.15.5", + "lru-cache": "^11.2.0", + "node-fetch-native": "^1.6.7", + "ofetch": "^1.5.1", + "ufo": "^1.6.3" }, "peerDependencies": { "@azure/app-configuration": "^1.8.0", @@ -12449,13 +11010,14 @@ "@azure/identity": "^4.6.0", "@azure/keyvault-secrets": "^4.9.0", "@azure/storage-blob": "^12.26.0", - "@capacitor/preferences": "^6.0.3 || ^7.0.0", + "@capacitor/preferences": "^6 || ^7 || ^8", "@deno/kv": ">=0.9.0", "@netlify/blobs": "^6.5.0 || ^7.0.0 || ^8.1.0 || ^9.0.0 || ^10.0.0", "@planetscale/database": "^1.19.0", "@upstash/redis": "^1.34.3", "@vercel/blob": ">=0.27.1", - "@vercel/kv": "^1.0.1", + "@vercel/functions": "^2.2.12 || ^3.0.0", + "@vercel/kv": "^1 || ^2 || ^3", "aws4fetch": "^1.0.20", "db0": ">=0.2.1", "idb-keyval": "^6.2.1", @@ -12499,6 +11061,9 @@ "@vercel/blob": { "optional": true }, + "@vercel/functions": { + "optional": true + }, "@vercel/kv": { "optional": true }, @@ -12520,14 +11085,17 @@ } }, "node_modules/unstorage/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmmirror.com/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "license": "ISC" + "version": "11.2.6", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", + "integrity": "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==", + "license": "BlueOak-1.0.0", + "engines": { + "node": "20 || >=22" + } }, "node_modules/untun": { "version": "0.1.3", - "resolved": "https://registry.npmmirror.com/untun/-/untun-0.1.3.tgz", + "resolved": "https://registry.npmjs.org/untun/-/untun-0.1.3.tgz", "integrity": "sha512-4luGP9LMYszMRZwsvyUd9MrxgEGZdZuZgpVQHEEX0lCYFESasVRvZd0EYpCkOIbJKHMuv0LskpXc/8Un+MJzEQ==", "license": "MIT", "dependencies": { @@ -12541,7 +11109,7 @@ }, "node_modules/untun/node_modules/pathe": { "version": "1.1.2", - "resolved": "https://registry.npmmirror.com/pathe/-/pathe-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", "license": "MIT" }, @@ -12562,65 +11130,23 @@ } }, "node_modules/unwasm": { - "version": "0.3.9", - "resolved": "https://registry.npmmirror.com/unwasm/-/unwasm-0.3.9.tgz", - "integrity": "sha512-LDxTx/2DkFURUd+BU1vUsF/moj0JsoTvl+2tcg2AUOiEzVturhGGx17/IMgGvKUYdZwr33EJHtChCJuhu9Ouvg==", - "license": "MIT", - "dependencies": { - "knitwork": "^1.0.0", - "magic-string": "^0.30.8", - "mlly": "^1.6.1", - "pathe": "^1.1.2", - "pkg-types": "^1.0.3", - "unplugin": "^1.10.0" - } - }, - "node_modules/unwasm/node_modules/confbox": { - "version": "0.1.8", - "resolved": "https://registry.npmmirror.com/confbox/-/confbox-0.1.8.tgz", - "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==", - "license": "MIT" - }, - "node_modules/unwasm/node_modules/pathe": { - "version": "1.1.2", - "resolved": "https://registry.npmmirror.com/pathe/-/pathe-1.1.2.tgz", - "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", - "license": "MIT" - }, - "node_modules/unwasm/node_modules/pkg-types": { - "version": "1.3.1", - "resolved": "https://registry.npmmirror.com/pkg-types/-/pkg-types-1.3.1.tgz", - "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==", - "license": "MIT", - "dependencies": { - "confbox": "^0.1.8", - "mlly": "^1.7.4", - "pathe": "^2.0.1" - } - }, - "node_modules/unwasm/node_modules/pkg-types/node_modules/pathe": { - "version": "2.0.3", - "resolved": "https://registry.npmmirror.com/pathe/-/pathe-2.0.3.tgz", - "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "license": "MIT" - }, - "node_modules/unwasm/node_modules/unplugin": { - "version": "1.16.1", - "resolved": "https://registry.npmmirror.com/unplugin/-/unplugin-1.16.1.tgz", - "integrity": "sha512-4/u/j4FrCKdi17jaxuJA0jClGxB1AvU2hw/IuayPc4ay1XGaJs/rbb4v5WKwAjNifjmXK9PIFyuPiaK8azyR9w==", + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/unwasm/-/unwasm-0.5.3.tgz", + "integrity": "sha512-keBgTSfp3r6+s9ZcSma+0chwxQdmLbB5+dAD9vjtB21UTMYuKAxHXCU1K2CbCtnP09EaWeRvACnXk0EJtUx+hw==", "license": "MIT", "dependencies": { - "acorn": "^8.14.0", - "webpack-virtual-modules": "^0.6.2" - }, - "engines": { - "node": ">=14.0.0" + "exsolve": "^1.0.8", + "knitwork": "^1.3.0", + "magic-string": "^0.30.21", + "mlly": "^1.8.0", + "pathe": "^2.0.3", + "pkg-types": "^2.3.0" } }, "node_modules/update-browserslist-db": { - "version": "1.1.3", - "resolved": "https://registry.npmmirror.com/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", - "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", "funding": [ { "type": "opencollective", @@ -12649,45 +11175,16 @@ }, "node_modules/uqr": { "version": "0.1.2", - "resolved": "https://registry.npmmirror.com/uqr/-/uqr-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/uqr/-/uqr-0.1.2.tgz", "integrity": "sha512-MJu7ypHq6QasgF5YRTjqscSzQp/W11zoUk6kvmlH+fmWEs63Y0Eib13hYFwAzagRJcVY8WVnlV+eBDUGMJ5IbA==", "license": "MIT" }, - "node_modules/urlpattern-polyfill": { - "version": "8.0.2", - "resolved": "https://registry.npmmirror.com/urlpattern-polyfill/-/urlpattern-polyfill-8.0.2.tgz", - "integrity": "sha512-Qp95D4TPJl1kC9SKigDcqgyM2VDVO4RiJc2d4qe5GrYm+zbIQCWWKAFaJNQ4BhdFeDGwBmAxqJBwWSJDb9T3BQ==", - "license": "MIT" - }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmmirror.com/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", "license": "MIT" }, - "node_modules/uuid": { - "version": "11.1.0", - "resolved": "https://registry.npmmirror.com/uuid/-/uuid-11.1.0.tgz", - "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/esm/bin/uuid" - } - }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmmirror.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "license": "Apache-2.0", - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmmirror.com/vary/-/vary-1.1.2.tgz", @@ -12698,17 +11195,17 @@ } }, "node_modules/vite": { - "version": "7.0.6", - "resolved": "https://registry.npmmirror.com/vite/-/vite-7.0.6.tgz", - "integrity": "sha512-MHFiOENNBd+Bd9uvc8GEsIzdkn1JxMmEeYX35tI3fv0sJBUTfW5tQsoaOwuY4KhBI09A3dUJ/DXf2yxPVPUceg==", + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", "license": "MIT", "dependencies": { - "esbuild": "^0.25.0", - "fdir": "^6.4.6", + "esbuild": "^0.27.0", + "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", - "rollup": "^4.40.0", - "tinyglobby": "^0.2.14" + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" }, "bin": { "vite": "bin/vite.js" @@ -12773,7 +11270,7 @@ }, "node_modules/vite-dev-rpc": { "version": "1.1.0", - "resolved": "https://registry.npmmirror.com/vite-dev-rpc/-/vite-dev-rpc-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/vite-dev-rpc/-/vite-dev-rpc-1.1.0.tgz", "integrity": "sha512-pKXZlgoXGoE8sEKiKJSng4hI1sQ4wi5YT24FCrwrLt6opmkjlqPPVmiPWWJn8M8byMxRGzp1CrFuqQs4M/Z39A==", "license": "MIT", "dependencies": { @@ -12789,7 +11286,7 @@ }, "node_modules/vite-hot-client": { "version": "2.1.0", - "resolved": "https://registry.npmmirror.com/vite-hot-client/-/vite-hot-client-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/vite-hot-client/-/vite-hot-client-2.1.0.tgz", "integrity": "sha512-7SpgZmU7R+dDnSmvXE1mfDtnHLHQSisdySVR7lO8ceAXvM0otZeuQQ6C8LrS5d/aYyP/QZ0hI0L+dIPrm4YlFQ==", "license": "MIT", "funding": { @@ -12800,54 +11297,54 @@ } }, "node_modules/vite-node": { - "version": "3.2.4", - "resolved": "https://registry.npmmirror.com/vite-node/-/vite-node-3.2.4.tgz", - "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-5.3.0.tgz", + "integrity": "sha512-8f20COPYJujc3OKPX6OuyBy3ZIv2det4eRRU4GY1y2MjbeGSUmPjedxg1b72KnTagCofwvZ65ThzjxDW2AtQFQ==", "license": "MIT", "dependencies": { "cac": "^6.7.14", - "debug": "^4.4.1", - "es-module-lexer": "^1.7.0", + "es-module-lexer": "^2.0.0", + "obug": "^2.1.1", "pathe": "^2.0.3", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + "vite": "^7.3.1" }, "bin": { - "vite-node": "vite-node.mjs" + "vite-node": "dist/cli.mjs" }, "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + "node": "^20.19.0 || >=22.12.0" }, "funding": { - "url": "https://opencollective.com/vitest" + "url": "https://opencollective.com/antfu" } }, "node_modules/vite-plugin-checker": { - "version": "0.10.1", - "resolved": "https://registry.npmmirror.com/vite-plugin-checker/-/vite-plugin-checker-0.10.1.tgz", - "integrity": "sha512-imiBsmYTPdjQHIZiEi5BhJ7K8Z/kCjTFMn+Qa4+5ao/a4Yql4yWFcf81FDJqlMiM57iY4Q3Z7PdoEe4KydULYQ==", + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/vite-plugin-checker/-/vite-plugin-checker-0.12.0.tgz", + "integrity": "sha512-CmdZdDOGss7kdQwv73UyVgLPv0FVYe5czAgnmRX2oKljgEvSrODGuClaV3PDR2+3ou7N/OKGauDDBjy2MB07Rg==", "license": "MIT", "dependencies": { "@babel/code-frame": "^7.27.1", "chokidar": "^4.0.3", "npm-run-path": "^6.0.0", "picocolors": "^1.1.1", - "picomatch": "^4.0.2", - "strip-ansi": "^7.1.0", + "picomatch": "^4.0.3", "tiny-invariant": "^1.3.3", - "tinyglobby": "^0.2.14", + "tinyglobby": "^0.2.15", "vscode-uri": "^3.1.0" }, "engines": { - "node": ">=14.16" + "node": ">=16.11" }, "peerDependencies": { "@biomejs/biome": ">=1.7", - "eslint": ">=7", + "eslint": ">=9.39.1", "meow": "^13.2.0", "optionator": "^0.9.4", + "oxlint": ">=1", "stylelint": ">=16", "typescript": "*", - "vite": ">=2.0.0", + "vite": ">=5.4.21", "vls": "*", "vti": "*", "vue-tsc": "~2.2.10 || ^3.0.0" @@ -12865,6 +11362,9 @@ "optionator": { "optional": true }, + "oxlint": { + "optional": true + }, "stylelint": { "optional": true }, @@ -12882,9 +11382,24 @@ } } }, + "node_modules/vite-plugin-checker/node_modules/chokidar": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "license": "MIT", + "dependencies": { + "readdirp": "^4.0.1" + }, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/vite-plugin-checker/node_modules/npm-run-path": { "version": "6.0.0", - "resolved": "https://registry.npmmirror.com/npm-run-path/-/npm-run-path-6.0.0.tgz", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-6.0.0.tgz", "integrity": "sha512-9qny7Z9DsQU8Ou39ERsPU4OZQlSTP47ShQzuKZ6PRXpYLtIFgl/DEBYEXKlvcEa+9tHVcK8CF81Y2V72qaZhWA==", "license": "MIT", "dependencies": { @@ -12900,7 +11415,7 @@ }, "node_modules/vite-plugin-checker/node_modules/path-key": { "version": "4.0.0", - "resolved": "https://registry.npmmirror.com/path-key/-/path-key-4.0.0.tgz", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", "license": "MIT", "engines": { @@ -12910,9 +11425,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/vite-plugin-checker/node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "license": "MIT", + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/vite-plugin-checker/node_modules/unicorn-magic": { "version": "0.3.0", - "resolved": "https://registry.npmmirror.com/unicorn-magic/-/unicorn-magic-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", "license": "MIT", "engines": { @@ -12923,19 +11451,19 @@ } }, "node_modules/vite-plugin-inspect": { - "version": "11.3.0", - "resolved": "https://registry.npmmirror.com/vite-plugin-inspect/-/vite-plugin-inspect-11.3.0.tgz", - "integrity": "sha512-vmt7K1WVKQkuiwvsM6e5h3HDJ2pSWTnzoj+JP9Kvu3Sh2G+nFap1F1V7tqpyA4qFxM1GQ84ryffWFGQrwShERQ==", + "version": "11.3.3", + "resolved": "https://registry.npmjs.org/vite-plugin-inspect/-/vite-plugin-inspect-11.3.3.tgz", + "integrity": "sha512-u2eV5La99oHoYPHE6UvbwgEqKKOQGz86wMg40CCosP6q8BkB6e5xPneZfYagK4ojPJSj5anHCrnvC20DpwVdRA==", "license": "MIT", "dependencies": { "ansis": "^4.1.0", "debug": "^4.4.1", "error-stack-parser-es": "^1.0.5", "ohash": "^2.0.11", - "open": "^10.1.2", - "perfect-debounce": "^1.0.0", + "open": "^10.2.0", + "perfect-debounce": "^2.0.0", "sirv": "^3.0.1", - "unplugin-utils": "^0.2.4", + "unplugin-utils": "^0.3.0", "vite-dev-rpc": "^1.1.0" }, "engines": { @@ -12955,7 +11483,7 @@ }, "node_modules/vite-plugin-inspect/node_modules/define-lazy-prop": { "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", "license": "MIT", "engines": { @@ -12967,7 +11495,7 @@ }, "node_modules/vite-plugin-inspect/node_modules/open": { "version": "10.2.0", - "resolved": "https://registry.npmmirror.com/open/-/open-10.2.0.tgz", + "resolved": "https://registry.npmjs.org/open/-/open-10.2.0.tgz", "integrity": "sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA==", "license": "MIT", "dependencies": { @@ -12983,15 +11511,37 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/vite-plugin-inspect/node_modules/perfect-debounce": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-2.1.0.tgz", + "integrity": "sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==", + "license": "MIT" + }, + "node_modules/vite-plugin-inspect/node_modules/unplugin-utils": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/unplugin-utils/-/unplugin-utils-0.3.1.tgz", + "integrity": "sha512-5lWVjgi6vuHhJ526bI4nlCOmkCIF3nnfXkCMDeMJrtdvxTs6ZFCM8oNufGTsDbKv/tJ/xj8RpvXjRuPBZJuJog==", + "license": "MIT", + "dependencies": { + "pathe": "^2.0.3", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, "node_modules/vite-plugin-vue-tracer": { - "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/vite-plugin-vue-tracer/-/vite-plugin-vue-tracer-1.0.0.tgz", - "integrity": "sha512-a+UB9IwGx5uwS4uG/a9kM6fCMnxONDkOTbgCUbhFpiGhqfxrrC1+9BibV7sWwUnwj1Dg6MnRxG0trLgUZslDXA==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/vite-plugin-vue-tracer/-/vite-plugin-vue-tracer-1.2.0.tgz", + "integrity": "sha512-a9Z/TLpxwmoE9kIcv28wqQmiszM7ec4zgndXWEsVD/2lEZLRGzcg7ONXmplzGF/UP5W59QNtS809OdywwpUWQQ==", "license": "MIT", "dependencies": { "estree-walker": "^3.0.3", - "exsolve": "^1.0.7", - "magic-string": "^0.30.17", + "exsolve": "^1.0.8", + "magic-string": "^0.30.21", "pathe": "^2.0.3", "source-map-js": "^1.2.1" }, @@ -13005,21 +11555,21 @@ }, "node_modules/vscode-uri": { "version": "3.1.0", - "resolved": "https://registry.npmmirror.com/vscode-uri/-/vscode-uri-3.1.0.tgz", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.1.0.tgz", "integrity": "sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==", "license": "MIT" }, "node_modules/vue": { - "version": "3.5.18", - "resolved": "https://registry.npmmirror.com/vue/-/vue-3.5.18.tgz", - "integrity": "sha512-7W4Y4ZbMiQ3SEo+m9lnoNpV9xG7QVMLa+/0RFwwiAVkeYoyGXqWE85jabU4pllJNUzqfLShJ5YLptewhCWUgNA==", + "version": "3.5.29", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.29.tgz", + "integrity": "sha512-BZqN4Ze6mDQVNAni0IHeMJ5mwr8VAJ3MQC9FmprRhcBYENw+wOAAjRj8jfmN6FLl0j96OXbR+CjWhmAmM+QGnA==", "license": "MIT", "dependencies": { - "@vue/compiler-dom": "3.5.18", - "@vue/compiler-sfc": "3.5.18", - "@vue/runtime-dom": "3.5.18", - "@vue/server-renderer": "3.5.18", - "@vue/shared": "3.5.18" + "@vue/compiler-dom": "3.5.29", + "@vue/compiler-sfc": "3.5.29", + "@vue/runtime-dom": "3.5.29", + "@vue/server-renderer": "3.5.29", + "@vue/shared": "3.5.29" }, "peerDependencies": { "typescript": "*" @@ -13031,24 +11581,24 @@ } }, "node_modules/vue-bundle-renderer": { - "version": "2.1.1", - "resolved": "https://registry.npmmirror.com/vue-bundle-renderer/-/vue-bundle-renderer-2.1.1.tgz", - "integrity": "sha512-+qALLI5cQncuetYOXp4yScwYvqh8c6SMXee3B+M7oTZxOgtESP0l4j/fXdEJoZ+EdMxkGWIj+aSEyjXkOdmd7g==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/vue-bundle-renderer/-/vue-bundle-renderer-2.2.0.tgz", + "integrity": "sha512-sz/0WEdYH1KfaOm0XaBmRZOWgYTEvUDt6yPYaUzl4E52qzgWLlknaPPTTZmp6benaPTlQAI/hN1x3tAzZygycg==", "license": "MIT", "dependencies": { - "ufo": "^1.5.4" + "ufo": "^1.6.1" } }, "node_modules/vue-devtools-stub": { "version": "0.1.0", - "resolved": "https://registry.npmmirror.com/vue-devtools-stub/-/vue-devtools-stub-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/vue-devtools-stub/-/vue-devtools-stub-0.1.0.tgz", "integrity": "sha512-RutnB7X8c5hjq39NceArgXg28WZtZpGc3+J16ljMiYnFhKvd8hITxSWQSQ5bvldxMDU6gG5mkxl1MTQLXckVSQ==", "license": "MIT" }, "node_modules/vue-router": { - "version": "4.5.1", - "resolved": "https://registry.npmmirror.com/vue-router/-/vue-router-4.5.1.tgz", - "integrity": "sha512-ogAF3P97NPm8fJsE4by9dwSYtDwXIY1nFY9T6DyQnGHd1E2Da94w9JIolpe42LJGIl0DwOHBi8TcRPlPGwbTtw==", + "version": "4.6.4", + "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-4.6.4.tgz", + "integrity": "sha512-Hz9q5sa33Yhduglwz6g9skT8OBPii+4bFn88w6J+J4MfEo4KRRpmiNG/hHHkdbRFlLBOqxN8y8gf2Fb0MTUgVg==", "license": "MIT", "dependencies": { "@vue/devtools-api": "^6.6.4" @@ -13057,21 +11607,12 @@ "url": "https://github.com/sponsors/posva" }, "peerDependencies": { - "vue": "^3.2.0" - } - }, - "node_modules/web-streams-polyfill": { - "version": "3.3.3", - "resolved": "https://registry.npmmirror.com/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", - "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", - "license": "MIT", - "engines": { - "node": ">= 8" + "vue": "^3.5.0" } }, "node_modules/webidl-conversions": { "version": "3.0.1", - "resolved": "https://registry.npmmirror.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", "license": "BSD-2-Clause" }, @@ -13083,7 +11624,7 @@ }, "node_modules/whatwg-url": { "version": "5.0.0", - "resolved": "https://registry.npmmirror.com/whatwg-url/-/whatwg-url-5.0.0.tgz", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", "license": "MIT", "dependencies": { @@ -13093,7 +11634,7 @@ }, "node_modules/which": { "version": "5.0.0", - "resolved": "https://registry.npmmirror.com/which/-/which-5.0.0.tgz", + "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz", "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==", "license": "ISC", "dependencies": { @@ -13106,82 +11647,6 @@ "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/winston": { - "version": "3.17.0", - "resolved": "https://registry.npmmirror.com/winston/-/winston-3.17.0.tgz", - "integrity": "sha512-DLiFIXYC5fMPxaRg832S6F5mJYvePtmO5G9v9IgUFPhXm9/GkXarH/TUrBAVzhTCzAj9anE/+GjrgXp/54nOgw==", - "license": "MIT", - "dependencies": { - "@colors/colors": "^1.6.0", - "@dabh/diagnostics": "^2.0.2", - "async": "^3.2.3", - "is-stream": "^2.0.0", - "logform": "^2.7.0", - "one-time": "^1.0.0", - "readable-stream": "^3.4.0", - "safe-stable-stringify": "^2.3.1", - "stack-trace": "0.0.x", - "triple-beam": "^1.3.0", - "winston-transport": "^4.9.0" - }, - "engines": { - "node": ">= 12.0.0" - } - }, - "node_modules/winston-transport": { - "version": "4.9.0", - "resolved": "https://registry.npmmirror.com/winston-transport/-/winston-transport-4.9.0.tgz", - "integrity": "sha512-8drMJ4rkgaPo1Me4zD/3WLfI/zPdA9o2IipKODunnGDcuqbHwjsbB79ylv04LCGGzU0xQ6vTznOMpQGaLhhm6A==", - "license": "MIT", - "dependencies": { - "logform": "^2.7.0", - "readable-stream": "^3.6.2", - "triple-beam": "^1.3.0" - }, - "engines": { - "node": ">= 12.0.0" - } - }, - "node_modules/winston-transport/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmmirror.com/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/winston/node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmmirror.com/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/winston/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmmirror.com/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/wrap-ansi": { "version": "8.1.0", "resolved": "https://registry.npmmirror.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz", @@ -13291,23 +11756,10 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "license": "ISC" }, - "node_modules/write-file-atomic": { - "version": "6.0.0", - "resolved": "https://registry.npmmirror.com/write-file-atomic/-/write-file-atomic-6.0.0.tgz", - "integrity": "sha512-GmqrO8WJ1NuzJ2DrziEI2o57jKAVIQNf8a18W3nCYU3H7PNWqCCVTeH6/NQE93CIllIgQS98rrmVkYgTX9fFJQ==", - "license": "ISC", - "dependencies": { - "imurmurhash": "^0.1.4", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, "node_modules/ws": { - "version": "8.18.3", - "resolved": "https://registry.npmmirror.com/ws/-/ws-8.18.3.tgz", - "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==", + "version": "8.19.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz", + "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==", "license": "MIT", "engines": { "node": ">=10.0.0" @@ -13327,7 +11779,7 @@ }, "node_modules/wsl-utils": { "version": "0.1.0", - "resolved": "https://registry.npmmirror.com/wsl-utils/-/wsl-utils-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/wsl-utils/-/wsl-utils-0.1.0.tgz", "integrity": "sha512-h3Fbisa2nKGPxCpm89Hk33lBLsnaGBvctQopaBSOW/uIs6FTe1ATyAnKFJrzVs9vpGdsTe73WF3V4lIsk4Gacw==", "license": "MIT", "dependencies": { @@ -13351,20 +11803,23 @@ }, "node_modules/yallist": { "version": "3.1.1", - "resolved": "https://registry.npmmirror.com/yallist/-/yallist-3.1.1.tgz", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", "license": "ISC" }, "node_modules/yaml": { - "version": "2.8.0", - "resolved": "https://registry.npmmirror.com/yaml/-/yaml-2.8.0.tgz", - "integrity": "sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==", + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz", + "integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==", "license": "ISC", "bin": { "yaml": "bin.mjs" }, "engines": { "node": ">= 14.6" + }, + "funding": { + "url": "https://github.com/sponsors/eemeli" } }, "node_modules/yargs": { @@ -13435,25 +11890,6 @@ "node": ">=8" } }, - "node_modules/yauzl": { - "version": "2.10.0", - "resolved": "https://registry.npmmirror.com/yauzl/-/yauzl-2.10.0.tgz", - "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", - "license": "MIT", - "dependencies": { - "buffer-crc32": "~0.2.3", - "fd-slicer": "~1.1.0" - } - }, - "node_modules/yauzl/node_modules/buffer-crc32": { - "version": "0.2.13", - "resolved": "https://registry.npmmirror.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", - "license": "MIT", - "engines": { - "node": "*" - } - }, "node_modules/ylru": { "version": "1.4.0", "resolved": "https://registry.npmmirror.com/ylru/-/ylru-1.4.0.tgz", @@ -13463,34 +11899,22 @@ "node": ">= 4.0.0" } }, - "node_modules/yocto-queue": { - "version": "1.2.1", - "resolved": "https://registry.npmmirror.com/yocto-queue/-/yocto-queue-1.2.1.tgz", - "integrity": "sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg==", - "license": "MIT", - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/youch": { - "version": "4.1.0-beta.10", - "resolved": "https://registry.npmmirror.com/youch/-/youch-4.1.0-beta.10.tgz", - "integrity": "sha512-rLfVLB4FgQneDr0dv1oddCVZmKjcJ6yX6mS4pU82Mq/Dt9a3cLZQ62pDBL4AUO+uVrCvtWz3ZFUL2HFAFJ/BXQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/youch/-/youch-4.1.0.tgz", + "integrity": "sha512-cYekNh2tUoU+voS11X0D0UQntVCSO6LQ1h10VriQGmfbpf0mnGTruwZICts23UUNiZCXm8H8hQBtRrdsbhuNNg==", "license": "MIT", "dependencies": { - "@poppinss/colors": "^4.1.5", - "@poppinss/dumper": "^0.6.4", - "@speed-highlight/core": "^1.2.7", - "cookie": "^1.0.2", + "@poppinss/colors": "^4.1.6", + "@poppinss/dumper": "^0.7.0", + "@speed-highlight/core": "^1.2.14", + "cookie-es": "^2.0.0", "youch-core": "^0.3.3" } }, "node_modules/youch-core": { "version": "0.3.3", - "resolved": "https://registry.npmmirror.com/youch-core/-/youch-core-0.3.3.tgz", + "resolved": "https://registry.npmjs.org/youch-core/-/youch-core-0.3.3.tgz", "integrity": "sha512-ho7XuGjLaJ2hWHoK8yFnsUGy2Y5uDpqSTq1FkHLK4/oqKtyUU1AFbOOxY4IpC9f0fTLjwYbslUz0Po5BpD1wrA==", "license": "MIT", "dependencies": { @@ -13500,7 +11924,7 @@ }, "node_modules/zip-stream": { "version": "6.0.1", - "resolved": "https://registry.npmmirror.com/zip-stream/-/zip-stream-6.0.1.tgz", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz", "integrity": "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==", "license": "MIT", "dependencies": { @@ -13511,15 +11935,6 @@ "engines": { "node": ">= 14" } - }, - "node_modules/zod": { - "version": "3.25.76", - "resolved": "https://registry.npmmirror.com/zod/-/zod-3.25.76.tgz", - "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/colinhacks" - } } } } diff --git a/frontend/package.json b/frontend/package.json index 685e338..b527734 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -12,9 +12,15 @@ "dependencies": { "@nuxtjs/tailwindcss": "^6.14.0", "@pinia/nuxt": "^0.11.2", + "@vueuse/motion": "^3.0.3", "axios": "^1.11.0", + "gsap": "^3.14.2", "nuxt": "^4.0.1", + "ogl": "^1.0.11", "vue": "^3.5.17", "vue-router": "^4.5.1" + }, + "devDependencies": { + "tailwindcss": "3.4.17" } } diff --git a/frontend/pages/chat/[[username]].vue b/frontend/pages/chat/[[username]].vue index 48ed479..3188acc 100644 --- a/frontend/pages/chat/[[username]].vue +++ b/frontend/pages/chat/[[username]].vue @@ -1,6003 +1,729 @@ - - diff --git a/frontend/pages/contacts.vue b/frontend/pages/contacts.vue new file mode 100644 index 0000000..9059632 --- /dev/null +++ b/frontend/pages/contacts.vue @@ -0,0 +1,501 @@ + + + diff --git a/frontend/pages/decrypt.vue b/frontend/pages/decrypt.vue index fb76aec..ffe0c49 100644 --- a/frontend/pages/decrypt.vue +++ b/frontend/pages/decrypt.vue @@ -26,24 +26,40 @@
-
- -
- {{ formData.key.length }}/64 + +
+
+ +
+ {{ formData.key.length }}/64 +
+ +

@@ -55,7 +71,7 @@ - 使用 wx_key 等工具获取的64位十六进制字符串 + 点击按钮将自动获取【数据库】与【图片】双重密钥。您也可以手动输入已知的64位密钥(使用wx_key等工具获取)。

@@ -109,6 +125,40 @@
+ + +
+
+
+ {{ dbDecryptProgress.message || (loading ? '解密中...' : '') }} +
+
+ {{ dbDecryptProgress.current }} / {{ dbDecryptProgress.total }} +
+
+ +
+
+
+ +
+ {{ dbDecryptProgress.current_file }} +
+ +
+
+
{{ dbDecryptProgress.success_count }}
+
成功
+
+
+
{{ dbDecryptProgress.fail_count }}
+
失败
+
+
+
@@ -131,35 +181,39 @@
+ +
+ 此步骤将为您解密微信聊天中的图片 +
+

+ + + + 如果您在第一步使用了“一键获取”或触发了云端解析,下方输入框已被自动填充。您也可可以使用wx_key等工具手动获取。 +

+

{{ manualKeyErrors.xor_key }}

{{ manualKeyErrors.aes_key }}

- -

- - - - 使用 wx_key 获取图片密钥;AES 可选(V4-V2 需要) -

@@ -325,6 +379,19 @@ + + + +
+ + + +
+

温馨提示

+

{{ warning }}

+
+
+
@@ -364,15 +431,17 @@ + + + + + diff --git a/frontend/pages/index.vue b/frontend/pages/index.vue index cf06df5..499e5d3 100644 --- a/frontend/pages/index.vue +++ b/frontend/pages/index.vue @@ -49,6 +49,17 @@ 聊天预览 + + + + + + + + + 年度总结 + @@ -57,17 +68,12 @@ diff --git a/frontend/pages/wrapped/index.vue b/frontend/pages/wrapped/index.vue new file mode 100644 index 0000000..3719c78 --- /dev/null +++ b/frontend/pages/wrapped/index.vue @@ -0,0 +1,669 @@ + + + + + diff --git a/frontend/plugins/api-check.client.js b/frontend/plugins/api-check.client.js index bda4bdc..5f86269 100644 --- a/frontend/plugins/api-check.client.js +++ b/frontend/plugins/api-check.client.js @@ -1,7 +1,8 @@ // 客户端插件:检查API连接状态 -export default defineNuxtPlugin(async (nuxtApp) => { +export default defineNuxtPlugin((nuxtApp) => { const { healthCheck } = useApi() const appStore = useAppStore() + let intervalId = 0 // 检查API连接 const checkApiConnection = async () => { @@ -17,10 +18,14 @@ export default defineNuxtPlugin(async (nuxtApp) => { console.error('API连接失败:', error) } } - - // 初始检查 - await checkApiConnection() - - // 定期检查(每30秒) - setInterval(checkApiConnection, 30000) -}) \ No newline at end of file + + nuxtApp.hook('app:mounted', () => { + void checkApiConnection() + + if (!intervalId) { + intervalId = window.setInterval(() => { + void checkApiConnection() + }, 30000) + } + }) +}) diff --git a/frontend/public/AnnualSummary1.png b/frontend/public/AnnualSummary1.png new file mode 100644 index 0000000..1d05ca4 Binary files /dev/null and b/frontend/public/AnnualSummary1.png differ diff --git a/frontend/public/AnnualSummary2.png b/frontend/public/AnnualSummary2.png new file mode 100644 index 0000000..e01d13b Binary files /dev/null and b/frontend/public/AnnualSummary2.png differ diff --git a/frontend/public/AnnualSummary3.png b/frontend/public/AnnualSummary3.png new file mode 100644 index 0000000..cc57330 Binary files /dev/null and b/frontend/public/AnnualSummary3.png differ diff --git a/frontend/public/AnnualSummary4.gif b/frontend/public/AnnualSummary4.gif new file mode 100644 index 0000000..14f6a3d Binary files /dev/null and b/frontend/public/AnnualSummary4.gif differ diff --git a/frontend/public/AnnualSummary5.gif b/frontend/public/AnnualSummary5.gif new file mode 100644 index 0000000..bf35baf Binary files /dev/null and b/frontend/public/AnnualSummary5.gif differ diff --git a/frontend/public/AnnualSummary6.png b/frontend/public/AnnualSummary6.png new file mode 100644 index 0000000..3a15932 Binary files /dev/null and b/frontend/public/AnnualSummary6.png differ diff --git a/frontend/public/AnnualSummary7.png b/frontend/public/AnnualSummary7.png new file mode 100644 index 0000000..37177c0 Binary files /dev/null and b/frontend/public/AnnualSummary7.png differ diff --git a/frontend/public/AnnualSummary8.png b/frontend/public/AnnualSummary8.png new file mode 100644 index 0000000..1c370dd Binary files /dev/null and b/frontend/public/AnnualSummary8.png differ diff --git a/frontend/public/Contact.png b/frontend/public/Contact.png new file mode 100644 index 0000000..a32a652 Binary files /dev/null and b/frontend/public/Contact.png differ diff --git a/frontend/public/QQImage_1770190010691_1103312318341691201.jpg b/frontend/public/QQImage_1770190010691_1103312318341691201.jpg new file mode 100644 index 0000000..0d9c087 Binary files /dev/null and b/frontend/public/QQImage_1770190010691_1103312318341691201.jpg differ diff --git a/frontend/public/RealTimeMessages.gif b/frontend/public/RealTimeMessages.gif new file mode 100644 index 0000000..5186298 Binary files /dev/null and b/frontend/public/RealTimeMessages.gif differ diff --git a/frontend/public/assets/images/LuckyBlock.png b/frontend/public/assets/images/LuckyBlock.png new file mode 100644 index 0000000..92d8a98 Binary files /dev/null and b/frontend/public/assets/images/LuckyBlock.png differ diff --git a/frontend/public/assets/images/wechat/overdue.png b/frontend/public/assets/images/wechat/overdue.png new file mode 100644 index 0000000..9bf8016 Binary files /dev/null and b/frontend/public/assets/images/wechat/overdue.png differ diff --git a/frontend/public/assets/images/wechat/wechat-trans-icon2.png b/frontend/public/assets/images/wechat/wechat-trans-icon2.png index b9f72da..6d500a2 100644 Binary files a/frontend/public/assets/images/wechat/wechat-trans-icon2.png and b/frontend/public/assets/images/wechat/wechat-trans-icon2.png differ diff --git a/frontend/public/edit.gif b/frontend/public/edit.gif new file mode 100644 index 0000000..48547a6 Binary files /dev/null and b/frontend/public/edit.gif differ diff --git a/frontend/public/export.png b/frontend/public/export.png index 59345dd..b9de9a9 100644 Binary files a/frontend/public/export.png and b/frontend/public/export.png differ diff --git a/frontend/public/message.png b/frontend/public/message.png index a693d5d..304ae11 100644 Binary files a/frontend/public/message.png and b/frontend/public/message.png differ diff --git a/frontend/public/search.png b/frontend/public/search.png index 1d42332..663b583 100644 Binary files a/frontend/public/search.png and b/frontend/public/search.png differ diff --git a/frontend/public/setting.png b/frontend/public/setting.png new file mode 100644 index 0000000..23315a8 Binary files /dev/null and b/frontend/public/setting.png differ diff --git a/frontend/public/sns.png b/frontend/public/sns.png new file mode 100644 index 0000000..bdd3216 Binary files /dev/null and b/frontend/public/sns.png differ diff --git a/frontend/public/style1.png b/frontend/public/style1.png new file mode 100644 index 0000000..49f2b58 Binary files /dev/null and b/frontend/public/style1.png differ diff --git a/frontend/stores/chatAccounts.js b/frontend/stores/chatAccounts.js new file mode 100644 index 0000000..517cd94 --- /dev/null +++ b/frontend/stores/chatAccounts.js @@ -0,0 +1,111 @@ +import { defineStore } from 'pinia' + +const SELECTED_ACCOUNT_KEY = 'ui.selected_account' + +export const useChatAccountsStore = defineStore('chatAccounts', () => { + const accounts = ref([]) + const selectedAccount = ref(null) + const loading = ref(false) + const error = ref('') + const loaded = ref(false) + + // Capture apiBase during synchronous store setup when Nuxt context is available. + // useApiBase() calls useRuntimeConfig() which requires the Nuxt app context; + // that context can be lost inside deferred async functions (e.g. onMounted callbacks). + const _apiBase = useApiBase() + + let loadPromise = null + + const readSelectedAccount = () => { + if (!process.client) return null + try { + const raw = localStorage.getItem(SELECTED_ACCOUNT_KEY) + const v = String(raw || '').trim() + return v || null + } catch { + return null + } + } + + const writeSelectedAccount = (value) => { + if (!process.client) return + try { + const v = String(value || '').trim() + if (!v) { + localStorage.removeItem(SELECTED_ACCOUNT_KEY) + return + } + localStorage.setItem(SELECTED_ACCOUNT_KEY, v) + } catch {} + } + + const setSelectedAccount = (next) => { + selectedAccount.value = next ? String(next) : null + writeSelectedAccount(selectedAccount.value) + } + + if (process.client) { + watch(selectedAccount, (next) => { + writeSelectedAccount(next) + }) + } + + const ensureLoaded = async ({ force = false } = {}) => { + if (!process.client) return + if (loaded.value && !force) return + + if (loadPromise && !force) { + await loadPromise + return + } + + loadPromise = (async () => { + loading.value = true + error.value = '' + + if (!selectedAccount.value) { + const cached = readSelectedAccount() + if (cached) selectedAccount.value = cached + } + + try { + const resp = await $fetch('/chat/accounts', { baseURL: _apiBase }) + const nextAccounts = Array.isArray(resp?.accounts) ? resp.accounts : [] + accounts.value = nextAccounts + + const preferred = String(selectedAccount.value || '').trim() + const defaultAccount = String(resp?.default_account || '').trim() + const fallback = defaultAccount || nextAccounts[0] || '' + const nextSelected = preferred && nextAccounts.includes(preferred) ? preferred : (fallback || null) + + selectedAccount.value = nextSelected + writeSelectedAccount(nextSelected) + loaded.value = true + } catch (e) { + accounts.value = [] + selectedAccount.value = null + writeSelectedAccount(null) + loaded.value = true + error.value = e?.message || '加载账号失败' + } finally { + loading.value = false + } + })() + + try { + await loadPromise + } finally { + loadPromise = null + } + } + + return { + accounts, + selectedAccount, + loading, + error, + loaded, + ensureLoaded, + setSelectedAccount, + } +}) diff --git a/frontend/stores/chatRealtime.js b/frontend/stores/chatRealtime.js new file mode 100644 index 0000000..2d1a015 --- /dev/null +++ b/frontend/stores/chatRealtime.js @@ -0,0 +1,225 @@ +import { defineStore } from 'pinia' + +import { useChatAccountsStore } from '~/stores/chatAccounts' + +export const useChatRealtimeStore = defineStore('chatRealtime', () => { + const chatAccounts = useChatAccountsStore() + + const enabled = ref(false) + const available = ref(false) + const checking = ref(false) + const statusInfo = ref(null) + const statusError = ref('') + const toggling = ref(false) + const toggleSeq = ref(0) + const lastToggleAction = ref('') + const changeSeq = ref(0) + const priorityUsername = ref('') + + let eventSource = null + let changeDebounceTimer = null + + const getAccount = () => String(chatAccounts.selectedAccount || '').trim() + + const setPriorityUsername = (username) => { + priorityUsername.value = String(username || '').trim() + } + + const ensureReadyAccount = async () => { + if (!process.client) return false + await chatAccounts.ensureLoaded() + return !!getAccount() + } + + const fetchStatus = async () => { + if (!process.client) return + const account = getAccount() + if (!account) { + available.value = false + statusInfo.value = null + statusError.value = '未检测到已解密账号,请先解密数据库。' + return + } + + const api = useApi() + checking.value = true + statusError.value = '' + try { + const resp = await api.getChatRealtimeStatus({ account }) + available.value = !!resp?.available + statusInfo.value = resp?.realtime || null + statusError.value = '' + } catch (e) { + available.value = false + statusInfo.value = null + statusError.value = e?.message || '实时状态获取失败' + } finally { + checking.value = false + } + } + + const stopStream = () => { + if (eventSource) { + try { + eventSource.close() + } catch {} + eventSource = null + } + if (changeDebounceTimer) { + try { + clearTimeout(changeDebounceTimer) + } catch {} + changeDebounceTimer = null + } + } + + const bumpChangeSeqDebounced = () => { + if (changeDebounceTimer) return + changeDebounceTimer = setTimeout(() => { + changeDebounceTimer = null + changeSeq.value += 1 + }, 500) + } + + const startStream = () => { + stopStream() + if (!process.client || typeof window === 'undefined') return + if (!enabled.value) return + const account = getAccount() + if (!account) return + if (typeof EventSource === 'undefined') return + + const apiBase = useApiBase() + const url = `${apiBase}/chat/realtime/stream?account=${encodeURIComponent(account)}` + + try { + eventSource = new EventSource(url) + } catch { + eventSource = null + return + } + + eventSource.onmessage = (ev) => { + try { + const data = JSON.parse(String(ev.data || '{}')) + if (String(data?.type || '') === 'change') { + bumpChangeSeqDebounced() + } + } catch {} + } + + eventSource.onerror = () => { + // Keep `enabled` as-is; same behavior as the old in-page implementation. + stopStream() + } + } + + const enable = async ({ silent = false } = {}) => { + if (toggling.value) return false + toggling.value = true + try { + const ok = await ensureReadyAccount() + if (!ok) { + if (!silent && process.client && typeof window !== 'undefined') { + window.alert('未检测到已解密账号,请先解密数据库。') + } + statusError.value = '未检测到已解密账号,请先解密数据库。' + return false + } + + await fetchStatus() + if (!available.value) { + if (!silent && process.client && typeof window !== 'undefined') { + window.alert(statusError.value || '实时模式不可用:缺少密钥或 db_storage 路径。') + } + return false + } + + enabled.value = true + startStream() + lastToggleAction.value = 'enabled' + toggleSeq.value += 1 + return true + } finally { + toggling.value = false + } + } + + const disable = async ({ silent = false } = {}) => { + if (toggling.value) return false + toggling.value = true + try { + const account = getAccount() + enabled.value = false + stopStream() + + if (!account) { + lastToggleAction.value = 'disabled' + toggleSeq.value += 1 + return true + } + + try { + const api = useApi() + await api.syncChatRealtimeAll({ + account, + max_scan: 200, + priority_username: priorityUsername.value || '', + priority_max_scan: 5000, + include_hidden: true, + include_official: true, + }) + } catch (e) { + if (!silent && process.client && typeof window !== 'undefined') { + window.alert(e?.message || '关闭实时模式时同步失败') + } + } + + lastToggleAction.value = 'disabled' + toggleSeq.value += 1 + return true + } finally { + toggling.value = false + } + } + + const toggle = async (opts = {}) => { + return enabled.value ? await disable(opts) : await enable(opts) + } + + if (process.client) { + watch( + () => chatAccounts.selectedAccount, + async () => { + setPriorityUsername('') + await fetchStatus() + if (enabled.value) { + startStream() + } + }, + { immediate: true } + ) + } + + return { + enabled, + available, + checking, + statusInfo, + statusError, + toggling, + toggleSeq, + lastToggleAction, + changeSeq, + priorityUsername, + + setPriorityUsername, + ensureReadyAccount, + fetchStatus, + startStream, + stopStream, + enable, + disable, + toggle, + } +}) diff --git a/frontend/stores/privacy.js b/frontend/stores/privacy.js new file mode 100644 index 0000000..d32a331 --- /dev/null +++ b/frontend/stores/privacy.js @@ -0,0 +1,31 @@ +import { defineStore } from 'pinia' + +import { readPrivacyMode, writePrivacyMode } from '~/lib/privacy-mode' + +export const usePrivacyStore = defineStore('privacy', () => { + const privacyMode = ref(false) + const initialized = ref(false) + + const init = () => { + if (initialized.value) return + initialized.value = true + privacyMode.value = readPrivacyMode(false) + } + + const set = (enabled) => { + privacyMode.value = !!enabled + writePrivacyMode(privacyMode.value) + } + + const toggle = () => { + set(!privacyMode.value) + } + + return { + privacyMode, + init, + set, + toggle, + } +}) + diff --git a/generate_config_template.py b/generate_config_template.py index 6e5d88a..87a6b42 100644 --- a/generate_config_template.py +++ b/generate_config_template.py @@ -6,6 +6,7 @@ import sqlite3 import json +import argparse from pathlib import Path from typing import Dict, List, Any from collections import defaultdict @@ -127,6 +128,82 @@ def analyze_database_structure(self, db_path: Path) -> Dict[str, Any]: try: cursor = conn.cursor() + + def parse_columns_from_create_sql(create_sql: str) -> list[tuple[str, str]]: + """ + 从建表 SQL 中尽力解析列名(用于 FTS5/缺失 tokenizer 扩展导致 PRAGMA 失败的情况)。 + 返回 (name, type);类型缺失时默认 TEXT。 + """ + out: list[tuple[str, str]] = [] + if not create_sql: + return out + try: + start = create_sql.find("(") + end = create_sql.rfind(")") + if start == -1 or end == -1 or end <= start: + return out + inner = create_sql[start + 1:end] + + parts: list[str] = [] + buf = "" + depth = 0 + for ch in inner: + if ch == "(": + depth += 1 + elif ch == ")": + depth -= 1 + if ch == "," and depth == 0: + parts.append(buf.strip()) + buf = "" + else: + buf += ch + if buf.strip(): + parts.append(buf.strip()) + + for part in parts: + token = part.strip() + if not token: + continue + low = token.lower() + # 跳过约束/外键等 + if low.startswith(("constraint", "primary", "unique", "foreign", "check")): + continue + # fts5 选项(tokenize/prefix/content/content_rowid 等) + if "=" in token: + key = token.split("=", 1)[0].strip().lower() + if key in ("tokenize", "prefix", "content", "content_rowid", "compress", "uncompress"): + continue + tokens = token.split() + if not tokens: + continue + name = tokens[0].strip("`\"[]") + typ = tokens[1].upper() if len(tokens) > 1 and "=" not in tokens[1] else "TEXT" + out.append((name, typ)) + except Exception: + return out + return out + + def get_table_columns(table_name: str) -> list[tuple[str, str]]: + # 先尝试 PRAGMA + try: + cursor.execute(f"PRAGMA table_info({table_name})") + columns = cursor.fetchall() + if columns: + return [(col[1], col[2]) for col in columns] + except Exception: + pass + + # 兜底:从 sqlite_master.sql 解析 + try: + cursor.execute( + "SELECT sql FROM sqlite_master WHERE type='table' AND name=?", + (table_name,), + ) + row = cursor.fetchone() + create_sql = row[0] if row and len(row) > 0 else "" + return parse_columns_from_create_sql(create_sql or "") + except Exception: + return [] # 获取所有表名 cursor.execute("SELECT name FROM sqlite_master WHERE type='table'") @@ -152,13 +229,10 @@ def analyze_database_structure(self, db_path: Path) -> Dict[str, Any]: table_key = f"{prefix}_*" # 使用模式名 # 获取代表表的字段信息 - cursor.execute(f"PRAGMA table_info({representative_table})") - columns = cursor.fetchall() + columns = get_table_columns(representative_table) fields = {} - for col in columns: - field_name = col[1] - field_type = col[2] + for field_name, field_type in columns: fields[field_name] = { "type": field_type, "meaning": "", # 留空供用户填写 @@ -188,13 +262,10 @@ def analyze_database_structure(self, db_path: Path) -> Dict[str, Any]: try: # 获取表字段信息 - cursor.execute(f"PRAGMA table_info({table_name})") - columns = cursor.fetchall() + columns = get_table_columns(table_name) fields = {} - for col in columns: - field_name = col[1] - field_type = col[2] + for field_name, field_type in columns: fields[field_name] = { "type": field_type, "meaning": "", # 留空供用户填写 @@ -219,16 +290,23 @@ def analyze_database_structure(self, db_path: Path) -> Dict[str, Any]: finally: conn.close() - def generate_template(self, output_file: str = "wechat_db_config_template.json"): + def generate_template( + self, + output_file: str = "wechat_db_config_template.json", + *, + include_excluded: bool = False, + include_message_shards: bool = False, + exclude_db_stems: set[str] | None = None, + ): """生成配置模板""" print("开始生成微信数据库配置模板...") # 定义要排除的数据库模式和描述 - excluded_patterns = { - r'biz_message_\d+\.db$': '企业微信聊天记录数据库', - r'bizchat\.db$': '企业微信联系人数据库', - r'contact_fts\.db$': '搜索联系人数据库', - r'favorite_fts\.db$': '搜索收藏数据库' + excluded_patterns = {} if include_excluded else { + r'biz_message_\d+\.db$': '公众号/企业微信聊天记录数据库(通常不参与个人聊天分析)', + r'bizchat\.db$': '企业微信联系人/会话数据库(通常不参与个人聊天分析)', + r'contact_fts\.db$': '联系人搜索索引数据库(FTS)', + r'favorite_fts\.db$': '收藏搜索索引数据库(FTS)' } # 查找所有数据库文件 @@ -263,29 +341,38 @@ def generate_template(self, output_file: str = "wechat_db_config_template.json") for excluded_file, description in excluded_files: print(f" - {excluded_file.name} ({description})") + # 显式排除指定 stem(不含 .db) + if exclude_db_stems: + before = len(db_files) + db_files = [p for p in db_files if p.stem not in exclude_db_stems] + after = len(db_files) + if before != after: + print(f"\n按 --exclude-db-stem 排除 {before - after} 个数据库: {sorted(exclude_db_stems)}") + print(f"\n实际处理 {len(db_files)} 个数据库文件") # 过滤message数据库,只保留倒数第二个(与主脚本逻辑一致) - message_numbered_dbs = [] - message_other_dbs = [] - - for db in db_files: - if re.match(r'message_\d+$', db.stem): # message_{数字}.db - message_numbered_dbs.append(db) - elif db.stem.startswith('message_'): # message_fts.db, message_resource.db等 - message_other_dbs.append(db) - - if len(message_numbered_dbs) > 1: - # 按数字编号排序(提取数字进行排序) - message_numbered_dbs.sort(key=lambda x: int(re.search(r'message_(\d+)', x.stem).group(1))) - # 选择倒数第二个(按编号排序) - selected_message_db = message_numbered_dbs[-2] # 倒数第二个 - print(f"检测到 {len(message_numbered_dbs)} 个message_{{数字}}.db数据库") - print(f"选择倒数第二个: {selected_message_db.name}") - - # 从db_files中移除其他message_{数字}.db数据库,但保留message_fts.db等 - db_files = [db for db in db_files if not re.match(r'message_\d+$', db.stem)] - db_files.append(selected_message_db) + if not include_message_shards: + message_numbered_dbs = [] + message_other_dbs = [] + + for db in db_files: + if re.match(r'message_\d+$', db.stem): # message_{数字}.db + message_numbered_dbs.append(db) + elif db.stem.startswith('message_'): # message_fts.db, message_resource.db等 + message_other_dbs.append(db) + + if len(message_numbered_dbs) > 1: + # 按数字编号排序(提取数字进行排序) + message_numbered_dbs.sort(key=lambda x: int(re.search(r'message_(\d+)', x.stem).group(1))) + # 选择倒数第二个(按编号排序) + selected_message_db = message_numbered_dbs[-2] # 倒数第二个 + print(f"检测到 {len(message_numbered_dbs)} 个message_{{数字}}.db数据库") + print(f"选择倒数第二个: {selected_message_db.name}") + + # 从db_files中移除其他message_{数字}.db数据库,但保留message_fts.db等 + db_files = [db for db in db_files if not re.match(r'message_\d+$', db.stem)] + db_files.append(selected_message_db) print(f"实际分析 {len(db_files)} 个数据库文件") @@ -370,11 +457,24 @@ def generate_template(self, output_file: str = "wechat_db_config_template.json") def main(): """主函数""" + parser = argparse.ArgumentParser(description="微信数据库字段配置模板生成器") + parser.add_argument("--databases-path", default="output/databases", help="解密后的数据库根目录(按账号分目录)") + parser.add_argument("--output", default="wechat_db_config_template.json", help="输出 JSON 模板路径") + parser.add_argument("--include-excluded", action="store_true", help="包含默认会被排除的数据库(如 bizchat/contact_fts/favorite_fts 等)") + parser.add_argument("--include-message-shards", action="store_true", help="包含所有 message_{n}.db(否则仅保留倒数第二个作代表)") + parser.add_argument("--exclude-db-stem", action="append", default=[], help="按 stem(不含 .db)排除数据库,可重复,例如: --exclude-db-stem digital_twin") + args = parser.parse_args() + print("微信数据库配置模板生成器") print("=" * 50) - - generator = ConfigTemplateGenerator() - generator.generate_template() + + generator = ConfigTemplateGenerator(databases_path=args.databases_path) + generator.generate_template( + output_file=args.output, + include_excluded=bool(args.include_excluded), + include_message_shards=bool(args.include_message_shards), + exclude_db_stems=set(args.exclude_db_stem or []), + ) if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/main.py b/main.py index 426d786..924dc94 100644 --- a/main.py +++ b/main.py @@ -5,23 +5,30 @@ 使用方法: uv run main.py -默认在8000端口启动API服务 +默认在10392端口启动API服务 """ import uvicorn import os from pathlib import Path +from wechat_decrypt_tool.runtime_settings import read_effective_backend_port def main(): """启动微信解密工具API服务""" host = os.environ.get("WECHAT_TOOL_HOST", "127.0.0.1") - port = int(os.environ.get("WECHAT_TOOL_PORT", "8000")) + port, port_source = read_effective_backend_port(default=10392) access_host = "127.0.0.1" if host in {"0.0.0.0", "::"} else host print("=" * 60) print("微信解密工具 API 服务") print("=" * 60) print("正在启动服务...") + if port_source == "env": + print("端口来源: 环境变量 WECHAT_TOOL_PORT") + elif port_source == "settings": + print("端口来源: 配置文件 output/runtime_settings.json(由网页/桌面设置写入)") + else: + print("端口来源: 默认值") print(f"API文档: http://{access_host}:{port}/docs") print(f"健康检查: http://{access_host}:{port}/api/health") print("按 Ctrl+C 停止服务") diff --git a/pyproject.toml b/pyproject.toml index 78cd6c2..e37fa6d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "wechat-decrypt-tool" -version = "0.1.0" +version = "1.3.0" description = "Modern WeChat database decryption tool with React frontend" readme = "README.md" requires-python = ">=3.11" @@ -18,6 +18,11 @@ dependencies = [ "loguru>=0.7.0", "zstandard>=0.23.0", "pilk>=0.2.4", + "pypinyin>=0.53.0", + "jieba>=0.42.1", + "wx_key>=1.1.0", + "packaging", + "httpx", ] [project.optional-dependencies] @@ -39,3 +44,6 @@ include = [ "src/wechat_decrypt_tool/native/wcdb_api.dll", "src/wechat_decrypt_tool/native/WCDB.dll", ] + +[tool.uv] +find-links = ["./tools/key_wheels/"] diff --git a/src/wechat_decrypt_tool/__init__.py b/src/wechat_decrypt_tool/__init__.py index f0cef38..eb13771 100644 --- a/src/wechat_decrypt_tool/__init__.py +++ b/src/wechat_decrypt_tool/__init__.py @@ -1,5 +1,5 @@ """微信数据库解密工具 """ -__version__ = "0.1.0" -__author__ = "WeChat Decrypt Tool" \ No newline at end of file +__version__ = "1.3.0" +__author__ = "WeChat Decrypt Tool" diff --git a/src/wechat_decrypt_tool/api.py b/src/wechat_decrypt_tool/api.py index df3773f..b8866c5 100644 --- a/src/wechat_decrypt_tool/api.py +++ b/src/wechat_decrypt_tool/api.py @@ -5,30 +5,42 @@ from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware +from starlette.requests import Request from starlette.exceptions import HTTPException as StarletteHTTPException from starlette.responses import FileResponse from starlette.staticfiles import StaticFiles from .logging_config import setup_logging, get_logger + +# 初始化日志系统 +setup_logging() +logger = get_logger(__name__) +request_logger = get_logger("wechat_decrypt_tool.request") + +from . import __version__ as APP_VERSION from .path_fix import PathFixRoute +from .chat_realtime_autosync import CHAT_REALTIME_AUTOSYNC from .routers.chat import router as _chat_router +from .routers.chat_contacts import router as _chat_contacts_router from .routers.chat_export import router as _chat_export_router from .routers.chat_media import router as _chat_media_router from .routers.decrypt import router as _decrypt_router from .routers.health import router as _health_router +from .routers.admin import router as _admin_router from .routers.keys import router as _keys_router from .routers.media import router as _media_router +from .routers.sns import router as _sns_router +from .routers.sns_export import router as _sns_export_router from .routers.wechat_detection import router as _wechat_detection_router +from .routers.wrapped import router as _wrapped_router +from .request_logging import log_server_errors_middleware +from .sns_stage_timing import add_sns_stage_timing_headers from .wcdb_realtime import WCDB_REALTIME, shutdown as _wcdb_shutdown -# 初始化日志系统 -setup_logging() -logger = get_logger(__name__) - app = FastAPI( title="微信数据库解密工具", description="现代化的微信数据库解密工具,支持微信信息检测和数据库解密功能", - version="0.1.0", + version=APP_VERSION, ) # 设置自定义路由类 @@ -41,16 +53,49 @@ allow_credentials=True, allow_methods=["*"], allow_headers=["*"], + expose_headers=["X-SNS-Source", "X-SNS-Hit-Type", "X-SNS-X-Enc"], ) + +@app.middleware("http") +async def _add_sns_stage_timing_headers(request: Request, call_next): + """Expose SNS stage metadata to the frontend without extra requests. + + `` elements can't read response headers, but browsers can surface `Server-Timing` + via `performance.getEntriesByName(...).serverTiming` when `Timing-Allow-Origin` is set. + """ + + response = await call_next(request) + try: + add_sns_stage_timing_headers( + response.headers, + source=str(response.headers.get("X-SNS-Source") or ""), + hit_type=str(response.headers.get("X-SNS-Hit-Type") or ""), + x_enc=str(response.headers.get("X-SNS-X-Enc") or ""), + ) + except Exception: + pass + return response + + +@app.middleware("http") +async def _log_server_errors(request: Request, call_next): + return await log_server_errors_middleware(request_logger, request, call_next) + + app.include_router(_health_router) +app.include_router(_admin_router) app.include_router(_wechat_detection_router) app.include_router(_decrypt_router) app.include_router(_keys_router) app.include_router(_media_router) app.include_router(_chat_router) +app.include_router(_chat_contacts_router) app.include_router(_chat_export_router) app.include_router(_chat_media_router) +app.include_router(_sns_router) +app.include_router(_sns_export_router) +app.include_router(_wrapped_router) class _SPAStaticFiles(StaticFiles): @@ -61,9 +106,36 @@ def __init__(self, *args, **kwargs): self._fallback_200 = Path(str(self.directory)) / "200.html" self._fallback_index = Path(str(self.directory)) / "index.html" + @staticmethod + def _normalize_path(path: str) -> str: + return str(path or "").strip().lstrip("/") + + @classmethod + def _is_shell_path(cls, path: str) -> bool: + normalized = cls._normalize_path(path) + return normalized in {"", "index.html", "200.html", "_payload.json"} or normalized.startswith( + "_payload.json/" + ) + + @classmethod + def _apply_cache_headers(cls, path: str, response): + normalized = cls._normalize_path(path) + try: + if cls._is_shell_path(normalized): + response.headers["Cache-Control"] = "no-store, no-cache, must-revalidate" + response.headers["Pragma"] = "no-cache" + response.headers["Expires"] = "0" + elif normalized.startswith("_nuxt/"): + response.headers.setdefault("Cache-Control", "public, max-age=31536000, immutable") + except Exception: + pass + return response + async def get_response(self, path: str, scope): # type: ignore[override] + normalized = self._normalize_path(path) try: - return await super().get_response(path, scope) + response = await super().get_response(path, scope) + return self._apply_cache_headers(normalized, response) except StarletteHTTPException as exc: if exc.status_code != 404: raise @@ -74,8 +146,8 @@ async def get_response(self, path: str, scope): # type: ignore[override] raise if self._fallback_200.exists(): - return FileResponse(str(self._fallback_200)) - return FileResponse(str(self._fallback_index)) + return self._apply_cache_headers("200.html", FileResponse(str(self._fallback_200))) + return self._apply_cache_headers("index.html", FileResponse(str(self._fallback_index))) def _maybe_mount_frontend() -> None: @@ -117,21 +189,48 @@ def _maybe_mount_frontend() -> None: _maybe_mount_frontend() +@app.on_event("startup") +async def _startup_background_jobs() -> None: + try: + CHAT_REALTIME_AUTOSYNC.start() + except Exception: + logger.exception("Failed to start realtime autosync service") + + @app.on_event("shutdown") async def _shutdown_wcdb_realtime() -> None: try: - WCDB_REALTIME.close_all() + CHAT_REALTIME_AUTOSYNC.stop() except Exception: pass + close_ok = False + lock_timeout_s: float | None = 0.2 try: - _wcdb_shutdown() + raw = str(os.environ.get("WECHAT_TOOL_WCDB_SHUTDOWN_LOCK_TIMEOUT_S", "0.2") or "").strip() + lock_timeout_s = float(raw) if raw else 0.2 + if lock_timeout_s <= 0: + lock_timeout_s = None except Exception: - pass + lock_timeout_s = 0.2 + try: + close_ok = WCDB_REALTIME.close_all(lock_timeout_s=lock_timeout_s) + except Exception: + close_ok = False + if close_ok: + try: + _wcdb_shutdown() + except Exception: + pass + else: + # If some conn locks were busy, other threads may still be running WCDB calls; avoid shutting down the lib. + logger.warning("[wcdb] close_all not fully completed; skip wcdb_shutdown") if __name__ == "__main__": import uvicorn + from .runtime_settings import read_effective_backend_port + host = os.environ.get("WECHAT_TOOL_HOST", "127.0.0.1") - port = int(os.environ.get("WECHAT_TOOL_PORT", "8000")) + port, _ = read_effective_backend_port(default=10392) uvicorn.run(app, host=host, port=port) diff --git a/src/wechat_decrypt_tool/avatar_cache.py b/src/wechat_decrypt_tool/avatar_cache.py new file mode 100644 index 0000000..c37eaee --- /dev/null +++ b/src/wechat_decrypt_tool/avatar_cache.py @@ -0,0 +1,454 @@ +from __future__ import annotations + +import hashlib +import os +import re +import sqlite3 +import time +from email.utils import formatdate +from pathlib import Path +from typing import Any, Optional +from urllib.parse import urlsplit, urlunsplit + +from .app_paths import get_output_dir +from .logging_config import get_logger + +logger = get_logger(__name__) + +AVATAR_CACHE_TTL_SECONDS = 7 * 24 * 60 * 60 + + +def is_avatar_cache_enabled() -> bool: + v = str(os.environ.get("WECHAT_TOOL_AVATAR_CACHE_ENABLED", "1") or "").strip().lower() + return v not in {"", "0", "false", "off", "no"} + + +def get_avatar_cache_root_dir() -> Path: + return get_output_dir() / "avatar_cache" + + +def _safe_segment(value: str) -> str: + cleaned = re.sub(r"[^0-9A-Za-z._-]+", "_", str(value or "").strip()) + cleaned = cleaned.strip("._-") + return cleaned or "default" + + +def _account_layout(account: str) -> tuple[Path, Path, Path, Path]: + account_dir = get_avatar_cache_root_dir() / _safe_segment(account) + files_dir = account_dir / "files" + tmp_dir = account_dir / "tmp" + db_path = account_dir / "avatar_cache.db" + return account_dir, files_dir, tmp_dir, db_path + + +def _ensure_account_layout(account: str) -> tuple[Path, Path, Path, Path]: + account_dir, files_dir, tmp_dir, db_path = _account_layout(account) + account_dir.mkdir(parents=True, exist_ok=True) + files_dir.mkdir(parents=True, exist_ok=True) + tmp_dir.mkdir(parents=True, exist_ok=True) + return account_dir, files_dir, tmp_dir, db_path + + +def _connect(account: str) -> sqlite3.Connection: + _, _, _, db_path = _ensure_account_layout(account) + conn = sqlite3.connect(str(db_path), timeout=5) + conn.row_factory = sqlite3.Row + _ensure_schema(conn) + return conn + + +def _ensure_schema(conn: sqlite3.Connection) -> None: + conn.execute( + """ + CREATE TABLE IF NOT EXISTS avatar_cache_entries ( + account TEXT NOT NULL, + cache_key TEXT NOT NULL, + source_kind TEXT NOT NULL, + username TEXT NOT NULL DEFAULT '', + source_url TEXT NOT NULL DEFAULT '', + source_md5 TEXT NOT NULL DEFAULT '', + source_update_time INTEGER NOT NULL DEFAULT 0, + rel_path TEXT NOT NULL DEFAULT '', + media_type TEXT NOT NULL DEFAULT 'application/octet-stream', + size_bytes INTEGER NOT NULL DEFAULT 0, + etag TEXT NOT NULL DEFAULT '', + last_modified TEXT NOT NULL DEFAULT '', + fetched_at INTEGER NOT NULL DEFAULT 0, + checked_at INTEGER NOT NULL DEFAULT 0, + expires_at INTEGER NOT NULL DEFAULT 0, + PRIMARY KEY (account, cache_key) + ) + """ + ) + conn.execute( + "CREATE INDEX IF NOT EXISTS idx_avatar_cache_entries_account_username ON avatar_cache_entries(account, username)" + ) + conn.execute( + "CREATE INDEX IF NOT EXISTS idx_avatar_cache_entries_account_source ON avatar_cache_entries(account, source_kind, source_url)" + ) + conn.commit() + + +def _row_to_dict(row: Optional[sqlite3.Row]) -> Optional[dict[str, Any]]: + if row is None: + return None + out: dict[str, Any] = {} + for k in row.keys(): + out[str(k)] = row[k] + return out + + +def normalize_avatar_source_url(url: str) -> str: + raw = str(url or "").strip() + if not raw: + return "" + try: + p = urlsplit(raw) + except Exception: + return raw + scheme = str(p.scheme or "").lower() + host = str(p.hostname or "").lower() + if not scheme or not host: + return raw + netloc = host + if p.port: + netloc = f"{host}:{int(p.port)}" + path = p.path or "/" + return urlunsplit((scheme, netloc, path, p.query or "", "")) + + +def cache_key_for_avatar_user(username: str) -> str: + u = str(username or "").strip() + return hashlib.sha1(f"user:{u}".encode("utf-8", errors="ignore")).hexdigest() + + +def cache_key_for_avatar_url(url: str) -> str: + u = normalize_avatar_source_url(url) + return hashlib.sha1(f"url:{u}".encode("utf-8", errors="ignore")).hexdigest() + + +def get_avatar_cache_entry(account: str, cache_key: str) -> Optional[dict[str, Any]]: + if (not is_avatar_cache_enabled()) or (not cache_key): + return None + try: + conn = _connect(account) + except Exception: + return None + try: + row = conn.execute( + "SELECT * FROM avatar_cache_entries WHERE account = ? AND cache_key = ? LIMIT 1", + (str(account or ""), str(cache_key or "")), + ).fetchone() + return _row_to_dict(row) + except Exception: + return None + finally: + try: + conn.close() + except Exception: + pass + + +def get_avatar_cache_user_entry(account: str, username: str) -> Optional[dict[str, Any]]: + if not username: + return None + return get_avatar_cache_entry(account, cache_key_for_avatar_user(username)) + + +def get_avatar_cache_url_entry(account: str, source_url: str) -> Optional[dict[str, Any]]: + if not source_url: + return None + return get_avatar_cache_entry(account, cache_key_for_avatar_url(source_url)) + + +def resolve_avatar_cache_entry_path(account: str, entry: Optional[dict[str, Any]]) -> Optional[Path]: + if not entry: + return None + rel = str(entry.get("rel_path") or "").strip().replace("\\", "/") + if not rel: + return None + account_dir, _, _, _ = _account_layout(account) + p = account_dir / rel + try: + account_dir_resolved = account_dir.resolve() + p_resolved = p.resolve() + if p_resolved != account_dir_resolved and account_dir_resolved not in p_resolved.parents: + return None + return p_resolved + except Exception: + return p + + +def avatar_cache_entry_file_exists(account: str, entry: Optional[dict[str, Any]]) -> Optional[Path]: + p = resolve_avatar_cache_entry_path(account, entry) + if not p: + return None + try: + if p.exists() and p.is_file(): + return p + except Exception: + return None + return None + + +def avatar_cache_entry_is_fresh(entry: Optional[dict[str, Any]], now_ts: Optional[int] = None) -> bool: + if not entry: + return False + try: + expires = int(entry.get("expires_at") or 0) + except Exception: + expires = 0 + if expires <= 0: + return False + now0 = int(now_ts or time.time()) + return expires > now0 + + +def _guess_ext(media_type: str) -> str: + mt = str(media_type or "").strip().lower() + if mt == "image/jpeg": + return "jpg" + if mt == "image/png": + return "png" + if mt == "image/gif": + return "gif" + if mt == "image/webp": + return "webp" + if mt == "image/bmp": + return "bmp" + if mt == "image/svg+xml": + return "svg" + if mt == "image/avif": + return "avif" + if mt.startswith("image/"): + return mt.split("/", 1)[1].split("+", 1)[0].split(";", 1)[0] or "img" + return "dat" + + +def _http_date_from_ts(ts: Optional[int]) -> str: + try: + t = int(ts or 0) + except Exception: + t = 0 + if t <= 0: + return "" + try: + return formatdate(timeval=float(t), usegmt=True) + except Exception: + return "" + + +def upsert_avatar_cache_entry( + account: str, + *, + cache_key: str, + source_kind: str, + username: str = "", + source_url: str = "", + source_md5: str = "", + source_update_time: int = 0, + rel_path: str = "", + media_type: str = "application/octet-stream", + size_bytes: int = 0, + etag: str = "", + last_modified: str = "", + fetched_at: Optional[int] = None, + checked_at: Optional[int] = None, + expires_at: Optional[int] = None, +) -> Optional[dict[str, Any]]: + if (not is_avatar_cache_enabled()) or (not cache_key): + return None + + acct = str(account or "").strip() + ck = str(cache_key or "").strip() + sk = str(source_kind or "").strip().lower() + if not acct or not ck or not sk: + return None + + source_url_norm = normalize_avatar_source_url(source_url) if source_url else "" + + now_ts = int(time.time()) + fetched = int(fetched_at if fetched_at is not None else now_ts) + checked = int(checked_at if checked_at is not None else now_ts) + expire_ts = int(expires_at if expires_at is not None else (checked + AVATAR_CACHE_TTL_SECONDS)) + + try: + conn = _connect(acct) + except Exception as e: + logger.warning(f"[avatar_cache_error] open db failed account={acct} err={e}") + return None + try: + conn.execute( + """ + INSERT INTO avatar_cache_entries ( + account, cache_key, source_kind, username, source_url, + source_md5, source_update_time, rel_path, media_type, size_bytes, + etag, last_modified, fetched_at, checked_at, expires_at + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(account, cache_key) DO UPDATE SET + source_kind=excluded.source_kind, + username=excluded.username, + source_url=excluded.source_url, + source_md5=excluded.source_md5, + source_update_time=excluded.source_update_time, + rel_path=excluded.rel_path, + media_type=excluded.media_type, + size_bytes=excluded.size_bytes, + etag=excluded.etag, + last_modified=excluded.last_modified, + fetched_at=excluded.fetched_at, + checked_at=excluded.checked_at, + expires_at=excluded.expires_at + """, + ( + acct, + ck, + sk, + str(username or "").strip(), + source_url_norm, + str(source_md5 or "").strip().lower(), + int(source_update_time or 0), + str(rel_path or "").strip().replace("\\", "/"), + str(media_type or "application/octet-stream").strip() or "application/octet-stream", + int(size_bytes or 0), + str(etag or "").strip(), + str(last_modified or "").strip(), + fetched, + checked, + expire_ts, + ), + ) + conn.commit() + row = conn.execute( + "SELECT * FROM avatar_cache_entries WHERE account = ? AND cache_key = ? LIMIT 1", + (acct, ck), + ).fetchone() + return _row_to_dict(row) + except Exception as e: + logger.warning(f"[avatar_cache_error] upsert failed account={acct} cache_key={ck} err={e}") + return None + finally: + try: + conn.close() + except Exception: + pass + + +def touch_avatar_cache_entry(account: str, cache_key: str, *, ttl_seconds: int = AVATAR_CACHE_TTL_SECONDS) -> bool: + if (not is_avatar_cache_enabled()) or (not cache_key): + return False + now_ts = int(time.time()) + try: + conn = _connect(account) + except Exception: + return False + try: + conn.execute( + "UPDATE avatar_cache_entries SET checked_at = ?, expires_at = ? WHERE account = ? AND cache_key = ?", + (now_ts, now_ts + max(60, int(ttl_seconds or AVATAR_CACHE_TTL_SECONDS)), str(account or ""), str(cache_key or "")), + ) + conn.commit() + return True + except Exception: + return False + finally: + try: + conn.close() + except Exception: + pass + + +def write_avatar_cache_payload( + account: str, + *, + source_kind: str, + username: str = "", + source_url: str = "", + payload: bytes, + media_type: str, + source_md5: str = "", + source_update_time: int = 0, + etag: str = "", + last_modified: str = "", + ttl_seconds: int = AVATAR_CACHE_TTL_SECONDS, +) -> tuple[Optional[dict[str, Any]], Optional[Path]]: + if (not is_avatar_cache_enabled()) or (not payload): + return None, None + + acct = str(account or "").strip() + sk = str(source_kind or "").strip().lower() + if not acct or sk not in {"user", "url"}: + return None, None + + source_url_norm = normalize_avatar_source_url(source_url) if source_url else "" + if sk == "user": + cache_key = cache_key_for_avatar_user(username) + else: + cache_key = cache_key_for_avatar_url(source_url_norm) + + digest = hashlib.sha1(bytes(payload)).hexdigest() + ext = _guess_ext(media_type) + rel_path = f"files/{digest[:2]}/{digest}.{ext}" + + try: + account_dir, _, tmp_dir, _ = _ensure_account_layout(acct) + except Exception as e: + logger.warning(f"[avatar_cache_error] ensure dirs failed account={acct} err={e}") + return None, None + + abs_path = account_dir / rel_path + try: + abs_path.parent.mkdir(parents=True, exist_ok=True) + if (not abs_path.exists()) or (int(abs_path.stat().st_size) != len(payload)): + tmp_path = tmp_dir / f"{digest}.{time.time_ns()}.tmp" + tmp_path.write_bytes(payload) + os.replace(str(tmp_path), str(abs_path)) + except Exception as e: + logger.warning(f"[avatar_cache_error] write file failed account={acct} path={abs_path} err={e}") + return None, None + + if (not etag) and digest: + etag = f'"{digest}"' + if (not last_modified) and source_update_time: + last_modified = _http_date_from_ts(source_update_time) + if not last_modified: + last_modified = _http_date_from_ts(int(time.time())) + + entry = upsert_avatar_cache_entry( + acct, + cache_key=cache_key, + source_kind=sk, + username=username, + source_url=source_url_norm, + source_md5=source_md5, + source_update_time=int(source_update_time or 0), + rel_path=rel_path, + media_type=media_type, + size_bytes=len(payload), + etag=etag, + last_modified=last_modified, + fetched_at=int(time.time()), + checked_at=int(time.time()), + expires_at=int(time.time()) + max(60, int(ttl_seconds or AVATAR_CACHE_TTL_SECONDS)), + ) + if not entry: + return None, None + return entry, abs_path + + +def build_avatar_cache_response_headers( + entry: Optional[dict[str, Any]], *, max_age: int = AVATAR_CACHE_TTL_SECONDS +) -> dict[str, str]: + headers: dict[str, str] = { + "Cache-Control": f"public, max-age={int(max_age)}", + } + if not entry: + return headers + etag = str(entry.get("etag") or "").strip() + last_modified = str(entry.get("last_modified") or "").strip() + if etag: + headers["ETag"] = etag + if last_modified: + headers["Last-Modified"] = last_modified + return headers + diff --git a/src/wechat_decrypt_tool/backend_entry.py b/src/wechat_decrypt_tool/backend_entry.py index bbfbf3b..4f0d5e3 100644 --- a/src/wechat_decrypt_tool/backend_entry.py +++ b/src/wechat_decrypt_tool/backend_entry.py @@ -9,11 +9,12 @@ import uvicorn from wechat_decrypt_tool.api import app +from wechat_decrypt_tool.runtime_settings import read_effective_backend_port def main() -> None: host = os.environ.get("WECHAT_TOOL_HOST", "127.0.0.1") - port = int(os.environ.get("WECHAT_TOOL_PORT", "8000")) + port, _ = read_effective_backend_port(default=10392) uvicorn.run(app, host=host, port=port, log_level="info") diff --git a/src/wechat_decrypt_tool/chat_edit_store.py b/src/wechat_decrypt_tool/chat_edit_store.py new file mode 100644 index 0000000..9149dd9 --- /dev/null +++ b/src/wechat_decrypt_tool/chat_edit_store.py @@ -0,0 +1,514 @@ +from __future__ import annotations + +import json +import re +import sqlite3 +import time +from pathlib import Path +from typing import Any, Optional + +from .app_paths import get_output_dir + +_HEX_RE = re.compile(r"^[0-9a-fA-F]+$") + + +def _db_path() -> Path: + return get_output_dir() / "message_edits.db" + + +def _connect() -> sqlite3.Connection: + db_path = _db_path() + db_path.parent.mkdir(parents=True, exist_ok=True) + conn = sqlite3.connect(str(db_path), timeout=5) + conn.row_factory = sqlite3.Row + _ensure_schema(conn) + return conn + + +def ensure_schema() -> None: + conn: Optional[sqlite3.Connection] = None + try: + conn = _connect() + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass + + +def _ensure_schema(conn: sqlite3.Connection) -> None: + conn.execute( + """ + CREATE TABLE IF NOT EXISTS message_edits ( + account TEXT NOT NULL, + session_id TEXT NOT NULL, + db TEXT NOT NULL, + table_name TEXT NOT NULL, + local_id INTEGER NOT NULL, + first_edited_at INTEGER NOT NULL, + last_edited_at INTEGER NOT NULL, + edit_count INTEGER NOT NULL, + original_msg_json TEXT NOT NULL, + original_resource_json TEXT, + edited_cols_json TEXT, + PRIMARY KEY (account, session_id, db, table_name, local_id) + ) + """ + ) + conn.execute( + "CREATE INDEX IF NOT EXISTS idx_message_edits_account_session ON message_edits(account, session_id)" + ) + conn.execute("CREATE INDEX IF NOT EXISTS idx_message_edits_account_last ON message_edits(account, last_edited_at)") + + # Backwards-compatible migrations for existing DBs. + try: + cols = { + str(r[1] or "").strip().lower() + for r in conn.execute("PRAGMA table_info(message_edits)").fetchall() + if r and len(r) > 1 and r[1] + } + if "edited_cols_json" not in cols: + conn.execute("ALTER TABLE message_edits ADD COLUMN edited_cols_json TEXT") + except Exception: + pass + conn.commit() + + +def _now_ms() -> int: + return int(time.time() * 1000) + + +def format_message_id(db: str, table_name: str, local_id: int) -> str: + return f"{str(db or '').strip()}:{str(table_name or '').strip()}:{int(local_id or 0)}" + + +def parse_message_id(message_id: str) -> tuple[str, str, int]: + parts = str(message_id or "").split(":", 2) + if len(parts) != 3: + raise ValueError("Invalid message_id format.") + db = str(parts[0] or "").strip() + table_name = str(parts[1] or "").strip() + try: + local_id = int(parts[2] or 0) + except Exception: + raise ValueError("Invalid message_id format.") + if not db or not table_name or local_id <= 0: + raise ValueError("Invalid message_id format.") + return db, table_name, local_id + + +def _bytes_to_hex(value: bytes) -> str: + return "0x" + value.hex() + + +def _hex_to_bytes(value: str) -> Optional[bytes]: + s = str(value or "").strip() + if not s.startswith("0x"): + return None + hex_part = s[2:] + if (not hex_part) or (len(hex_part) % 2 != 0) or (_HEX_RE.match(hex_part) is None): + return None + try: + return bytes.fromhex(hex_part) + except Exception: + return None + + +def _jsonify_blobs(obj: Any) -> Any: + if obj is None: + return None + if isinstance(obj, (bytes, bytearray, memoryview)): + return _bytes_to_hex(bytes(obj)) + if isinstance(obj, dict): + return {str(k): _jsonify_blobs(v) for k, v in obj.items()} + if isinstance(obj, (list, tuple)): + return [_jsonify_blobs(v) for v in obj] + return obj + + +def _dejsonify_blobs(obj: Any) -> Any: + if obj is None: + return None + if isinstance(obj, str): + b = _hex_to_bytes(obj) + return b if b is not None else obj + if isinstance(obj, dict): + return {str(k): _dejsonify_blobs(v) for k, v in obj.items()} + if isinstance(obj, list): + return [_dejsonify_blobs(v) for v in obj] + return obj + + +def dumps_json_with_blobs(obj: Any) -> str: + return json.dumps(_jsonify_blobs(obj), ensure_ascii=False, separators=(",", ":")) + + +def loads_json_with_blobs(payload: str) -> Any: + return _dejsonify_blobs(json.loads(str(payload or "") or "null")) + + +def upsert_original_once( + *, + account: str, + session_id: str, + db: str, + table_name: str, + local_id: int, + original_msg: dict[str, Any], + original_resource: Optional[dict[str, Any]], + now_ms: Optional[int] = None, +) -> None: + """Insert the original snapshot for a message only once, then bump counters on subsequent edits.""" + a = str(account or "").strip() + sid = str(session_id or "").strip() + db_norm = str(db or "").strip() + t = str(table_name or "").strip() + lid = int(local_id or 0) + if not a or not sid or not db_norm or not t or lid <= 0: + raise ValueError("Missing required keys for message edit store.") + + ts = int(now_ms if now_ms is not None else _now_ms()) + msg_json = dumps_json_with_blobs(original_msg or {}) + res_json = dumps_json_with_blobs(original_resource) if original_resource is not None else None + + conn: Optional[sqlite3.Connection] = None + try: + conn = _connect() + existing = conn.execute( + """ + SELECT 1 + FROM message_edits + WHERE account = ? AND session_id = ? AND db = ? AND table_name = ? AND local_id = ? + LIMIT 1 + """, + (a, sid, db_norm, t, lid), + ).fetchone() + if existing is None: + conn.execute( + """ + INSERT INTO message_edits( + account, session_id, db, table_name, local_id, + first_edited_at, last_edited_at, edit_count, + original_msg_json, original_resource_json, edited_cols_json + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + """, + (a, sid, db_norm, t, lid, ts, ts, 1, msg_json, res_json, None), + ) + else: + conn.execute( + """ + UPDATE message_edits + SET last_edited_at = ?, edit_count = edit_count + 1 + WHERE account = ? AND session_id = ? AND db = ? AND table_name = ? AND local_id = ? + """, + (ts, a, sid, db_norm, t, lid), + ) + conn.commit() + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass + + +def _parse_json_str_list(payload: Any) -> list[str]: + if payload is None: + return [] + if isinstance(payload, (list, tuple)): + return [str(x or "").strip() for x in payload if str(x or "").strip()] + s = str(payload or "").strip() + if not s: + return [] + try: + v = json.loads(s) + except Exception: + return [] + if not isinstance(v, list): + return [] + return [str(x or "").strip() for x in v if str(x or "").strip()] + + +def merge_edited_columns( + *, + account: str, + session_id: str, + db: str, + table_name: str, + local_id: int, + columns: list[str], +) -> bool: + """Merge edited message column names into the per-message edit record. + + This allows reset to restore only the fields actually modified by the tool. + """ + a = str(account or "").strip() + sid = str(session_id or "").strip() + db_norm = str(db or "").strip() + t = str(table_name or "").strip() + lid = int(local_id or 0) + if not a or not sid or not db_norm or not t or lid <= 0: + return False + + cols_in = [str(x or "").strip() for x in (columns or []) if str(x or "").strip()] + if not cols_in: + return True + + conn: Optional[sqlite3.Connection] = None + try: + conn = _connect() + row = conn.execute( + """ + SELECT edited_cols_json + FROM message_edits + WHERE account = ? AND session_id = ? AND db = ? AND table_name = ? AND local_id = ? + LIMIT 1 + """, + (a, sid, db_norm, t, lid), + ).fetchone() + if row is None: + return False + + existing = _parse_json_str_list(row[0] if row and len(row) else None) + merged = {c.lower() for c in existing if c} | {c.lower() for c in cols_in if c} + merged_list = sorted(merged) + payload = json.dumps(merged_list, ensure_ascii=False, separators=(",", ":")) + conn.execute( + """ + UPDATE message_edits + SET edited_cols_json = ? + WHERE account = ? AND session_id = ? AND db = ? AND table_name = ? AND local_id = ? + """, + (payload, a, sid, db_norm, t, lid), + ) + conn.commit() + return True + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass + + +def _row_to_dict(row: Optional[sqlite3.Row]) -> Optional[dict[str, Any]]: + if row is None: + return None + out: dict[str, Any] = {} + for k in row.keys(): + out[str(k)] = row[k] + return out + + +def list_sessions(account: str) -> list[dict[str, Any]]: + a = str(account or "").strip() + if not a: + return [] + + conn: Optional[sqlite3.Connection] = None + try: + conn = _connect() + rows = conn.execute( + """ + SELECT session_id, COUNT(*) AS msg_count, MAX(last_edited_at) AS last_edited_at + FROM message_edits + WHERE account = ? + GROUP BY session_id + ORDER BY last_edited_at DESC + """, + (a,), + ).fetchall() + out: list[dict[str, Any]] = [] + for r in rows: + try: + sid = str(r["session_id"] or "").strip() + except Exception: + sid = "" + if not sid: + continue + out.append( + { + "session_id": sid, + "msg_count": int(r["msg_count"] or 0), + "last_edited_at": int(r["last_edited_at"] or 0), + } + ) + return out + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass + + +def list_messages(account: str, session_id: str) -> list[dict[str, Any]]: + a = str(account or "").strip() + sid = str(session_id or "").strip() + if not a or not sid: + return [] + + conn: Optional[sqlite3.Connection] = None + try: + conn = _connect() + rows = conn.execute( + """ + SELECT * + FROM message_edits + WHERE account = ? AND session_id = ? + ORDER BY last_edited_at ASC, local_id ASC + """, + (a, sid), + ).fetchall() + out: list[dict[str, Any]] = [] + for r in rows: + item = _row_to_dict(r) or {} + try: + item["message_id"] = format_message_id(item.get("db") or "", item.get("table_name") or "", item.get("local_id") or 0) + except Exception: + item["message_id"] = "" + out.append(item) + return out + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass + + +def get_message_edit(account: str, session_id: str, message_id: str) -> Optional[dict[str, Any]]: + a = str(account or "").strip() + sid = str(session_id or "").strip() + if not a or not sid or not message_id: + return None + try: + db, table_name, local_id = parse_message_id(message_id) + except Exception: + return None + + conn: Optional[sqlite3.Connection] = None + try: + conn = _connect() + row = conn.execute( + """ + SELECT * + FROM message_edits + WHERE account = ? AND session_id = ? AND db = ? AND table_name = ? AND local_id = ? + LIMIT 1 + """, + (a, sid, db, table_name, int(local_id)), + ).fetchone() + item = _row_to_dict(row) + if not item: + return None + item["message_id"] = format_message_id(db, table_name, local_id) + return item + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass + + +def delete_message_edit(account: str, session_id: str, message_id: str) -> bool: + a = str(account or "").strip() + sid = str(session_id or "").strip() + if not a or not sid or not message_id: + return False + try: + db, table_name, local_id = parse_message_id(message_id) + except Exception: + return False + + conn: Optional[sqlite3.Connection] = None + try: + conn = _connect() + cur = conn.execute( + """ + DELETE FROM message_edits + WHERE account = ? AND session_id = ? AND db = ? AND table_name = ? AND local_id = ? + """, + (a, sid, db, table_name, int(local_id)), + ) + conn.commit() + return int(getattr(cur, "rowcount", 0) or 0) > 0 + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass + + +def update_message_edit_local_id( + *, + account: str, + session_id: str, + db: str, + table_name: str, + old_local_id: int, + new_local_id: int, +) -> bool: + """Update the primary key local_id for an existing edit record (unsafe operations may change Msg.local_id).""" + a = str(account or "").strip() + sid = str(session_id or "").strip() + db_norm = str(db or "").strip() + t = str(table_name or "").strip() + old_lid = int(old_local_id or 0) + new_lid = int(new_local_id or 0) + if not a or not sid or not db_norm or not t or old_lid <= 0 or new_lid <= 0: + return False + if old_lid == new_lid: + return True + + conn: Optional[sqlite3.Connection] = None + try: + conn = _connect() + cur = conn.execute( + """ + UPDATE message_edits + SET local_id = ? + WHERE account = ? AND session_id = ? AND db = ? AND table_name = ? AND local_id = ? + """, + (new_lid, a, sid, db_norm, t, old_lid), + ) + conn.commit() + return int(getattr(cur, "rowcount", 0) or 0) > 0 + except Exception: + return False + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass + + +def delete_account_edits(account: str) -> int: + a = str(account or "").strip() + if not a: + return 0 + + conn: Optional[sqlite3.Connection] = None + try: + conn = _connect() + cur = conn.execute( + """ + DELETE FROM message_edits + WHERE account = ? + """, + (a,), + ) + conn.commit() + return int(getattr(cur, "rowcount", 0) or 0) + except Exception: + return 0 + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass diff --git a/src/wechat_decrypt_tool/chat_export_service.py b/src/wechat_decrypt_tool/chat_export_service.py index ed5d345..46a11f0 100644 --- a/src/wechat_decrypt_tool/chat_export_service.py +++ b/src/wechat_decrypt_tool/chat_export_service.py @@ -1,9 +1,16 @@ from __future__ import annotations +import functools +import base64 +import hashlib import heapq +import html +import ipaddress import json +import os import re import sqlite3 +import socket import tempfile import threading import time @@ -13,6 +20,9 @@ from datetime import datetime from pathlib import Path from typing import Any, Iterable, Literal, Optional +from urllib.parse import urljoin, urlparse + +import requests from .chat_helpers import ( _decode_message_content, @@ -21,13 +31,17 @@ _extract_xml_attr, _extract_xml_tag_or_attr, _extract_xml_tag_text, + _format_session_time, _infer_message_brief_by_local_type, _infer_transfer_status_text, _iter_message_db_paths, _list_decrypted_accounts, _load_contact_rows, + _load_latest_message_previews, _lookup_resource_md5, _parse_app_message, + _parse_location_message, + _parse_system_message_content, _parse_pat_message, _pick_display_name, _quote_ident, @@ -39,9 +53,10 @@ ) from .logging_config import get_logger from .media_helpers import ( - _convert_silk_to_wav, + _convert_silk_to_browser_audio, _detect_image_media_type, _fallback_search_media_by_file_id, + _read_and_maybe_decrypt_media, _resolve_account_db_storage_dir, _resolve_account_wxid_dir, _resolve_media_path_for_kind, @@ -50,7 +65,7 @@ logger = get_logger(__name__) -ExportFormat = Literal["json", "txt"] +ExportFormat = Literal["json", "txt", "html"] ExportScope = Literal["selected", "all", "groups", "singles"] ExportStatus = Literal["queued", "running", "done", "error", "cancelled"] MediaKind = Literal["image", "emoji", "video", "video_thumb", "voice", "file"] @@ -74,6 +89,2238 @@ def _safe_name(s: str, max_len: int = 80) -> str: return t +def _resolve_export_output_dir(account_dir: Path, output_dir_raw: Any) -> Path: + text = str(output_dir_raw or "").strip() + if not text: + default_dir = account_dir.parents[1] / "exports" / account_dir.name + default_dir.mkdir(parents=True, exist_ok=True) + return default_dir + + out_dir = Path(text).expanduser() + if not out_dir.is_absolute(): + raise ValueError("output_dir must be an absolute path.") + + try: + out_dir.mkdir(parents=True, exist_ok=True) + except Exception as e: + raise ValueError(f"Failed to prepare output_dir: {e}") from e + + return out_dir.resolve() + + +def _resolve_ui_public_dir() -> Optional[Path]: + """Best-effort resolve Nuxt generated public directory for exporting UI CSS. + + Priority: + 1) `WECHAT_TOOL_UI_DIR` env + 2) repo default `frontend/.output/public` + """ + + ui_dir_env = os.environ.get("WECHAT_TOOL_UI_DIR", "").strip() + candidates: list[Path] = [] + if ui_dir_env: + candidates.append(Path(ui_dir_env)) + + # Repo defaults: generated Nuxt output or checked-in desktop UI assets. + repo_root = Path(__file__).resolve().parents[2] + candidates.append(repo_root / "frontend" / ".output" / "public") + candidates.append(repo_root / "desktop" / "resources" / "ui") + + for p in candidates: + try: + nuxt_dir = p / "_nuxt" + if nuxt_dir.is_dir() and any(nuxt_dir.glob("entry.*.css")): + return p + except Exception: + continue + return None + + +def _load_ui_entry_css(ui_public_dir: Path) -> str: + """Load Nuxt `entry.*.css` content (choose largest file if multiple).""" + + nuxt_dir = Path(ui_public_dir) / "_nuxt" + try: + css_files = list(nuxt_dir.glob("entry.*.css")) + except Exception: + css_files = [] + + if not css_files: + return "" + + def sort_key(p: Path) -> int: + try: + return int(p.stat().st_size) + except Exception: + return 0 + + css_files.sort(key=sort_key, reverse=True) + best = css_files[0] + try: + return best.read_text(encoding="utf-8") + except Exception: + try: + return best.read_text(encoding="utf-8", errors="ignore") + except Exception: + return "" + + +_VUE_SCOPED_ATTR_RE = re.compile(r"\[data-v-[0-9a-f]{8}\]", flags=re.IGNORECASE) +_CHAT_HISTORY_MD5_TAG_RE = re.compile( + r"(?i)<(?:fullmd5|thumbfullmd5|md5|emoticonmd5|emojimd5|cdnthumbmd5)>([0-9a-f]{32})<" +) +_CHAT_HISTORY_URL_TAG_RE = re.compile(r"(?i)<(?:sourceheadurl|cdnurlstring|encrypturlstring|externurl)>(https?://[^<\s]+)<") +_CHAT_HISTORY_SERVER_ID_TAG_RE = re.compile(r"(?i)\s*(\d+)\s*<") + + +def _strip_vue_scoped_attrs(css: str) -> str: + """Strip Vue SFC scoped attribute selectors like `[data-v-xxxxxxxx]`.""" + + if not css: + return "" + try: + return _VUE_SCOPED_ATTR_RE.sub("", css) + except Exception: + return css + + +def _load_ui_css_bundle(*, ui_public_dir: Optional[Path], report: dict[str, Any]) -> str: + """Load Nuxt CSS bundle for offline HTML export. + + Includes: + - `_nuxt/entry.*.css` (base + tailwind utilities) + - Chat page chunks `_nuxt/*_username_*.css` (scoped selectors stripped) + - `_HTML_EXPORT_CSS_PATCH` appended last + + Falls back to `_HTML_EXPORT_CSS_FALLBACK` when entry css is missing. + + Note: We only bundle chat-related chunks because stripping Vue SFC scoped selectors (`[data-v-...]`) can + otherwise leak scoped utility overrides (e.g. `.text-sm[data-v-...]`) into global rules in the export. + """ + + if ui_public_dir is None: + try: + report["errors"].append("WARN: Nuxt UI dir not found; export HTML will use fallback styles.") + except Exception: + pass + return _HTML_EXPORT_CSS_FALLBACK + "\n\n" + _HTML_EXPORT_CSS_PATCH + + entry_css = _load_ui_entry_css(ui_public_dir) + if not entry_css: + try: + report["errors"].append("WARN: Nuxt UI CSS not found; export HTML will use fallback styles.") + except Exception: + pass + return _HTML_EXPORT_CSS_FALLBACK + "\n\n" + _HTML_EXPORT_CSS_PATCH + + entry_css = _strip_vue_scoped_attrs(entry_css) + + nuxt_dir = Path(ui_public_dir) / "_nuxt" + chat_css_paths: list[Path] = [] + try: + chat_css_paths = [p for p in nuxt_dir.glob("*_username_*.css") if p.is_file()] + except Exception: + chat_css_paths = [] + + chat_css_paths.sort(key=lambda p: p.name) + + if not chat_css_paths: + try: + report["errors"].append( + "WARN: Nuxt chat CSS chunk not found (*_username_*.css); some message styles may be missing." + ) + except Exception: + pass + + extra_chunks: list[str] = [] + for p in chat_css_paths: + try: + extra_chunks.append(_strip_vue_scoped_attrs(p.read_text(encoding="utf-8"))) + except Exception: + try: + extra_chunks.append(_strip_vue_scoped_attrs(p.read_text(encoding="utf-8", errors="ignore"))) + except Exception: + continue + + parts = [entry_css] + if extra_chunks: + parts.append("\n\n".join(extra_chunks)) + parts.append(_HTML_EXPORT_CSS_PATCH) + return "\n\n".join(parts) + + +_TS_WECHAT_EMOJI_ENTRY_RE = re.compile(r'^\s*"(?P[^"]+)"\s*:\s*"(?P[^"]+)"\s*,?\s*$') + + +@functools.lru_cache(maxsize=1) +def _load_wechat_emoji_table() -> dict[str, str]: + repo_root = Path(__file__).resolve().parents[2] + path = repo_root / "frontend" / "utils" / "wechat-emojis.ts" + try: + text = path.read_text(encoding="utf-8") + except Exception: + return {} + + table: dict[str, str] = {} + for line in text.splitlines(): + stripped = line.strip() + if not stripped or stripped.startswith("//"): + continue + match = _TS_WECHAT_EMOJI_ENTRY_RE.match(line) + if match: + key = str(match.group("key") or "") + value = str(match.group("value") or "") + if key and value: + table[key] = value + return table + + +@functools.lru_cache(maxsize=1) +def _load_wechat_emoji_regex() -> Optional[re.Pattern[str]]: + table = _load_wechat_emoji_table() + if not table: + return None + + keys = sorted(table.keys(), key=len, reverse=True) + escaped = [re.escape(k) for k in keys if k] + if not escaped: + return None + + try: + return re.compile(f"({'|'.join(escaped)})") + except Exception: + return None + + +def _zip_write_tree( + *, + zf: zipfile.ZipFile, + src_dir: Path, + dest_prefix: str, + written: set[str], +) -> int: + """Recursively add a directory tree to the zip under `dest_prefix`. + + Skips any file whose `arcname` already exists in `written`. + Returns number of files written. + """ + + try: + if not src_dir.exists() or (not src_dir.is_dir()): + return 0 + except Exception: + return 0 + + prefix = str(dest_prefix or "").strip().strip("/").replace("\\", "/") + count = 0 + try: + for p in src_dir.rglob("*"): + try: + if not p.is_file(): + continue + except Exception: + continue + try: + rel = p.relative_to(src_dir).as_posix() + except Exception: + rel = p.name + arc = f"{prefix}/{rel}" if prefix else rel + arc = arc.lstrip("/").replace("\\", "/") + if not arc or arc in written: + continue + try: + zf.write(str(p), arcname=arc) + except Exception: + continue + written.add(arc) + count += 1 + except Exception: + return count + return count + + +_REMOTE_IMAGE_MAX_BYTES = 5 * 1024 * 1024 +_REMOTE_IMAGE_TIMEOUT = (5, 10) +_REMOTE_IMAGE_ALLOWED_CT: dict[str, str] = { + "image/jpeg": "jpg", + "image/png": "png", + "image/gif": "gif", + "image/webp": "webp", +} + + +def _is_public_ip(ip_text: str) -> bool: + try: + ip = ipaddress.ip_address(str(ip_text or "").strip()) + except Exception: + return False + return bool(getattr(ip, "is_global", False)) + + +def _is_safe_remote_host(hostname: str, port: Optional[int]) -> bool: + host = str(hostname or "").strip().lower().rstrip(".") + if not host: + return False + if host == "localhost" or host.endswith(".localhost"): + return False + try: + if _is_public_ip(host): + return True + if re.fullmatch(r"[0-9a-f:]+", host) and ":" in host and (not _is_public_ip(host)): + return False + except Exception: + pass + + try: + infos = socket.getaddrinfo(host, int(port or 443), type=socket.SOCK_STREAM) + except Exception: + return False + + for info in infos: + try: + sockaddr = info[4] + ip_text = str(sockaddr[0] or "") + except Exception: + ip_text = "" + if not _is_public_ip(ip_text): + return False + return True + + +def _download_remote_image_to_zip( + *, + zf: zipfile.ZipFile, + url: str, + remote_written: dict[str, str], + report: dict[str, Any], +) -> str: + raw = str(url or "").strip() + if not raw: + return "" + + cached = remote_written.get(raw) + if cached is not None: + return cached + + current = raw + last_error = "" + + for _ in range(4): # 0..3 redirects + parsed = urlparse(current) + if parsed.scheme not in {"http", "https"}: + last_error = f"unsupported scheme: {parsed.scheme}" + break + host = parsed.hostname or "" + if not host: + last_error = "missing hostname" + break + if not _is_safe_remote_host(host, parsed.port): + last_error = f"blocked host: {host}" + break + + resp = None + try: + resp = requests.get( + current, + stream=True, + timeout=_REMOTE_IMAGE_TIMEOUT, + allow_redirects=False, + headers={ + "User-Agent": "wechat-chat-export/1.0", + "Accept": "image/*", + }, + ) + + if int(resp.status_code) in {301, 302, 303, 307, 308}: + loc = str(resp.headers.get("Location") or "").strip() + if not loc: + last_error = f"redirect without Location ({resp.status_code})" + break + current = urljoin(current, loc) + continue + + if int(resp.status_code) != 200: + last_error = f"http {resp.status_code}" + break + + ct = str(resp.headers.get("Content-Type") or "").split(";", 1)[0].strip().lower() + ext = _REMOTE_IMAGE_ALLOWED_CT.get(ct, "") + + cl = str(resp.headers.get("Content-Length") or "").strip() + if cl: + try: + if int(cl) > _REMOTE_IMAGE_MAX_BYTES: + last_error = f"remote image too large: {cl} bytes" + break + except Exception: + pass + + buf = bytearray() + too_large = False + for chunk in resp.iter_content(chunk_size=65536): + if not chunk: + continue + buf.extend(chunk) + if len(buf) > _REMOTE_IMAGE_MAX_BYTES: + too_large = True + break + + if too_large: + last_error = f"remote image too large: >{_REMOTE_IMAGE_MAX_BYTES} bytes" + break + + if not ext: + # Some WeChat CDN endpoints return `application/octet-stream` even for images. + # Detect by magic bytes to improve offline exports for merged-forward emojis/avatars. + try: + mt2 = _detect_image_media_type(bytes(buf[:32])) + except Exception: + mt2 = "" + ext = _REMOTE_IMAGE_ALLOWED_CT.get(str(mt2 or "").strip().lower(), "") + if not ext: + last_error = f"unsupported content-type: {ct or 'unknown'}" + break + + h = hashlib.sha256(raw.encode("utf-8", errors="ignore")).hexdigest() + arc = f"media/remote/{h[:32]}.{ext}" + zf.writestr(arc, bytes(buf)) + remote_written[raw] = arc + return arc + except Exception as e: + last_error = f"request failed: {e}" + break + finally: + try: + if resp is not None: + resp.close() + except Exception: + pass + + try: + clipped = raw if len(raw) <= 260 else (raw[:257] + "...") + report["errors"].append(f"WARN: Remote image download skipped/failed: {clipped} ({last_error})") + except Exception: + pass + remote_written[raw] = "" + return "" + + +_HTML_EXPORT_CSS_FALLBACK = """ +/* Fallback styles for chat export HTML (Nuxt build CSS not found). */ +html, body { height: 100%; } +body { + margin: 0; + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "PingFang SC", "Hiragino Sans GB", "Microsoft YaHei", + "Helvetica Neue", Helvetica, Arial, sans-serif; + background: #EDEDED; + color: #111827; +} +a { color: inherit; } +""" + + +_HTML_EXPORT_CSS_PATCH = """ +/* Offline HTML viewer patch */ +:root { + /* Keep aligned with frontend defaults (see `frontend/app.vue`). */ + --dpr: 1; + --message-radius: 4px; + --sidebar-rail-step: 48px; + --sidebar-rail-btn: 32px; + --sidebar-rail-icon: 24px; +} +html, body { height: 100%; } +body { background: #EDEDED; } + +/* Layout helpers (used by exported HTML). */ +.wce-root { height: 100vh; display: flex; overflow: hidden; background: #EDEDED; } +.wce-rail { width: 60px; min-width: 60px; max-width: 60px; background: #e8e7e7; border-right: 1px solid #e5e7eb; display: flex; flex-direction: column; } +.wce-session-panel { width: calc(var(--session-list-width, 295px) / var(--dpr)); min-width: calc(var(--session-list-width, 295px) / var(--dpr)); max-width: calc(var(--session-list-width, 295px) / var(--dpr)); background: #F7F7F7; border-right: 1px solid #e5e7eb; display: flex; flex-direction: column; min-height: 0; } +.wce-chat-area { flex: 1; display: flex; flex-direction: column; min-height: 0; background: #EDEDED; } +.wce-chat-main { flex: 1; display: flex; min-height: 0; } +.wce-chat-col { flex: 1; display: flex; flex-direction: column; min-height: 0; min-width: 0; position: relative; } +.wce-chat-header { height: calc(56px / var(--dpr)); padding: 0 calc(20px / var(--dpr)); display: flex; align-items: center; border-bottom: 1px solid #e5e7eb; background: #EDEDED; } +.wce-chat-title { font-size: 1rem; font-weight: 500; color: #111827; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; } +.wce-filter-select { font-size: 0.75rem; padding: calc(6px / var(--dpr)) calc(8px / var(--dpr)); border: 0; border-radius: calc(8px / var(--dpr)); background: transparent; color: #374151; } +.wce-message-container { flex: 1; overflow: auto; padding: 16px; min-height: 0; } +.wce-pager { display: flex; align-items: center; justify-content: center; gap: calc(12px / var(--dpr)); padding: calc(6px / var(--dpr)) 0 calc(12px / var(--dpr)); } +.wce-pager-btn { font-size: 0.75rem; padding: calc(6px / var(--dpr)) calc(10px / var(--dpr)); border-radius: calc(8px / var(--dpr)); border: 1px solid #e5e7eb; background: #fff; color: #374151; cursor: pointer; } +.wce-pager-btn:hover { background: #f9fafb; } +.wce-pager-btn:disabled { opacity: 0.6; cursor: not-allowed; } +.wce-pager-status { font-size: 0.75rem; color: #6b7280; } + +/* Single session item (middle column). */ +.wce-session-item { display: flex; align-items: center; gap: 12px; padding: 0 12px; height: 80px; border-bottom: 1px solid #f3f4f6; background: #DEDEDE; text-decoration: none; color: inherit; } +.wce-session-avatar { width: 45px; height: 45px; border-radius: 6px; overflow: hidden; background: #d1d5db; flex-shrink: 0; } +.wce-session-avatar img { width: 100%; height: 100%; object-fit: cover; display: block; } +.wce-session-meta { min-width: 0; flex: 1; } +.wce-session-name { font-size: 0.875rem; font-weight: 600; color: #111827; white-space: nowrap; overflow: hidden; text-overflow: ellipsis; } +.wce-session-sub { font-size: 0.75rem; color: #6b7280; white-space: nowrap; overflow: hidden; text-overflow: ellipsis; margin-top: calc(2px / var(--dpr)); } + +/* Message rows (right column). */ +.wce-msg-row { display: flex; align-items: flex-start; margin-bottom: 24px; } +.wce-msg-row-sent { justify-content: flex-end; } +.wce-msg-row-received { justify-content: flex-start; } +.wce-msg { display: flex; align-items: flex-start; max-width: 640px; } +.wce-msg-sent { flex-direction: row-reverse; } +.wce-avatar { width: calc(42px / var(--dpr)); height: calc(42px / var(--dpr)); border-radius: 6px; overflow: hidden; background: #d1d5db; flex-shrink: 0; } +.wce-avatar img { width: 100%; height: 100%; object-fit: cover; display: block; } +.wce-avatar-sent { margin-left: 12px; } +.wce-avatar-received { margin-right: 12px; } +.wce-sender-name { font-size: 0.75rem; color: #6b7280; margin-bottom: calc(4px / var(--dpr)); max-width: calc(320px / var(--dpr)); overflow: hidden; text-overflow: ellipsis; white-space: nowrap; } + +/* Bubble basics (tailwind classes may override when Nuxt CSS is present). */ +.wce-bubble { padding: calc(8px / var(--dpr)) calc(12px / var(--dpr)); border-radius: var(--message-radius); font-size: 0.875rem; line-height: 1.6; white-space: pre-wrap; word-break: break-word; max-width: calc(320px / var(--dpr)); position: relative; } +.wce-bubble-sent { background: #95EC69; color: #000; } +.wce-bubble-received { background: #fff; color: #1f2937; } + +/* WeChat-like bubble tail (fallback). */ +.bubble-tail-l, .bubble-tail-r { position: relative; } +.bubble-tail-l::after { + content: ''; + position: absolute; + left: -4px; + top: 12px; + width: 12px; + height: 12px; + background: #FFFFFF; + transform: rotate(45deg); + border-radius: 2px; +} +.bubble-tail-r::after { + content: ''; + position: absolute; + right: -4px; + top: 12px; + width: 12px; + height: 12px; + background: #95EC69; + transform: rotate(45deg); + border-radius: 2px; +} + +/* System messages. */ +.wce-system { display: flex; justify-content: center; margin: 16px 0; } +.wce-system > div { font-size: 0.75rem; color: #9e9e9e; padding: calc(4px / var(--dpr)) 0; } + +/* Media blocks. */ +.wce-media-img { max-width: 240px; max-height: 240px; border-radius: var(--message-radius); display: block; object-fit: cover; } +.wce-emoji-img { width: 96px; height: 96px; object-fit: contain; display: block; } +.wce-video-wrap { position: relative; display: inline-block; border-radius: var(--message-radius); overflow: hidden; background: rgba(0,0,0,0.05); } +.wce-video-thumb { display: block; width: 220px; max-width: 260px; height: auto; max-height: 260px; object-fit: cover; } +.wce-video-play { position: absolute; inset: 0; display: flex; align-items: center; justify-content: center; } +.wce-video-play > div { width: 48px; height: 48px; border-radius: 9999px; background: rgba(0,0,0,0.45); display: flex; align-items: center; justify-content: center; } + +.wce-file { border: 1px solid #e5e7eb; border-radius: 10px; padding: 10px 12px; background: #fff; max-width: 320px; } +.wce-file-name { font-size: 0.8125rem; color: #111827; word-break: break-all; } +.wce-file-meta { font-size: 0.75rem; color: #6b7280; margin-top: calc(4px / var(--dpr)); } +.wce-file-actions { margin-top: 8px; } +.wce-file-actions a { font-size: 0.75rem; color: #07c160; text-decoration: none; } +.wce-file-actions a:hover { text-decoration: underline; } + +.wce-audio { width: 260px; max-width: 92vw; } +.wce-audio-actions { margin-top: 6px; } +.wce-audio-actions a { font-size: 0.75rem; color: #07c160; text-decoration: none; } +.wce-audio-actions a:hover { text-decoration: underline; } + +/* Voice message fallback styles (keep close to `frontend/pages/chat/[[username]].vue`). */ +.wechat-voice-wrapper { display: flex; width: 100%; position: relative; } +.wechat-voice-bubble { + border-radius: var(--message-radius); + position: relative; + transition: opacity 0.15s ease; + min-width: 80px; + max-width: 200px; + cursor: pointer; +} +.wechat-voice-bubble:hover { opacity: 0.85; } +.wechat-voice-bubble:active { opacity: 0.7; } +.wechat-voice-sent { background: #95EC69; } +.wechat-voice-sent::after { + content: ''; + position: absolute; + top: 50%; + right: -4px; + transform: translateY(-50%) rotate(45deg); + width: 10px; + height: 10px; + background: #95EC69; + border-radius: 2px; +} +.wechat-voice-received { background: #fff; } +.wechat-voice-received::before { + content: ''; + position: absolute; + top: 50%; + left: -4px; + transform: translateY(-50%) rotate(45deg); + width: 10px; + height: 10px; + background: #fff; + border-radius: 2px; +} +.wechat-voice-content { display: flex; align-items: center; padding: 8px 12px; gap: 8px; } +.wechat-voice-icon { width: 18px; height: 18px; flex-shrink: 0; color: #1a1a1a; } +.wechat-quote-voice-icon { width: 14px; height: 14px; color: inherit; } +.voice-icon-sent { transform: scaleX(-1); } +.wechat-voice-icon.voice-playing .voice-wave-2 { animation: voice-wave-2 1s infinite; } +.wechat-voice-icon.voice-playing .voice-wave-3 { animation: voice-wave-3 1s infinite; } +@keyframes voice-wave-2 { + 0%, 33% { opacity: 0; } + 34%, 100% { opacity: 1; } +} +@keyframes voice-wave-3 { + 0%, 66% { opacity: 0; } + 67%, 100% { opacity: 1; } +} +.wechat-voice-duration { font-size: 14px; color: #1a1a1a; } +.wechat-voice-unread { + position: absolute; + top: 50%; + right: -20px; + transform: translateY(-50%); + width: 8px; + height: 8px; + border-radius: 50%; + background: #e75e58; +} + +/* Index page helpers. */ +.wce-index { min-height: 100vh; background: #EDEDED; } +.wce-index-container { max-width: 880px; margin: 0 auto; padding: 24px; } +.wce-index-card { background: #fff; border: 1px solid #e5e7eb; border-radius: 12px; overflow: hidden; } +.wce-index-item { display: flex; align-items: center; gap: 12px; padding: 12px 14px; border-bottom: 1px solid #f3f4f6; text-decoration: none; color: inherit; } +.wce-index-item:last-child { border-bottom: 0; } +.wce-index-item:hover { background: #f9fafb; } +.wce-index-title { font-size: 1.125rem; font-weight: 700; color: #111827; margin: 0 0 calc(6px / var(--dpr)) 0; } +.wce-index-sub { font-size: 0.75rem; color: #6b7280; margin: 0 0 calc(16px / var(--dpr)) 0; } +""" + + +_HTML_EXPORT_JS = r""" +(() => { + const updateDprVar = () => { + try { + document.documentElement.style.setProperty('--dpr', '1') + } catch {} + } + + const hideJsMissingBanner = () => { + try { + const el = document.getElementById('wceJsMissing') + if (el) el.style.display = 'none' + } catch {} + } + + const initSessionSearch = () => { + const input = document.getElementById('sessionSearchInput') + if (!input) return + + const clearBtn = document.getElementById('sessionSearchClear') + const items = Array.from(document.querySelectorAll('[data-wce-session-item=\"1\"]')) + + const apply = () => { + const q = String(input.value || '').trim().toLowerCase() + try { if (clearBtn) clearBtn.style.display = q ? '' : 'none' } catch {} + + items.forEach((el) => { + if (!el) return + const isActive = String(el.getAttribute('aria-current') || '') === 'page' + const name = String(el.getAttribute('data-wce-session-name') || '').toLowerCase() + const username = String(el.getAttribute('data-wce-session-username') || '').toLowerCase() + const show = !q || isActive || name.includes(q) || username.includes(q) + try { el.style.display = show ? '' : 'none' } catch {} + }) + } + + input.addEventListener('input', apply) + if (clearBtn) { + clearBtn.addEventListener('click', () => { + try { input.value = '' } catch {} + try { input.focus() } catch {} + apply() + }) + } + apply() + } + + const initVoicePlayback = () => { + let activeAudio = null + let activeIcon = null + + const stopAudio = (audio, icon) => { + if (!audio) return + try { audio.pause() } catch {} + try { audio.currentTime = 0 } catch {} + try { if (icon) icon.classList.remove('voice-playing') } catch {} + } + + const bindAudioEnd = (audio) => { + if (!audio) return + try { + if (audio.dataset && audio.dataset.wceVoiceBound === '1') return + if (audio.dataset) audio.dataset.wceVoiceBound = '1' + } catch {} + + try { + audio.addEventListener('ended', () => { + try { + const wrapper = audio.closest('.wechat-voice-wrapper') || audio.parentElement + const icon = wrapper ? wrapper.querySelector('.wechat-voice-icon') : null + if (icon) icon.classList.remove('voice-playing') + } catch {} + + if (activeAudio === audio) { + activeAudio = null + activeIcon = null + } + }) + } catch {} + } + + document.addEventListener('click', (ev) => { + const target = ev && ev.target + + const quoteBtn = target && target.closest ? target.closest('[data-wce-quote-voice-btn=\"1\"]') : null + if (quoteBtn) { + if (quoteBtn.hasAttribute && quoteBtn.hasAttribute('disabled')) return + + const wrapper = quoteBtn.closest ? (quoteBtn.closest('[data-wce-quote-voice-wrapper=\"1\"]') || quoteBtn.parentElement) : quoteBtn.parentElement + if (!wrapper) return + + const audio = wrapper.querySelector ? (wrapper.querySelector('audio[data-wce-quote-voice-audio=\"1\"]') || wrapper.querySelector('audio')) : null + if (!audio) return + + bindAudioEnd(audio) + + const icon = (quoteBtn.querySelector && quoteBtn.querySelector('.wechat-voice-icon')) || (wrapper.querySelector && wrapper.querySelector('.wechat-voice-icon')) + + if (activeAudio && activeAudio !== audio) stopAudio(activeAudio, activeIcon) + + const isPlaying = !audio.paused && !audio.ended + if (activeAudio === audio && isPlaying) { + stopAudio(audio, icon) + activeAudio = null + activeIcon = null + return + } + + activeAudio = audio + activeIcon = icon + try { if (icon) icon.classList.add('voice-playing') } catch {} + try { + const p = audio.play() + if (p && typeof p.catch === 'function') { + p.catch(() => { + stopAudio(audio, icon) + if (activeAudio === audio) { + activeAudio = null + activeIcon = null + } + }) + } + } catch { + stopAudio(audio, icon) + if (activeAudio === audio) { + activeAudio = null + activeIcon = null + } + } + return + } + + const bubble = target && target.closest ? target.closest('.wechat-voice-bubble') : null + if (!bubble) return + + const wrapper = bubble.closest('.wechat-voice-wrapper') || bubble.parentElement + if (!wrapper) return + + const audio = wrapper.querySelector('audio') + if (!audio) return + + bindAudioEnd(audio) + + const icon = bubble.querySelector('.wechat-voice-icon') || wrapper.querySelector('.wechat-voice-icon') + + if (activeAudio && activeAudio !== audio) stopAudio(activeAudio, activeIcon) + + const isPlaying = !audio.paused && !audio.ended + if (activeAudio === audio && isPlaying) { + stopAudio(audio, icon) + activeAudio = null + activeIcon = null + return + } + + activeAudio = audio + activeIcon = icon + try { if (icon) icon.classList.add('voice-playing') } catch {} + try { + const p = audio.play() + if (p && typeof p.catch === 'function') { + p.catch(() => { + stopAudio(audio, icon) + if (activeAudio === audio) { + activeAudio = null + activeIcon = null + } + }) + } + } catch { + stopAudio(audio, icon) + if (activeAudio === audio) { + activeAudio = null + activeIcon = null + } + } + }) + } + + const applyMessageTypeFilter = () => { + const select = document.getElementById('messageTypeFilter') + if (!select) return + const selected = String(select.value || 'all') + const nodes = document.querySelectorAll('[data-render-type]') + nodes.forEach((el) => { + const rt = String(el.getAttribute('data-render-type') || 'text') + const show = selected === 'all' ? true : rt === selected + el.style.display = show ? '' : 'none' + }) + } + + const scrollToBottom = () => { + const container = document.getElementById('messageContainer') + if (!container) return + container.scrollTop = container.scrollHeight + } + + const updateSessionMessageCount = () => { + const el = document.getElementById('sessionMessageCount') + const container = document.getElementById('messageContainer') + if (!el || !container) return + const items = container.querySelectorAll('[data-render-type]') + el.textContent = String(items.length) + } + + const safeJsonParse = (text) => { + try { return JSON.parse(String(text || '')) } catch { return null } + } + + const readMediaIndex = () => { + const el = document.getElementById('wceMediaIndex') + const obj = safeJsonParse(el ? el.textContent : '') + if (!obj || typeof obj !== 'object') return {} + return obj + } + + const readPageMeta = () => { + const el = document.getElementById('wcePageMeta') + const obj = safeJsonParse(el ? el.textContent : '') + if (!obj || typeof obj !== 'object') return null + return obj + } + + const initPagedMessageLoading = () => { + const meta = readPageMeta() + if (!meta) return + + const totalPages = Number(meta.totalPages || 0) + if (!Number.isFinite(totalPages) || totalPages <= 1) return + + const initialPage = Number(meta.initialPage || totalPages || 1) + const padWidth = Number(meta.padWidth || 0) || 0 + const prefix = String(meta.pageFilePrefix || 'pages/page-') + const suffix = String(meta.pageFileSuffix || '.js') + + const container = document.getElementById('messageContainer') + const list = document.getElementById('wceMessageList') || container + const pager = document.getElementById('wcePager') + const btn = document.getElementById('wceLoadPrevBtn') + const status = document.getElementById('wceLoadPrevStatus') + if (!container || !list || !pager || !btn) return + + try { pager.style.display = '' } catch {} + + const loaded = new Set() + loaded.add(initialPage) + let nextPage = initialPage - 1 + let loading = false + + const setStatus = (text) => { + try { if (status) status.textContent = String(text || '') } catch {} + } + + const updateUi = (overrideText) => { + if (overrideText != null) { + setStatus(overrideText) + try { btn.disabled = false } catch {} + return + } + if (nextPage < 1) { + setStatus('已到底') + try { btn.disabled = true } catch {} + return + } + if (loading) { + setStatus('加载中...') + try { btn.disabled = true } catch {} + return + } + setStatus('点击加载更早消息') + try { btn.disabled = false } catch {} + } + + const pageSrc = (n) => { + const num = padWidth > 0 ? String(n).padStart(padWidth, '0') : String(n) + return prefix + num + suffix + } + + window.__WCE_PAGE_QUEUE__ = window.__WCE_PAGE_QUEUE__ || [] + window.__WCE_PAGE_LOADED__ = (pageNo, html) => { + const n = Number(pageNo) + if (!Number.isFinite(n) || n < 1) return + if (loaded.has(n)) return + loaded.add(n) + + try { + const prevH = container.scrollHeight + const prevTop = container.scrollTop + list.insertAdjacentHTML('afterbegin', String(html || '')) + const newH = container.scrollHeight + container.scrollTop = prevTop + (newH - prevH) + } catch { + try { list.insertAdjacentHTML('afterbegin', String(html || '')) } catch {} + } + + loading = false + nextPage = n - 1 + try { applyMessageTypeFilter() } catch {} + try { updateSessionMessageCount() } catch {} + updateUi() + } + + // Flush any queued pages (should be rare, but keeps behavior robust). + try { + const q = window.__WCE_PAGE_QUEUE__ + if (Array.isArray(q) && q.length) { + const items = q.slice(0) + q.length = 0 + items.forEach((it) => { + try { + if (it && it.length >= 2) window.__WCE_PAGE_LOADED__(it[0], it[1]) + } catch {} + }) + } + } catch {} + + const requestLoad = () => { + if (loading) return + if (nextPage < 1) return + const n = nextPage + + loading = true + updateUi() + + const s = document.createElement('script') + s.async = true + s.src = pageSrc(n) + s.onerror = () => { + loading = false + updateUi('加载失败,可重试') + } + try { document.body.appendChild(s) } catch { + loading = false + updateUi('加载失败,可重试') + } + } + + btn.addEventListener('click', () => requestLoad()) + + let lastScrollAt = 0 + container.addEventListener('scroll', () => { + const now = Date.now() + if (now - lastScrollAt < 200) return + lastScrollAt = now + if (container.scrollTop < 120) requestLoad() + }) + + updateUi() + } + + const isMaybeMd5 = (value) => /^[0-9a-f]{32}$/i.test(String(value || '').trim()) + const pickFirstMd5 = (...values) => { + for (const v of values) { + const s = String(v || '').trim() + if (isMaybeMd5(s)) return s.toLowerCase() + } + return '' + } + + const normalizeChatHistoryUrl = (value) => String(value || '').trim().replace(/\s+/g, '') + + const decodeBase64Utf8 = (b64) => { + try { + const bin = atob(String(b64 || '')) + const bytes = new Uint8Array(bin.length) + for (let i = 0; i < bin.length; i++) bytes[i] = bin.charCodeAt(i) + if (typeof TextDecoder !== 'undefined') { + return new TextDecoder('utf-8', { fatal: false }).decode(bytes) + } + let out = '' + for (let i = 0; i < bytes.length; i++) out += String.fromCharCode(bytes[i]) + return out + } catch { + return '' + } + } + + const resolveMd5Any = (index, md5) => { + const key = String(md5 || '').trim().toLowerCase() + if (!key) return '' + const maps = [ + index && index.images, + index && index.emojis, + index && index.videos, + index && index.videoThumbs, + ] + for (const m of maps) { + try { + if (m && m[key]) return String(m[key] || '') + } catch {} + } + return '' + } + + const resolveServerMd5 = (index, serverId) => { + const key = String(serverId || '').trim() + if (!key) return '' + try { + const v = index && index.serverMd5 && index.serverMd5[key] + return isMaybeMd5(v) ? String(v || '').trim().toLowerCase() : '' + } catch {} + return '' + } + + const resolveRemoteAny = (index, ...urls) => { + for (const u0 of urls) { + const u = normalizeChatHistoryUrl(u0) + if (!u) continue + try { + const local = index && index.remote && index.remote[u] + if (local) return String(local || '') + } catch {} + const ul = String(u || '').trim().toLowerCase() + if (ul.startsWith('http://') || ul.startsWith('https://')) return u + } + return '' + } + + const parseChatHistoryRecord = (recordItemXml) => { + const xml = String(recordItemXml || '').trim() + if (!xml) return { info: null, items: [] } + + const normalized = xml + .replace(/ /g, ' ') + .replace(/[\u0000-\u0008\u000B\u000C\u000E-\u001F]/g, '') + .replace(/&(?!amp;|lt;|gt;|quot;|apos;|#\d+;|#x[\da-fA-F]+;)/g, '&') + + let doc + try { + doc = new DOMParser().parseFromString(normalized, 'text/xml') + } catch { + return { info: null, items: [] } + } + + const parserErrors = doc.getElementsByTagName('parsererror') + if (parserErrors && parserErrors.length) return { info: null, items: [] } + + const getText = (node, tag) => { + try { + if (!node) return '' + const els = Array.from(node.getElementsByTagName(tag) || []) + const direct = els.find((el) => el && el.parentNode === node) + const el = direct || els[0] + return String(el?.textContent || '').trim() + } catch { + return '' + } + } + + const getDirectChildXml = (node, tag) => { + try { + if (!node) return '' + const children = Array.from(node.children || []) + const el = children.find((c) => String(c?.tagName || '').toLowerCase() === String(tag || '').toLowerCase()) + if (!el) return '' + + const raw = String(el.textContent || '').trim() + if (raw && raw.startsWith('<') && raw.endsWith('>')) return raw + + if (typeof XMLSerializer !== 'undefined') { + return new XMLSerializer().serializeToString(el) + } + } catch {} + return '' + } + + const getAnyXml = (node, tag) => { + try { + if (!node) return '' + const els = Array.from(node.getElementsByTagName(tag) || []) + const direct = els.find((el) => el && el.parentNode === node) + const el = direct || els[0] + if (!el) return '' + + const raw = String(el.textContent || '').trim() + if (raw && raw.startsWith('<') && raw.endsWith('>')) return raw + if (typeof XMLSerializer !== 'undefined') return new XMLSerializer().serializeToString(el) + } catch {} + return '' + } + + const sameTag = (el, tag) => String(el?.tagName || '').toLowerCase() === String(tag || '').toLowerCase() + + const closestAncestorByTag = (node, tag) => { + const lower = String(tag || '').toLowerCase() + let cur = node + while (cur) { + if (cur.nodeType === 1 && String(cur.tagName || '').toLowerCase() === lower) return cur + cur = cur.parentNode + } + return null + } + + const root = doc?.documentElement + const isChatRoom = String(getText(root, 'isChatRoom') || '').trim() === '1' + const title = getText(root, 'title') + const desc = getText(root, 'desc') || getText(root, 'info') + + const datalist = (() => { + try { + const all = Array.from(doc.getElementsByTagName('datalist') || []) + const top = root ? all.find((el) => closestAncestorByTag(el, 'recorditem') === root) : null + return top || all[0] || null + } catch { + return null + } + })() + + const itemNodes = (() => { + if (datalist) return Array.from(datalist.children || []).filter((el) => sameTag(el, 'dataitem')) + return Array.from(root?.children || []).filter((el) => sameTag(el, 'dataitem')) + })() + + const parsed = itemNodes.map((node, idx) => { + const datatype = String(node.getAttribute('datatype') || getText(node, 'datatype') || '').trim() + const dataid = String(node.getAttribute('dataid') || getText(node, 'dataid') || '').trim() || String(idx) + + const sourcename = getText(node, 'sourcename') + const sourcetime = getText(node, 'sourcetime') + const sourceheadurl = normalizeChatHistoryUrl(getText(node, 'sourceheadurl')) + const datatitle = getText(node, 'datatitle') + const datadesc = getText(node, 'datadesc') + const link = normalizeChatHistoryUrl(getText(node, 'link') || getText(node, 'dataurl') || getText(node, 'url')) + const datafmt = getText(node, 'datafmt') + const duration = getText(node, 'duration') + + const fullmd5 = getText(node, 'fullmd5') + const thumbfullmd5 = getText(node, 'thumbfullmd5') + const md5 = getText(node, 'md5') || getText(node, 'emoticonmd5') || getText(node, 'emojimd5') || getText(node, 'emojiMd5') + const cdnthumbmd5 = getText(node, 'cdnthumbmd5') + const cdnurlstring = normalizeChatHistoryUrl(getText(node, 'cdnurlstring')) + const encrypturlstring = normalizeChatHistoryUrl(getText(node, 'encrypturlstring')) + const externurl = normalizeChatHistoryUrl(getText(node, 'externurl')) + const aeskey = getText(node, 'aeskey') + const fromnewmsgid = getText(node, 'fromnewmsgid') + const srcMsgLocalid = getText(node, 'srcMsgLocalid') + const srcMsgCreateTime = getText(node, 'srcMsgCreateTime') + const nestedRecordItem = ( + getAnyXml(node, 'recorditem') + || getDirectChildXml(node, 'recorditem') + || getText(node, 'recorditem') + || getAnyXml(node, 'recordxml') + || getDirectChildXml(node, 'recordxml') + || getText(node, 'recordxml') + ) + + let content = datatitle || datadesc + if (!content) { + if (datatype === '4') content = '[视频]' + else if (datatype === '2' || datatype === '3') content = '[图片]' + else if (datatype === '47' || datatype === '37') content = '[表情]' + else if (datatype) content = `[消息 ${datatype}]` + else content = '[消息]' + } + + const fmt = String(datafmt || '').trim().toLowerCase().replace(/^\./, '') + const imageFormats = new Set(['jpg', 'jpeg', 'png', 'gif', 'webp', 'bmp', 'heic', 'heif']) + + let renderType = 'text' + if (datatype === '17') { + renderType = 'chatHistory' + } else if (datatype === '5' || link) { + renderType = 'link' + } else if (datatype === '4' || String(duration || '').trim() || fmt === 'mp4') { + renderType = 'video' + } else if (datatype === '47' || datatype === '37') { + renderType = 'emoji' + } else if ( + datatype === '2' + || datatype === '3' + || imageFormats.has(fmt) + || (datatype !== '1' && isMaybeMd5(fullmd5)) + ) { + renderType = 'image' + } else if (isMaybeMd5(md5) && /表情/.test(String(content || ''))) { + renderType = 'emoji' + } + + let outTitle = '' + let outUrl = '' + let recordItem = '' + if (renderType === 'chatHistory') { + outTitle = datatitle || content || '聊天记录' + content = datadesc || '' + recordItem = nestedRecordItem + } else if (renderType === 'link') { + outTitle = datatitle || content || '' + outUrl = link || externurl || '' + // datadesc can be an invisible filler; only keep as description when meaningful. + const cleanDesc = String(datadesc || '').replace(/[\\u3164\\u2800]/g, '').trim() + const cleanTitle = String(outTitle || '').replace(/[\\u3164\\u2800]/g, '').trim() + if (!cleanDesc || (cleanTitle && cleanDesc === cleanTitle)) content = '' + else content = String(datadesc || '').trim() + } + + return { + id: dataid, + datatype, + sourcename, + sourcetime, + sourceheadurl, + datafmt, + duration, + fullmd5, + thumbfullmd5, + md5, + cdnthumbmd5, + cdnurlstring, + encrypturlstring, + externurl, + aeskey, + fromnewmsgid, + srcMsgLocalid, + srcMsgCreateTime, + renderType, + title: outTitle, + recordItem, + url: outUrl, + content + } + }) + + return { + info: { isChatRoom, title, desc }, + items: parsed + } + } + + const initChatHistoryModal = () => { + const modal = document.getElementById('chatHistoryModal') + const titleEl = document.getElementById('chatHistoryModalTitle') + const closeBtn = document.getElementById('chatHistoryModalClose') + const emptyEl = document.getElementById('chatHistoryModalEmpty') + const listEl = document.getElementById('chatHistoryModalList') + if (!modal || !titleEl || !closeBtn || !emptyEl || !listEl) return + + const mediaIndex = readMediaIndex() + let historyStack = [] + let currentState = null + let backBtn = null + + const updateBackVisibility = () => { + if (!backBtn) return + const show = Array.isArray(historyStack) && historyStack.length > 0 + try { backBtn.classList.toggle('hidden', !show) } catch {} + } + + // Add a back button next to the title (created at runtime to avoid changing the HTML template). + try { + const header = titleEl.parentElement + if (header) { + const wrap = document.createElement('div') + wrap.className = 'flex items-center gap-2 min-w-0' + + backBtn = document.createElement('button') + backBtn.type = 'button' + backBtn.className = 'p-2 rounded hover:bg-black/5 flex-shrink-0 hidden' + try { backBtn.setAttribute('aria-label', '返回') } catch {} + try { backBtn.setAttribute('title', '返回') } catch {} + backBtn.innerHTML = '' + + header.insertBefore(wrap, titleEl) + wrap.appendChild(backBtn) + wrap.appendChild(titleEl) + } + } catch {} + + const close = () => { + try { modal.classList.add('hidden') } catch {} + try { modal.style.display = 'none' } catch {} + try { modal.setAttribute('aria-hidden', 'true') } catch {} + try { document.body.style.overflow = '' } catch {} + try { titleEl.textContent = '聊天记录' } catch {} + try { listEl.textContent = '' } catch {} + try { emptyEl.style.display = '' } catch {} + historyStack = [] + currentState = null + updateBackVisibility() + } + + const buildChatHistoryState = (payload) => { + const title = String(payload?.title || '聊天记录').trim() || '聊天记录' + const xml = String(payload?.recordItem || '').trim() + const parsed = parseChatHistoryRecord(xml) + const info = (parsed && parsed.info) ? parsed.info : { isChatRoom: false } + let records = (parsed && Array.isArray(parsed.items)) ? parsed.items : [] + + if (!records.length) { + const lines = Array.isArray(payload?.fallbackLines) + ? payload.fallbackLines + : String(payload?.content || '').trim().split(/\r?\n/).map((x) => String(x || '').trim()).filter(Boolean) + records = lines.map((line, idx) => ({ id: String(idx), renderType: 'text', content: line, sourcename: '', sourcetime: '' })) + } + + return { title, info, records } + } + + const renderRecordRow = (rec, info) => { + const row = document.createElement('div') + row.className = 'px-4 py-3 flex gap-3 border-b border-gray-100' + + const avatarWrap = document.createElement('div') + avatarWrap.className = 'w-9 h-9 rounded-md overflow-hidden bg-gray-200 flex-shrink-0' + const name0 = String(rec?.sourcename || '').trim() || '?' + const avatarUrlRaw = normalizeChatHistoryUrl(rec?.sourceheadurl) + const avatarLocal = (mediaIndex && mediaIndex.remote && mediaIndex.remote[avatarUrlRaw]) ? String(mediaIndex.remote[avatarUrlRaw] || '') : '' + const avatarUrlLower = String(avatarUrlRaw || '').trim().toLowerCase() + const avatarUrl = avatarLocal || ((avatarUrlLower.startsWith('http://') || avatarUrlLower.startsWith('https://')) ? avatarUrlRaw : '') + if (avatarUrl) { + const img = document.createElement('img') + img.src = avatarUrl + img.alt = '头像' + img.className = 'w-full h-full object-cover' + try { img.referrerPolicy = 'no-referrer' } catch {} + img.onerror = () => { + try { avatarWrap.textContent = '' } catch {} + const fb = document.createElement('div') + fb.className = 'w-full h-full flex items-center justify-center text-xs font-bold text-gray-600' + fb.textContent = String(name0.charAt(0) || '?') + avatarWrap.appendChild(fb) + } + avatarWrap.appendChild(img) + } else { + const fb = document.createElement('div') + fb.className = 'w-full h-full flex items-center justify-center text-xs font-bold text-gray-600' + fb.textContent = String(name0.charAt(0) || '?') + avatarWrap.appendChild(fb) + } + + const main = document.createElement('div') + main.className = 'min-w-0 flex-1' + + const header = document.createElement('div') + header.className = 'flex items-start gap-2' + + const headerLeft = document.createElement('div') + headerLeft.className = 'min-w-0 flex-1' + const senderName = String(rec?.sourcename || '').trim() + if (info && info.isChatRoom && senderName) { + const sn = document.createElement('div') + sn.className = 'text-xs text-gray-500 leading-none truncate mb-1' + sn.textContent = senderName + headerLeft.appendChild(sn) + } + + const headerRight = document.createElement('div') + headerRight.className = 'text-xs text-gray-400 flex-shrink-0 leading-none' + const timeText = String(rec?.sourcetime || '').trim() + headerRight.textContent = timeText + + header.appendChild(headerLeft) + if (timeText) header.appendChild(headerRight) + + const body = document.createElement('div') + body.className = 'mt-1' + + const rt = String(rec?.renderType || 'text') + const content = String(rec?.content || '').trim() + const serverId = String(rec?.fromnewmsgid || '').trim() + const serverMd5 = resolveServerMd5(mediaIndex, serverId) + + if (rt === 'chatHistory') { + const card = document.createElement('div') + card.className = 'wechat-chat-history-card wechat-special-card msg-radius' + + const chBody = document.createElement('div') + chBody.className = 'wechat-chat-history-body' + + const chTitle = document.createElement('div') + chTitle.className = 'wechat-chat-history-title' + chTitle.textContent = String(rec?.title || '聊天记录') + chBody.appendChild(chTitle) + + const raw = String(rec?.content || '').trim() + const lines = raw ? raw.split(/\r?\n/).map((x) => String(x || '').trim()).filter(Boolean).slice(0, 4) : [] + if (lines.length) { + const preview = document.createElement('div') + preview.className = 'wechat-chat-history-preview' + for (const line of lines) { + const el = document.createElement('div') + el.className = 'wechat-chat-history-line' + el.textContent = line + preview.appendChild(el) + } + chBody.appendChild(preview) + } + + card.appendChild(chBody) + + const bottom = document.createElement('div') + bottom.className = 'wechat-chat-history-bottom' + const label = document.createElement('span') + label.textContent = '聊天记录' + bottom.appendChild(label) + card.appendChild(bottom) + + const nestedXml = String(rec?.recordItem || '').trim() + if (nestedXml) { + card.classList.add('cursor-pointer') + card.addEventListener('click', (ev) => { + try { ev.preventDefault() } catch {} + try { ev.stopPropagation() } catch {} + openNestedChatHistory(rec) + }) + } + + body.appendChild(card) + } else if (rt === 'link') { + const href = normalizeChatHistoryUrl(rec?.url) || normalizeChatHistoryUrl(rec?.externurl) + const heading = String(rec?.title || '').trim() || content || href || '链接' + const desc = String(rec?.content || '').trim() + + const thumbMd5 = pickFirstMd5(rec?.fullmd5, rec?.thumbfullmd5, rec?.cdnthumbmd5, rec?.md5, rec?.id) + let previewUrl = resolveMd5Any(mediaIndex, thumbMd5) + if (!previewUrl && serverMd5) previewUrl = resolveMd5Any(mediaIndex, serverMd5) + if (!previewUrl) previewUrl = resolveRemoteAny(mediaIndex, rec?.externurl, rec?.cdnurlstring, rec?.encrypturlstring) + + const card = document.createElement(href ? 'a' : 'div') + card.className = 'wechat-link-card wechat-special-card msg-radius cursor-pointer' + if (href) { + card.href = href + card.target = '_blank' + card.rel = 'noreferrer noopener' + } + try { card.style.textDecoration = 'none' } catch {} + try { card.style.outline = 'none' } catch {} + + const linkContent = document.createElement('div') + linkContent.className = 'wechat-link-content' + + const linkInfo = document.createElement('div') + linkInfo.className = 'wechat-link-info' + const titleEl = document.createElement('div') + titleEl.className = 'wechat-link-title' + titleEl.textContent = heading + linkInfo.appendChild(titleEl) + if (desc) { + const descEl = document.createElement('div') + descEl.className = 'wechat-link-desc' + descEl.textContent = desc + linkInfo.appendChild(descEl) + } + linkContent.appendChild(linkInfo) + + if (previewUrl) { + const thumb = document.createElement('div') + thumb.className = 'wechat-link-thumb' + const img = document.createElement('img') + img.src = previewUrl + img.alt = heading || '链接预览' + img.className = 'wechat-link-thumb-img' + try { img.referrerPolicy = 'no-referrer' } catch {} + thumb.appendChild(img) + linkContent.appendChild(thumb) + } + + card.appendChild(linkContent) + + const fromRow = document.createElement('div') + fromRow.className = 'wechat-link-from' + const fromText = (() => { + const f0 = String(rec?.from || '').trim() + if (f0) return f0 + try { return href ? (new URL(href).hostname || '') : '' } catch { return '' } + })() + const fromAvatarText = fromText ? (Array.from(fromText)[0] || '') : '' + const fromAvatar = document.createElement('div') + fromAvatar.className = 'wechat-link-from-avatar' + fromAvatar.textContent = fromAvatarText || '\u200B' + const fromName = document.createElement('div') + fromName.className = 'wechat-link-from-name' + fromName.textContent = fromText || '\u200B' + fromRow.appendChild(fromAvatar) + fromRow.appendChild(fromName) + card.appendChild(fromRow) + + body.appendChild(card) + } else if (rt === 'video') { + const videoMd5 = pickFirstMd5(rec?.fullmd5, rec?.md5, rec?.id) + const thumbMd5 = pickFirstMd5(rec?.thumbfullmd5, rec?.cdnthumbmd5) || videoMd5 + let videoUrl = resolveMd5Any(mediaIndex, videoMd5) + if (!videoUrl && serverMd5) videoUrl = resolveMd5Any(mediaIndex, serverMd5) + if (!videoUrl) videoUrl = resolveRemoteAny(mediaIndex, rec?.externurl, rec?.cdnurlstring, rec?.encrypturlstring) + + let thumbUrl = resolveMd5Any(mediaIndex, thumbMd5) + if (!thumbUrl && serverMd5) thumbUrl = resolveMd5Any(mediaIndex, serverMd5) + if (!thumbUrl) thumbUrl = resolveRemoteAny(mediaIndex, rec?.externurl, rec?.cdnurlstring, rec?.encrypturlstring) + + const wrap = document.createElement('div') + wrap.className = 'msg-radius overflow-hidden relative bg-black/5 inline-block' + + if (thumbUrl) { + const img = document.createElement('img') + img.src = thumbUrl + img.alt = '视频' + img.className = 'block w-[220px] max-w-[260px] h-auto max-h-[260px] object-cover' + wrap.appendChild(img) + } else { + const t = document.createElement('div') + t.className = 'px-3 py-2 text-sm text-gray-700' + t.textContent = content || '[视频]' + wrap.appendChild(t) + } + + if (thumbUrl) { + const overlay = document.createElement(videoUrl ? 'a' : 'div') + if (videoUrl) { + overlay.href = videoUrl + overlay.target = '_blank' + overlay.rel = 'noreferrer noopener' + } + overlay.className = 'absolute inset-0 flex items-center justify-center' + const btn = document.createElement('div') + btn.className = 'w-12 h-12 rounded-full bg-black/45 flex items-center justify-center' + btn.innerHTML = '' + overlay.appendChild(btn) + wrap.appendChild(overlay) + } + + body.appendChild(wrap) + } else if (rt === 'image') { + const imageMd5 = pickFirstMd5(rec?.fullmd5, rec?.thumbfullmd5, rec?.cdnthumbmd5, rec?.md5, rec?.id) + let imgUrl = resolveMd5Any(mediaIndex, imageMd5) + if (!imgUrl && serverMd5) imgUrl = resolveMd5Any(mediaIndex, serverMd5) + if (!imgUrl) imgUrl = resolveRemoteAny(mediaIndex, rec?.externurl, rec?.cdnurlstring, rec?.encrypturlstring) + if (imgUrl) { + const outer = document.createElement('div') + outer.className = 'msg-radius overflow-hidden cursor-pointer inline-block' + const a = document.createElement('a') + a.href = imgUrl + a.target = '_blank' + a.rel = 'noreferrer noopener' + const img = document.createElement('img') + img.src = imgUrl + img.alt = '图片' + img.className = 'max-w-[240px] max-h-[240px] object-cover' + a.appendChild(img) + outer.appendChild(a) + body.appendChild(outer) + } else { + const t = document.createElement('div') + t.className = 'px-3 py-2 text-sm text-gray-700 whitespace-pre-wrap break-words' + t.textContent = content || '[图片]' + body.appendChild(t) + } + } else if (rt === 'emoji') { + const emojiMd5 = pickFirstMd5(rec?.md5, rec?.fullmd5, rec?.thumbfullmd5, rec?.cdnthumbmd5, rec?.id) + let emojiUrl = resolveMd5Any(mediaIndex, emojiMd5) + if (!emojiUrl && serverMd5) emojiUrl = resolveMd5Any(mediaIndex, serverMd5) + if (!emojiUrl) emojiUrl = resolveRemoteAny(mediaIndex, rec?.externurl, rec?.cdnurlstring, rec?.encrypturlstring) + if (emojiUrl) { + const img = document.createElement('img') + img.src = emojiUrl + img.alt = '表情' + img.className = 'w-24 h-24 object-contain' + body.appendChild(img) + } else { + const t = document.createElement('div') + t.className = 'px-3 py-2 text-sm text-gray-700 whitespace-pre-wrap break-words' + t.textContent = content || '[表情]' + body.appendChild(t) + } + } else { + const t = document.createElement('div') + t.className = 'px-3 py-2 text-sm text-gray-700 whitespace-pre-wrap break-words' + t.textContent = content || '' + body.appendChild(t) + } + + main.appendChild(header) + main.appendChild(body) + + row.appendChild(avatarWrap) + row.appendChild(main) + return row + } + + const applyChatHistoryState = (state) => { + currentState = state + const title = String(state?.title || '聊天记录').trim() || '聊天记录' + const info = state?.info || { isChatRoom: false } + const records = Array.isArray(state?.records) ? state.records : [] + + try { titleEl.textContent = title } catch {} + try { listEl.textContent = '' } catch {} + + if (!records.length) { + try { emptyEl.style.display = '' } catch {} + } else { + try { emptyEl.style.display = 'none' } catch {} + for (const rec of records) { + try { + listEl.appendChild(renderRecordRow(rec, info)) + } catch {} + } + } + + updateBackVisibility() + } + + const openNestedChatHistory = (rec) => { + const xml = String(rec?.recordItem || '').trim() + if (!xml) return + if (currentState) { + historyStack = [...historyStack, currentState] + } + const state = buildChatHistoryState({ + title: String(rec?.title || '聊天记录'), + recordItem: xml, + content: String(rec?.content || ''), + }) + applyChatHistoryState(state) + } + + if (backBtn) { + backBtn.addEventListener('click', (ev) => { + try { ev.preventDefault() } catch {} + if (!Array.isArray(historyStack) || !historyStack.length) return + const prev = historyStack[historyStack.length - 1] + historyStack = historyStack.slice(0, -1) + applyChatHistoryState(prev) + }) + } + + const openFromCard = (card) => { + const title = String(card?.getAttribute('data-title') || '聊天记录').trim() || '聊天记录' + const b64 = String(card?.getAttribute('data-record-item-b64') || '').trim() + const xml = decodeBase64Utf8(b64) + const lines = Array.from(card.querySelectorAll('.wechat-chat-history-line') || []) + .map((el) => String(el?.textContent || '').trim()) + .filter(Boolean) + + historyStack = [] + const state = buildChatHistoryState({ title, recordItem: xml, fallbackLines: lines }) + applyChatHistoryState(state) + + try { modal.classList.remove('hidden') } catch {} + try { modal.style.display = 'flex' } catch {} + try { modal.setAttribute('aria-hidden', 'false') } catch {} + try { document.body.style.overflow = 'hidden' } catch {} + } + + closeBtn.addEventListener('click', (ev) => { + try { ev.preventDefault() } catch {} + close() + }) + modal.addEventListener('click', (ev) => { + const t = ev && ev.target + if (t === modal) close() + }) + + document.addEventListener('keydown', (ev) => { + const key = String(ev?.key || '') + if (key === 'Escape' && !modal.classList.contains('hidden')) close() + + if ((key === 'Enter' || key === ' ') && modal.classList.contains('hidden')) { + const target = ev && ev.target + const card = target && target.closest ? target.closest('[data-wce-chat-history=\"1\"]') : null + if (!card) return + try { ev.preventDefault() } catch {} + openFromCard(card) + } + }, true) + + document.addEventListener('click', (ev) => { + const target = ev && ev.target + const card = target && target.closest ? target.closest('[data-wce-chat-history=\"1\"]') : null + if (!card) return + try { ev.preventDefault() } catch {} + openFromCard(card) + }, true) + } + + const initChatHistoryFloatingWindows = () => { + const mediaIndex = readMediaIndex() + let zIndex = 1000 + let cascade = 0 + let idSeed = 0 + + const clampNumber = (value, min, max) => { + const n = Number(value) + if (!Number.isFinite(n)) return min + return Math.min(max, Math.max(min, n)) + } + + const getViewport = () => { + const w = Math.max(320, window.innerWidth || 0) + const h = Math.max(240, window.innerHeight || 0) + return { w, h } + } + + const getPoint = (ev) => { + try { + return (ev && ev.touches && ev.touches[0]) ? ev.touches[0] : ev + } catch { + return ev + } + } + + const buildChatHistoryState = (payload) => { + const title = String(payload?.title || '聊天记录').trim() || '聊天记录' + const xml = String(payload?.recordItem || '').trim() + const parsed = parseChatHistoryRecord(xml) + const info = (parsed && parsed.info) ? parsed.info : { isChatRoom: false } + let records = (parsed && Array.isArray(parsed.items)) ? parsed.items : [] + + if (!records.length) { + const lines = Array.isArray(payload?.fallbackLines) + ? payload.fallbackLines + : String(payload?.content || '').trim().split(/\r?\n/).map((x) => String(x || '').trim()).filter(Boolean) + records = lines.map((line, idx) => ({ id: String(idx), renderType: 'text', content: line, sourcename: '', sourcetime: '' })) + } + + return { title, info, records } + } + + const renderRecordRow = (rec, info, onOpenNested) => { + const row = document.createElement('div') + row.className = 'px-4 py-3 flex gap-3 border-b border-gray-100 bg-[#f7f7f7]' + + const avatarWrap = document.createElement('div') + avatarWrap.className = 'w-9 h-9 rounded-md overflow-hidden bg-gray-200 flex-shrink-0' + const name0 = String(rec?.sourcename || '').trim() || '?' + const avatarUrlRaw = normalizeChatHistoryUrl(rec?.sourceheadurl) + const avatarLocal = (mediaIndex && mediaIndex.remote && mediaIndex.remote[avatarUrlRaw]) ? String(mediaIndex.remote[avatarUrlRaw] || '') : '' + const avatarUrlLower = String(avatarUrlRaw || '').trim().toLowerCase() + const avatarUrl = avatarLocal || ((avatarUrlLower.startsWith('http://') || avatarUrlLower.startsWith('https://')) ? avatarUrlRaw : '') + if (avatarUrl) { + const img = document.createElement('img') + img.src = avatarUrl + img.alt = '头像' + img.className = 'w-full h-full object-cover' + try { img.referrerPolicy = 'no-referrer' } catch {} + img.onerror = () => { + try { avatarWrap.textContent = '' } catch {} + const fb = document.createElement('div') + fb.className = 'w-full h-full flex items-center justify-center text-xs font-bold text-gray-600' + fb.textContent = String(name0.charAt(0) || '?') + avatarWrap.appendChild(fb) + } + avatarWrap.appendChild(img) + } else { + const fb = document.createElement('div') + fb.className = 'w-full h-full flex items-center justify-center text-xs font-bold text-gray-600' + fb.textContent = String(name0.charAt(0) || '?') + avatarWrap.appendChild(fb) + } + + const main = document.createElement('div') + main.className = 'min-w-0 flex-1' + + const header = document.createElement('div') + header.className = 'flex items-start gap-2' + + const headerLeft = document.createElement('div') + headerLeft.className = 'min-w-0 flex-1' + const senderName = String(rec?.sourcename || '').trim() + if (info && info.isChatRoom && senderName) { + const sn = document.createElement('div') + sn.className = 'text-xs text-gray-500 leading-none truncate mb-1' + sn.textContent = senderName + headerLeft.appendChild(sn) + } + + const headerRight = document.createElement('div') + headerRight.className = 'text-xs text-gray-400 flex-shrink-0 leading-none' + const timeText = String(rec?.sourcetime || '').trim() + headerRight.textContent = timeText + + header.appendChild(headerLeft) + if (timeText) header.appendChild(headerRight) + + const body = document.createElement('div') + body.className = 'mt-1' + + const rt = String(rec?.renderType || 'text') + const content = String(rec?.content || '').trim() + const serverId = String(rec?.fromnewmsgid || '').trim() + const serverMd5 = resolveServerMd5(mediaIndex, serverId) + + if (rt === 'chatHistory') { + const card = document.createElement('div') + card.className = 'wechat-chat-history-card wechat-special-card msg-radius' + + const chBody = document.createElement('div') + chBody.className = 'wechat-chat-history-body' + + const chTitle = document.createElement('div') + chTitle.className = 'wechat-chat-history-title' + chTitle.textContent = String(rec?.title || '聊天记录') + chBody.appendChild(chTitle) + + const raw = String(rec?.content || '').trim() + const lines = raw ? raw.split(/\r?\n/).map((x) => String(x || '').trim()).filter(Boolean).slice(0, 4) : [] + if (lines.length) { + const preview = document.createElement('div') + preview.className = 'wechat-chat-history-preview' + for (const line of lines) { + const el = document.createElement('div') + el.className = 'wechat-chat-history-line' + el.textContent = line + preview.appendChild(el) + } + chBody.appendChild(preview) + } + + card.appendChild(chBody) + + const bottom = document.createElement('div') + bottom.className = 'wechat-chat-history-bottom' + const label = document.createElement('span') + label.textContent = '聊天记录' + bottom.appendChild(label) + card.appendChild(bottom) + + const nestedXml = String(rec?.recordItem || '').trim() + if (nestedXml) { + card.classList.add('cursor-pointer') + card.addEventListener('click', (ev) => { + try { ev.preventDefault() } catch {} + try { ev.stopPropagation() } catch {} + if (typeof onOpenNested === 'function') onOpenNested(rec) + }) + } + + body.appendChild(card) + } else if (rt === 'link') { + const href = normalizeChatHistoryUrl(rec?.url) || normalizeChatHistoryUrl(rec?.externurl) + const heading = String(rec?.title || '').trim() || content || href || '链接' + const desc = String(rec?.content || '').trim() + + const thumbMd5 = pickFirstMd5(rec?.fullmd5, rec?.thumbfullmd5, rec?.cdnthumbmd5, rec?.md5, rec?.id) + let previewUrl = resolveMd5Any(mediaIndex, thumbMd5) + if (!previewUrl && serverMd5) previewUrl = resolveMd5Any(mediaIndex, serverMd5) + if (!previewUrl) previewUrl = resolveRemoteAny(mediaIndex, rec?.externurl, rec?.cdnurlstring, rec?.encrypturlstring) + + const card = document.createElement(href ? 'a' : 'div') + card.className = 'wechat-link-card wechat-special-card msg-radius cursor-pointer' + if (href) { + card.href = href + card.target = '_blank' + card.rel = 'noreferrer noopener' + } + try { card.style.textDecoration = 'none' } catch {} + try { card.style.outline = 'none' } catch {} + + const linkContent = document.createElement('div') + linkContent.className = 'wechat-link-content' + + const linkInfo = document.createElement('div') + linkInfo.className = 'wechat-link-info' + const titleEl = document.createElement('div') + titleEl.className = 'wechat-link-title' + titleEl.textContent = heading + linkInfo.appendChild(titleEl) + if (desc) { + const descEl = document.createElement('div') + descEl.className = 'wechat-link-desc' + descEl.textContent = desc + linkInfo.appendChild(descEl) + } + linkContent.appendChild(linkInfo) + + if (previewUrl) { + const thumb = document.createElement('div') + thumb.className = 'wechat-link-thumb' + const img = document.createElement('img') + img.src = previewUrl + img.alt = heading || '链接预览' + img.className = 'wechat-link-thumb-img' + try { img.referrerPolicy = 'no-referrer' } catch {} + thumb.appendChild(img) + linkContent.appendChild(thumb) + } + + card.appendChild(linkContent) + + const fromRow = document.createElement('div') + fromRow.className = 'wechat-link-from' + const fromAvatar = document.createElement('div') + fromAvatar.className = 'wechat-link-from-avatar' + + const fromUrlRaw = normalizeChatHistoryUrl(rec?.sourceheadurl) + const fromLocal = (mediaIndex && mediaIndex.remote && mediaIndex.remote[fromUrlRaw]) ? String(mediaIndex.remote[fromUrlRaw] || '') : '' + const fromLower = String(fromUrlRaw || '').trim().toLowerCase() + const fromUrl = fromLocal || ((fromLower.startsWith('http://') || fromLower.startsWith('https://')) ? fromUrlRaw : '') + const fromText = String(rec?.sourcename || '').trim() + if (fromUrl) { + const img = document.createElement('img') + img.src = fromUrl + img.alt = '' + img.className = 'wechat-link-from-avatar-img' + try { img.referrerPolicy = 'no-referrer' } catch {} + img.onerror = () => { + try { fromAvatar.textContent = '' } catch {} + const span = document.createElement('span') + span.textContent = String(fromText ? fromText.charAt(0) : '\u200B') + fromAvatar.appendChild(span) + } + fromAvatar.appendChild(img) + } else { + const span = document.createElement('span') + span.textContent = String(fromText ? fromText.charAt(0) : '\u200B') + fromAvatar.appendChild(span) + } + const fromName = document.createElement('div') + fromName.className = 'wechat-link-from-name' + fromName.textContent = fromText || '\u200B' + fromRow.appendChild(fromAvatar) + fromRow.appendChild(fromName) + card.appendChild(fromRow) + + body.appendChild(card) + } else if (rt === 'video') { + const videoMd5 = pickFirstMd5(rec?.fullmd5, rec?.md5, rec?.id) + const thumbMd5 = pickFirstMd5(rec?.thumbfullmd5, rec?.cdnthumbmd5) || videoMd5 + let videoUrl = resolveMd5Any(mediaIndex, videoMd5) + if (!videoUrl && serverMd5) videoUrl = resolveMd5Any(mediaIndex, serverMd5) + if (!videoUrl) videoUrl = resolveRemoteAny(mediaIndex, rec?.externurl, rec?.cdnurlstring, rec?.encrypturlstring) + + let thumbUrl = resolveMd5Any(mediaIndex, thumbMd5) + if (!thumbUrl && serverMd5) thumbUrl = resolveMd5Any(mediaIndex, serverMd5) + if (!thumbUrl) thumbUrl = resolveRemoteAny(mediaIndex, rec?.externurl, rec?.cdnurlstring, rec?.encrypturlstring) + + const wrap = document.createElement('div') + wrap.className = 'msg-radius overflow-hidden relative bg-black/5 inline-block' + + if (thumbUrl) { + const img = document.createElement('img') + img.src = thumbUrl + img.alt = '视频' + img.className = 'block w-[220px] max-w-[260px] h-auto max-h-[260px] object-cover' + wrap.appendChild(img) + } else { + const t = document.createElement('div') + t.className = 'px-3 py-2 text-sm text-gray-700' + t.textContent = content || '[视频]' + wrap.appendChild(t) + } + + if (thumbUrl) { + const overlay = document.createElement(videoUrl ? 'a' : 'div') + if (videoUrl) { + overlay.href = videoUrl + overlay.target = '_blank' + overlay.rel = 'noreferrer noopener' + } + overlay.className = 'absolute inset-0 flex items-center justify-center' + const btn = document.createElement('div') + btn.className = 'w-12 h-12 rounded-full bg-black/45 flex items-center justify-center' + btn.innerHTML = '' + overlay.appendChild(btn) + wrap.appendChild(overlay) + } + + body.appendChild(wrap) + } else if (rt === 'image') { + const imageMd5 = pickFirstMd5(rec?.fullmd5, rec?.thumbfullmd5, rec?.cdnthumbmd5, rec?.md5, rec?.id) + let imgUrl = resolveMd5Any(mediaIndex, imageMd5) + if (!imgUrl && serverMd5) imgUrl = resolveMd5Any(mediaIndex, serverMd5) + if (!imgUrl) imgUrl = resolveRemoteAny(mediaIndex, rec?.externurl, rec?.cdnurlstring, rec?.encrypturlstring) + if (imgUrl) { + const outer = document.createElement('div') + outer.className = 'msg-radius overflow-hidden cursor-pointer inline-block' + const a = document.createElement('a') + a.href = imgUrl + a.target = '_blank' + a.rel = 'noreferrer noopener' + const img = document.createElement('img') + img.src = imgUrl + img.alt = '图片' + img.className = 'max-w-[240px] max-h-[240px] object-cover' + a.appendChild(img) + outer.appendChild(a) + body.appendChild(outer) + } else { + const t = document.createElement('div') + t.className = 'px-3 py-2 text-sm text-gray-700 whitespace-pre-wrap break-words' + t.textContent = content || '[图片]' + body.appendChild(t) + } + } else if (rt === 'emoji') { + const emojiMd5 = pickFirstMd5(rec?.md5, rec?.fullmd5, rec?.thumbfullmd5, rec?.cdnthumbmd5, rec?.id) + let emojiUrl = resolveMd5Any(mediaIndex, emojiMd5) + if (!emojiUrl && serverMd5) emojiUrl = resolveMd5Any(mediaIndex, serverMd5) + if (!emojiUrl) emojiUrl = resolveRemoteAny(mediaIndex, rec?.externurl, rec?.cdnurlstring, rec?.encrypturlstring) + if (emojiUrl) { + const img = document.createElement('img') + img.src = emojiUrl + img.alt = '表情' + img.className = 'w-24 h-24 object-contain' + body.appendChild(img) + } else { + const t = document.createElement('div') + t.className = 'px-3 py-2 text-sm text-gray-700 whitespace-pre-wrap break-words' + t.textContent = content || '[表情]' + body.appendChild(t) + } + } else { + const t = document.createElement('div') + t.className = 'px-3 py-2 text-sm text-gray-700 whitespace-pre-wrap break-words' + t.textContent = content || '' + body.appendChild(t) + } + + main.appendChild(header) + main.appendChild(body) + + row.appendChild(avatarWrap) + row.appendChild(main) + return row + } + + const focusWindow = (wrap) => { + zIndex += 1 + try { wrap.style.zIndex = String(zIndex) } catch {} + } + + const openChatHistoryWindow = (payload, opts) => { + const state = buildChatHistoryState(payload || {}) + const info = state.info || { isChatRoom: false } + const records = Array.isArray(state.records) ? state.records : [] + + const vp = getViewport() + const width = Math.min(560, Math.max(320, Math.floor(vp.w * 0.92))) + const height = Math.min(560, Math.max(240, Math.floor(vp.h * 0.8))) + + let x = Math.max(8, Math.floor((vp.w - width) / 2)) + let y = Math.max(8, Math.floor((vp.h - height) / 2)) + + const spawnFrom = opts && opts.spawnFrom + if (spawnFrom) { + x = Number(spawnFrom.x || x) + 24 + y = Number(spawnFrom.y || y) + 24 + } else { + x += cascade + y += cascade + cascade = (cascade + 24) % 120 + } + + x = clampNumber(x, 8, Math.max(8, vp.w - width - 8)) + y = clampNumber(y, 8, Math.max(8, vp.h - height - 8)) + + const win = { id: String(++idSeed), x, y, width, height } + + const wrap = document.createElement('div') + wrap.className = 'fixed' + wrap.style.left = `${win.x}px` + wrap.style.top = `${win.y}px` + wrap.style.zIndex = String(++zIndex) + + const box = document.createElement('div') + box.className = 'bg-[#f7f7f7] rounded-xl shadow-xl overflow-hidden border border-gray-200 flex flex-col' + box.style.width = `${win.width}px` + box.style.height = `${win.height}px` + wrap.appendChild(box) + + const header = document.createElement('div') + header.className = 'px-3 py-2 bg-[#f7f7f7] border-b border-gray-200 flex items-center justify-between select-none cursor-move' + box.appendChild(header) + + const titleEl = document.createElement('div') + titleEl.className = 'text-sm text-[#161616] truncate min-w-0' + titleEl.textContent = String(state.title || '聊天记录') + header.appendChild(titleEl) + + const closeBtn = document.createElement('button') + closeBtn.type = 'button' + closeBtn.className = 'p-2 rounded hover:bg-black/5 flex-shrink-0' + try { closeBtn.setAttribute('aria-label', '关闭') } catch {} + try { closeBtn.setAttribute('title', '关闭') } catch {} + closeBtn.innerHTML = '' + header.appendChild(closeBtn) + + const body = document.createElement('div') + body.className = 'flex-1 overflow-auto bg-[#f7f7f7]' + box.appendChild(body) + + if (!records.length) { + const empty = document.createElement('div') + empty.className = 'text-sm text-gray-500 text-center py-10' + empty.textContent = '没有可显示的聊天记录' + body.appendChild(empty) + } else { + const onOpenNested = (rec) => { + const xml = String(rec?.recordItem || '').trim() + if (!xml) return + openChatHistoryWindow({ + title: String(rec?.title || '聊天记录'), + recordItem: xml, + content: String(rec?.content || ''), + }, { spawnFrom: win }) + } + for (const rec of records) { + try { + body.appendChild(renderRecordRow(rec, info, onOpenNested)) + } catch {} + } + } + + const updatePos = () => { + try { wrap.style.left = `${win.x}px` } catch {} + try { wrap.style.top = `${win.y}px` } catch {} + } + + closeBtn.addEventListener('click', (ev) => { + try { ev.preventDefault() } catch {} + try { ev.stopPropagation() } catch {} + try { wrap.remove() } catch { + try { if (wrap.parentElement) wrap.parentElement.removeChild(wrap) } catch {} + } + }) + + const startDrag = (ev) => { + const t = ev && ev.target + if (t && t.closest && t.closest('button')) return + + focusWindow(wrap) + const p0 = getPoint(ev) + const ox = Number(p0?.clientX || 0) - win.x + const oy = Number(p0?.clientY || 0) - win.y + + const onMove = (e2) => { + const p = getPoint(e2) + if (!p) return + try { if (e2 && typeof e2.preventDefault === 'function') e2.preventDefault() } catch {} + + const vp2 = getViewport() + const nx = Number(p.clientX || 0) - ox + const ny = Number(p.clientY || 0) - oy + win.x = clampNumber(nx, 8, Math.max(8, vp2.w - win.width - 8)) + win.y = clampNumber(ny, 8, Math.max(8, vp2.h - win.height - 8)) + updatePos() + } + + const stop = () => { + try { document.removeEventListener('mousemove', onMove) } catch {} + try { document.removeEventListener('touchmove', onMove) } catch {} + } + + try { document.addEventListener('mousemove', onMove) } catch {} + try { document.addEventListener('mouseup', () => stop(), { once: true }) } catch {} + try { document.addEventListener('touchmove', onMove, { passive: false }) } catch {} + try { document.addEventListener('touchend', () => stop(), { once: true }) } catch {} + + try { ev.preventDefault() } catch {} + } + + header.addEventListener('mousedown', startDrag) + header.addEventListener('touchstart', startDrag, { passive: false }) + + wrap.addEventListener('mousedown', () => focusWindow(wrap)) + wrap.addEventListener('touchstart', () => focusWindow(wrap), { passive: true }) + + try { document.body.appendChild(wrap) } catch {} + return win + } + + document.addEventListener('keydown', (ev) => { + const key = String(ev?.key || '') + if (key !== 'Enter' && key !== ' ') return + const target = ev && ev.target + const card = target && target.closest ? target.closest('[data-wce-chat-history=\"1\"]') : null + if (!card) return + try { ev.preventDefault() } catch {} + const title = String(card?.getAttribute('data-title') || '聊天记录').trim() || '聊天记录' + const b64 = String(card?.getAttribute('data-record-item-b64') || '').trim() + const xml = decodeBase64Utf8(b64) + const lines = Array.from(card.querySelectorAll('.wechat-chat-history-line') || []) + .map((el) => String(el?.textContent || '').trim()) + .filter(Boolean) + openChatHistoryWindow({ title, recordItem: xml, fallbackLines: lines }) + }, true) + + document.addEventListener('click', (ev) => { + const target = ev && ev.target + const card = target && target.closest ? target.closest('[data-wce-chat-history=\"1\"]') : null + if (!card) return + try { ev.preventDefault() } catch {} + const title = String(card?.getAttribute('data-title') || '聊天记录').trim() || '聊天记录' + const b64 = String(card?.getAttribute('data-record-item-b64') || '').trim() + const xml = decodeBase64Utf8(b64) + const lines = Array.from(card.querySelectorAll('.wechat-chat-history-line') || []) + .map((el) => String(el?.textContent || '').trim()) + .filter(Boolean) + openChatHistoryWindow({ title, recordItem: xml, fallbackLines: lines }) + }, true) + } + + document.addEventListener('DOMContentLoaded', () => { + hideJsMissingBanner() + updateDprVar() + try { + window.addEventListener('resize', updateDprVar) + } catch {} + + initSessionSearch() + initVoicePlayback() + initChatHistoryFloatingWindows() + initPagedMessageLoading() + + const select = document.getElementById('messageTypeFilter') + if (select) { + select.addEventListener('change', applyMessageTypeFilter) + applyMessageTypeFilter() + } + + updateSessionMessageCount() + scrollToBottom() + try { + window.addEventListener('load', () => { + updateSessionMessageCount() + scrollToBottom() + setTimeout(scrollToBottom, 60) + }) + } catch {} + }) + + // Best-effort: defer scripts execute after the DOM is parsed, so we can hide the banner immediately. + hideJsMissingBanner() +})() +""" + + def _format_ts(ts: int) -> str: if not ts: return "" @@ -99,43 +2346,58 @@ def _normalize_render_type_key(value: Any) -> str: return lower -def _render_types_to_local_types(render_types: set[str]) -> Optional[set[int]]: - rt = {str(x or "").strip() for x in (render_types or set())} - rt = {x for x in rt if x} - if not rt: +def _is_render_type_selected(render_type: Any, selected_render_types: Optional[set[str]]) -> bool: + if selected_render_types is None: + return True + rt = _normalize_render_type_key(render_type) or "text" + return rt in selected_render_types + + +def _media_kinds_from_selected_types(selected_render_types: Optional[set[str]]) -> Optional[set[MediaKind]]: + if selected_render_types is None: return None - out: set[int] = set() - for k in rt: - if k == "text": - out.add(1) - elif k == "image": - out.add(3) - elif k == "voice": - out.add(34) - elif k == "video": - out.update({43, 62}) - elif k == "emoji": - out.add(47) - elif k == "voip": - out.add(50) - elif k == "system": - out.update({10000, 266287972401}) - elif k == "quote": - out.add(244813135921) - out.add(49) # Some quote messages are embedded as appmsg (local_type=49). - elif k in {"link", "file", "transfer", "redpacket"}: - out.add(49) - else: - # Unknown type: cannot safely prefilter by local_type. - return None + out: set[MediaKind] = set() + # Merged-forward chat history items can contain arbitrary media types; enable packing those + # even when users only select `chatHistory` in the renderType filter. + if "chathistory" in selected_render_types: + out.update({"image", "emoji", "video", "video_thumb", "voice", "file"}) + if "image" in selected_render_types: + out.add("image") + if "emoji" in selected_render_types: + out.add("emoji") + if "video" in selected_render_types: + out.add("video") + out.add("video_thumb") + if "voice" in selected_render_types: + out.add("voice") + if "file" in selected_render_types: + out.add("file") return out -def _should_estimate_by_local_type(render_types: set[str]) -> bool: - # Only estimate counts when every requested type maps 1:1 to local_type. - # App messages (local_type=49) are heterogeneous and cannot be counted accurately without parsing. - return not bool(render_types & {"link", "file", "transfer", "redpacket", "quote"}) +def _resolve_effective_media_kinds( + *, + include_media: bool, + media_kinds: list[MediaKind], + selected_render_types: Optional[set[str]], + privacy_mode: bool, +) -> tuple[bool, list[MediaKind]]: + if privacy_mode or (not include_media): + return False, [] + + kinds = [k for k in media_kinds if k in {"image", "emoji", "video", "video_thumb", "voice", "file"}] + if not kinds: + return False, [] + + selected_media_kinds = _media_kinds_from_selected_types(selected_render_types) + if selected_media_kinds is not None: + kinds = [k for k in kinds if k in selected_media_kinds] + + kinds = list(dict.fromkeys(kinds)) + if not kinds: + return False, [] + return True, kinds @dataclass @@ -235,7 +2497,10 @@ def create_job( include_media: bool, media_kinds: list[MediaKind], message_types: list[str], + output_dir: Optional[str], allow_process_key_extract: bool, + download_remote_media: bool, + html_page_size: int = 1000, privacy_mode: bool, file_name: Optional[str], ) -> ExportJob: @@ -257,7 +2522,10 @@ def create_job( "includeMedia": bool(include_media), "mediaKinds": media_kinds, "messageTypes": list(dict.fromkeys([str(t or "").strip() for t in (message_types or []) if str(t or "").strip()])), + "outputDir": str(output_dir or "").strip(), "allowProcessKeyExtract": bool(allow_process_key_extract), + "downloadRemoteMedia": bool(download_remote_media), + "htmlPageSize": int(html_page_size) if int(html_page_size or 0) > 0 else int(html_page_size or 0), "privacyMode": bool(privacy_mode), "fileName": str(file_name or "").strip(), }, @@ -299,12 +2567,22 @@ def _run_job(self, job: ExportJob, account_dir: Path) -> None: opts = dict(job.options or {}) scope: ExportScope = str(opts.get("scope") or "selected") # type: ignore[assignment] - export_format: ExportFormat = str(opts.get("format") or "json") # type: ignore[assignment] + export_format_raw = str(opts.get("format") or "json").strip() or "json" + if export_format_raw not in {"json", "txt", "html"}: + raise ValueError(f"Unsupported export format: {export_format_raw}") + export_format: ExportFormat = export_format_raw # type: ignore[assignment] include_hidden = bool(opts.get("includeHidden")) include_official = bool(opts.get("includeOfficial")) include_media = bool(opts.get("includeMedia")) allow_process_key_extract = bool(opts.get("allowProcessKeyExtract")) + download_remote_media = bool(opts.get("downloadRemoteMedia")) privacy_mode = bool(opts.get("privacyMode")) + try: + html_page_size = int(opts.get("htmlPageSize") or 1000) + except Exception: + html_page_size = 1000 + if html_page_size < 0: + html_page_size = 0 media_kinds_raw = opts.get("mediaKinds") or [] media_kinds: list[MediaKind] = [] @@ -313,10 +2591,6 @@ def _run_job(self, job: ExportJob, account_dir: Path) -> None: if ks in {"image", "emoji", "video", "video_thumb", "voice", "file"}: media_kinds.append(ks) # type: ignore[arg-type] - if privacy_mode: - include_media = False - media_kinds = [] - st = int(opts.get("startTime") or 0) or None et = int(opts.get("endTime") or 0) or None @@ -328,9 +2602,15 @@ def _run_job(self, job: ExportJob, account_dir: Path) -> None: if want: want_types = want - local_types = _render_types_to_local_types(want_types) if want_types else None - can_estimate = (want_types is None) or _should_estimate_by_local_type(want_types) - estimate_local_types = local_types if (want_types and can_estimate) else None + include_media, media_kinds = _resolve_effective_media_kinds( + include_media=include_media, + media_kinds=media_kinds, + selected_render_types=want_types, + privacy_mode=privacy_mode, + ) + + local_types = None + estimate_local_types = None target_usernames = _resolve_export_targets( account_dir=account_dir, @@ -342,8 +2622,7 @@ def _run_job(self, job: ExportJob, account_dir: Path) -> None: if not target_usernames: raise ValueError("No target conversations to export.") - exports_root = account_dir.parents[1] / "exports" / account_dir.name - exports_root.mkdir(parents=True, exist_ok=True) + exports_root = _resolve_export_output_dir(account_dir, opts.get("outputDir")) ts = datetime.now().strftime("%Y%m%d_%H%M%S") base_name = str(opts.get("fileName") or "").strip() @@ -438,6 +2717,138 @@ def resolve_display_name(u: str) -> str: pass with zipfile.ZipFile(tmp_zip, mode="w", compression=zipfile.ZIP_DEFLATED, compresslevel=6) as zf: + html_index_items: list[dict[str, Any]] = [] + self_avatar_path = "" + session_items: list[dict[str, Any]] = [] + remote_written: dict[str, str] = {} + remote_download_enabled = bool(download_remote_media) and (export_format == "html") and include_media and (not privacy_mode) + if export_format == "html": + ui_public_dir = _resolve_ui_public_dir() + css_payload = _load_ui_css_bundle(ui_public_dir=ui_public_dir, report=report) + zf.writestr("assets/wechat-chat-export.css", css_payload) + zf.writestr("assets/wechat-chat-export.js", _HTML_EXPORT_JS) + + # Bundle UI static assets so the HTML works offline. + repo_root = Path(__file__).resolve().parents[2] + static_written: set[str] = { + "assets/wechat-chat-export.css", + "assets/wechat-chat-export.js", + } + + if ui_public_dir is not None: + _zip_write_tree( + zf=zf, + src_dir=Path(ui_public_dir) / "fonts", + dest_prefix="fonts", + written=static_written, + ) + _zip_write_tree( + zf=zf, + src_dir=Path(ui_public_dir) / "wxemoji", + dest_prefix="wxemoji", + written=static_written, + ) + _zip_write_tree( + zf=zf, + src_dir=Path(ui_public_dir) / "assets" / "images" / "wechat", + dest_prefix="assets/images/wechat", + written=static_written, + ) + + _zip_write_tree( + zf=zf, + src_dir=repo_root / "frontend" / "public" / "assets" / "images" / "wechat", + dest_prefix="assets/images/wechat", + written=static_written, + ) + _zip_write_tree( + zf=zf, + src_dir=repo_root / "frontend" / "assets" / "images" / "wechat", + dest_prefix="assets/images/wechat", + written=static_written, + ) + + preview_by_username: dict[str, str] = {} + last_ts_by_username: dict[str, int] = {} + + if not privacy_mode: + self_avatar_path = _materialize_avatar( + zf=zf, + head_image_conn=head_image_conn, + username=account_dir.name, + avatar_written=avatar_written, + ) + + try: + preview_by_username = _load_latest_message_previews(account_dir, target_usernames) + except Exception: + preview_by_username = {} + + session_db_path = Path(account_dir) / "session.db" + if session_db_path.exists(): + sconn = sqlite3.connect(str(session_db_path)) + sconn.row_factory = sqlite3.Row + try: + uniq = list(dict.fromkeys([u for u in target_usernames if u])) + chunk_size = 900 + for i in range(0, len(uniq), chunk_size): + chunk = uniq[i : i + chunk_size] + placeholders = ",".join(["?"] * len(chunk)) + try: + rows = sconn.execute( + f"SELECT username, sort_timestamp, last_timestamp FROM SessionTable WHERE username IN ({placeholders})", + chunk, + ).fetchall() + for r in rows: + u = str(r["username"] or "").strip() + if not u: + continue + ts = int(r["sort_timestamp"] or 0) + if ts <= 0: + ts = int(r["last_timestamp"] or 0) + last_ts_by_username[u] = int(ts or 0) + except sqlite3.OperationalError: + rows = sconn.execute( + f"SELECT username, last_timestamp FROM SessionTable WHERE username IN ({placeholders})", + chunk, + ).fetchall() + for r in rows: + u = str(r["username"] or "").strip() + if not u: + continue + last_ts_by_username[u] = int(r["last_timestamp"] or 0) + except Exception: + last_ts_by_username = {} + finally: + sconn.close() + + for idx, conv_username in enumerate(target_usernames, start=1): + conv_row = contact_row_cache.get(conv_username) + conv_name = _pick_display_name(conv_row, conv_username) + conv_is_group = bool(conv_username.endswith("@chatroom")) + conv_dir = f"conversations/{_conversation_dir_name(idx, conv_name, conv_username, conv_is_group, privacy_mode)}" + + conv_avatar_path = "" + if not privacy_mode: + conv_avatar_path = _materialize_avatar( + zf=zf, + head_image_conn=head_image_conn, + username=conv_username, + avatar_written=avatar_written, + ) + + session_items.append( + { + "username": "" if privacy_mode else conv_username, + "displayName": (f"会话 {idx:04d}" if privacy_mode else conv_name), + "isGroup": bool(conv_is_group), + "convDir": conv_dir, + "avatarPath": "" if privacy_mode else conv_avatar_path, + "lastTimeText": ("" if privacy_mode else _format_session_time(last_ts_by_username.get(conv_username))), + "previewText": ("" if privacy_mode else str(preview_by_username.get(conv_username) or "")), + } + ) + for idx, conv_username in enumerate(target_usernames, start=1): if self._should_cancel(job): raise _JobCancelled() @@ -456,16 +2867,13 @@ def resolve_display_name(u: str) -> str: job.progress.current_conversation_messages_total = 0 try: - if not can_estimate: - estimated_total = 0 - else: - estimated_total = _estimate_conversation_message_count( - account_dir=account_dir, - conv_username=conv_username, - start_time=st, - end_time=et, - local_types=estimate_local_types, - ) + estimated_total = _estimate_conversation_message_count( + account_dir=account_dir, + conv_username=conv_username, + start_time=st, + end_time=et, + local_types=estimate_local_types, + ) except Exception: estimated_total = 0 @@ -516,6 +2924,39 @@ def resolve_display_name(u: str) -> str: job=job, lock=self._lock, ) + elif export_format == "html": + exported_count = _write_conversation_html( + zf=zf, + conv_dir=conv_dir, + account_dir=account_dir, + conv_username=conv_username, + conv_name=conv_name, + conv_avatar_path=conv_avatar_path, + conv_is_group=conv_is_group, + self_avatar_path=self_avatar_path, + session_items=session_items, + download_remote_media=remote_download_enabled, + remote_written=remote_written, + html_page_size=html_page_size, + start_time=st, + end_time=et, + want_types=want_types, + local_types=local_types, + resource_conn=resource_conn, + resource_chat_id=chat_id, + head_image_conn=head_image_conn, + resolve_display_name=resolve_display_name, + privacy_mode=privacy_mode, + include_media=include_media, + media_kinds=media_kinds, + media_written=media_written, + avatar_written=avatar_written, + report=report, + allow_process_key_extract=allow_process_key_extract, + media_db_path=media_db_path, + job=job, + lock=self._lock, + ) else: exported_count = _write_conversation_json( zf=zf, @@ -555,10 +2996,80 @@ def resolve_display_name(u: str) -> str: "messageCount": int(exported_count), } zf.writestr(f"{conv_dir}/meta.json", json.dumps(meta, ensure_ascii=False, indent=2)) + if export_format == "html": + html_index_items.append({"convDir": conv_dir, "meta": meta}) with self._lock: + job.progress.current_conversation_messages_exported = int(exported_count) + job.progress.current_conversation_messages_total = int(exported_count) job.progress.conversations_done += 1 + if export_format == "html": + def esc_text(v: Any) -> str: + return html.escape(str(v or ""), quote=False) + + def esc_attr(v: Any) -> str: + return html.escape(str(v or ""), quote=True) + + parts: list[str] = [] + parts.append("\n") + parts.append('\n') + parts.append("\n") + parts.append(' \n') + parts.append(' \n') + parts.append(" 聊天记录导出\n") + parts.append(' \n') + parts.append(' \n') + parts.append("\n") + parts.append("\n") + parts.append( + '
' + "提示:此页面需要 JavaScript 才能使用“合并聊天记录”等交互功能。若该提示一直存在,请确认已完整解压导出目录,并检查 wechat-chat-export.js 是否能加载(位于 assets/)。
\n" + ) + parts.append('
\n') + parts.append('
\n') + parts.append('

聊天记录导出(HTML)

\n') + parts.append( + f'

账号: {esc_text("hidden" if privacy_mode else account_dir.name)} · 会话数: {len(html_index_items)} · 导出时间: {esc_text(_now_iso())}

\n' + ) + parts.append('
\n') + + for item in html_index_items: + conv_dir0 = str(item.get("convDir") or "").strip() + meta0 = item.get("meta") or {} + display_name = str(meta0.get("displayName") or "会话").strip() or "会话" + avatar_path = str(meta0.get("avatarPath") or "").strip() + try: + msg_count = int(meta0.get("messageCount") or 0) + except Exception: + msg_count = 0 + + href = f"{conv_dir0}/messages.html" if conv_dir0 else "" + parts.append(f' \n') + parts.append(' \n") + parts.append('
\n') + parts.append(f'
{esc_text(display_name)}
\n') + parts.append(f'
共 {msg_count} 条消息
\n') + parts.append("
\n") + parts.append("
\n") + + parts.append("
\n") + parts.append('

提示:解压后直接打开本文件;媒体文件位于 media/ 目录。

\n') + parts.append("
\n") + parts.append("
\n") + parts.append("\n") + parts.append("\n") + zf.writestr("index.html", "".join(parts)) + manifest = { "schemaVersion": 1, "exportedAt": _now_iso(), @@ -577,6 +3088,8 @@ def resolve_display_name(u: str) -> str: "includeMedia": include_media, "mediaKinds": media_kinds, "allowProcessKeyExtract": allow_process_key_extract, + "downloadRemoteMedia": bool(download_remote_media), + "htmlPageSize": int(html_page_size) if export_format == "html" else None, "privacyMode": privacy_mode, }, "stats": { @@ -895,9 +3408,14 @@ def _parse_message_for_export( title = "" url = "" from_name = "" + from_username = "" + link_type = "" + link_style = "" record_item = "" image_md5 = "" + image_md5_candidates: list[str] = [] image_file_id = "" + image_file_id_candidates: list[str] = [] emoji_md5 = "" emoji_url = "" thumb_url = "" @@ -909,6 +3427,11 @@ def _parse_message_for_export( video_url = "" video_thumb_url = "" voice_length = "" + quote_username = "" + quote_server_id = "" + quote_type = "" + quote_thumb_url = "" + quote_voice_length = "" quote_title = "" quote_content = "" amount = "" @@ -919,16 +3442,14 @@ def _parse_message_for_export( file_md5 = "" transfer_id = "" voip_type = "" + location_lat: Optional[float] = None + location_lng: Optional[float] = None + location_poiname = "" + location_label = "" if local_type == 10000: render_type = "system" - if "revokemsg" in raw_text: - content_text = "撤回了一条消息" - else: - import re as _re - - content_text = _re.sub(r"]*>", "", raw_text) - content_text = _re.sub(r"\\s+", " ", content_text).strip() or "[系统消息]" + content_text = _parse_system_message_content(raw_text) elif local_type == 49: parsed = _parse_app_message(raw_text) render_type = str(parsed.get("renderType") or "text") @@ -936,7 +3457,15 @@ def _parse_message_for_export( title = str(parsed.get("title") or "") url = str(parsed.get("url") or "") from_name = str(parsed.get("from") or "") + from_username = str(parsed.get("fromUsername") or "") + link_type = str(parsed.get("linkType") or "") + link_style = str(parsed.get("linkStyle") or "") record_item = str(parsed.get("recordItem") or "") + quote_username = str(parsed.get("quoteUsername") or "") + quote_server_id = str(parsed.get("quoteServerId") or "") + quote_type = str(parsed.get("quoteType") or "") + quote_thumb_url = str(parsed.get("quoteThumbUrl") or "") + quote_voice_length = str(parsed.get("quoteVoiceLength") or "") quote_title = str(parsed.get("quoteTitle") or "") quote_content = str(parsed.get("quoteContent") or "") amount = str(parsed.get("amount") or "") @@ -969,51 +3498,98 @@ def _parse_message_for_export( render_type = "quote" parsed = _parse_app_message(raw_text) content_text = str(parsed.get("content") or "[引用消息]") + quote_username = str(parsed.get("quoteUsername") or "") + quote_server_id = str(parsed.get("quoteServerId") or "") + quote_type = str(parsed.get("quoteType") or "") + quote_thumb_url = str(parsed.get("quoteThumbUrl") or "") + quote_voice_length = str(parsed.get("quoteVoiceLength") or "") quote_title = str(parsed.get("quoteTitle") or "") quote_content = str(parsed.get("quoteContent") or "") + elif local_type == 48: + parsed = _parse_location_message(raw_text) + render_type = str(parsed.get("renderType") or "location") + content_text = str(parsed.get("content") or "[Location]") + location_lat = parsed.get("locationLat") + location_lng = parsed.get("locationLng") + location_poiname = str(parsed.get("locationPoiname") or "") + location_label = str(parsed.get("locationLabel") or "") elif local_type == 3: render_type = "image" - image_md5 = _extract_xml_attr(raw_text, "md5") or _extract_xml_tag_text(raw_text, "md5") - if not image_md5: - for k in [ - "cdnthumbmd5", - "cdnthumd5", - "cdnmidimgmd5", - "cdnbigimgmd5", - "hdmd5", - "hevc_mid_md5", - "hevc_md5", - "imgmd5", - "filemd5", - ]: - image_md5 = _extract_xml_attr(raw_text, k) or _extract_xml_tag_text(raw_text, k) - if image_md5: - break + def add_md5(v: Any) -> None: + s = str(v or "").strip().lower() + if _is_md5(s) and s not in image_md5_candidates: + image_md5_candidates.append(s) + + for k in [ + "md5", + "hdmd5", + "hevc_md5", + "hevc_mid_md5", + "cdnbigimgmd5", + "cdnmidimgmd5", + "cdnthumbmd5", + "cdnthumd5", + "imgmd5", + "filemd5", + ]: + add_md5(_extract_xml_attr(raw_text, k)) + add_md5(_extract_xml_tag_text(raw_text, k)) + + # Prefer message_resource.db md5 for local files: XML md5 frequently differs from the on-disk *.dat basename + # (especially for *_t.dat thumbnails), causing offline media materialization to miss. + if resource_conn is not None: + try: + md5_hit = _lookup_resource_md5( + resource_conn, + resource_chat_id, + message_local_type=local_type, + server_id=int(row.server_id or 0), + local_id=int(row.local_id or 0), + create_time=int(row.create_time or 0), + ) + except Exception: + md5_hit = "" + + md5_hit = str(md5_hit or "").strip().lower() + if _is_md5(md5_hit): + try: + image_md5_candidates.remove(md5_hit) + except ValueError: + pass + image_md5_candidates.insert(0, md5_hit) - _cdn_url_or_id = ( - _extract_xml_attr(raw_text, "cdnthumburl") - or _extract_xml_attr(raw_text, "cdnthumurl") - or _extract_xml_attr(raw_text, "cdnmidimgurl") - or _extract_xml_attr(raw_text, "cdnbigimgurl") - or _extract_xml_tag_text(raw_text, "cdnthumburl") - or _extract_xml_tag_text(raw_text, "cdnthumurl") - or _extract_xml_tag_text(raw_text, "cdnmidimgurl") - or _extract_xml_tag_text(raw_text, "cdnbigimgurl") - ) - _cdn_url_or_id = str(_cdn_url_or_id or "").strip() - image_url = _cdn_url_or_id if _cdn_url_or_id.startswith(("http://", "https://")) else "" - if (not image_url) and _cdn_url_or_id: - image_file_id = _cdn_url_or_id + image_md5 = image_md5_candidates[0] if image_md5_candidates else "" - if (not image_md5) and resource_conn is not None: - image_md5 = _lookup_resource_md5( - resource_conn, - resource_chat_id, - message_local_type=local_type, - server_id=int(row.server_id or 0), - local_id=int(row.local_id or 0), - create_time=int(row.create_time or 0), - ) + url_or_id_candidates: list[str] = [] + + def add_url_or_id(v: Any) -> None: + s = str(v or "").strip() + if s: + try: + s = html.unescape(s).strip() + except Exception: + pass + if s and s not in url_or_id_candidates: + url_or_id_candidates.append(s) + + for k in ["cdnthumburl", "cdnthumurl", "cdnmidimgurl", "cdnbigimgurl"]: + add_url_or_id(_extract_xml_attr(raw_text, k)) + add_url_or_id(_extract_xml_tag_text(raw_text, k)) + + for v in url_or_id_candidates: + low = str(v or "").strip().lower() + if low.startswith(("http://", "https://")): + if not image_url: + image_url = str(v).strip() + continue + if str(v).startswith("//"): + if not image_url: + image_url = "https:" + str(v).strip() + continue + if v and v not in image_file_id_candidates: + image_file_id_candidates.append(v) + + image_file_id = image_file_id_candidates[0] if image_file_id_candidates else "" content_text = "[图片]" elif local_type == 34: render_type = "voice" @@ -1107,7 +3683,16 @@ def _parse_message_for_export( content_text = str(parsed.get("content") or content_text) title = str(parsed.get("title") or title) url = str(parsed.get("url") or url) + from_name = str(parsed.get("from") or from_name) + from_username = str(parsed.get("fromUsername") or from_username) + link_type = str(parsed.get("linkType") or link_type) + link_style = str(parsed.get("linkStyle") or link_style) record_item = str(parsed.get("recordItem") or record_item) + quote_username = str(parsed.get("quoteUsername") or quote_username) + quote_server_id = str(parsed.get("quoteServerId") or quote_server_id) + quote_type = str(parsed.get("quoteType") or quote_type) + quote_thumb_url = str(parsed.get("quoteThumbUrl") or quote_thumb_url) + quote_voice_length = str(parsed.get("quoteVoiceLength") or quote_voice_length) quote_title = str(parsed.get("quoteTitle") or quote_title) quote_content = str(parsed.get("quoteContent") or quote_content) amount = str(parsed.get("amount") or amount) @@ -1165,10 +3750,15 @@ def _parse_message_for_export( "title": title, "url": url, "from": from_name, + "fromUsername": from_username, + "linkType": link_type, + "linkStyle": link_style, "recordItem": record_item, "thumbUrl": thumb_url, "imageMd5": image_md5, "imageFileId": image_file_id, + "imageMd5Candidates": image_md5_candidates, + "imageFileIdCandidates": image_file_id_candidates, "imageUrl": image_url, "emojiMd5": emoji_md5, "emojiUrl": emoji_url, @@ -1179,6 +3769,11 @@ def _parse_message_for_export( "videoUrl": video_url, "videoThumbUrl": video_thumb_url, "voiceLength": voice_length, + "quoteUsername": quote_username, + "quoteServerId": quote_server_id, + "quoteType": quote_type, + "quoteThumbUrl": quote_thumb_url, + "quoteVoiceLength": quote_voice_length, "quoteTitle": quote_title, "quoteContent": quote_content, "amount": amount, @@ -1189,6 +3784,10 @@ def _parse_message_for_export( "transferStatus": transfer_status, "transferId": transfer_id, "voipType": voip_type, + "locationLat": location_lat, + "locationLng": location_lng, + "locationPoiname": location_poiname, + "locationLabel": location_label, } @@ -1325,12 +3924,8 @@ def lookup_alias(username: str) -> str: resource_chat_id=resource_chat_id, sender_alias=sender_alias, ) - if want_types: - rt_key = _normalize_render_type_key(msg.get("renderType")) - if rt_key not in want_types: - if scanned % 500 == 0 and job.cancel_requested: - raise _JobCancelled() - continue + if not _is_render_type_selected(msg.get("renderType"), want_types): + continue su = str(msg.get("senderUsername") or "").strip() if privacy_mode: @@ -1438,43 +4033,815 @@ def lookup_alias(username: str) -> str: if u in alias_cache: return alias_cache[u] - alias = "" + alias = "" + try: + r = contact_conn.execute("SELECT alias FROM contact WHERE username = ? LIMIT 1", (u,)).fetchone() + if r is not None and r[0] is not None: + alias = str(r[0] or "").strip() + if not alias: + r = contact_conn.execute("SELECT alias FROM stranger WHERE username = ? LIMIT 1", (u,)).fetchone() + if r is not None and r[0] is not None: + alias = str(r[0] or "").strip() + except Exception: + alias = "" + + alias_cache[u] = alias + return alias + + # Same as JSON: write to temp file first to avoid zip interleaving writes. + with tempfile.TemporaryDirectory(prefix="wechat_chat_export_") as tmp_dir: + tmp_path = Path(tmp_dir) / "messages.txt" + with open(tmp_path, "w", encoding="utf-8", newline="\n") as tw: + if privacy_mode: + tw.write("会话: 已隐藏\n") + tw.write("账号: hidden\n") + else: + tw.write(f"会话: {conv_name} ({conv_username})\n") + tw.write(f"账号: {account_dir.name}\n") + if conv_avatar_path: + tw.write(f"会话头像: {conv_avatar_path}\n") + if start_time or end_time: + st = _format_ts(int(start_time)) if start_time else "不限" + et = _format_ts(int(end_time)) if end_time else "不限" + tw.write(f"时间范围: {st} ~ {et}\n") + if want_types: + tw.write(f"消息类型: {', '.join(sorted(want_types))}\n") + tw.write(f"导出时间: {_now_iso()}\n") + tw.write("\n") + + sender_alias_map: dict[str, int] = {} + scanned = 0 + prev_ts = 0 + for row in _iter_rows_for_conversation( + account_dir=account_dir, + conv_username=conv_username, + start_time=start_time, + end_time=end_time, + local_types=local_types, + ): + scanned += 1 + sender_alias = "" + if conv_is_group and row.raw_text and (not row.raw_text.startswith("<")) and (not row.raw_text.startswith('"<')): + sep = row.raw_text.find(":\n") + if sep > 0: + prefix = row.raw_text[:sep].strip() + su = str(row.sender_username or "").strip() + if prefix and su and prefix != su: + strong_hint = prefix.startswith("wxid_") or prefix.endswith("@chatroom") or "@" in prefix + if not strong_hint: + body_probe = row.raw_text[sep + 2 :].lstrip("\n").lstrip() + body_is_xml = body_probe.startswith("<") or body_probe.startswith('"<') + if not body_is_xml: + sender_alias = lookup_alias(su) + + msg = _parse_message_for_export( + row=row, + conv_username=conv_username, + is_group=conv_is_group, + resource_conn=resource_conn, + resource_chat_id=resource_chat_id, + sender_alias=sender_alias, + ) + if not _is_render_type_selected(msg.get("renderType"), want_types): + continue + + su = str(msg.get("senderUsername") or "").strip() + if privacy_mode: + _privacy_scrub_message(msg, conv_is_group=conv_is_group, sender_alias_map=sender_alias_map) + else: + msg["senderDisplayName"] = resolve_display_name(su) if su else "" + msg["senderAvatarPath"] = ( + _materialize_avatar( + zf=zf, + head_image_conn=head_image_conn, + username=su, + avatar_written=avatar_written, + ) + if (su and head_image_conn is not None) + else "" + ) + + if include_media: + _attach_offline_media( + zf=zf, + account_dir=account_dir, + conv_username=conv_username, + msg=msg, + media_written=media_written, + report=report, + media_kinds=media_kinds, + allow_process_key_extract=allow_process_key_extract, + media_db_path=media_db_path, + lock=lock, + job=job, + ) + + tw.write(_format_message_line_txt(msg=msg) + "\n") + + exported += 1 + with lock: + job.progress.messages_exported += 1 + job.progress.current_conversation_messages_exported = exported + + if scanned % 500 == 0 and job.cancel_requested: + raise _JobCancelled() + + tw.flush() + + zf.write(str(tmp_path), arcname) + if contact_conn is not None: + try: + contact_conn.close() + except Exception: + pass + + return exported + + +def _write_conversation_html( + *, + zf: zipfile.ZipFile, + conv_dir: str, + account_dir: Path, + conv_username: str, + conv_name: str, + conv_avatar_path: str, + conv_is_group: bool, + self_avatar_path: str, + session_items: list[dict[str, Any]], + download_remote_media: bool, + remote_written: dict[str, str], + html_page_size: int = 1000, + start_time: Optional[int], + end_time: Optional[int], + want_types: Optional[set[str]], + local_types: Optional[set[int]], + resource_conn: Optional[sqlite3.Connection], + resource_chat_id: Optional[int], + head_image_conn: Optional[sqlite3.Connection], + resolve_display_name: Any, + privacy_mode: bool, + include_media: bool, + media_kinds: list[MediaKind], + media_written: dict[str, str], + avatar_written: dict[str, str], + report: dict[str, Any], + allow_process_key_extract: bool, + media_db_path: Path, + job: ExportJob, + lock: threading.Lock, +) -> int: + arcname = f"{conv_dir}/messages.html" + exported = 0 + + rel_root = "../../" + css_href = rel_root + "assets/wechat-chat-export.css" + js_src = rel_root + "assets/wechat-chat-export.js" + + def esc_text(v: Any) -> str: + return html.escape(str(v or ""), quote=False) + + def esc_attr(v: Any) -> str: + return html.escape(str(v or ""), quote=True) + + def is_http_url(u: str) -> bool: + s = str(u or "").strip().lower() + return s.startswith("http://") or s.startswith("https://") + + def rel_path(p: Any) -> str: + s = str(p or "").strip().lstrip("/").replace("\\", "/") + if not s: + return "" + return rel_root + s + + def offline_path(msg: dict[str, Any], kind: str) -> str: + media = msg.get("offlineMedia") or [] + if not isinstance(media, list): + return "" + for item in media: + try: + k = str(item.get("kind") or "").strip() + except Exception: + k = "" + if k != kind: + continue + try: + p = str(item.get("path") or "").strip() + except Exception: + p = "" + if p: + return rel_path(p) + return "" + + def maybe_download_remote_image(url: str) -> str: + if not download_remote_media: + return "" + u = str(url or "").strip() + if u: + try: + u = html.unescape(u).strip() + except Exception: + pass + try: + u = re.sub(r"\s+", "", u) + except Exception: + pass + if not is_http_url(u): + return "" + arc = _download_remote_image_to_zip( + zf=zf, + url=u, + remote_written=remote_written, + report=report, + ) + if not arc: + return "" + local = rel_path(arc) + try: + page_media_index.setdefault("remote", {})[u] = local + except Exception: + pass + return local + + emoji_table = _load_wechat_emoji_table() + emoji_regex = _load_wechat_emoji_regex() + + def render_text_with_emojis(v: Any) -> str: + text = str(v or "") + if not text: + return "" + if not emoji_table or emoji_regex is None: + return esc_text(text) + + parts: list[str] = [] + last = 0 + for match in emoji_regex.finditer(text): + start = match.start() + end = match.end() + if start > last: + parts.append(esc_text(text[last:start])) + + key = match.group(0) + value = str(emoji_table.get(key) or "") + if value: + src = rel_path(f"wxemoji/{value}") + parts.append( + f'' + ) + else: + parts.append(esc_text(key)) + last = end + + if last < len(text): + parts.append(esc_text(text[last:])) + return "".join(parts) + + def build_avatar_html(*, src: str, fallback_text: str, extra_class: str) -> str: + safe_fallback = esc_text((fallback_text or "?")[:1] or "?") + if src: + return ( + f'
' + f'avatar' + f"
" + ) + return ( + f'
' + f'
{safe_fallback}
' + f"
" + ) + + def wechat_icon(name: str) -> str: + return rel_path(f"assets/images/wechat/{name}") + + def format_file_size(size: Any) -> str: + if not size: + return "" + s = str(size).strip() + try: + num = float(s) + except Exception: + return s + + if num < 0: + return s + + def fmt_num(n: float) -> str: + if float(n).is_integer(): + return str(int(n)) + txt = f"{n:.2f}" + return txt.rstrip("0").rstrip(".") + + if num < 1024: + return f"{fmt_num(num)} B" + if num < 1024 * 1024: + return f"{(num / 1024):.2f} KB" + return f"{(num / 1024 / 1024):.2f} MB" + + def format_transfer_amount(amount: Any) -> str: + s = str(amount if amount is not None else "").strip() + if not s: + return "" + return re.sub(r"[¥¥]", "", s).strip() + + def get_red_packet_text(message: dict[str, Any]) -> str: + text = str(message.get("content") if message is not None else "").strip() + if (not text) or text == "[Red Packet]": + return "恭喜发财,大吉大利" + return text + + def is_transfer_returned(message: dict[str, Any]) -> bool: + pay_sub_type = str(message.get("paySubType") or "").strip() + if pay_sub_type in {"4", "9"}: + return True + st = str(message.get("transferStatus") or "").strip() + c = str(message.get("content") or "").strip() + text = f"{st} {c}".strip() + if not text: + return False + return ("退回" in text) or ("退还" in text) + + def is_transfer_overdue(message: dict[str, Any]) -> bool: + pay_sub_type = str(message.get("paySubType") or "").strip() + if pay_sub_type == "10": + return True + st = str(message.get("transferStatus") or "").strip() + c = str(message.get("content") or "").strip() + text = f"{st} {c}".strip() + if not text: + return False + return "过期" in text + + def is_transfer_received(message: dict[str, Any]) -> bool: + pay_sub_type = str(message.get("paySubType") or "").strip() + if pay_sub_type == "3": + return True + st = str(message.get("transferStatus") or "").strip() + if not st: + return False + return ("已收款" in st) or ("已被接收" in st) + + def get_transfer_title(message: dict[str, Any], *, is_sent: bool) -> str: + pay_sub_type = str(message.get("paySubType") or "").strip() + transfer_status = str(message.get("transferStatus") or "").strip() + if transfer_status: + return transfer_status + if pay_sub_type == "1": + return "转账" + if pay_sub_type == "3": + return "已被接收" if is_sent else "已收款" + if pay_sub_type == "8": + return "发起转账" + if pay_sub_type == "4": + return "已退还" + if pay_sub_type == "9": + return "已被退还" + if pay_sub_type == "10": + return "已过期" + content = str(message.get("content") or "").strip() + if content and content not in {"转账", "[转账]"}: + return content + return "转账" + + def get_voice_duration_in_seconds(duration_ms: Any) -> int: + try: + ms = int(str(duration_ms or "0").strip() or "0") + except Exception: + ms = 0 + return int(round(ms / 1000.0)) + + def get_voice_width(duration_ms: Any) -> str: + seconds = get_voice_duration_in_seconds(duration_ms) + min_width = 80 + max_width = 200 + width = min(max_width, min_width + seconds * 4) + return f"{width}px" + + def get_chat_history_preview_lines(message: dict[str, Any]) -> list[str]: + raw = str(message.get("content") or "").strip() + if not raw: + return [] + lines = [ln.strip() for ln in raw.splitlines()] + lines = [ln for ln in lines if ln] + return lines[:4] + + def get_file_icon_url(file_name: str) -> str: + ext = "" + try: + ext = (str(file_name or "").rsplit(".", 1)[-1] or "").lower().strip() + except Exception: + ext = "" + + if ext == "pdf": + return wechat_icon("pdf.png") + if ext in {"zip", "rar", "7z", "tar", "gz"}: + return wechat_icon("zip.png") + if ext in {"doc", "docx"}: + return wechat_icon("word.png") + if ext in {"xls", "xlsx", "csv"}: + return wechat_icon("excel.png") + return wechat_icon("zip.png") + + def get_link_from_text(message: dict[str, Any], *, url: str) -> str: + raw = str(message.get("from") or "").strip() + if raw: + return raw + try: + from urllib.parse import urlparse + + host = urlparse(str(url or "")).hostname + return str(host or "").strip() + except Exception: + return "" + + def first_glyph(text: str) -> str: + t = str(text or "").strip() + if not t: + return "" + try: + return next(iter(t)) or "" + except Exception: + return t[:1] + + page_media_index: dict[str, Any] = { + "images": {}, + "emojis": {}, + "videos": {}, + "videoThumbs": {}, + "serverMd5": {}, + "remote": {}, + } + chat_history_md5_done: set[str] = set() + + def _remember_offline_media(message: dict[str, Any]) -> None: + media = message.get("offlineMedia") or [] + if not isinstance(media, list): + return + for item in media: + try: + kind = str(item.get("kind") or "").strip() + except Exception: + kind = "" + try: + md5 = str(item.get("md5") or "").strip().lower() + except Exception: + md5 = "" + try: + path0 = str(item.get("path") or "").strip() + except Exception: + path0 = "" + if (not md5) or (not path0): + continue + url0 = rel_path(path0) + if kind == "image": + page_media_index["images"][md5] = url0 + elif kind == "emoji": + page_media_index["emojis"][md5] = url0 + elif kind == "video": + page_media_index["videos"][md5] = url0 + elif kind == "video_thumb": + page_media_index["videoThumbs"][md5] = url0 + + def _ensure_chat_history_md5(md5: str) -> str: + m = str(md5 or "").strip().lower() + if (not m) or (not _is_md5(m)): + return "" + if m in chat_history_md5_done: + for k in ("images", "emojis", "videos", "videoThumbs"): + try: + hit = str((page_media_index.get(k) or {}).get(m) or "").strip() + except Exception: + hit = "" + if hit: + return hit + return "" + chat_history_md5_done.add(m) + + arc = "" + is_new = False + + for try_kind in ("image", "emoji", "video_thumb", "video"): + arc, is_new = _materialize_media( + zf=zf, + account_dir=account_dir, + conv_username=conv_username, + kind=try_kind, # type: ignore[arg-type] + md5=m, + file_id="", + media_written=media_written, + suggested_name="", + ) + if arc: + break + + if not arc: + return "" + + url0 = rel_path(arc) try: - r = contact_conn.execute("SELECT alias FROM contact WHERE username = ? LIMIT 1", (u,)).fetchone() - if r is not None and r[0] is not None: - alias = str(r[0] or "").strip() - if not alias: - r = contact_conn.execute("SELECT alias FROM stranger WHERE username = ? LIMIT 1", (u,)).fetchone() - if r is not None and r[0] is not None: - alias = str(r[0] or "").strip() + page_media_index["images"].setdefault(m, url0) + page_media_index["emojis"].setdefault(m, url0) + page_media_index["videoThumbs"].setdefault(m, url0) + if arc.lower().endswith(".mp4"): + page_media_index["videos"][m] = url0 except Exception: - alias = "" + pass - alias_cache[u] = alias - return alias + if is_new: + with lock: + job.progress.media_copied += 1 + return url0 + + chat_title = "已隐藏" if privacy_mode else (conv_name or conv_username or "会话") + page_title = chat_title + + options = [ + ("all", "全部"), + ("text", "文本"), + ("image", "图片"), + ("emoji", "表情"), + ("video", "视频"), + ("voice", "语音"), + ("chatHistory", "聊天记录"), + ("transfer", "转账"), + ("redPacket", "红包"), + ("file", "文件"), + ("link", "链接"), + ("quote", "引用"), + ("system", "系统"), + ("voip", "通话"), + ] + + page_size = 0 + try: + page_size = int(html_page_size or 0) + except Exception: + page_size = 0 + if page_size < 0: + page_size = 0 - # Same as JSON: write to temp file first to avoid zip interleaving writes. + # NOTE: write to a temp file first to avoid zip interleaving writes. with tempfile.TemporaryDirectory(prefix="wechat_chat_export_") as tmp_dir: - tmp_path = Path(tmp_dir) / "messages.txt" - with open(tmp_path, "w", encoding="utf-8", newline="\n") as tw: - if privacy_mode: - tw.write("会话: 已隐藏\n") - tw.write("账号: hidden\n") + tmp_path = Path(tmp_dir) / "messages.html" + pages_frag_dir = Path(tmp_dir) / "pages_fragments" + page_frag_paths: list[Path] = [] + paged_old_page_paths: list[Path] = [] + paged_total_pages = 1 + paged_pad_width = 4 + with open(tmp_path, "w", encoding="utf-8", newline="\n") as hw: + class _WriteProxy: + def __init__(self, default_target): + self._default = default_target + self._target = default_target + + def set_target(self, target) -> None: + self._target = target or self._default + + def write(self, s: str) -> Any: + return self._target.write(s) + + def flush(self) -> None: + try: + if self._target is not self._default: + self._target.flush() + except Exception: + pass + try: + self._default.flush() + except Exception: + pass + + tw = _WriteProxy(hw) + tw.write("\n") + tw.write('\n') + tw.write("\n") + tw.write(' \n') + tw.write(' \n') + tw.write(f" {esc_text(page_title)}\n") + tw.write(f' \n') + tw.write(f' \n') + tw.write("\n") + tw.write("\n") + tw.write( + '
' + "提示:此页面需要 JavaScript 才能使用“合并聊天记录”等交互功能。若该提示一直存在,请确认已完整解压导出目录,并检查 wechat-chat-export.js 是否能加载(位于 assets/)。
\n" + ) + + # Root + tw.write('
\n') + + # Left rail (avatar + chat icon) + tw.write( + '
\n' + ) + + self_avatar_src = "" if privacy_mode else rel_path(self_avatar_path) + tw.write('
\n') + tw.write('
\n') + if self_avatar_src: + tw.write( + f' avatar\n' + ) else: - tw.write(f"会话: {conv_name} ({conv_username})\n") - tw.write(f"账号: {account_dir.name}\n") - if conv_avatar_path: - tw.write(f"会话头像: {conv_avatar_path}\n") - if start_time or end_time: - st = _format_ts(int(start_time)) if start_time else "不限" - et = _format_ts(int(end_time)) if end_time else "不限" - tw.write(f"时间范围: {st} ~ {et}\n") - if want_types: - tw.write(f"消息类型: {', '.join(sorted(want_types))}\n") - tw.write(f"导出时间: {_now_iso()}\n") - tw.write("\n") + tw.write( + '
\n' + ) + tw.write("
\n") + tw.write("
\n") + + tw.write( + f' \n' + ) + tw.write( + '
\n' + ) + tw.write('
\n') + tw.write(' \n") + tw.write("
\n") + tw.write("
\n") + tw.write("
\n") + tw.write("
\n") + + # Middle session list (all exported conversations) + tw.write( + '
\n' + ) + tw.write('
\n') + tw.write( + '
\n' + ) + tw.write('
\n') + tw.write(' \n') + tw.write( + ' \n' + ) + tw.write( + ' \n' + ) + tw.write(" \n") + search_input_cls = "contact-search-input" + if privacy_mode: + search_input_cls += " privacy-blur" + tw.write( + f' \n' + ) + tw.write( + ' \n") + tw.write("
\n") + tw.write("
\n") + tw.write("
\n") + tw.write('
\n') + + conv_dir_norm = str(conv_dir or "").strip().strip("/").replace("\\", "/") + for item in session_items: + item_conv_dir = str(item.get("convDir") or "").strip().strip("/").replace("\\", "/") + if not item_conv_dir: + continue + + href = f"{rel_root}{item_conv_dir}/messages.html" + item_display_name = str(item.get("displayName") or "").strip() or "会话" + item_avatar_path = str(item.get("avatarPath") or "").strip() + item_avatar_src = rel_path(item_avatar_path) if item_avatar_path else "" + item_last_time = str(item.get("lastTimeText") or "").strip() + item_preview = str(item.get("previewText") or "").strip() + + is_active = False + try: + is_active = (str(item.get("username") or "").strip() == conv_username) or (item_conv_dir == conv_dir_norm) + except Exception: + is_active = item_conv_dir == conv_dir_norm + + safe_char = (item_display_name[:1] or "?").strip() or "?" + classes = ( + "px-3 cursor-pointer transition-colors duration-150 border-b border-gray-100 " + "h-[calc(80px/var(--dpr))] flex items-center" + ) + if is_active: + classes += " bg-[#DEDEDE]" + else: + classes += " hover:bg-[#F5F5F5]" + + item_username = str(item.get("username") or "").strip() + tw.write( + f' \n") + tw.write('
\n') + tw.write( + '
\n' + ) + if item_avatar_src and (not privacy_mode): + tw.write( + f' {esc_attr(item_display_name)}\n' + ) + else: + tw.write( + f'
{esc_text(safe_char)}
\n' + ) + tw.write("
\n") + tw.write("
\n") + tw.write('
\n') + tw.write('
\n') + tw.write( + f'

{esc_text(item_display_name)}

\n' + ) + tw.write('
\n') + tw.write(f' {esc_text(item_last_time)}\n') + tw.write("
\n") + tw.write("
\n") + tw.write( + f'

{render_text_with_emojis(item_preview)}

\n' + ) + tw.write("
\n") + tw.write("
\n") + + tw.write("
\n") + tw.write("
\n") + + # Right chat area + tw.write('
\n') + tw.write('
\n') + tw.write('
\n') + tw.write('
\n') + + tw.write('
\n') + tw.write('
\n') + tw.write(f'

{esc_text(chat_title)}

\n') + tw.write("
\n") + tw.write('
\n') + tw.write(f' \n") + tw.write("
\n") + tw.write("
\n") + + tw.write('
\n') + tw.write(' \n") + tw.write('
\n') + + page_fp = None + page_fp_path: Optional[Path] = None + page_no = 1 + page_msg_count = 0 + + def _open_page_fp() -> Any: + nonlocal page_fp, page_fp_path + pages_frag_dir.mkdir(parents=True, exist_ok=True) + page_fp_path = pages_frag_dir / f"page_{page_no}.htmlfrag" + page_fp = open(page_fp_path, "w", encoding="utf-8", newline="\n") + return page_fp + + def _close_page_fp() -> None: + nonlocal page_fp, page_fp_path + if page_fp is None: + page_fp_path = None + return + try: + page_fp.flush() + except Exception: + pass + try: + page_fp.close() + except Exception: + pass + if page_fp_path is not None: + page_frag_paths.append(page_fp_path) + page_fp = None + page_fp_path = None + tw.set_target(hw) + + def _mark_exported() -> None: + nonlocal exported, page_no, page_msg_count + exported += 1 + with lock: + job.progress.messages_exported += 1 + job.progress.current_conversation_messages_exported = exported + if page_size > 0: + page_msg_count += 1 + if page_msg_count >= page_size: + _close_page_fp() + page_no += 1 + page_msg_count = 0 sender_alias_map: dict[str, int] = {} + prev_ts = 0 scanned = 0 for row in _iter_rows_for_conversation( account_dir=account_dir, @@ -1484,19 +4851,6 @@ def lookup_alias(username: str) -> str: local_types=local_types, ): scanned += 1 - sender_alias = "" - if conv_is_group and row.raw_text and (not row.raw_text.startswith("<")) and (not row.raw_text.startswith('"<')): - sep = row.raw_text.find(":\n") - if sep > 0: - prefix = row.raw_text[:sep].strip() - su = str(row.sender_username or "").strip() - if prefix and su and prefix != su: - strong_hint = prefix.startswith("wxid_") or prefix.endswith("@chatroom") or "@" in prefix - if not strong_hint: - body_probe = row.raw_text[sep + 2 :].lstrip("\n").lstrip() - body_is_xml = body_probe.startswith("<") or body_probe.startswith('"<') - if not body_is_xml: - sender_alias = lookup_alias(su) msg = _parse_message_for_export( row=row, @@ -1504,28 +4858,24 @@ def lookup_alias(username: str) -> str: is_group=conv_is_group, resource_conn=resource_conn, resource_chat_id=resource_chat_id, - sender_alias=sender_alias, + sender_alias="", ) - if want_types: - rt_key = _normalize_render_type_key(msg.get("renderType")) - if rt_key not in want_types: - if scanned % 500 == 0 and job.cancel_requested: - raise _JobCancelled() - continue + if not _is_render_type_selected(msg.get("renderType"), want_types): + continue - su = str(msg.get("senderUsername") or "").strip() + sender_username = str(msg.get("senderUsername") or "").strip() if privacy_mode: _privacy_scrub_message(msg, conv_is_group=conv_is_group, sender_alias_map=sender_alias_map) else: - msg["senderDisplayName"] = resolve_display_name(su) if su else "" + msg["senderDisplayName"] = resolve_display_name(sender_username) if sender_username else "" msg["senderAvatarPath"] = ( _materialize_avatar( zf=zf, head_image_conn=head_image_conn, - username=su, + username=sender_username, avatar_written=avatar_written, ) - if (su and head_image_conn is not None) + if (sender_username and head_image_conn is not None) else "" ) @@ -1543,25 +4893,692 @@ def lookup_alias(username: str) -> str: lock=lock, job=job, ) + _remember_offline_media(msg) - tw.write(_format_message_line_txt(msg=msg) + "\n") + rt = str(msg.get("renderType") or "text").strip() or "text" + create_time_text = str(msg.get("createTimeText") or "").strip() + try: + ts = int(msg.get("createTime") or 0) + except Exception: + ts = 0 + + show_divider = False + if ts and ((prev_ts == 0) or (abs(ts - prev_ts) >= 300)): + show_divider = True + + if page_size > 0: + if page_fp is None: + _open_page_fp() + tw.set_target(page_fp) + + if show_divider: + divider_text = _format_session_time(ts) + if divider_text: + tw.write('
\n') + tw.write(f'
{esc_text(divider_text)}
\n') + tw.write("
\n") + + # Wrapper (for filter) + tw.write(f'
\n') + + if rt == "system": + tw.write('
\n') + tw.write(f'
{esc_text(msg.get("content") or "")}
\n') + tw.write("
\n") + tw.write("
\n") + _mark_exported() + if ts: + prev_ts = ts + continue - exported += 1 - with lock: - job.progress.messages_exported += 1 - job.progress.current_conversation_messages_exported = exported + is_sent = bool(msg.get("isSent")) + row_cls = "wce-msg-row wce-msg-row-sent flex items-center justify-end" if is_sent else "wce-msg-row wce-msg-row-received flex items-center justify-start" + msg_cls = "wce-msg wce-msg-sent flex items-start max-w-md flex-row-reverse" if is_sent else "wce-msg flex items-start max-w-md" + avatar_extra = "wce-avatar-sent ml-3" if is_sent else "wce-avatar-received mr-3" + + tw.write(f'
\n') + tw.write(f'
\n') + + avatar_src = rel_path(str(msg.get("senderAvatarPath") or "").strip()) + display_name = str(msg.get("senderDisplayName") or "").strip() + fallback_char = (display_name or sender_username or "?")[:1] + tw.write(" " + build_avatar_html(src=avatar_src, fallback_text=fallback_char, extra_class=avatar_extra) + "\n") + + align_cls = "items-end" if is_sent else "items-start" + tw.write(f'
\n') + if conv_is_group and (not is_sent) and display_name: + tw.write(f'
{esc_text(display_name)}
\n') + + pos_cls = "right-0" if is_sent else "left-0" + tw.write( + '
{esc_text(create_time_text)}
\n' + ) + + # Message body + bubble_dir_cls = "bg-[#95EC69] text-black bubble-tail-r" if is_sent else "bg-white text-gray-800 bubble-tail-l" + bubble_base_cls = "px-3 py-2 text-sm max-w-sm relative msg-bubble whitespace-pre-wrap break-words leading-relaxed" + bubble_unknown_cls = ( + "px-3 py-2 text-xs max-w-sm relative msg-bubble whitespace-pre-wrap break-words leading-relaxed text-gray-700" + ) + + if rt == "image": + src = offline_path(msg, "image") + if not src: + url = str(msg.get("imageUrl") or "").strip() + src = url if is_http_url(url) else "" + if src: + tw.write('
\n') + tw.write('
\n') + tw.write(f' \n') + tw.write(f' 图片\n') + tw.write(" \n") + tw.write("
\n") + tw.write("
\n") + else: + tw.write(f'
{render_text_with_emojis(msg.get("content") or "")}
\n') + elif rt == "emoji": + src = offline_path(msg, "emoji") + if not src: + url = str(msg.get("emojiUrl") or "").strip() + src = url if is_http_url(url) else "" + if src: + emoji_dir = " flex-row-reverse" if is_sent else "" + tw.write(f'
\n') + tw.write(f' 表情\n') + tw.write("
\n") + else: + tw.write(f'
{render_text_with_emojis(msg.get("content") or "")}
\n') + elif rt == "video": + thumb = offline_path(msg, "video_thumb") + if not thumb: + url = str(msg.get("videoThumbUrl") or "").strip() + thumb = url if is_http_url(url) else "" + video = offline_path(msg, "video") + if not video: + url = str(msg.get("videoUrl") or "").strip() + video = url if is_http_url(url) else "" + if thumb: + tw.write('
\n') + tw.write('
\n') + tw.write(f' 视频\n') + if video: + tw.write(f' \n') + tw.write('
\n') + tw.write(' \n') + tw.write("
\n") + tw.write("
\n") + else: + tw.write('
\n') + tw.write('
\n') + tw.write(' \n') + tw.write("
\n") + tw.write("
\n") + tw.write("
\n") + tw.write("
\n") + else: + tw.write(f'
{render_text_with_emojis(msg.get("content") or "")}
\n') + elif rt == "voice": + voice = offline_path(msg, "voice") + duration_ms = msg.get("voiceLength") + width = get_voice_width(duration_ms) + seconds = get_voice_duration_in_seconds(duration_ms) + voice_dir_cls = "wechat-voice-sent" if is_sent else "wechat-voice-received" + content_dir_cls = " flex-row-reverse" if is_sent else "" + icon_dir_cls = "voice-icon-sent" if is_sent else "voice-icon-received" + voice_id = str(msg.get("id") or "").strip() + + tw.write('
\n') + tw.write( + f'
\n' + ) + tw.write(f'
\n') + tw.write( + f' \n' + ) + tw.write( + ' \n' + ) + tw.write( + ' \n' + ) + tw.write( + ' \n' + ) + tw.write(" \n") + tw.write(f' {esc_text(seconds)}"\n') + tw.write("
\n") + tw.write("
\n") + if voice: + tw.write(f' \n') + tw.write("
\n") + elif rt == "file": + fsrc = offline_path(msg, "file") + title = str(msg.get("title") or msg.get("content") or "文件").strip() + size = str(msg.get("fileSize") or "").strip() + size_text = format_file_size(size) + sent_side_cls = " wechat-special-sent-side" if is_sent else "" + cls = f"wechat-redpacket-card wechat-special-card wechat-file-card msg-radius{sent_side_cls}" + tag = "a" if fsrc else "div" + attrs = f' href="{esc_attr(fsrc)}" download' if fsrc else "" + tw.write(f' <{tag}{attrs} class="{esc_attr(cls)}">\n') + tw.write('
\n') + tw.write('
\n') + tw.write(f' {esc_text(title or "文件")}\n') + if size_text: + tw.write(f' {esc_text(size_text)}\n') + tw.write("
\n") + tw.write(f' \n') + tw.write("
\n") + tw.write('
\n') + tw.write(f' \n') + tw.write(" 微信电脑版\n") + tw.write("
\n") + tw.write(f" \n") + elif rt == "link": + url = str(msg.get("url") or "").strip() + safe_url = url if is_http_url(url) else "" + if safe_url: + heading = str(msg.get("title") or msg.get("content") or safe_url).strip() + abstract = str(msg.get("content") or "").strip() + preview = str(msg.get("thumbUrl") or "").strip() + preview_url = "" + if is_http_url(preview): + local = maybe_download_remote_image(preview) + preview_url = local or preview + variant = str(msg.get("linkStyle") or "").strip().lower() + + from_text = get_link_from_text(msg, url=safe_url) + from_avatar_text = first_glyph(from_text) or "\u200B" + from_text = from_text or "\u200B" + sent_side_cls = " wechat-special-sent-side" if is_sent else "" + + if variant == "cover": + cls = f"wechat-link-card-cover wechat-special-card msg-radius{sent_side_cls}" + tw.write( + f' \n' + ) + if preview_url: + tw.write(' \n") + else: + tw.write(' \n") + tw.write(f' \n') + tw.write(" \n") + else: + cls = f"wechat-link-card wechat-special-card msg-radius{sent_side_cls}" + tw.write( + f' \n' + ) + tw.write(' \n") + tw.write(' \n") + tw.write(" \n") + else: + tw.write(f'
{render_text_with_emojis(msg.get("content") or "")}
\n') + elif rt == "voip": + voip_dir_cls = "wechat-voip-sent" if is_sent else "wechat-voip-received" + content_dir_cls = " flex-row-reverse" if is_sent else "" + voip_type = str(msg.get("voipType") or "").strip().lower() + icon = "wechat-video-light.png" if voip_type == "video" else "wechat-audio-light.png" + tw.write(f'
\n') + tw.write(f'
\n') + tw.write(f' \n') + tw.write(f' {esc_text(msg.get("content") or "通话")}\n') + tw.write("
\n") + tw.write("
\n") + elif rt == "quote": + tw.write( + f'
{render_text_with_emojis(msg.get("content") or "")}
\n' + ) + + qt = str(msg.get("quoteTitle") or "").strip() + qc = str(msg.get("quoteContent") or "").strip() + qthumb = str(msg.get("quoteThumbUrl") or "").strip() + qtype = str(msg.get("quoteType") or "").strip() + qsid_raw = str(msg.get("quoteServerId") or "").strip() + qsid = int(qsid_raw) if qsid_raw.isdigit() else 0 + + def is_quoted_voice() -> bool: + if qtype == "34": + return True + return (qc == "[语音]") and bool(qsid_raw) + + def is_quoted_image() -> bool: + if qtype == "3": + return True + return (qc == "[图片]") and bool(qsid_raw) + + def is_quoted_link() -> bool: + if qtype == "49": + return True + return bool(re.match(r"^\[链接\]\s*", qc)) + + def get_quoted_link_text() -> str: + if not qc: + return "" + return re.sub(r"^\[链接\]\s*", "", qc).strip() or qc + + quoted_voice = is_quoted_voice() + quoted_image = is_quoted_image() + quoted_link = is_quoted_link() + + quote_voice_url = "" + if include_media and ("voice" in media_kinds) and quoted_voice and qsid: + try: + arc, is_new = _materialize_voice( + zf=zf, + media_db_path=media_db_path, + server_id=int(qsid), + media_written=media_written, + ) + except Exception: + arc, is_new = "", False + if arc: + quote_voice_url = rel_path(arc) + if is_new: + with lock: + job.progress.media_copied += 1 + + quote_image_url = "" + if include_media and ("image" in media_kinds) and quoted_image and qsid and resource_conn is not None: + md5_hit = "" + try: + md5_hit = _lookup_resource_md5( + resource_conn, + resource_chat_id, + message_local_type=3, + server_id=int(qsid), + local_id=0, + create_time=0, + ) + except Exception: + md5_hit = "" + + if md5_hit: + try: + arc, is_new = _materialize_media( + zf=zf, + account_dir=account_dir, + conv_username=conv_username, + kind="image", + md5=str(md5_hit or "").strip().lower(), + file_id="", + media_written=media_written, + suggested_name="", + ) + except Exception: + arc, is_new = "", False + if arc: + quote_image_url = rel_path(arc) + if is_new: + with lock: + job.progress.media_copied += 1 + + qthumb_url = "" + if is_http_url(qthumb): + qthumb_local = maybe_download_remote_image(qthumb) if download_remote_media else "" + qthumb_url = qthumb_local or qthumb + + if qt or qc: + tw.write( + '
\n' + ) + tw.write('
\n') + if quoted_voice: + seconds = get_voice_duration_in_seconds(msg.get("quoteVoiceLength")) + disabled = not bool(quote_voice_url) + btn_cls = "flex items-center gap-1 min-w-0 hover:opacity-80" + if disabled: + btn_cls += " opacity-60 cursor-not-allowed" + dis_attr = " disabled" if disabled else "" + tw.write('
\n') + if qt: + tw.write(f' {esc_text(qt)}:\n') + tw.write( + f' \n") + if quote_voice_url: + tw.write( + f' \n' + ) + tw.write("
\n") + else: + tw.write('
\n') + if quoted_link: + link_text = get_quoted_link_text() + tw.write('
\n') + if qt: + tw.write(f' {esc_text(qt)}:\n') + if link_text: + ml = ' class="ml-1"' if qt else "" + tw.write(f' 🔗 {esc_text(link_text)}\n') + tw.write("
\n") + else: + hide_qc = quoted_image and qt and bool(quote_image_url) + tw.write('
\n') + if qt: + tw.write(f' {esc_text(qt)}:\n') + if qc and (not hide_qc): + ml = ' class="ml-1"' if qt else "" + tw.write(f' {esc_text(qc)}\n') + tw.write("
\n") + tw.write("
\n") + tw.write("
\n") + + if quoted_link and qthumb_url: + tw.write( + f' \n' + ) + tw.write( + f' 引用链接缩略图\n' + ) + tw.write(" \n") + + if (not quoted_link) and quoted_image and quote_image_url: + tw.write( + f' \n' + ) + tw.write( + f' 引用图片\n' + ) + tw.write(" \n") + + tw.write("
\n") + elif rt == "chatHistory": + title = str(msg.get("title") or "").strip() or "聊天记录" + record_item = str(msg.get("recordItem") or "").strip() + record_item_b64 = "" + if record_item: + try: + record_item_b64 = base64.b64encode(record_item.encode("utf-8", errors="replace")).decode("ascii") + except Exception: + record_item_b64 = "" + + if record_item and include_media and (not privacy_mode): + try: + for m in _CHAT_HISTORY_MD5_TAG_RE.findall(record_item): + _ensure_chat_history_md5(m) + except Exception: + pass + if resource_conn is not None: + try: + server_map = page_media_index.get("serverMd5") + if not isinstance(server_map, dict): + server_map = {} + page_media_index["serverMd5"] = server_map + + for sid_raw in _CHAT_HISTORY_SERVER_ID_TAG_RE.findall(record_item): + sid_text = str(sid_raw or "").strip() + if not sid_text or sid_text in server_map: + continue + if (len(sid_text) > 24) or (not sid_text.isdigit()): + continue + sid = int(sid_text) + if sid <= 0: + continue + + md5_hit = "" + try: + md5_hit = _lookup_resource_md5( + resource_conn, + None, # do NOT filter by chat_id: merged-forward records come from other chats + 0, # do NOT filter by local_type + int(sid), + 0, + 0, + ) + except Exception: + md5_hit = "" + + md5_hit = str(md5_hit or "").strip().lower() + if not _is_md5(md5_hit): + continue + if _ensure_chat_history_md5(md5_hit): + server_map[sid_text] = md5_hit + except Exception: + pass + if download_remote_media: + try: + for u in _CHAT_HISTORY_URL_TAG_RE.findall(record_item): + maybe_download_remote_image(u) + except Exception: + pass + + lines = get_chat_history_preview_lines(msg) + sent_side_cls = " wechat-special-sent-side" if is_sent else "" + cls = f"wechat-chat-history-card wechat-special-card msg-radius{sent_side_cls} cursor-pointer" + tw.write( + f'
\n' + ) + tw.write('
\n') + tw.write(f'
{esc_text(title)}
\n') + if lines: + tw.write('
\n') + for line in lines: + tw.write(f'
{esc_text(line)}
\n') + tw.write("
\n") + tw.write("
\n") + tw.write('
聊天记录
\n') + tw.write("
\n") + elif rt == "transfer": + received = is_transfer_received(msg) + returned = is_transfer_returned(msg) + overdue = is_transfer_overdue(msg) + side_cls = "wechat-transfer-sent-side" if is_sent else "wechat-transfer-received-side" + cls_parts = ["wechat-transfer-card", "msg-radius", side_cls] + if received: + cls_parts.append("wechat-transfer-received") + if returned: + cls_parts.append("wechat-transfer-returned") + if overdue: + cls_parts.append("wechat-transfer-overdue") + cls = " ".join(cls_parts) + if returned: + icon = "wechat-returned.png" + elif overdue: + icon = "overdue.png" + elif received: + icon = "wechat-trans-icon2.png" + else: + icon = "wechat-trans-icon1.png" + amount = format_transfer_amount(msg.get("amount")) + status = get_transfer_title(msg, is_sent=is_sent) + tw.write(f'
\n') + tw.write('
\n') + tw.write(f' \n') + tw.write('
\n') + if amount: + tw.write(f' ¥{esc_text(amount)}\n') + tw.write(f' {esc_text(status)}\n') + tw.write("
\n") + tw.write("
\n") + tw.write('
微信转账
\n') + tw.write("
\n") + elif rt == "redPacket": + received = False + cls_parts = ["wechat-redpacket-card", "wechat-special-card", "msg-radius"] + if received: + cls_parts.append("wechat-redpacket-received") + if is_sent: + cls_parts.append("wechat-special-sent-side") + icon = "wechat-trans-icon4.png" if received else "wechat-trans-icon3.png" + tw.write(f'
\n') + tw.write('
\n') + tw.write(f' \n') + tw.write('
\n') + tw.write(f' {esc_text(get_red_packet_text(msg))}\n') + if received: + tw.write(' 已领取\n') + tw.write("
\n") + tw.write("
\n") + tw.write('
微信红包
\n') + tw.write("
\n") + elif rt == "text": + tw.write(f'
{render_text_with_emojis(msg.get("content") or "")}
\n') + else: + content = str(msg.get("content") or "").strip() + if not content: + content = f"[{str(msg.get('type') or 'unknown')}] 消息" + tw.write(f'
{render_text_with_emojis(content)}
\n') + + tw.write("
\n") + tw.write("
\n") + tw.write("
\n") + tw.write("
\n") + + _mark_exported() + if ts: + prev_ts = ts if scanned % 500 == 0 and job.cancel_requested: raise _JobCancelled() + if page_size > 0: + _close_page_fp() + paged_total_pages = max(1, len(page_frag_paths)) + paged_pad_width = max(4, len(str(paged_total_pages))) + if page_frag_paths: + paged_old_page_paths = list(page_frag_paths[:-1]) + tw.set_target(hw) + try: + tw.write(page_frag_paths[-1].read_text(encoding="utf-8")) + except Exception: + try: + tw.write(page_frag_paths[-1].read_text(encoding="utf-8", errors="ignore")) + except Exception: + pass + else: + paged_old_page_paths = [] + tw.set_target(hw) + + # Close message list + container + tw.set_target(hw) + tw.write("
\n") + tw.write("
\n") + + if page_size > 0 and paged_total_pages > 1: + page_meta = { + "schemaVersion": 1, + "pageSize": int(page_size), + "totalPages": int(paged_total_pages), + "initialPage": int(paged_total_pages), + "totalMessages": int(exported), + "padWidth": int(paged_pad_width), + "pageFilePrefix": "pages/page-", + "pageFileSuffix": ".js", + "inlinedPages": [int(paged_total_pages)], + } + try: + page_meta_payload = json.dumps(page_meta, ensure_ascii=False) + except Exception: + page_meta_payload = "{}" + page_meta_payload = page_meta_payload.replace("{page_meta_payload}\n') + + tw.write("
\n") + tw.write("
\n") + tw.write("
\n") + tw.write("
\n") + tw.write("\n") + + try: + media_index_payload = json.dumps(page_media_index, ensure_ascii=False) + except Exception: + media_index_payload = "{}" + media_index_payload = media_index_payload.replace("{media_index_payload}\n') + + tw.write("\n") + tw.write("\n") tw.flush() zf.write(str(tmp_path), arcname) - if contact_conn is not None: - try: - contact_conn.close() - except Exception: - pass + + if page_size > 0 and paged_old_page_paths: + for page_no, frag_path in enumerate(paged_old_page_paths, start=1): + try: + frag_text = frag_path.read_text(encoding="utf-8") + except Exception: + try: + frag_text = frag_path.read_text(encoding="utf-8", errors="ignore") + except Exception: + frag_text = "" + + try: + frag_json = json.dumps(frag_text, ensure_ascii=False) + except Exception: + frag_json = json.dumps("", ensure_ascii=False) + + num = str(page_no).zfill(int(paged_pad_width or 4)) + arc_js = f"{conv_dir}/pages/page-{num}.js" + js_payload = ( + "(() => {\n" + f" const pageNo = {int(page_no)};\n" + f" const html = {frag_json};\n" + " try {\n" + " const fn = window.__WCE_PAGE_LOADED__;\n" + " if (typeof fn === 'function') fn(pageNo, html);\n" + " else {\n" + " const q = (window.__WCE_PAGE_QUEUE__ = window.__WCE_PAGE_QUEUE__ || []);\n" + " q.push([pageNo, html]);\n" + " }\n" + " } catch {}\n" + "})();\n" + ) + zf.writestr(arc_js, js_payload) return exported @@ -1666,9 +5683,16 @@ def _privacy_scrub_message( for k in ( "title", "url", + "from", + "fromUsername", + "linkType", + "linkStyle", "thumbUrl", + "recordItem", "imageMd5", "imageFileId", + "imageMd5Candidates", + "imageFileIdCandidates", "imageUrl", "emojiMd5", "emojiUrl", @@ -1679,6 +5703,11 @@ def _privacy_scrub_message( "videoUrl", "videoThumbUrl", "voiceLength", + "quoteUsername", + "quoteServerId", + "quoteType", + "quoteThumbUrl", + "quoteVoiceLength", "quoteTitle", "quoteContent", "amount", @@ -1733,25 +5762,88 @@ def record_missing(kind: str, ident: str) -> None: offline: list[dict[str, Any]] = [] if rt == "image" and "image" in media_kinds: - md5 = str(msg.get("imageMd5") or "").strip().lower() - file_id = str(msg.get("imageFileId") or "").strip() - arc, is_new = _materialize_media( - zf=zf, - account_dir=account_dir, - conv_username=conv_username, - kind="image", - md5=md5 if _is_md5(md5) else "", - file_id=file_id, - media_written=media_written, - suggested_name="", - ) + primary_md5 = str(msg.get("imageMd5") or "").strip().lower() + primary_file_id = str(msg.get("imageFileId") or "").strip() + + md5_candidates_raw = msg.get("imageMd5Candidates") or [] + file_id_candidates_raw = msg.get("imageFileIdCandidates") or [] + md5_candidates = md5_candidates_raw if isinstance(md5_candidates_raw, list) else [] + file_id_candidates = file_id_candidates_raw if isinstance(file_id_candidates_raw, list) else [] + + md5s: list[str] = [] + file_ids: list[str] = [] + + def add_md5(v: Any) -> None: + s = str(v or "").strip().lower() + if _is_md5(s) and s not in md5s: + md5s.append(s) + + def add_file_id(v: Any) -> None: + s = str(v or "").strip() + if s and s not in file_ids: + file_ids.append(s) + + add_md5(primary_md5) + for v in md5_candidates: + add_md5(v) + + add_file_id(primary_file_id) + for v in file_id_candidates: + add_file_id(v) + + arc = "" + is_new = False + used_md5 = "" + used_file_id = "" + + # Prefer md5-based resolution first (more reliable), then fall back to file_id search. + for md5 in md5s: + arc, is_new = _materialize_media( + zf=zf, + account_dir=account_dir, + conv_username=conv_username, + kind="image", + md5=md5, + file_id="", + media_written=media_written, + suggested_name="", + ) + if arc: + used_md5 = md5 + break + + if not arc: + for file_id in file_ids: + arc, is_new = _materialize_media( + zf=zf, + account_dir=account_dir, + conv_username=conv_username, + kind="image", + md5="", + file_id=file_id, + media_written=media_written, + suggested_name="", + ) + if arc: + used_file_id = file_id + break + if arc: - offline.append({"kind": "image", "path": arc, "md5": md5, "fileId": file_id}) + # Keep primary fields in sync with what actually resolved. + try: + if used_md5: + msg["imageMd5"] = used_md5 + if used_file_id: + msg["imageFileId"] = used_file_id + except Exception: + pass + + offline.append({"kind": "image", "path": arc, "md5": used_md5 or primary_md5, "fileId": used_file_id or primary_file_id}) if is_new: with lock: job.progress.media_copied += 1 else: - record_missing("image", md5 or file_id) + record_missing("image", primary_md5 or primary_file_id) if rt == "emoji" and "emoji" in media_kinds: md5 = str(msg.get("emojiMd5") or "").strip().lower() @@ -1951,13 +6043,9 @@ def _materialize_voice( if not isinstance(data, (bytes, bytearray)): data = bytes(data) - wav = _convert_silk_to_wav(data) - if wav != data and wav[:4] == b"RIFF": - ext = "wav" - payload = wav - else: - ext = "silk" - payload = data + payload, ext, _media_type = _convert_silk_to_browser_audio(data, preferred_format="mp3") + if not payload: + return "", False arc = f"media/voices/voice_{int(server_id)}.{ext}" zf.writestr(arc, payload) @@ -2026,20 +6114,27 @@ def _materialize_media( except Exception: return "", False + try: + with open(src, "rb") as f: + head = f.read(64) + except Exception: + head = b"" + + head_mt = _detect_image_media_type(head[:32]) + looks_like_mp4 = len(head) >= 8 and head[4:8] == b"ftyp" + ext = src.suffix.lstrip(".").lower() if not ext: - try: - head = src.read_bytes()[:32] - except Exception: - head = b"" - mt = _detect_image_media_type(head) - if mt.startswith("image/"): - ext = mt.split("/", 1)[-1] - elif len(head) >= 8 and head[4:8] == b"ftyp": + if head_mt.startswith("image/"): + ext = head_mt.split("/", 1)[-1] + elif looks_like_mp4: ext = "mp4" else: ext = "dat" + if ext == "jpeg": + ext = "jpg" + folder = "misc" if kind == "image": folder = "images" @@ -2061,10 +6156,62 @@ def _materialize_media( arc_name = arc_name[:160] arc = f"media/{folder}/{arc_name}" - try: - zf.write(src, arcname=arc) - except Exception: - return "", False + should_stream_copy = False + if kind == "file": + should_stream_copy = True + elif kind in {"image", "emoji", "video_thumb"}: + should_stream_copy = ( + (ext == "jpg" and head_mt == "image/jpeg") + or (ext == "png" and head_mt == "image/png") + or (ext == "gif" and head_mt == "image/gif") + or (ext == "webp" and head_mt == "image/webp") + ) + elif kind == "video": + should_stream_copy = ext == "mp4" and looks_like_mp4 + + if should_stream_copy or (kind not in {"image", "emoji", "video", "video_thumb"}): + try: + zf.write(src, arcname=arc) + except Exception: + return "", False + else: + try: + data, mt = _read_and_maybe_decrypt_media(src, account_dir=account_dir) + except Exception: + try: + zf.write(src, arcname=arc) + except Exception: + return "", False + media_written[key] = arc + return arc, True + + mt = str(mt or "").strip() + if mt == "image/png": + ext2 = "png" + elif mt == "image/jpeg": + ext2 = "jpg" + elif mt == "image/gif": + ext2 = "gif" + elif mt == "image/webp": + ext2 = "webp" + elif mt == "video/mp4": + ext2 = "mp4" + else: + ext2 = "dat" if mt == "application/octet-stream" else (ext or "dat") + + if ext2 != ext: + if nice and kind == "file": + arc_name = f"{nice}_{ident}.{ext2}" if ext2 else f"{nice}_{ident}" + else: + arc_name = f"{ident}.{ext2}" if ext2 else ident + if len(arc_name) > 160: + arc_name = arc_name[:160] + arc = f"media/{folder}/{arc_name}" + + try: + zf.writestr(arc, data) + except Exception: + return "", False media_written[key] = arc return arc, True diff --git a/src/wechat_decrypt_tool/chat_helpers.py b/src/wechat_decrypt_tool/chat_helpers.py index 57f44c2..2926205 100644 --- a/src/wechat_decrypt_tool/chat_helpers.py +++ b/src/wechat_decrypt_tool/chat_helpers.py @@ -8,7 +8,7 @@ from datetime import datetime from pathlib import Path from typing import Any, Optional -from urllib.parse import quote +from urllib.parse import parse_qs, quote, urlparse from fastapi import HTTPException @@ -618,6 +618,73 @@ def _normalize_xml_url(url: str) -> str: return u.replace("&", "&").strip() +def _is_mp_weixin_article_url(url: str) -> bool: + u = str(url or "").strip() + if not u: + return False + + try: + host = str(urlparse(u).hostname or "").strip().lower() + if host == "mp.weixin.qq.com" or host.endswith(".mp.weixin.qq.com"): + return True + except Exception: + pass + + lu = u.lower() + return "mp.weixin.qq.com/" in lu + + +def _is_mp_weixin_feed_article_url(url: str) -> bool: + """Detect WeChat's PC feed/recommendation mp.weixin.qq.com share URLs. + + These links often carry an `exptype` like: + masonry_feed_brief_content_elite_for_pcfeeds_u2i + + WeChat desktop tends to render them in a cover-card style (image + bottom title), + so we use this as a hint to choose the 'cover' linkStyle. + """ + + u = str(url or "").strip() + if not u: + return False + + try: + parsed = urlparse(u) + q = parse_qs(parsed.query or "") + for v in (q.get("exptype") or []): + if "masonry_feed" in str(v or "").lower(): + return True + except Exception: + pass + + return "exptype=masonry_feed" in u.lower() + + +def _classify_link_share(*, app_type: int, url: str, source_username: str, desc: str) -> tuple[str, str]: + src = str(source_username or "").strip().lower() + is_official_article = bool( + app_type in (5, 68) + and (_is_mp_weixin_article_url(url) or src.startswith("gh_")) + ) + + link_type = "official_article" if is_official_article else "web_link" + + d = str(desc or "").strip() + hashtag_count = len(re.findall(r"#[^#\s]+", d)) + + # 公众号文章中「封面图 + 底栏标题」卡片特征:摘要以 #话题# 风格为主。 + cover_like = bool( + is_official_article + and ( + d.startswith("#") + or hashtag_count >= 2 + or _is_mp_weixin_feed_article_url(url) + ) + ) + link_style = "cover" if cover_like else "default" + return link_type, link_style + + def _extract_xml_tag_text(xml_text: str, tag: str) -> str: if not xml_text or not tag: return "" @@ -645,6 +712,105 @@ def _extract_xml_tag_or_attr(xml_text: str, name: str) -> str: return _extract_xml_attr(xml_text, name) +def _parse_location_message(text: str) -> dict[str, Any]: + raw = html.unescape(str(text or "").strip()) + + def _clean(value: Any) -> str: + candidate = _strip_cdata(str(value or "").strip()) + if not candidate: + return "" + candidate = html.unescape(candidate) + candidate = re.sub(r"\s+", " ", candidate).strip() + return candidate + + def _to_float(value: Any) -> Optional[float]: + s = str(value or "").strip() + if not s: + return None + try: + num = float(s) + except Exception: + return None + if not (-180.0 <= num <= 180.0): + return None + return num + + poiname = _clean( + _extract_xml_tag_or_attr(raw, "poiname") + or _extract_xml_tag_or_attr(raw, "poiName") + or _extract_xml_tag_or_attr(raw, "name") + ) + label = _clean( + _extract_xml_tag_or_attr(raw, "label") + or _extract_xml_tag_or_attr(raw, "labelname") + or _extract_xml_tag_or_attr(raw, "address") + ) + + lat = _to_float( + _extract_xml_tag_or_attr(raw, "x") + or _extract_xml_tag_or_attr(raw, "latitude") + or _extract_xml_tag_or_attr(raw, "lat") + ) + lng = _to_float( + _extract_xml_tag_or_attr(raw, "y") + or _extract_xml_tag_or_attr(raw, "longitude") + or _extract_xml_tag_or_attr(raw, "lng") + or _extract_xml_tag_or_attr(raw, "lon") + ) + + if lat is not None and not (-90.0 <= lat <= 90.0): + lat = None + if lng is not None and not (-180.0 <= lng <= 180.0): + lng = None + + title = poiname or label or "位置" + return { + "renderType": "location", + "content": title or "[Location]", + "locationLat": lat, + "locationLng": lng, + "locationPoiname": poiname, + "locationLabel": label, + } + + +def _parse_system_message_content(raw_text: str) -> str: + text = str(raw_text or "").strip() + if not text: + return "[系统消息]" + + def _clean_system_text(value: str) -> str: + candidate = str(value or "").strip() + if not candidate: + return "" + + nested_content = _extract_xml_tag_text(candidate, "content") + if nested_content: + candidate = nested_content + + candidate = re.sub(r"", "", candidate) + candidate = re.sub(r"]*>", "", candidate) + candidate = re.sub(r"\s+", " ", candidate).strip() + return candidate + + if "revokemsg" in text.lower(): + replace_msg = _extract_xml_tag_text(text, "replacemsg") + cleaned_replace_msg = _clean_system_text(replace_msg) + if cleaned_replace_msg: + return cleaned_replace_msg + + revoke_msg = _extract_xml_tag_text(text, "revokemsg") + cleaned_revoke_msg = _clean_system_text(revoke_msg) + if cleaned_revoke_msg: + return cleaned_revoke_msg + + return "撤回了一条消息" + + content_text = _clean_system_text(text) + return content_text or "[系统消息]" + + def _extract_refermsg_block(xml_text: str) -> str: if not xml_text: return "" @@ -652,6 +818,65 @@ def _extract_refermsg_block(xml_text: str) -> str: return (m.group(1) or "").strip() if m else "" +def _extract_refermsg_content(refer_block: str) -> str: + if not refer_block: + return "" + + cdata_match = re.search( + r"]*>\s*\s*", + refer_block, + flags=re.IGNORECASE | re.DOTALL, + ) + if cdata_match: + return str(cdata_match.group(1) or "").strip() + + return _extract_xml_tag_text(refer_block, "content") + + +def _summarize_nested_quote_content(raw_content: str) -> str: + candidate = str(raw_content or "").strip() + if not candidate: + return "" + + lower = candidate.lower() + if " str: + candidate = str(raw_content or "").strip() + if not candidate: + return "" + + probes = [candidate] + + if candidate.startswith("wxid_"): + colon = candidate.find(":") + if 0 < colon <= 64: + rest = candidate[colon + 1 :].strip() + if rest: + probes.append(rest) + + for probe in probes: + for key in ("thumburl", "cdnthumburl", "cdnthumurl", "coverurl", "cover"): + value = _normalize_xml_url(_extract_xml_tag_or_attr(probe, key)) + if value: + return value + + return "" + + def _infer_transfer_status_text( is_sent: bool, paysubtype: str, @@ -665,7 +890,7 @@ def _infer_transfer_status_text( rs = str(receivestatus or "").strip() if rs == "1": - return "已收款" + return "已被接收" if is_sent else "已收款" if rs == "2": return "已退还" if rs == "3": @@ -681,7 +906,7 @@ def _infer_transfer_status_text( if t == "8": return "发起转账" if t == "3": - return "已收款" if is_sent else "已被接收" + return "已被接收" if is_sent else "已收款" if t == "1": return "转账" @@ -733,10 +958,22 @@ def _extract_sender_from_group_xml(xml_text: str) -> str: if not xml_text: return "" - v = _extract_xml_tag_text(xml_text, "fromusername") + probe_text = xml_text + try: + # Avoid picking nested quoted-message sender from . + probe_text = re.sub( + r"(]*>.*?)", + "", + xml_text, + flags=re.IGNORECASE | re.DOTALL, + ) + except Exception: + probe_text = xml_text + + v = _extract_xml_tag_text(probe_text, "fromusername") if v: return v - v = _extract_xml_attr(xml_text, "fromusername") + v = _extract_xml_attr(probe_text, "fromusername") if v: return v return "" @@ -766,11 +1003,40 @@ def _parse_quote_message(text: str) -> str: def _parse_app_message(text: str) -> dict[str, Any]: - app_type_raw = _extract_xml_tag_text(text, "type") - try: - app_type = int(str(app_type_raw or "0").strip() or "0") - except Exception: - app_type = 0 + def _extract_appmsg_type(xml_text: str) -> int: + """提取 直系子节点的 ,避免被 refermsg/recorditem/weappinfo 等嵌套块里的 干扰。""" + + probe = str(xml_text or "") + try: + m = re.search(r"]*>(.*?)", probe, flags=re.IGNORECASE | re.DOTALL) + except Exception: + m = None + + if m: + inner = str(m.group(1) or "") + # 一些嵌套块内部也会出现 ,先剔除再提取。 + try: + inner = re.sub(r"(]*>.*?)", "", inner, flags=re.IGNORECASE | re.DOTALL) + inner = re.sub(r"(]*>.*?)", "", inner, flags=re.IGNORECASE | re.DOTALL) + inner = re.sub(r"(]*>.*?)", "", inner, flags=re.IGNORECASE | re.DOTALL) + inner = re.sub(r"(]*>.*?)", "", inner, flags=re.IGNORECASE | re.DOTALL) + inner = re.sub(r"(]*>.*?)", "", inner, flags=re.IGNORECASE | re.DOTALL) + except Exception: + pass + + t = _extract_xml_tag_text(inner, "type") + try: + return int(str(t or "0").strip() or "0") + except Exception: + return 0 + + t = _extract_xml_tag_text(probe, "type") + try: + return int(str(t or "0").strip() or "0") + except Exception: + return 0 + + app_type = _extract_appmsg_type(text) title = _extract_xml_tag_text(text, "title") des = _extract_xml_tag_text(text, "des") url = _normalize_xml_url(_extract_xml_tag_text(text, "url")) @@ -807,8 +1073,18 @@ def _parse_app_message(text: str) -> dict[str, Any]: "recordItem": record_item or "", } - if app_type in (5, 68) and url: - thumb_url = _normalize_xml_url(_extract_xml_tag_text(text, "thumburl")) + if app_type in (4, 5, 68) and url: + # Many appmsg link cards (notably Bilibili shares with 4) include a metadata block. + # DO NOT treat " dict[str, Any]: "thumbUrl": thumb_url or "", "from": str(source_display_name or "").strip(), "fromUsername": str(source_username or "").strip(), + "linkType": link_type, + "linkStyle": link_style, + } + + if app_type in (33, 36): + # 小程序分享(WeChat v4 常见:local_type = 49 + (33<<32) / 49 + (36<<32)) + # 注:部分 payload 的 为空;前端会按需渲染为不可点击卡片。 + weapp_block = _extract_xml_tag_text(text, "weappinfo") or _extract_xml_tag_text(text, "wxaappinfo") + weapp_username = _extract_xml_tag_text(weapp_block, "username") if weapp_block else "" + weapp_icon = _normalize_xml_url( + _extract_xml_tag_or_attr(weapp_block, "weappiconurl") if weapp_block else "" + ) or _normalize_xml_url(_extract_xml_tag_or_attr(text, "weappiconurl")) + + thumb_url = _normalize_xml_url( + _extract_xml_tag_or_attr(text, "thumburl") + or _extract_xml_tag_or_attr(text, "cdnthumburl") + or _extract_xml_tag_or_attr(text, "coverurl") + or _extract_xml_tag_or_attr(text, "cover") + or weapp_icon + ) + + from_display = str(source_display_name or "").strip() + if not from_display and weapp_block: + from_display = ( + _extract_xml_tag_text(weapp_block, "nickname") + or _extract_xml_tag_text(weapp_block, "appname") + or "" + ) + if not from_display: + from_display = str(_extract_xml_tag_text(text, "sourcename") or "").strip() + + from_u = str(weapp_username or source_username or "").strip() + + content_text = (des or title or "[Mini Program]").strip() or "[Mini Program]" + title_text = (title or des or "").strip() + return { + "renderType": "link", + "content": content_text, + "title": title_text or content_text, + "url": url or "", + "thumbUrl": thumb_url or "", + "from": from_display, + "fromUsername": from_u, + "linkType": "mini_program", + "linkStyle": "default", } if app_type in (6, 74): @@ -870,7 +1191,7 @@ def _parse_app_message(text: str) -> dict[str, Any]: or "" ) refer_svrid = _extract_xml_tag_or_attr(refer_block, "svrid") - refer_content = _extract_xml_tag_text(refer_block, "content") + refer_content = _extract_refermsg_content(refer_block) refer_type = _extract_xml_tag_or_attr(refer_block, "type") rt = (reply_text or "").strip() @@ -887,6 +1208,7 @@ def _parse_app_message(text: str) -> dict[str, Any]: refer_content = rest t = str(refer_type or "").strip() + quote_thumb_url = "" quote_voice_length = "" if t == "3": refer_content = "[图片]" @@ -907,8 +1229,29 @@ def _parse_app_message(text: str) -> dict[str, Any]: except Exception: quote_voice_length = "" refer_content = "[语音]" - elif t == "49" and refer_content: - refer_content = f"[链接] {refer_content}".strip() + elif t == "57": + summarized = _summarize_nested_quote_content(str(refer_content or "")) + if summarized: + refer_content = summarized + elif str(refer_content or "").lstrip().startswith("<"): + refer_content = "[引用消息]" + elif t in {"49", "5", "68"}: + raw_link_content = str(refer_content or "").strip() + summarized = _summarize_nested_quote_content(raw_link_content) + link_text = str(summarized or raw_link_content).strip() + quote_thumb_url = _extract_nested_quote_thumb_url(raw_link_content) + + if link_text.startswith("wxid_"): + colon = link_text.find(":") + if 0 < colon <= 64: + maybe_rest = link_text[colon + 1 :].strip() + if maybe_rest: + second_try = _summarize_nested_quote_content(maybe_rest) + link_text = str(second_try or maybe_rest).strip() + if not quote_thumb_url: + quote_thumb_url = _extract_nested_quote_thumb_url(maybe_rest) + + refer_content = f"[链接] {link_text}".strip() if link_text else "[链接]" return { "renderType": "quote", @@ -917,11 +1260,15 @@ def _parse_app_message(text: str) -> dict[str, Any]: "quoteTitle": refer_displayname or "", "quoteContent": refer_content or "", "quoteType": t, + "quoteThumbUrl": quote_thumb_url, "quoteServerId": str(refer_svrid or "").strip(), "quoteVoiceLength": quote_voice_length, } - if app_type == 62 or ".... + # Be strict here: lots of non-pat appmsg payloads still carry a nested ... metadata block. + patmsg_attr = bool(re.search(r"<(sysmsg|appmsg)\b[^>]*\btype=['\"]patmsg['\"]", lower)) + if app_type == 62 or patmsg_attr: return {"renderType": "system", "content": "[拍一拍]"} if app_type == 2000 or ( @@ -1053,11 +1400,7 @@ def _build_latest_message_preview( content_text = "" if local_type == 10000: - if "revokemsg" in raw_text: - content_text = "撤回了一条消息" - else: - content_text = re.sub(r"]*>", "", raw_text) - content_text = re.sub(r"\s+", " ", content_text).strip() or "[系统消息]" + content_text = _parse_system_message_content(raw_text) elif local_type == 244813135921: parsed = _parse_app_message(raw_text) qt = str(parsed.get("quoteTitle") or "").strip() @@ -1093,7 +1436,15 @@ def _build_latest_message_preview( elif local_type == 43 or local_type == 62: content_text = "[视频]" elif local_type == 47: - content_text = "[表情]" + content_text = "[动画表情]" + elif local_type == 48: + parsed = _parse_location_message(raw_text) + location_name = ( + str(parsed.get("locationPoiname") or "").strip() + or str(parsed.get("locationLabel") or "").strip() + or str(parsed.get("content") or "").strip() + ) + content_text = f"[位置]{location_name}" if location_name else "[位置]" else: if raw_text and (not raw_text.startswith("<")) and (not raw_text.startswith('"<')): content_text = raw_text @@ -1107,6 +1458,102 @@ def _build_latest_message_preview( return content_text +def _extract_group_preview_sender_username(preview_text: str) -> str: + text = str(preview_text or "").strip() + if not text: + return "" + + match = re.match(r"^([^:\s]{1,128}):\s*.+$", text) + if not match: + return "" + + sender = str(match.group(1) or "").strip() + if not sender: + return "" + + if sender.startswith("wxid_") or sender.endswith("@chatroom") or ("@" in sender): + return sender + if re.fullmatch(r"[A-Za-z][A-Za-z0-9_-]{1,127}", sender): + return sender + return "" + + +def _normalize_session_preview_text( + preview_text: str, + *, + is_group: bool, + sender_display_names: Optional[dict[str, str]] = None, +) -> str: + text = re.sub(r"\s+", " ", str(preview_text or "").strip()).strip() + if not text: + return "" + + text = text.replace("[表情]", "[动画表情]") + text = re.sub(r"\[location\]", "[位置]", text, flags=re.IGNORECASE) + if (not is_group) or text.startswith("[草稿]"): + return text + + match = re.match(r"^([^:\s]{1,128}):\s*(.+)$", text) + if not match: + return text + + sender_username = str(match.group(1) or "").strip() + body = str(match.group(2) or "").strip() + if (not sender_username) or (not body): + return text + + display_name = str((sender_display_names or {}).get(sender_username) or "").strip() + if display_name and display_name != sender_username: + return f"{display_name}: {body}" + return text + + +def _replace_preview_sender_prefix(preview_text: str, sender_display_name: str) -> str: + text = re.sub(r"\s+", " ", str(preview_text or "").strip()).strip() + if not text: + return "" + + display_name = str(sender_display_name or "").strip() + if (not display_name) or text.startswith("[草稿]"): + return text + + match = re.match(r"^([^:\n]{1,128}):\s*(.+)$", text) + if not match: + return text + + body = re.sub(r"\s+", " ", str(match.group(2) or "").strip()).strip() + if not body: + return text + return f"{display_name}: {body}" + + +def _build_group_sender_display_name_map( + contact_db_path: Path, + previews: dict[str, str], +) -> dict[str, str]: + group_sender_usernames: set[str] = set() + for conv_username, preview_text in previews.items(): + if not str(conv_username or "").endswith("@chatroom"): + continue + sender_username = _extract_group_preview_sender_username(preview_text) + if sender_username: + group_sender_usernames.add(sender_username) + + if not group_sender_usernames: + return {} + + display_names: dict[str, str] = {} + sender_contact_rows = _load_contact_rows(contact_db_path, list(group_sender_usernames)) + for sender_username in group_sender_usernames: + row = sender_contact_rows.get(sender_username) + if row is None: + continue + display_name = _pick_display_name(row, sender_username) + if display_name and display_name != sender_username: + display_names[sender_username] = display_name + return display_names + + def _load_latest_message_previews(account_dir: Path, usernames: list[str]) -> dict[str, str]: if not usernames: return {} @@ -1338,6 +1785,208 @@ def query_table(table: str, targets: list[str]) -> None: conn.close() +def _load_group_nickname_map_from_contact_db( + contact_db_path: Path, + chatroom_id: str, + sender_usernames: list[str], +) -> dict[str, str]: + """Best-effort mapping for group member nickname (aka group card) from contact.db. + + WeChat stores per-chatroom member nicknames in `contact.db.chat_room.ext_buffer` as a protobuf-like blob. + This helper parses that blob and returns { sender_username -> group_nickname } for the requested senders. + + Notes: + - Best-effort: never raises; returns {} on any failure. + - Only resolves usernames included in `sender_usernames` to keep parsing cheap. + """ + + chatroom = str(chatroom_id or "").strip() + if not chatroom.endswith("@chatroom"): + return {} + + targets = list(dict.fromkeys([str(x or "").strip() for x in sender_usernames if str(x or "").strip()])) + if not targets: + return {} + target_set = set(targets) + + def decode_varint(raw: bytes, offset: int) -> tuple[Optional[int], int]: + value = 0 + shift = 0 + pos = int(offset) + n = len(raw) + while pos < n: + byte = raw[pos] + pos += 1 + value |= (byte & 0x7F) << shift + if (byte & 0x80) == 0: + return value, pos + shift += 7 + if shift > 63: + return None, n + return None, n + + def iter_fields(raw: bytes): + idx = 0 + n = len(raw) + while idx < n: + tag, idx_next = decode_varint(raw, idx) + if tag is None or idx_next <= idx: + break + idx = idx_next + field_no = int(tag) >> 3 + wire_type = int(tag) & 0x7 + + if wire_type == 0: + _, idx_next = decode_varint(raw, idx) + if idx_next <= idx: + break + idx = idx_next + continue + + if wire_type == 2: + size, idx_next = decode_varint(raw, idx) + if size is None or idx_next <= idx: + break + idx = idx_next + end = idx + int(size) + if end > n: + break + chunk = raw[idx:end] + idx = end + yield field_no, wire_type, chunk + continue + + if wire_type == 1: + idx += 8 + continue + if wire_type == 5: + idx += 4 + continue + break + + def is_strong_username_hint(s: str) -> bool: + v = str(s or "").strip() + return v.startswith("wxid_") or v.endswith("@chatroom") or v.startswith("gh_") or ("@" in v) + + def looks_like_username(s: str) -> bool: + v = str(s or "").strip() + if not v: + return False + if is_strong_username_hint(v): + return True + # Common alias-style WeChat IDs are ASCII-ish and do not contain whitespace. + if len(v) < 6 or len(v) > 32: + return False + if re.search(r"\s", v): + return False + if not re.match(r"^[A-Za-z][A-Za-z0-9_-]+$", v): + return False + if v.isdigit(): + return False + return True + + def pick_display(strings: list[tuple[int, str]], target: str) -> str: + best_score = -1 + best = "" + for i, (fno, value) in enumerate(strings): + v = str(value or "").strip() + if (not v) or v == target: + continue + if is_strong_username_hint(v): + continue + if "\n" in v or "\r" in v: + continue + if len(v) > 64: + continue + + score = 0 + if int(fno) == 2: + score += 100 + if not looks_like_username(v): + score += 20 + score += max(0, 32 - len(v)) + # Stable tie-breaker: prefer earlier appearance. + score = score * 1000 - i + if score > best_score: + best_score = score + best = v + return best + + try: + conn = sqlite3.connect(str(contact_db_path)) + except Exception: + return {} + + try: + row = conn.execute( + "SELECT ext_buffer FROM chat_room WHERE username = ? LIMIT 1", + (chatroom,), + ).fetchone() + if row is None: + return {} + + ext = row[0] + if ext is None: + return {} + if isinstance(ext, memoryview): + ext_buf = ext.tobytes() + elif isinstance(ext, (bytes, bytearray)): + ext_buf = bytes(ext) + else: + return {} + if not ext_buf: + return {} + + out: dict[str, str] = {} + for _, wire_type, chunk in iter_fields(ext_buf): + if wire_type != 2 or (not chunk): + continue + + # Parse submessage and collect UTF-8 strings. + strings: list[tuple[int, str]] = [] + try: + for sfno, swire, sval in iter_fields(chunk): + if swire != 2: + continue + if not sval: + continue + if len(sval) > 256: + continue + try: + txt = bytes(sval).decode("utf-8", errors="strict") + except Exception: + continue + txt = txt.strip() + if not txt: + continue + strings.append((int(sfno), txt)) + except Exception: + continue + + if not strings: + continue + + present = [v for _, v in strings if v in target_set and v not in out] + if not present: + continue + + for target in present: + disp = pick_display(strings, target) + if disp: + out[target] = disp + if len(out) >= len(target_set): + break + + return out + except Exception: + return {} + finally: + try: + conn.close() + except Exception: + pass + + def _load_usernames_by_display_names(contact_db_path: Path, names: list[str]) -> dict[str, str]: """Best-effort mapping from display name -> username using contact.db. @@ -1488,10 +2137,10 @@ def _row_to_search_hit( if is_group and raw_text and (not raw_text.startswith("<")) and (not raw_text.startswith('"<')): sender_prefix, raw_text = _split_group_sender_prefix(raw_text, sender_username) - if is_group and sender_prefix: + if is_group and sender_prefix and (not sender_username): sender_username = sender_prefix - if is_group and raw_text and (raw_text.startswith("<") or raw_text.startswith('"<')): + if is_group and (not sender_username) and raw_text and (raw_text.startswith("<") or raw_text.startswith('"<')): xml_sender = _extract_sender_from_group_xml(raw_text) if xml_sender: sender_username = xml_sender @@ -1508,18 +2157,21 @@ def _row_to_search_hit( quote_username = "" quote_title = "" quote_content = "" + quote_thumb_url = "" + link_type = "" + link_style = "" amount = "" pay_sub_type = "" transfer_status = "" voip_type = "" + location_lat: Optional[float] = None + location_lng: Optional[float] = None + location_poiname = "" + location_label = "" if local_type == 10000: render_type = "system" - if "revokemsg" in raw_text: - content_text = "撤回了一条消息" - else: - content_text = re.sub(r"]*>", "", raw_text) - content_text = re.sub(r"\s+", " ", content_text).strip() or "[系统消息]" + content_text = _parse_system_message_content(raw_text) elif local_type == 49: parsed = _parse_app_message(raw_text) render_type = str(parsed.get("renderType") or "text") @@ -1528,6 +2180,9 @@ def _row_to_search_hit( url = str(parsed.get("url") or "") quote_title = str(parsed.get("quoteTitle") or "") quote_content = str(parsed.get("quoteContent") or "") + quote_thumb_url = str(parsed.get("quoteThumbUrl") or "") + link_type = str(parsed.get("linkType") or "") + link_style = str(parsed.get("linkStyle") or "") quote_username = str(parsed.get("quoteUsername") or "") amount = str(parsed.get("amount") or "") pay_sub_type = str(parsed.get("paySubType") or "") @@ -1552,6 +2207,7 @@ def _row_to_search_hit( content_text = str(parsed.get("content") or "[引用消息]") quote_title = str(parsed.get("quoteTitle") or "") quote_content = str(parsed.get("quoteContent") or "") + quote_thumb_url = str(parsed.get("quoteThumbUrl") or "") quote_username = str(parsed.get("quoteUsername") or "") elif local_type == 3: render_type = "image" @@ -1566,6 +2222,14 @@ def _row_to_search_hit( elif local_type == 47: render_type = "emoji" content_text = "[表情]" + elif local_type == 48: + parsed = _parse_location_message(raw_text) + render_type = str(parsed.get("renderType") or "location") + content_text = str(parsed.get("content") or "[Location]") + location_lat = parsed.get("locationLat") + location_lng = parsed.get("locationLng") + location_poiname = str(parsed.get("locationPoiname") or "") + location_label = str(parsed.get("locationLabel") or "") elif local_type == 50: render_type = "voip" try: @@ -1601,6 +2265,9 @@ def _row_to_search_hit( url = str(parsed.get("url") or url) quote_title = str(parsed.get("quoteTitle") or quote_title) quote_content = str(parsed.get("quoteContent") or quote_content) + quote_thumb_url = str(parsed.get("quoteThumbUrl") or quote_thumb_url) + link_type = str(parsed.get("linkType") or link_type) + link_style = str(parsed.get("linkStyle") or link_style) amount = str(parsed.get("amount") or amount) pay_sub_type = str(parsed.get("paySubType") or pay_sub_type) quote_username = str(parsed.get("quoteUsername") or quote_username) @@ -1640,11 +2307,18 @@ def _row_to_search_hit( "content": content_text, "title": title, "url": url, + "linkType": link_type, + "linkStyle": link_style, "quoteUsername": quote_username, "quoteTitle": quote_title, "quoteContent": quote_content, + "quoteThumbUrl": quote_thumb_url, "amount": amount, "paySubType": pay_sub_type, "transferStatus": transfer_status, "voipType": voip_type, + "locationLat": location_lat, + "locationLng": location_lng, + "locationPoiname": location_poiname, + "locationLabel": location_label, } diff --git a/src/wechat_decrypt_tool/chat_realtime_autosync.py b/src/wechat_decrypt_tool/chat_realtime_autosync.py new file mode 100644 index 0000000..55021b8 --- /dev/null +++ b/src/wechat_decrypt_tool/chat_realtime_autosync.py @@ -0,0 +1,331 @@ +"""Background auto-sync from WCDB realtime (db_storage) into decrypted sqlite. + +Why: +- The UI can read "latest" messages from WCDB realtime (`source=realtime`), but most APIs default to the + decrypted sqlite snapshot (`source=decrypted`). +- Previously we only synced realtime -> decrypted when the UI toggled realtime off, which caused `/api/chat/messages` + to lag behind while realtime was enabled. + +This module runs a lightweight background poller that watches db_storage mtime changes and triggers an incremental +sync_all into decrypted sqlite. It is intentionally conservative (debounced + rate-limited) to avoid hammering the +backend or the sqlite files. +""" + +from __future__ import annotations + +import os +import threading +import time +from dataclasses import dataclass +from pathlib import Path +from typing import Any, Optional + +from fastapi import HTTPException + +from .chat_helpers import _list_decrypted_accounts, _resolve_account_dir +from .logging_config import get_logger +from .wcdb_realtime import WCDB_REALTIME + +logger = get_logger(__name__) + + +def _env_bool(name: str, default: bool) -> bool: + raw = str(os.environ.get(name, "") or "").strip().lower() + if not raw: + return default + return raw not in {"0", "false", "no", "off"} + + +def _env_int(name: str, default: int, *, min_v: int, max_v: int) -> int: + raw = str(os.environ.get(name, "") or "").strip() + try: + v = int(raw) + except Exception: + v = int(default) + if v < min_v: + v = min_v + if v > max_v: + v = max_v + return v + + +def _scan_db_storage_mtime_ns(db_storage_dir: Path) -> int: + """Best-effort scan of db_storage for a "latest mtime" signal. + + We intentionally restrict to common database buckets to reduce walk cost. + """ + + try: + base = str(db_storage_dir) + except Exception: + return 0 + + max_ns = 0 + try: + for root, dirs, files in os.walk(base): + if root == base: + allow = {"message", "session", "contact", "head_image", "bizchat", "sns", "general", "favorite"} + dirs[:] = [d for d in dirs if str(d or "").lower() in allow] + + for fn in files: + name = str(fn or "").lower() + if not name.endswith((".db", ".db-wal", ".db-shm")): + continue + if not ( + ("message" in name) + or ("session" in name) + or ("contact" in name) + or ("name2id" in name) + or ("head_image" in name) + ): + continue + + try: + st = os.stat(os.path.join(root, fn)) + m_ns = int(getattr(st, "st_mtime_ns", 0) or 0) + if m_ns <= 0: + m_ns = int(float(getattr(st, "st_mtime", 0.0) or 0.0) * 1_000_000_000) + if m_ns > max_ns: + max_ns = m_ns + except Exception: + continue + except Exception: + return 0 + + return max_ns + + +@dataclass +class _AccountState: + last_mtime_ns: int = 0 + due_at: float = 0.0 + last_sync_end_at: float = 0.0 + thread: Optional[threading.Thread] = None + + +class ChatRealtimeAutoSyncService: + def __init__(self) -> None: + self._enabled = _env_bool("WECHAT_TOOL_REALTIME_AUTOSYNC", True) + self._interval_ms = _env_int("WECHAT_TOOL_REALTIME_AUTOSYNC_INTERVAL_MS", 1000, min_v=200, max_v=10_000) + self._debounce_ms = _env_int("WECHAT_TOOL_REALTIME_AUTOSYNC_DEBOUNCE_MS", 600, min_v=0, max_v=10_000) + self._min_sync_interval_ms = _env_int( + "WECHAT_TOOL_REALTIME_AUTOSYNC_MIN_SYNC_INTERVAL_MS", 800, min_v=0, max_v=60_000 + ) + self._workers = _env_int("WECHAT_TOOL_REALTIME_AUTOSYNC_WORKERS", 1, min_v=1, max_v=4) + + # Sync strategy defaults: cheap incremental write into decrypted sqlite. + self._sync_max_scan = _env_int("WECHAT_TOOL_REALTIME_AUTOSYNC_MAX_SCAN", 200, min_v=20, max_v=5000) + self._priority_max_scan = _env_int("WECHAT_TOOL_REALTIME_AUTOSYNC_PRIORITY_MAX_SCAN", 600, min_v=20, max_v=5000) + self._backfill_limit = _env_int("WECHAT_TOOL_REALTIME_AUTOSYNC_BACKFILL_LIMIT", 0, min_v=0, max_v=5000) + # Default to the same conservative filtering as the chat UI sidebar (avoid hammering gh_/hidden sessions). + self._include_hidden = _env_bool("WECHAT_TOOL_REALTIME_AUTOSYNC_INCLUDE_HIDDEN", False) + self._include_official = _env_bool("WECHAT_TOOL_REALTIME_AUTOSYNC_INCLUDE_OFFICIAL", False) + + self._mu = threading.Lock() + self._states: dict[str, _AccountState] = {} + self._stop = threading.Event() + self._thread: Optional[threading.Thread] = None + + def start(self) -> None: + if not self._enabled: + logger.info("[realtime-autosync] disabled by env WECHAT_TOOL_REALTIME_AUTOSYNC=0") + return + + with self._mu: + if self._thread is not None and self._thread.is_alive(): + return + self._stop.clear() + self._thread = threading.Thread(target=self._run, name="realtime-autosync", daemon=True) + self._thread.start() + + logger.info( + "[realtime-autosync] started interval_ms=%s debounce_ms=%s min_sync_interval_ms=%s max_scan=%s backfill_limit=%s workers=%s", + int(self._interval_ms), + int(self._debounce_ms), + int(self._min_sync_interval_ms), + int(self._sync_max_scan), + int(self._backfill_limit), + int(self._workers), + ) + + def stop(self) -> None: + with self._mu: + th = self._thread + self._thread = None + + if th is None: + return + + self._stop.set() + try: + th.join(timeout=5.0) + except Exception: + pass + + logger.info("[realtime-autosync] stopped") + + def _run(self) -> None: + while not self._stop.is_set(): + tick_t0 = time.perf_counter() + try: + self._tick() + except Exception: + logger.exception("[realtime-autosync] tick failed") + + # Avoid busy looping on exceptions; keep a minimum sleep. + elapsed_ms = (time.perf_counter() - tick_t0) * 1000.0 + sleep_ms = max(100.0, float(self._interval_ms) - elapsed_ms) + self._stop.wait(timeout=sleep_ms / 1000.0) + + def _tick(self) -> None: + accounts = _list_decrypted_accounts() + now = time.time() + + if not accounts: + return + + for acc in accounts: + if self._stop.is_set(): + break + + try: + account_dir = _resolve_account_dir(acc) + except HTTPException: + continue + except Exception: + continue + + info = WCDB_REALTIME.get_status(account_dir) + available = bool(info.get("dll_present") and info.get("key_present") and info.get("db_storage_dir")) + if not available: + continue + + db_storage_dir = Path(str(info.get("db_storage_dir") or "").strip()) + if not db_storage_dir.exists() or not db_storage_dir.is_dir(): + continue + + scan_t0 = time.perf_counter() + mtime_ns = _scan_db_storage_mtime_ns(db_storage_dir) + scan_ms = (time.perf_counter() - scan_t0) * 1000.0 + if scan_ms > 2000: + logger.warning("[realtime-autosync] scan slow account=%s ms=%.1f", acc, scan_ms) + + with self._mu: + st = self._states.setdefault(acc, _AccountState()) + if mtime_ns and mtime_ns != st.last_mtime_ns: + st.last_mtime_ns = int(mtime_ns) + st.due_at = now + (float(self._debounce_ms) / 1000.0) + + # Schedule daemon threads. (Important: do NOT use ThreadPoolExecutor here; its threads are non-daemon on + # Windows/Python 3.12 and can prevent Ctrl+C from stopping the process.) + to_start: list[threading.Thread] = [] + with self._mu: + # Drop state for removed accounts to keep memory bounded. + keep = set(accounts) + for acc in list(self._states.keys()): + if acc not in keep: + self._states.pop(acc, None) + + # Clean up finished threads and compute current concurrency. + running = 0 + for st in self._states.values(): + th = st.thread + if th is not None and th.is_alive(): + running += 1 + elif th is not None and (not th.is_alive()): + st.thread = None + + for acc, st in self._states.items(): + if running >= int(self._workers): + break + if st.due_at <= 0 or st.due_at > now: + continue + if st.thread is not None and st.thread.is_alive(): + continue + + since = now - float(st.last_sync_end_at or 0.0) + min_interval = float(self._min_sync_interval_ms) / 1000.0 + if min_interval > 0 and since < min_interval: + st.due_at = now + (min_interval - since) + continue + + st.due_at = 0.0 + th = threading.Thread( + target=self._sync_account_runner, + args=(acc,), + name=f"realtime-autosync-{acc}", + daemon=True, + ) + st.thread = th + to_start.append(th) + running += 1 + + for th in to_start: + if self._stop.is_set(): + break + try: + th.start() + except Exception: + # Best-effort: if a thread fails to start, clear the state so we can retry later. + with self._mu: + for acc, st in self._states.items(): + if st.thread is th: + st.thread = None + break + + def _sync_account_runner(self, account: str) -> None: + account = str(account or "").strip() + try: + if self._stop.is_set() or (not account): + return + res = self._sync_account(account) + inserted = int((res or {}).get("inserted_total") or (res or {}).get("insertedTotal") or 0) + synced = int((res or {}).get("synced") or (res or {}).get("sessionsSynced") or 0) + logger.info("[realtime-autosync] sync done account=%s synced=%s inserted=%s", account, synced, inserted) + except Exception: + logger.exception("[realtime-autosync] sync failed account=%s", account) + finally: + with self._mu: + st = self._states.get(account) + if st is not None: + st.thread = None + st.last_sync_end_at = time.time() + + def _sync_account(self, account: str) -> dict[str, Any]: + """Run a cheap incremental sync_all for one account.""" + + account = str(account or "").strip() + if not account: + return {"status": "skipped", "reason": "missing account"} + + try: + account_dir = _resolve_account_dir(account) + except Exception as e: + return {"status": "skipped", "reason": f"resolve account failed: {e}"} + + info = WCDB_REALTIME.get_status(account_dir) + available = bool(info.get("dll_present") and info.get("key_present") and info.get("db_storage_dir")) + if not available: + return {"status": "skipped", "reason": "realtime not available"} + + # Import lazily to avoid any startup import ordering issues. + from .routers.chat import sync_chat_realtime_messages_all + + try: + return sync_chat_realtime_messages_all( + request=None, # not used by the handler logic; we run it as an internal job + account=account, + max_scan=int(self._sync_max_scan), + priority_username=None, + priority_max_scan=int(self._priority_max_scan), + include_hidden=bool(self._include_hidden), + include_official=bool(self._include_official), + backfill_limit=int(self._backfill_limit), + ) + except HTTPException as e: + return {"status": "error", "error": str(e.detail or "")} + except Exception as e: + return {"status": "error", "error": str(e)} + + +CHAT_REALTIME_AUTOSYNC = ChatRealtimeAutoSyncService() diff --git a/src/wechat_decrypt_tool/isaac64.py b/src/wechat_decrypt_tool/isaac64.py new file mode 100644 index 0000000..c0ee1a1 --- /dev/null +++ b/src/wechat_decrypt_tool/isaac64.py @@ -0,0 +1,210 @@ +from __future__ import annotations + +"""ISAAC-64 PRNG (best-effort fallback). + +In this repo, Moments (SNS) *video* decryption uses a keystream generator that +matches WeFlow's WxIsaac64 (WASM) behavior and XORs only the first 128KB of the +MP4. + +This module provides a pure-Python ISAAC-64 implementation so the backend can +still attempt to generate a keystream when the WASM helper is unavailable. + +Notes: +- Moments *image* decryption is handled via `wcdb_api.dll` (`wcdb_decrypt_sns_image`) + because "ISAAC-64 full-file XOR" is not reliably reproducible for images across + different versions/samples. +- This ISAAC-64 implementation may not perfectly match WxIsaac64; treat it as + best-effort. +""" + +from typing import Any, Literal + +_MASK_64 = 0xFFFFFFFFFFFFFFFF + + +def _u64(v: int) -> int: + return int(v) & _MASK_64 + + +class Isaac64: + def __init__(self, seed: Any): + seed_text = str(seed).strip() + if not seed_text: + seed_val = 0 + else: + try: + # WeFlow seeds with BigInt(seed), where seed is usually a decimal string. + seed_val = int(seed_text, 0) + except Exception: + seed_val = 0 + + self.mm = [_u64(0) for _ in range(256)] + self.aa = _u64(0) + self.bb = _u64(0) + self.cc = _u64(0) + self.randrsl = [_u64(0) for _ in range(256)] + self.randrsl[0] = _u64(seed_val) + self.randcnt = 0 + self._init(True) + + def _init(self, flag: bool) -> None: + a = b = c = d = e = f = g = h = _u64(0x9E3779B97F4A7C15) + + def mix() -> tuple[int, int, int, int, int, int, int, int]: + nonlocal a, b, c, d, e, f, g, h + a = _u64(a - e) + f = _u64(f ^ (h >> 9)) + h = _u64(h + a) + + b = _u64(b - f) + g = _u64(g ^ _u64(a << 9)) + a = _u64(a + b) + + c = _u64(c - g) + h = _u64(h ^ (b >> 23)) + b = _u64(b + c) + + d = _u64(d - h) + a = _u64(a ^ _u64(c << 15)) + c = _u64(c + d) + + e = _u64(e - a) + b = _u64(b ^ (d >> 14)) + d = _u64(d + e) + + f = _u64(f - b) + c = _u64(c ^ _u64(e << 20)) + e = _u64(e + f) + + g = _u64(g - c) + d = _u64(d ^ (f >> 17)) + f = _u64(f + g) + + h = _u64(h - d) + e = _u64(e ^ _u64(g << 14)) + g = _u64(g + h) + return a, b, c, d, e, f, g, h + + for _ in range(4): + mix() + + for i in range(0, 256, 8): + if flag: + a = _u64(a + self.randrsl[i]) + b = _u64(b + self.randrsl[i + 1]) + c = _u64(c + self.randrsl[i + 2]) + d = _u64(d + self.randrsl[i + 3]) + e = _u64(e + self.randrsl[i + 4]) + f = _u64(f + self.randrsl[i + 5]) + g = _u64(g + self.randrsl[i + 6]) + h = _u64(h + self.randrsl[i + 7]) + mix() + self.mm[i] = a + self.mm[i + 1] = b + self.mm[i + 2] = c + self.mm[i + 3] = d + self.mm[i + 4] = e + self.mm[i + 5] = f + self.mm[i + 6] = g + self.mm[i + 7] = h + + if flag: + for i in range(0, 256, 8): + a = _u64(a + self.mm[i]) + b = _u64(b + self.mm[i + 1]) + c = _u64(c + self.mm[i + 2]) + d = _u64(d + self.mm[i + 3]) + e = _u64(e + self.mm[i + 4]) + f = _u64(f + self.mm[i + 5]) + g = _u64(g + self.mm[i + 6]) + h = _u64(h + self.mm[i + 7]) + mix() + self.mm[i] = a + self.mm[i + 1] = b + self.mm[i + 2] = c + self.mm[i + 3] = d + self.mm[i + 4] = e + self.mm[i + 5] = f + self.mm[i + 6] = g + self.mm[i + 7] = h + + self._isaac64() + self.randcnt = 256 + + def _isaac64(self) -> None: + self.cc = _u64(self.cc + 1) + self.bb = _u64(self.bb + self.cc) + + for i in range(256): + x = self.mm[i] + if (i & 3) == 0: + # aa ^= ~(aa << 21) + self.aa = _u64(self.aa ^ (_u64(self.aa << 21) ^ _MASK_64)) + elif (i & 3) == 1: + self.aa = _u64(self.aa ^ (self.aa >> 5)) + elif (i & 3) == 2: + self.aa = _u64(self.aa ^ _u64(self.aa << 12)) + else: + self.aa = _u64(self.aa ^ (self.aa >> 33)) + + self.aa = _u64(self.mm[(i + 128) & 255] + self.aa) + y = _u64(self.mm[(x >> 3) & 255] + self.aa + self.bb) + self.mm[i] = y + self.bb = _u64(self.mm[(y >> 11) & 255] + x) + self.randrsl[i] = self.bb + + def rand_u64(self) -> int: + """Return the next ISAAC-64 output as an unsigned 64-bit integer. + + Note: The original reference `rand()` consumes `randrsl[]` in reverse order. + """ + if self.randcnt == 0: + self._isaac64() + self.randcnt = 256 + self.randcnt -= 1 + return _u64(self.randrsl[self.randcnt]) + + # Backward-compatible alias (older callers used `get_next()`). + def get_next(self) -> int: # pragma: no cover + return self.rand_u64() + + KeystreamWordFormat = Literal["raw_le", "raw_be", "be_swap32", "le_swap32"] + + @staticmethod + def _raw_to_bytes(raw: int, word_format: KeystreamWordFormat) -> bytes: + """Serialize one 64-bit `rand()` output to 8 bytes. + + - raw_le/raw_be: direct endianness of the 64-bit integer. + - be_swap32: big-endian bytes with 32-bit halves swapped (BE(lo32)||BE(hi32)). + This matches the byte layout implied by the doc's `htonl(hi32)||htonl(lo32)` + pattern when the resulting u64 is read as bytes on little-endian hosts. + - le_swap32: little-endian bytes with 32-bit halves swapped. + """ + v = _u64(raw) + if word_format == "raw_le": + return int(v).to_bytes(8, "little", signed=False) + if word_format == "raw_be": + return int(v).to_bytes(8, "big", signed=False) + if word_format == "be_swap32": + b = int(v).to_bytes(8, "big", signed=False) + return b[4:8] + b[0:4] + if word_format == "le_swap32": + b = int(v).to_bytes(8, "little", signed=False) + return b[4:8] + b[0:4] + raise ValueError(f"Unknown ISAAC64 word_format: {word_format}") + + def generate_keystream(self, size: int, *, word_format: KeystreamWordFormat = "be_swap32") -> bytes: + """Generate a keystream of `size` bytes. + + This mirrors the decryption loop behavior: produce a new 8-byte keyblock + for every 8 bytes of input, and slice for tail bytes. + """ + want = int(size or 0) + if want <= 0: + return b"" + + blocks = (want + 7) // 8 + out = bytearray() + for _ in range(blocks): + out.extend(self._raw_to_bytes(self.rand_u64(), word_format)) + return bytes(out[:want]) diff --git a/src/wechat_decrypt_tool/key_service.py b/src/wechat_decrypt_tool/key_service.py new file mode 100644 index 0000000..215003d --- /dev/null +++ b/src/wechat_decrypt_tool/key_service.py @@ -0,0 +1,292 @@ +# import sys +# import requests + +try: + import wx_key +except ImportError: + print('[!] 环境中未安装wx_key依赖,可能无法自动获取数据库密钥') + wx_key = None + # sys.exit(1) + +import time +import psutil +import subprocess +import hashlib +import os +import json +import re +import random +import logging +import asyncio +import httpx +from pathlib import Path +from typing import Optional, List, Dict, Any +from dataclasses import dataclass +from packaging import version as pkg_version # 建议使用 packaging 库处理版本比较 +from .wechat_detection import detect_wechat_installation +from .key_store import upsert_account_keys_in_store +from .media_helpers import _resolve_account_dir, _resolve_account_wxid_dir + +logger = logging.getLogger(__name__) + + +# ====================== 以下是hook逻辑 ====================================== + +@dataclass +class HookConfig: + min_version: str + pattern: str + mask: str + offset: int + md5_pattern: str = "" + md5_mask: str = "" + md5_offset: int = 0 + +class WeChatKeyFetcher: + def __init__(self): + self.process_name = "Weixin.exe" + self.timeout_seconds = 60 + + @staticmethod + def _hex_array_to_str(hex_array: List[int]) -> str: + return " ".join([f"{b:02X}" for b in hex_array]) + + def _get_hook_config(self, version_str: str) -> Optional[HookConfig]: + try: + v_curr = pkg_version.parse(version_str) + except Exception as e: + logger.error(f"版本号解析失败: {version_str} || {e}") + return None + + + if v_curr > pkg_version.parse("4.1.6.14"): + return HookConfig( + min_version=">4.1.6.14", + pattern=self._hex_array_to_str([ + 0x24, 0x50, 0x48, 0xC7, 0x45, 0x00, 0xFE, 0xFF, 0xFF, 0xFF, + 0x44, 0x89, 0xCF, 0x44, 0x89, 0xC3, 0x49, 0x89, 0xD6, 0x48, + 0x89, 0xCE, 0x48, 0x89 + ]), + mask="xxxxxxxxxxxxxxxxxxxxxxxx", + offset=-3, + md5_pattern="48 8D 4D 00 48 89 4D B0 48 89 45 B8 48 8D 7D 00 48 8D 55 B0 48 89 F9", + md5_mask="xxx?xxxxxxxxxxx?xxxxxxx", + md5_offset=4 + ) + + if pkg_version.parse("4.1.4") <= v_curr <= pkg_version.parse("4.1.6.14"): + return HookConfig( + min_version="4.1.4-4.1.6.14", + pattern=self._hex_array_to_str([ + 0x24, 0x08, 0x48, 0x89, 0x6c, 0x24, 0x10, 0x48, 0x89, 0x74, + 0x00, 0x18, 0x48, 0x89, 0x7c, 0x00, 0x20, 0x41, 0x56, 0x48, + 0x83, 0xec, 0x50, 0x41 + ]), + mask="xxxxxxxxxx?xxxx?xxxxxxxx", + offset=-3, + md5_pattern="48 8D 4D 00 48 89 4D B0 48 89 45 B8 48 8D 7D 00 48 8D 55 B0 48 89 F9", + md5_mask="xxx?xxxxxxxxxxx?xxxxxxx", + md5_offset=4 + ) + + if v_curr < pkg_version.parse("4.1.4"): + """图片密钥可能是错的,版本过低没有测试""" + return HookConfig( + min_version="<4.1.4", + pattern=self._hex_array_to_str([ + 0x24, 0x50, 0x48, 0xc7, 0x45, 0x00, 0xfe, 0xff, 0xff, 0xff, + 0x44, 0x89, 0xcf, 0x44, 0x89, 0xc3, 0x49, 0x89, 0xd6, 0x48, + 0x89, 0xce, 0x48, 0x89 + ]), + mask="xxxxxxxxxxxxxxxxxxxxxxxx", + offset=-15, # -0xf + md5_pattern="48 8D 4D 00 48 89 4D B0 48 89 45 B8 48 8D 7D 00 48 8D 55 B0 48 89 F9", + md5_mask="xxx?xxxxxxxxxxx?xxxxxxx", + md5_offset=4 + ) + + return None + + def kill_wechat(self): + """检测并查杀微信进程""" + killed = False + for proc in psutil.process_iter(['pid', 'name']): + try: + if proc.info['name'] == self.process_name: + logger.info(f"Killing WeChat process: {proc.info['pid']}") + proc.terminate() + killed = True + except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess): + pass + + if killed: + time.sleep(1) # 等待完全退出 + + def launch_wechat(self, exe_path: str) -> int: + """启动微信并返回 PID""" + try: + + process = subprocess.Popen(exe_path) + + time.sleep(2) + candidates = [] + for proc in psutil.process_iter(['pid', 'name', 'create_time']): + if proc.info['name'] == self.process_name: + candidates.append(proc) + + if candidates: + + candidates.sort(key=lambda x: x.info['create_time'], reverse=True) + target_pid = candidates[0].info['pid'] + return target_pid + + return process.pid + + except Exception as e: + logger.error(f"启动微信失败: {e}") + raise RuntimeError(f"无法启动微信: {e}") + + def fetch_key(self) -> dict: + """调用 wx_key 获取双密钥""" + if wx_key is None: + raise RuntimeError("wx_key 模块未安装或加载失败") + + install_info = detect_wechat_installation() + exe_path = install_info.get('wechat_exe_path') + version = install_info.get('wechat_version') + + if not exe_path or not version: + raise RuntimeError("无法自动定位微信安装路径或版本") + + logger.info(f"Detect WeChat: {version} at {exe_path}") + + config = self._get_hook_config(version) + if not config: + raise RuntimeError(f"原生获取失败:当前微信版本 ({version}) 过低,为保证稳定性,仅支持 4.1.5 及以上版本使用原生获取。") + + self.kill_wechat() + pid = self.launch_wechat(exe_path) + logger.info(f"WeChat launched, PID: {pid}") + + if not wx_key.initialize_hook(pid, "", config.pattern, config.mask, config.offset, + config.md5_pattern, config.md5_mask, config.md5_offset): + err = wx_key.get_last_error_msg() + raise RuntimeError(f"Hook初始化失败: {err}") + + start_time = time.time() + found_db_key = None + found_md5_data = None + + try: + while True: + if time.time() - start_time > self.timeout_seconds: + raise TimeoutError("获取密钥超时 (60s),请确保在弹出的微信中完成登录。") + + key_data = wx_key.poll_key_data() + if key_data: + if 'key' in key_data: + found_db_key = key_data['key'] + if 'md5' in key_data: + found_md5_data = key_data['md5'] + + if found_db_key and found_md5_data: + break + + while True: + msg, level = wx_key.get_status_message() + if msg is None: + break + if level == 2: + logger.error(f"[Hook Error] {msg}") + + time.sleep(0.1) + finally: + logger.info("Cleaning up hook...") + wx_key.cleanup_hook() + + aes_key = None # gemini !!! ??? + xor_key = None + + if found_md5_data and "|" in found_md5_data: + aes_key, xor_key_dec = found_md5_data.split("|") + xor_key = f"0x{int(xor_key_dec):02X}" + + return { + "db_key": found_db_key, + "aes_key": aes_key, + "xor_key": xor_key + } + +def get_db_key_workflow(): + fetcher = WeChatKeyFetcher() + return fetcher.fetch_key() + + +# ============================== 以下是图片密钥逻辑 ===================================== + +def get_wechat_internal_global_config(wx_dir: Path, file_name1) -> bytes: + xwechat_files_root = wx_dir.parent + target_path = os.path.join(xwechat_files_root, "all_users", "config", file_name1) + if not os.path.exists(target_path): + raise FileNotFoundError(f"找不到配置文件: {target_path},请确认微信数据目录结构是否完整") + return Path(target_path).read_bytes() + + + +async def fetch_and_save_remote_keys(account: Optional[str] = None) -> Dict[str, Any]: + account_dir = _resolve_account_dir(account) + wx_id_dir = _resolve_account_wxid_dir(account_dir) + wxid = wx_id_dir.name + + url = "https://view.free.c3o.re/api/key" + data = {"weixinIDFolder": wxid} + + logger.info(f"正在为账号 {wxid} 获取云端备选图片密钥...") + + try: + blob1_bytes = get_wechat_internal_global_config(wx_id_dir, file_name1="global_config") + blob2_bytes = get_wechat_internal_global_config(wx_id_dir, file_name1="global_config.crc") + except Exception as e: + raise RuntimeError(f"读取微信内部文件失败: {e}") + + files = { + 'fileBytes': ('file', blob1_bytes, 'application/octet-stream'), + 'crcBytes': ('file.crc', blob2_bytes, 'application/octet-stream'), + } + + async with httpx.AsyncClient(timeout=30) as client: + logger.info("向云端 API 发送请求...") + response = await client.post(url, data=data, files=files) + + if response.status_code != 200: + raise RuntimeError(f"云端服务器错误: {response.status_code} - {response.text[:100]}") + + config = response.json() + if not config: + raise RuntimeError("云端解析失败: 返回数据为空") + + # 新 API 的字段兼容处理 + xor_raw = str(config.get("xorKey", config.get("xor_key", ""))) + aes_val = str(config.get("aesKey", config.get("aes_key", ""))) + + try: + if xor_raw.startswith("0x"): + xor_int = int(xor_raw, 16) + else: + xor_int = int(xor_raw) + xor_hex_str = f"0x{xor_int:02X}" + except: + xor_hex_str = xor_raw + + upsert_account_keys_in_store( + account=wxid, + image_xor_key=xor_hex_str, + image_aes_key=aes_val + ) + + return { + "wxid": wxid, + "xor_key": xor_hex_str, + "aes_key": aes_val, + "nick_name": config.get("nickName", config.get("nick_name", "")) + } \ No newline at end of file diff --git a/src/wechat_decrypt_tool/key_store.py b/src/wechat_decrypt_tool/key_store.py index 570b080..d95d4be 100644 --- a/src/wechat_decrypt_tool/key_store.py +++ b/src/wechat_decrypt_tool/key_store.py @@ -67,3 +67,20 @@ def upsert_account_keys_in_store( pass return item + + +def remove_account_keys_from_store(account: str) -> bool: + account = str(account or "").strip() + if not account: + return False + + store = load_account_keys_store() + if account not in store: + return False + + try: + store.pop(account, None) + _atomic_write_json(_KEY_STORE_PATH, store) + return True + except Exception: + return False diff --git a/src/wechat_decrypt_tool/logging_config.py b/src/wechat_decrypt_tool/logging_config.py index 4635353..ac16832 100644 --- a/src/wechat_decrypt_tool/logging_config.py +++ b/src/wechat_decrypt_tool/logging_config.py @@ -3,6 +3,7 @@ """ import logging +import os import sys from datetime import datetime from pathlib import Path @@ -52,15 +53,22 @@ def __new__(cls): return cls._instance def __init__(self): - if not self._initialized: - self.setup_logging() - WeChatLogger._initialized = True + # Lazy-init in `setup_logging()` / accessors to avoid double-initialization when + # callers instantiate the manager and then call `setup_logging()` again. + pass def setup_logging(self, log_level: str = "INFO"): """设置日志配置""" + # Allow overriding via env var for easier debugging (e.g. WECHAT_TOOL_LOG_LEVEL=DEBUG) + env_level = str(os.environ.get("WECHAT_TOOL_LOG_LEVEL", "") or "").strip() + if env_level: + log_level = env_level + # 创建日志目录 now = datetime.now() - log_dir = Path("output/logs") / str(now.year) / f"{now.month:02d}" / f"{now.day:02d}" + from .app_paths import get_output_dir + + log_dir = get_output_dir() / "logs" / str(now.year) / f"{now.month:02d}" / f"{now.day:02d}" log_dir.mkdir(parents=True, exist_ok=True) # 设置日志文件名 @@ -71,6 +79,10 @@ def setup_logging(self, log_level: str = "INFO"): root_logger = logging.getLogger() for handler in root_logger.handlers[:]: root_logger.removeHandler(handler) + try: + handler.close() + except Exception: + pass # 配置日志格式 # 文件格式(无颜色) @@ -88,47 +100,76 @@ def setup_logging(self, log_level: str = "INFO"): # 文件处理器 file_handler = logging.FileHandler(self.log_file, encoding='utf-8') file_handler.setFormatter(file_formatter) - file_handler.setLevel(getattr(logging, log_level.upper())) + level = getattr(logging, str(log_level or "INFO").upper(), logging.INFO) + file_handler.setLevel(level) # 控制台处理器 console_handler = logging.StreamHandler(sys.stdout) console_handler.setFormatter(console_formatter) - console_handler.setLevel(getattr(logging, log_level.upper())) + console_handler.setLevel(level) # 配置根日志器 - root_logger.setLevel(getattr(logging, log_level.upper())) + root_logger.setLevel(level) root_logger.addHandler(file_handler) root_logger.addHandler(console_handler) # 只为uvicorn日志器添加文件处理器,保持其原有的控制台处理器(带颜色) uvicorn_logger = logging.getLogger("uvicorn") + for handler in uvicorn_logger.handlers[:]: + if isinstance(handler, logging.FileHandler): + uvicorn_logger.removeHandler(handler) + try: + handler.close() + except Exception: + pass uvicorn_logger.addHandler(file_handler) - uvicorn_logger.setLevel(getattr(logging, log_level.upper())) + uvicorn_logger.setLevel(level) # 只为uvicorn.access日志器添加文件处理器 uvicorn_access_logger = logging.getLogger("uvicorn.access") + for handler in uvicorn_access_logger.handlers[:]: + if isinstance(handler, logging.FileHandler): + uvicorn_access_logger.removeHandler(handler) + try: + handler.close() + except Exception: + pass uvicorn_access_logger.addHandler(file_handler) - uvicorn_access_logger.setLevel(getattr(logging, log_level.upper())) + uvicorn_access_logger.setLevel(level) # 只为uvicorn.error日志器添加文件处理器 uvicorn_error_logger = logging.getLogger("uvicorn.error") + for handler in uvicorn_error_logger.handlers[:]: + if isinstance(handler, logging.FileHandler): + uvicorn_error_logger.removeHandler(handler) + try: + handler.close() + except Exception: + pass uvicorn_error_logger.addHandler(file_handler) - uvicorn_error_logger.setLevel(getattr(logging, log_level.upper())) + uvicorn_error_logger.setLevel(level) # 配置FastAPI日志器 fastapi_logger = logging.getLogger("fastapi") - fastapi_logger.handlers = [] + for handler in fastapi_logger.handlers[:]: + fastapi_logger.removeHandler(handler) + try: + handler.close() + except Exception: + pass fastapi_logger.addHandler(file_handler) fastapi_logger.addHandler(console_handler) - fastapi_logger.setLevel(getattr(logging, log_level.upper())) + fastapi_logger.setLevel(level) # 记录初始化信息 logger = logging.getLogger(__name__) logger.info("=" * 60) logger.info("微信解密工具日志系统初始化完成") logger.info(f"日志文件: {self.log_file}") - logger.info(f"日志级别: {log_level}") + logger.info(f"日志级别: {logging.getLevelName(level)}") logger.info("=" * 60) + + WeChatLogger._initialized = True return self.log_file @@ -138,6 +179,8 @@ def get_logger(self, name: str) -> logging.Logger: def get_log_file_path(self) -> Path: """获取当前日志文件路径""" + if not hasattr(self, "log_file"): + self.setup_logging() return self.log_file @@ -150,10 +193,14 @@ def setup_logging(log_level: str = "INFO") -> Path: def get_logger(name: str) -> logging.Logger: """获取日志器的便捷函数""" logger_manager = WeChatLogger() + if not WeChatLogger._initialized: + logger_manager.setup_logging() return logger_manager.get_logger(name) def get_log_file_path() -> Path: """获取当前日志文件路径的便捷函数""" logger_manager = WeChatLogger() + if not WeChatLogger._initialized: + logger_manager.setup_logging() return logger_manager.get_log_file_path() diff --git a/src/wechat_decrypt_tool/media_helpers.py b/src/wechat_decrypt_tool/media_helpers.py index 59ed9c8..3b247b9 100644 --- a/src/wechat_decrypt_tool/media_helpers.py +++ b/src/wechat_decrypt_tool/media_helpers.py @@ -23,17 +23,17 @@ # 运行时输出目录(桌面端可通过 WECHAT_TOOL_DATA_DIR 指向可写目录) -_OUTPUT_DATABASES_DIR = get_output_databases_dir() _PACKAGE_ROOT = Path(__file__).resolve().parent def _list_decrypted_accounts() -> list[str]: """列出已解密输出的账号目录名(仅保留包含 session.db + contact.db 的账号)""" - if not _OUTPUT_DATABASES_DIR.exists(): + output_db_dir = get_output_databases_dir() + if not output_db_dir.exists(): return [] accounts: list[str] = [] - for p in _OUTPUT_DATABASES_DIR.iterdir(): + for p in output_db_dir.iterdir(): if not p.is_dir(): continue if (p / "session.db").exists() and (p / "contact.db").exists(): @@ -45,6 +45,7 @@ def _list_decrypted_accounts() -> list[str]: def _resolve_account_dir(account: Optional[str]) -> Path: """解析账号目录,并进行路径安全校验(防止路径穿越)""" + output_db_dir = get_output_databases_dir() accounts = _list_decrypted_accounts() if not accounts: raise HTTPException( @@ -53,8 +54,8 @@ def _resolve_account_dir(account: Optional[str]) -> Path: ) selected = account or accounts[0] - base = _OUTPUT_DATABASES_DIR.resolve() - candidate = (_OUTPUT_DATABASES_DIR / selected).resolve() + base = output_db_dir.resolve() + candidate = (output_db_dir / selected).resolve() if candidate != base and base not in candidate.parents: raise HTTPException(status_code=400, detail="Invalid account path.") @@ -1963,6 +1964,114 @@ def _convert_silk_to_wav(silk_data: bytes) -> bytes: return silk_data +def _looks_like_mp3(data: bytes) -> bool: + if not data: + return False + if data.startswith(b"ID3"): + return True + return len(data) >= 2 and data[0] == 0xFF and (data[1] & 0xE0) == 0xE0 + + +@lru_cache(maxsize=1) +def _find_ffmpeg_executable() -> str: + import shutil + + env_value = str(os.environ.get("WECHAT_TOOL_FFMPEG") or "").strip() + if env_value: + resolved = shutil.which(env_value) + if resolved: + return resolved + candidate = Path(env_value).expanduser() + if candidate.is_file(): + return str(candidate) + + return shutil.which("ffmpeg") or "" + + +def _convert_wav_to_mp3(wav_data: bytes) -> bytes: + import subprocess + import tempfile + + if not wav_data or not wav_data.startswith(b"RIFF"): + return b"" + + ffmpeg_exe = _find_ffmpeg_executable() + if not ffmpeg_exe: + return b"" + + try: + with tempfile.TemporaryDirectory() as tmp_dir: + tmp_path = Path(tmp_dir) + wav_path = tmp_path / "voice.wav" + mp3_path = tmp_path / "voice.mp3" + wav_path.write_bytes(wav_data) + + proc = subprocess.run( + [ + ffmpeg_exe, + "-y", + "-hide_banner", + "-loglevel", + "error", + "-i", + str(wav_path), + "-vn", + "-codec:a", + "libmp3lame", + "-q:a", + "4", + str(mp3_path), + ], + check=False, + capture_output=True, + ) + if proc.returncode != 0 or not mp3_path.exists(): + err = proc.stderr.decode("utf-8", errors="ignore").strip() + if err: + logger.warning(f"WAV to MP3 conversion failed: {err}") + return b"" + + mp3_data = mp3_path.read_bytes() + if _looks_like_mp3(mp3_data): + return mp3_data + except Exception as e: + logger.warning(f"WAV to MP3 conversion failed: {e}") + + return b"" + + +def _convert_silk_to_browser_audio( + silk_data: bytes, + *, + preferred_format: str = "mp3", +) -> tuple[bytes, str, str]: + """Convert SILK audio to a browser-friendly format. + + Returns `(payload, ext, media_type)`. + Preference order: + 1) MP3 if ffmpeg is available + 2) WAV if SILK decoding succeeds + 3) original SILK bytes as a last-resort fallback + """ + + data = bytes(silk_data or b"") + if not data: + return b"", "silk", "audio/silk" + + if _looks_like_mp3(data): + return data, "mp3", "audio/mpeg" + + wav_data = data if data.startswith(b"RIFF") else _convert_silk_to_wav(data) + if wav_data.startswith(b"RIFF"): + if str(preferred_format or "").strip().lower() == "mp3": + mp3_data = _convert_wav_to_mp3(wav_data) + if mp3_data: + return mp3_data, "mp3", "audio/mpeg" + return wav_data, "wav", "audio/wav" + + return data, "silk", "audio/silk" + + def _resolve_media_path_for_kind( account_dir: Path, kind: str, diff --git a/src/wechat_decrypt_tool/native/msvcp140.dll b/src/wechat_decrypt_tool/native/msvcp140.dll new file mode 100644 index 0000000..554d2ff Binary files /dev/null and b/src/wechat_decrypt_tool/native/msvcp140.dll differ diff --git a/src/wechat_decrypt_tool/native/msvcp140_1.dll b/src/wechat_decrypt_tool/native/msvcp140_1.dll new file mode 100644 index 0000000..184514f Binary files /dev/null and b/src/wechat_decrypt_tool/native/msvcp140_1.dll differ diff --git a/src/wechat_decrypt_tool/native/vcruntime140.dll b/src/wechat_decrypt_tool/native/vcruntime140.dll new file mode 100644 index 0000000..950b587 Binary files /dev/null and b/src/wechat_decrypt_tool/native/vcruntime140.dll differ diff --git a/src/wechat_decrypt_tool/native/vcruntime140_1.dll b/src/wechat_decrypt_tool/native/vcruntime140_1.dll new file mode 100644 index 0000000..a481970 Binary files /dev/null and b/src/wechat_decrypt_tool/native/vcruntime140_1.dll differ diff --git a/src/wechat_decrypt_tool/native/wcdb_api.dll b/src/wechat_decrypt_tool/native/wcdb_api.dll index 8f5bdb3..7bbb0f4 100644 Binary files a/src/wechat_decrypt_tool/native/wcdb_api.dll and b/src/wechat_decrypt_tool/native/wcdb_api.dll differ diff --git a/src/wechat_decrypt_tool/path_fix.py b/src/wechat_decrypt_tool/path_fix.py index c0ddf65..e05ff08 100644 --- a/src/wechat_decrypt_tool/path_fix.py +++ b/src/wechat_decrypt_tool/path_fix.py @@ -32,10 +32,8 @@ def _is_absolute_path(self, path: str) -> bool: def _validate_paths_in_json(self, json_data: dict) -> Optional[str]: """验证JSON中的路径,返回错误信息(如果有)""" logger.info(f"开始验证路径,JSON数据: {json_data}") - # 检查db_storage_path字段(现在是必需的) - if 'db_storage_path' not in json_data: - return "缺少必需的db_storage_path参数,请提供具体的数据库存储路径。" - + # 仅在提供 db_storage_path 时进行校验(例如 /api/decrypt)。 + # 其它 API 的 JSON payload 不一定包含路径字段,不应强制要求。 if 'db_storage_path' in json_data: path = json_data['db_storage_path'] @@ -115,11 +113,16 @@ def _validate_paths_in_json(self, json_data: dict) -> Optional[str]: async def body(self) -> bytes: """重写body方法,预处理JSON中的路径问题""" + cached = getattr(self.state, "_pathfix_body_bytes", None) + if isinstance(cached, (bytes, bytearray)): + return bytes(cached) + body = await super().body() # 只处理JSON请求 content_type = self.headers.get("content-type", "") if "application/json" not in content_type: + self.state._pathfix_body_bytes = body return body try: @@ -134,6 +137,7 @@ async def body(self) -> bytes: logger.info(f"检测到路径错误: {path_error}") # 我们将错误信息存储在请求中,稍后在路由处理器中检查 self.state.path_validation_error = path_error + self.state._pathfix_body_bytes = body return body except json.JSONDecodeError as e: # JSON格式错误,继续尝试修复 @@ -169,17 +173,30 @@ def fix_path(match): if path_error: logger.info(f"修复后检测到路径错误: {path_error}") self.state.path_validation_error = path_error - return fixed_body_str.encode('utf-8') + fixed_bytes = fixed_body_str.encode('utf-8') + self.state._pathfix_body_bytes = fixed_bytes + try: + self._body = fixed_bytes # type: ignore[attr-defined] + except Exception: + pass + return fixed_bytes else: logger.info(f"修复后路径验证通过") except json.JSONDecodeError as e: logger.warning(f"修复后JSON仍然解析失败: {e}") - return fixed_body_str.encode('utf-8') + fixed_bytes = fixed_body_str.encode('utf-8') + self.state._pathfix_body_bytes = fixed_bytes + try: + self._body = fixed_bytes # type: ignore[attr-defined] + except Exception: + pass + return fixed_bytes except Exception as e: # 如果处理失败,返回原始body logger.warning(f"JSON路径修复失败,使用原始请求体: {e}") + self.state._pathfix_body_bytes = body return body @@ -193,12 +210,17 @@ async def custom_route_handler(request: Request) -> any: # 将Request替换为我们的自定义Request custom_request = PathFixRequest(request.scope, request.receive) - # 检查是否有路径验证错误 - if hasattr(custom_request.state, 'path_validation_error'): - raise HTTPException( - status_code=400, - detail=custom_request.state.path_validation_error, - ) + # 仅对 JSON 请求预读 body,以触发路径修复/校验逻辑,并在发现错误时提前返回 400。 + try: + content_type = (custom_request.headers.get("content-type", "") or "").lower() + if "application/json" in content_type: + await custom_request.body() + except Exception: + pass + + path_err = getattr(custom_request.state, "path_validation_error", None) + if path_err: + raise HTTPException(status_code=400, detail=path_err) return await original_route_handler(custom_request) diff --git a/src/wechat_decrypt_tool/request_logging.py b/src/wechat_decrypt_tool/request_logging.py new file mode 100644 index 0000000..50178ee --- /dev/null +++ b/src/wechat_decrypt_tool/request_logging.py @@ -0,0 +1,131 @@ +from __future__ import annotations + +import json +from typing import Any + +from starlette.requests import Request +from starlette.responses import Response + + +def _stringify_detail(detail: Any) -> str: + if detail is None: + return "" + if isinstance(detail, str): + return detail.strip() + try: + return json.dumps(detail, ensure_ascii=False) + except Exception: + return str(detail).strip() + + +def _extract_response_detail(response: Response) -> str: + body = getattr(response, "body", None) + if body is None: + return "" + + try: + raw = body.tobytes() if isinstance(body, memoryview) else body + except Exception: + raw = body + + if isinstance(raw, bytes): + text = raw.decode("utf-8", errors="ignore").strip() + else: + text = str(raw).strip() + if not text: + return "" + + content_type = str(response.headers.get("content-type") or "").lower() + if "json" not in content_type: + return "" + + try: + payload = json.loads(text) + except Exception: + return "" + + if not isinstance(payload, dict): + return "" + return _stringify_detail(payload.get("detail")) + + +async def _buffer_response_body(response: Response) -> tuple[Response, bytes]: + body = getattr(response, "body", None) + if body is not None: + try: + raw = body.tobytes() if isinstance(body, memoryview) else body + except Exception: + raw = body + if isinstance(raw, bytes): + return response, raw + if isinstance(raw, str): + return response, raw.encode("utf-8") + return response, bytes(raw) + + chunks: list[bytes] = [] + body_iterator = getattr(response, "body_iterator", None) + if body_iterator is not None: + async for chunk in body_iterator: + if isinstance(chunk, memoryview): + chunks.append(chunk.tobytes()) + elif isinstance(chunk, bytes): + chunks.append(chunk) + else: + chunks.append(str(chunk).encode("utf-8")) + + body_bytes = b"".join(chunks) + rebuilt = Response( + content=body_bytes, + status_code=response.status_code, + headers=dict(response.headers), + media_type=response.media_type, + background=response.background, + ) + return rebuilt, body_bytes + + +def _extract_response_detail_from_body(response: Response, body: bytes) -> str: + if not body: + return "" + + try: + text = body.decode("utf-8", errors="ignore").strip() + except Exception: + return "" + if not text: + return "" + + content_type = str(response.headers.get("content-type") or "").lower() + if "json" not in content_type: + return "" + + try: + payload = json.loads(text) + except Exception: + return "" + + if not isinstance(payload, dict): + return "" + return _stringify_detail(payload.get("detail")) + + +async def log_server_errors_middleware(logger, request: Request, call_next): + method = str(request.method or "").upper() or "GET" + path = str(request.url.path or "").strip() or "/" + + try: + response = await call_next(request) + except Exception as exc: + logger.exception("[server-exception] method=%s path=%s error=%s", method, path, exc) + raise + + status = int(getattr(response, "status_code", 0) or 0) + if status >= 500: + response, body = await _buffer_response_body(response) + detail = _extract_response_detail_from_body(response, body) or _extract_response_detail(response) + if detail: + logger.error("[server-5xx] status=%s method=%s path=%s detail=%s", status, method, path, detail) + else: + logger.error("[server-5xx] status=%s method=%s path=%s", status, method, path) + + return response diff --git a/src/wechat_decrypt_tool/routers/admin.py b/src/wechat_decrypt_tool/routers/admin.py new file mode 100644 index 0000000..e947e3c --- /dev/null +++ b/src/wechat_decrypt_tool/routers/admin.py @@ -0,0 +1,283 @@ +from __future__ import annotations + +import asyncio +import ipaddress +import os +import socket +import subprocess +import sys +import time +from pathlib import Path + +import httpx +from fastapi import APIRouter, BackgroundTasks, HTTPException +from starlette.requests import Request + +from ..logging_config import get_log_file_path, get_logger +from ..path_fix import PathFixRoute +from ..runtime_settings import read_effective_backend_port, write_backend_port_env_file, write_backend_port_setting + + +router = APIRouter(route_class=PathFixRoute) +logger = get_logger(__name__) + +DEFAULT_BACKEND_PORT = 10392 +_PORT_CHANGE_IN_PROGRESS = False + + +def _format_host_for_url(host: str) -> str: + h = str(host or "").strip() or "127.0.0.1" + if ":" in h and not (h.startswith("[") and h.endswith("]")): + return f"[{h}]" + return h + + +def _get_backend_bind_host() -> str: + return str(os.environ.get("WECHAT_TOOL_HOST", "127.0.0.1") or "").strip() or "127.0.0.1" + + +def _get_backend_access_host() -> str: + host = _get_backend_bind_host() + if host in {"0.0.0.0", "::"}: + return "127.0.0.1" + return host + + +def _is_loopback_client(request: Request) -> bool: + client = request.client + host = str(getattr(client, "host", "") or "").strip() + if not host: + return False + try: + ip = ipaddress.ip_address(host) + if ip.is_loopback: + return True + if isinstance(ip, ipaddress.IPv6Address) and ip.ipv4_mapped and ip.ipv4_mapped.is_loopback: + return True + except ValueError: + if host.lower() == "localhost": + return True + return False + + +def _get_current_log_file_path() -> Path: + log_file = Path(get_log_file_path()) + try: + log_file.parent.mkdir(parents=True, exist_ok=True) + except Exception: + pass + if not log_file.exists(): + try: + log_file.touch(exist_ok=True) + except Exception: + pass + return log_file + + +def _open_path_with_default_app(path: Path) -> None: + target = str(path) + if os.name == "nt": + opener = getattr(os, "startfile", None) + if opener is None: + raise RuntimeError("当前系统不支持默认打开文件") + opener(target) + return + + if sys.platform == "darwin": + subprocess.Popen(["open", target]) + return + + subprocess.Popen(["xdg-open", target]) + + +def _is_port_available(port: int, host: str) -> bool: + try: + addr = (host, int(port)) + family = socket.AF_INET6 if ":" in host else socket.AF_INET + with socket.socket(family, socket.SOCK_STREAM) as s: + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 0) + s.bind(addr) + return True + except Exception: + return False + + +async def _wait_for_backend_ready(health_url: str, timeout_s: float = 30.0) -> bool: + started = time.time() + async with httpx.AsyncClient(timeout=1.0) as client: + while time.time() - started < timeout_s: + try: + resp = await client.get(health_url) + if resp.status_code < 500: + return True + except Exception: + pass + await asyncio.sleep(0.3) + return False + + +def _spawn_backend_process(next_port: int) -> subprocess.Popen: + env = os.environ.copy() + env["WECHAT_TOOL_PORT"] = str(int(next_port)) + env.setdefault("WECHAT_TOOL_HOST", _get_backend_bind_host()) + + # Keep the same working directory so output paths remain consistent. + # (When `WECHAT_TOOL_DATA_DIR` is not set, the app uses `Path.cwd()`.) + cwd = os.getcwd() + cwd_path = Path(cwd) + + # Ensure local imports work when running from source (repo root + src layout). + src_dir = cwd_path / "src" + try: + existing_pp = str(env.get("PYTHONPATH", "") or "").strip() + if src_dir.is_dir(): + env["PYTHONPATH"] = str(src_dir) if not existing_pp else f"{src_dir}{os.pathsep}{existing_pp}" + except Exception: + pass + + if getattr(sys, "frozen", False): + cmd = [sys.executable] + spawn_cwd = cwd + else: + main_py = cwd_path / "main.py" + if main_py.is_file(): + cmd = [sys.executable, str(main_py)] + spawn_cwd = cwd + else: + cmd = [sys.executable, "-m", "wechat_decrypt_tool.backend_entry"] + spawn_cwd = cwd + + return subprocess.Popen(cmd, cwd=spawn_cwd, env=env) + + +async def _exit_process_after(delay_s: float) -> None: + try: + await asyncio.sleep(max(0.0, float(delay_s))) + except Exception: + pass + os._exit(0) # noqa: S404 + + +@router.get("/api/admin/log-file", summary="获取当前后端日志文件路径") +async def get_backend_log_file() -> dict: + log_file = _get_current_log_file_path() + return {"path": str(log_file), "exists": log_file.exists()} + + +@router.post("/api/admin/log-file/open", summary="打开当前后端日志文件(仅允许本机访问)") +async def open_backend_log_file(request: Request) -> dict: + if not _is_loopback_client(request): + raise HTTPException(status_code=403, detail="仅允许本机访问该接口") + + log_file = _get_current_log_file_path() + try: + _open_path_with_default_app(log_file) + except Exception as e: + logger.error("open_backend_log_file failed path=%s err=%s", log_file, e) + raise HTTPException(status_code=500, detail=f"打开日志文件失败:{e}") + return {"success": True, "path": str(log_file)} + + +@router.post("/api/admin/log-frontend-server-error", summary="记录前端感知到的服务器错误") +async def log_frontend_server_error(payload: dict) -> dict: + data = payload if isinstance(payload, dict) else {} + try: + status = int(data.get("status")) + except Exception: + status = 0 + + method = str(data.get("method") or "").strip().upper() or "GET" + request_url = str(data.get("request_url") or "").strip() + message = str(data.get("message") or "").strip() + backend_detail = str(data.get("backend_detail") or "").strip() + source = str(data.get("source") or "").strip() + page_url = str(data.get("page_url") or "").strip() + + logger.error( + "[frontend-server-error] status=%s method=%s request_url=%s message=%s backend_detail=%s source=%s page_url=%s", + status, + method, + request_url, + message, + backend_detail, + source, + page_url, + ) + return {"success": True, "path": str(_get_current_log_file_path())} + + +@router.get("/api/admin/port", summary="获取后端端口(用于前端设置页)") +async def get_backend_port() -> dict: + port, source = read_effective_backend_port(default=DEFAULT_BACKEND_PORT) + return {"port": port, "source": source, "default_port": DEFAULT_BACKEND_PORT} + + +@router.post("/api/admin/port", summary="修改后端端口并重启(仅允许本机访问)") +async def set_backend_port(payload: dict, request: Request, background_tasks: BackgroundTasks) -> dict: + if not _is_loopback_client(request): + raise HTTPException(status_code=403, detail="仅允许本机访问该接口") + + global _PORT_CHANGE_IN_PROGRESS + if _PORT_CHANGE_IN_PROGRESS: + raise HTTPException(status_code=409, detail="端口切换中,请稍后重试") + + raw = payload.get("port") if isinstance(payload, dict) else None + try: + next_port = int(raw) + except Exception: + raise HTTPException(status_code=400, detail="端口无效:请输入 1-65535 的整数") + if next_port < 1 or next_port > 65535: + raise HTTPException(status_code=400, detail="端口无效:请输入 1-65535 的整数") + + current_port, _ = read_effective_backend_port(default=DEFAULT_BACKEND_PORT) + if next_port == int(current_port): + write_backend_port_setting(next_port) + env_file = write_backend_port_env_file(next_port) + host = _format_host_for_url(_get_backend_access_host()) + return { + "success": True, + "changed": False, + "port": next_port, + "ui_url": f"http://{host}:{next_port}/", + "env_file": str(env_file) if env_file else None, + } + + bind_host = _get_backend_bind_host() + if not _is_port_available(next_port, bind_host): + raise HTTPException(status_code=409, detail=f"端口 {next_port} 已被占用,请换一个端口") + + proc = None + _PORT_CHANGE_IN_PROGRESS = True + try: + try: + proc = _spawn_backend_process(next_port) + except Exception as e: + raise HTTPException(status_code=500, detail=f"启动新后端进程失败:{e}") + + access_host = _get_backend_access_host() + health_url = f"http://{_format_host_for_url(access_host)}:{next_port}/api/health" + ok = await _wait_for_backend_ready(health_url, timeout_s=30.0) + if not ok: + try: + if proc and proc.poll() is None: + proc.terminate() + except Exception: + pass + raise HTTPException(status_code=500, detail=f"新端口启动超时:{health_url}") + + # Persist only after the new backend is confirmed ready. + write_backend_port_setting(next_port) + env_file = write_backend_port_env_file(next_port) + + background_tasks.add_task(_exit_process_after, 0.2) + + host = _format_host_for_url(access_host) + return { + "success": True, + "changed": True, + "port": next_port, + "ui_url": f"http://{host}:{next_port}/", + "env_file": str(env_file) if env_file else None, + } + finally: + _PORT_CHANGE_IN_PROGRESS = False diff --git a/src/wechat_decrypt_tool/routers/chat.py b/src/wechat_decrypt_tool/routers/chat.py index 4597bf1..5ab3058 100644 --- a/src/wechat_decrypt_tool/routers/chat.py +++ b/src/wechat_decrypt_tool/routers/chat.py @@ -3,8 +3,10 @@ import sqlite3 import asyncio import json +import shutil import time import threading +from datetime import datetime, timedelta from os import scandir from pathlib import Path from typing import Any, Optional @@ -39,13 +41,19 @@ _make_snippet, _match_tokens, _load_contact_rows, + _load_group_nickname_map_from_contact_db, _load_usernames_by_display_names, _load_latest_message_previews, + _build_group_sender_display_name_map, + _normalize_session_preview_text, + _extract_group_preview_sender_username, + _replace_preview_sender_prefix, _lookup_resource_md5, _normalize_xml_url, _parse_app_message, + _parse_location_message, + _parse_system_message_content, _parse_pat_message, - _pick_avatar_url, _pick_display_name, _query_head_image_usernames, _quote_ident, @@ -58,7 +66,10 @@ _split_group_sender_prefix, _to_char_token_text, ) -from ..media_helpers import _try_find_decrypted_resource +from ..media_helpers import _resolve_account_db_storage_dir, _try_find_decrypted_resource +from .. import chat_edit_store +from ..app_paths import get_output_dir +from ..key_store import remove_account_keys_from_store from ..path_fix import PathFixRoute from ..session_last_message import ( build_session_last_message_table, @@ -68,8 +79,14 @@ from ..wcdb_realtime import ( WCDBRealtimeError, WCDB_REALTIME, + exec_query as _wcdb_exec_query, + get_avatar_urls as _wcdb_get_avatar_urls, + get_display_names as _wcdb_get_display_names, + get_group_members as _wcdb_get_group_members, + get_group_nicknames as _wcdb_get_group_nicknames, get_messages as _wcdb_get_messages, get_sessions as _wcdb_get_sessions, + update_message as _wcdb_update_message, ) logger = get_logger(__name__) @@ -83,6 +100,420 @@ _REALTIME_SYNC_ALL_LOCKS: dict[str, threading.Lock] = {} +def _is_hex_md5(value: Any) -> bool: + s = str(value or "").strip().lower() + return len(s) == 32 and all(c in "0123456789abcdef" for c in s) + + +_HEX_RE = re.compile(r"^[0-9a-fA-F]+$") + + +def _hex_to_bytes(value: str) -> Optional[bytes]: + s = str(value or "").strip() + if not s.startswith("0x"): + return None + hex_part = s[2:] + if (not hex_part) or (len(hex_part) % 2 != 0) or (_HEX_RE.match(hex_part) is None): + return None + try: + return bytes.fromhex(hex_part) + except Exception: + return None + + +def _bytes_to_hex(value: bytes) -> str: + return "0x" + value.hex() + + +def _is_mostly_printable_text(s: str) -> bool: + if not s: + return False + sample = s[:600] + if not sample: + return False + printable = sum(1 for ch in sample if ch.isprintable() or ch in {"\n", "\r", "\t"}) + return (printable / len(sample)) >= 0.85 + + +def _jsonify_db_value(key: str, value: Any) -> Any: + """Convert sqlite row values into JSON-friendly values (best-effort).""" + if value is None: + return None + if isinstance(value, memoryview): + value = value.tobytes() + if isinstance(value, (bytes, bytearray)): + b = bytes(value) + k = str(key or "").strip().lower() + if k in {"compress_content", "packed_info_data", "packed_info", "packedinfo", "packedinfodata"} or k.endswith( + "_data" + ): + return _bytes_to_hex(b) + if not b: + return "" + try: + s = b.decode("utf-8") + if _is_mostly_printable_text(s): + return s + except Exception: + pass + return _bytes_to_hex(b) + if isinstance(value, (int, float, bool, str)): + return value + try: + return str(value) + except Exception: + return None + + +def _sql_literal(value: Any) -> str: + """Build a SQLite literal for WCDB exec_query (no parameters supported).""" + if value is None: + return "NULL" + if isinstance(value, bool): + return "1" if value else "0" + if isinstance(value, (int, float)): + try: + return str(int(value)) + except Exception: + return "0" + if isinstance(value, memoryview): + value = value.tobytes() + if isinstance(value, (bytes, bytearray)): + b = bytes(value) + return "X'" + b.hex() + "'" + s = str(value) + return "'" + s.replace("'", "''") + "'" + + +def _normalize_edit_value(col: str, value: Any, *, from_snapshot: bool = False) -> Any: + c = str(col or "").strip().lower() + if value is None: + return None + if isinstance(value, str): + # Allow editing BLOBs via 0x... hex strings (unsafe only, enforced elsewhere). + b = _hex_to_bytes(value) + if b is not None: + return b + + # Some WCDB exec_query snapshots return raw BLOBs as bare hex strings (without 0x prefix). + # When restoring from snapshots (reset), convert them back to bytes so SQLite stores them as BLOB again. + want_blob_hex = ( + c in {"packed_info_data", "packed_info", "packedinfo", "packedinfodata"} + or c.endswith("_data") + or c in {"source"} + or (from_snapshot and c in {"message_content", "compress_content"}) + ) + if want_blob_hex: + s = value.strip() + # Heuristic for message_content: avoid converting legitimate short "hex-like" text messages. + min_len = 0 + if c == "message_content": + s_lower = s.lower() + # zstd frame magic: 28 b5 2f fd + if s_lower.startswith("28b52ffd"): + min_len = 16 + else: + min_len = 64 + if s and (len(s) >= min_len) and (len(s) % 2 == 0) and (_HEX_RE.fullmatch(s) is not None): + try: + return bytes.fromhex(s) + except Exception: + return value + if c in { + "local_id", + "create_time", + "server_id", + "local_type", + "sort_seq", + } or c.startswith("wcdb_ct_"): + s = value.strip() + if s and re.fullmatch(r"-?\d+", s): + try: + return int(s) + except Exception: + return value + return value + + +def _is_safe_edit_column(col: str, *, unsafe: bool) -> bool: + if unsafe: + return True + c = str(col or "").strip().lower() + if not c: + return False + if c == "local_id": + return False + if c.startswith("wcdb_ct_"): + return False + if c in {"compress_content", "packed_info_data", "packed_info"}: + return False + return True + + +def _pb_read_varint(buf: bytes, i: int) -> tuple[int, int]: + """Read a protobuf varint from buf starting at i, returning (value, next_index).""" + x = 0 + shift = 0 + while i < len(buf) and shift < 64: + b = buf[i] + i += 1 + x |= (b & 0x7F) << shift + if (b & 0x80) == 0: + return x, i + shift += 7 + raise ValueError("Invalid varint.") + + +def _pb_write_varint(x: int) -> bytes: + """Write a protobuf varint for a non-negative integer.""" + n = int(x or 0) + if n < 0: + raise ValueError("Negative varint.") + out = bytearray() + while True: + b = n & 0x7F + n >>= 7 + if n: + out.append(b | 0x80) + else: + out.append(b) + break + return bytes(out) + + +def _swap_packed_info_from_to(packed: bytes | bytearray | memoryview) -> tuple[bytes, int, int]: + """Swap protobuf field #1 and #2 varint values in packed_info_data. + + Empirically, WeChat uses packed_info_data as a tiny protobuf containing at least: + - field 1: fromId (Name2Id rowid) + - field 2: toId (Name2Id rowid) + + Swapping these flips message direction in the WeChat client. + Returns (new_bytes, old_field1, old_field2). + """ + if isinstance(packed, memoryview): + data = packed.tobytes() + else: + data = bytes(packed) + if not data: + raise ValueError("Empty packed_info_data.") + + # Pass 1: find the first occurrences of field 1/2 varints. + i = 0 + v1: Optional[int] = None + v2: Optional[int] = None + while i < len(data): + key, i = _pb_read_varint(data, i) + field_num = key >> 3 + wire = key & 7 + if wire == 0: + val, i = _pb_read_varint(data, i) + if field_num == 1 and v1 is None: + v1 = int(val) + elif field_num == 2 and v2 is None: + v2 = int(val) + continue + if wire == 1: + i += 8 + continue + if wire == 2: + ln, i = _pb_read_varint(data, i) + i += int(ln) + continue + if wire == 5: + i += 4 + continue + raise ValueError(f"Unsupported wire type: {wire}") + + if v1 is None or v2 is None: + raise ValueError("packed_info_data does not contain field #1 and #2 varints.") + + # Pass 2: rebuild and swap values for all field 1/2 varints. + i = 0 + out = bytearray() + while i < len(data): + key, i2 = _pb_read_varint(data, i) + field_num = key >> 3 + wire = key & 7 + out += _pb_write_varint(key) + i = i2 + + if wire == 0: + val, i = _pb_read_varint(data, i) + if field_num == 1: + val = int(v2) + elif field_num == 2: + val = int(v1) + out += _pb_write_varint(int(val)) + continue + if wire == 1: + out += data[i : i + 8] + i += 8 + continue + if wire == 2: + ln, i = _pb_read_varint(data, i) + out += _pb_write_varint(int(ln)) + out += data[i : i + int(ln)] + i += int(ln) + continue + if wire == 5: + out += data[i : i + 4] + i += 4 + continue + raise ValueError(f"Unsupported wire type: {wire}") + + return bytes(out), int(v1), int(v2) + + +def _avatar_url_unified( + *, + account_dir: Path, + username: str, + local_avatar_usernames: set[str] | None = None, +) -> str: + u = str(username or "").strip() + if not u: + return "" + # Unified avatar entrypoint: backend decides local db vs remote fallback + cache. + return _build_avatar_url(str(account_dir.name or ""), u) + + +def _load_group_nickname_map_from_wcdb( + *, + account_dir: Path, + chatroom_id: str, + sender_usernames: list[str], + rt_conn=None, +) -> dict[str, str]: + chatroom = str(chatroom_id or "").strip() + if not chatroom.endswith("@chatroom"): + return {} + + targets = list(dict.fromkeys([str(x or "").strip() for x in sender_usernames if str(x or "").strip()])) + if not targets: + return {} + + try: + wcdb_conn = rt_conn or WCDB_REALTIME.ensure_connected(account_dir) + except Exception: + return {} + + target_set = set(targets) + out: dict[str, str] = {} + + try: + with wcdb_conn.lock: + nickname_map = _wcdb_get_group_nicknames(wcdb_conn.handle, chatroom) + for username, nickname in (nickname_map or {}).items(): + su = str(username or "").strip() + nn = str(nickname or "").strip() + if su and nn and su in target_set: + out[su] = nn + except Exception: + pass + + unresolved = [u for u in targets if u not in out] + if not unresolved: + return out + + try: + with wcdb_conn.lock: + members = _wcdb_get_group_members(wcdb_conn.handle, chatroom) + except Exception: + return out + + if not members: + return out + + unresolved_set = set(unresolved) + for member in members: + try: + username = str(member.get("username") or "").strip() + except Exception: + username = "" + if (not username) or (username not in unresolved_set): + continue + + nickname = "" + for key in ("nickname", "displayName", "remark", "originalName"): + try: + candidate = str(member.get(key) or "").strip() + except Exception: + candidate = "" + if candidate: + nickname = candidate + break + if nickname: + out[username] = nickname + + return out + + +def _load_group_nickname_map( + *, + account_dir: Path, + contact_db_path: Path, + chatroom_id: str, + sender_usernames: list[str], + rt_conn=None, +) -> dict[str, str]: + """Resolve group member nickname (group card) via WCDB and contact.db ext_buffer (best-effort).""" + + contact_map: dict[str, str] = {} + try: + contact_map = _load_group_nickname_map_from_contact_db( + contact_db_path, + chatroom_id, + sender_usernames, + ) + except Exception: + contact_map = {} + + wcdb_map: dict[str, str] = {} + try: + wcdb_map = _load_group_nickname_map_from_wcdb( + account_dir=account_dir, + chatroom_id=chatroom_id, + sender_usernames=sender_usernames, + rt_conn=rt_conn, + ) + except Exception: + wcdb_map = {} + + if not contact_map and not wcdb_map: + return {} + + # Merge: WCDB wins (newer DLLs may provide higher-quality group nicknames). + merged: dict[str, str] = {} + merged.update(contact_map) + merged.update(wcdb_map) + return merged + + +def _resolve_sender_display_name( + *, + sender_username: str, + sender_contact_rows: dict[str, sqlite3.Row], + wcdb_display_names: dict[str, str], + group_nicknames: Optional[dict[str, str]] = None, +) -> str: + su = str(sender_username or "").strip() + if not su: + return "" + + gn = str((group_nicknames or {}).get(su) or "").strip() + if gn: + return gn + + row = sender_contact_rows.get(su) + display_name = _pick_display_name(row, su) + if display_name == su: + wd = str(wcdb_display_names.get(su) or "").strip() + if wd and wd != su: + display_name = wd + return display_name + + def _realtime_sync_lock(account: str, username: str) -> threading.Lock: key = (str(account or "").strip(), str(username or "").strip()) with _REALTIME_SYNC_MU: @@ -213,6 +644,13 @@ async def stream_chat_realtime_events( if not db_storage_dir.exists() or not db_storage_dir.is_dir(): raise HTTPException(status_code=400, detail="db_storage directory not found for this account.") + logger.info( + "[realtime] SSE stream open account=%s interval_ms=%s db_storage=%s", + account_dir.name, + int(interval_ms), + str(db_storage_dir), + ) + async def gen(): last_mtime_ns = 0 last_heartbeat = 0.0 @@ -226,27 +664,40 @@ async def gen(): } yield f"data: {json.dumps(initial, ensure_ascii=False)}\n\n" - while True: - if await request.is_disconnected(): - break + try: + while True: + if await request.is_disconnected(): + break - mtime_ns = _scan_db_storage_mtime_ns(db_storage_dir) - if mtime_ns and mtime_ns != last_mtime_ns: - last_mtime_ns = mtime_ns - payload = { - "type": "change", - "account": account_dir.name, - "mtimeNs": int(mtime_ns), - "ts": int(time.time() * 1000), - } - yield f"data: {json.dumps(payload, ensure_ascii=False)}\n\n" + # Avoid blocking the event loop on a potentially large directory walk. + scan_t0 = time.perf_counter() + try: + mtime_ns = await asyncio.to_thread(_scan_db_storage_mtime_ns, db_storage_dir) + except Exception: + mtime_ns = 0 + scan_ms = (time.perf_counter() - scan_t0) * 1000.0 + if scan_ms > 1000: + logger.warning("[realtime] SSE scan slow account=%s ms=%.1f", account_dir.name, scan_ms) + + if mtime_ns and mtime_ns != last_mtime_ns: + last_mtime_ns = mtime_ns + payload = { + "type": "change", + "account": account_dir.name, + "mtimeNs": int(mtime_ns), + "ts": int(time.time() * 1000), + } + logger.info("[realtime] SSE change account=%s mtime_ns=%s", account_dir.name, int(mtime_ns)) + yield f"data: {json.dumps(payload, ensure_ascii=False)}\n\n" - now = time.time() - if now - last_heartbeat > 15: - last_heartbeat = now - yield ": ping\n\n" + now = time.time() + if now - last_heartbeat > 15: + last_heartbeat = now + yield ": ping\n\n" - await asyncio.sleep(interval_ms / 1000.0) + await asyncio.sleep(interval_ms / 1000.0) + finally: + logger.info("[realtime] SSE stream closed account=%s", account_dir.name) headers = {"Cache-Control": "no-cache", "Connection": "keep-alive", "X-Accel-Buffering": "no"} return StreamingResponse(gen(), media_type="text/event-stream", headers=headers) @@ -269,6 +720,157 @@ def _resolve_decrypted_message_table(account_dir: Path, username: str) -> Option return None +def _local_month_range_epoch_seconds(*, year: int, month: int) -> tuple[int, int]: + """Return [start, end) range as epoch seconds for local time month boundaries. + + Notes: + - Uses local midnight boundaries (not +86400 * days) to stay DST-safe. + - Returned timestamps are integers (seconds). + """ + + start = datetime(int(year), int(month), 1) + if int(month) == 12: + end = datetime(int(year) + 1, 1, 1) + else: + end = datetime(int(year), int(month) + 1, 1) + return int(start.timestamp()), int(end.timestamp()) + + +def _local_day_range_epoch_seconds(*, date_str: str) -> tuple[int, int, str]: + """Return [start, end) range as epoch seconds for local date boundaries. + + Returns the normalized `YYYY-MM-DD` date string as the 3rd element. + """ + + d0 = datetime.strptime(str(date_str or "").strip(), "%Y-%m-%d") + d1 = d0 + timedelta(days=1) + return int(d0.timestamp()), int(d1.timestamp()), d0.strftime("%Y-%m-%d") + + +def _pick_message_db_for_new_table(account_dir: Path, username: str) -> Optional[Path]: + """Pick a target decrypted sqlite db to place a new Msg_ table. + + Some accounts have both `message_*.db` and `biz_message_*.db`. For normal users we prefer + `message*.db`; for official accounts (`gh_`) we prefer `biz_message*.db`. + """ + + db_paths = _iter_message_db_paths(account_dir) + if not db_paths: + return None + + uname = str(username or "").strip() + want_biz = bool(uname and uname.startswith("gh_")) + + msg_paths: list[Path] = [] + biz_paths: list[Path] = [] + other_paths: list[Path] = [] + for p in db_paths: + ln = p.name.lower() + if re.match(r"^message(_\d+)?\.db$", ln): + msg_paths.append(p) + elif re.match(r"^biz_message(_\d+)?\.db$", ln): + biz_paths.append(p) + else: + other_paths.append(p) + + if want_biz and biz_paths: + return biz_paths[0] + if msg_paths: + return msg_paths[0] + if biz_paths: + return biz_paths[0] + return other_paths[0] if other_paths else db_paths[0] + + +def _ensure_decrypted_message_table(account_dir: Path, username: str) -> tuple[Path, str]: + """Ensure the decrypted sqlite has a Msg_ table for this conversation. + + Why: + - The decrypted snapshot can miss newly created sessions, so WCDB realtime can show messages + while the decrypted message_*.db has no table -> `/api/chat/messages` returns empty. + - Realtime sync should be able to create the missing conversation table and then insert rows. + """ + + uname = str(username or "").strip() + if not uname: + raise HTTPException(status_code=400, detail="Missing username.") + + resolved = _resolve_decrypted_message_table(account_dir, uname) + if resolved: + return resolved + + target_db = _pick_message_db_for_new_table(account_dir, uname) + if target_db is None: + raise HTTPException(status_code=404, detail="No message databases found for this account.") + + # Use the conventional WeChat naming (`Msg_`). Resolution is case-insensitive. + import hashlib + + md5_hex = hashlib.md5(uname.encode("utf-8")).hexdigest() + table_name = f"Msg_{md5_hex}" + quoted_table = _quote_ident(table_name) + + conn = sqlite3.connect(str(target_db)) + try: + conn.execute( + f""" + CREATE TABLE IF NOT EXISTS {quoted_table}( + local_id INTEGER PRIMARY KEY AUTOINCREMENT, + server_id INTEGER, + local_type INTEGER, + sort_seq INTEGER, + real_sender_id INTEGER, + create_time INTEGER, + status INTEGER, + upload_status INTEGER, + download_status INTEGER, + server_seq INTEGER, + origin_source INTEGER, + source TEXT, + message_content TEXT, + compress_content TEXT, + packed_info_data BLOB, + WCDB_CT_message_content INTEGER DEFAULT NULL, + WCDB_CT_source INTEGER DEFAULT NULL + ) + """ + ) + + # Match the common indexes we observe on existing Msg_* tables for query performance. + idx_sender = _quote_ident(f"{table_name}_SENDERID") + idx_server = _quote_ident(f"{table_name}_SERVERID") + idx_sort = _quote_ident(f"{table_name}_SORTSEQ") + idx_type_seq = _quote_ident(f"{table_name}_TYPE_SEQ") + conn.execute(f"CREATE INDEX IF NOT EXISTS {idx_sender} ON {quoted_table}(real_sender_id)") + conn.execute(f"CREATE INDEX IF NOT EXISTS {idx_server} ON {quoted_table}(server_id)") + conn.execute(f"CREATE INDEX IF NOT EXISTS {idx_sort} ON {quoted_table}(sort_seq)") + conn.execute(f"CREATE INDEX IF NOT EXISTS {idx_type_seq} ON {quoted_table}(local_type, sort_seq)") + + conn.commit() + finally: + conn.close() + + return target_db, table_name + + +def _ensure_decrypted_message_tables( + account_dir: Path, usernames: list[str] +) -> dict[str, tuple[Path, str]]: + """Bulk resolver that also creates missing Msg_ tables when needed.""" + + table_map = _resolve_decrypted_message_tables(account_dir, usernames) + for u in usernames: + uname = str(u or "").strip() + if not uname or uname in table_map: + continue + try: + table_map[uname] = _ensure_decrypted_message_table(account_dir, uname) + except Exception: + # Best-effort: if we can't create the table, keep it missing and let callers skip. + continue + return table_map + + def _resolve_decrypted_message_tables( account_dir: Path, usernames: list[str] ) -> dict[str, tuple[Path, str]]: @@ -336,47 +938,299 @@ def _ensure_session_last_message_table(conn: sqlite3.Connection) -> None: ) -@router.post("/api/chat/realtime/sync", summary="实时消息同步到解密库(按会话增量)") -async def sync_chat_realtime_messages( - request: Request, - username: str, - account: Optional[str] = None, - max_scan: int = 600, -): - """ - 设计目的:实时模式只用来“同步增量”到 output/databases 下的解密库,前端始终从解密库读取显示, - 避免 WCDB realtime 返回格式差异(如 compress_content/message_content 的 hex 编码)直接影响渲染。 +def _get_session_table_columns(conn: sqlite3.Connection) -> set[str]: + try: + rows = conn.execute("PRAGMA table_info(SessionTable)").fetchall() + # PRAGMA table_info returns: cid, name, type, notnull, dflt_value, pk + cols = {str(r[1]) for r in rows if r and r[1]} + return cols + except Exception: + return set() - 同步策略:从 WCDB 获取最新消息(从新到旧),直到遇到解密库中已存在的最大 local_id 为止。 + +def _upsert_session_table_rows(conn: sqlite3.Connection, rows: list[dict[str, Any]]) -> None: + """Best-effort upsert of WCDB Session rows into decrypted session.db::SessionTable. + + Why: + - WCDB realtime can observe newly created sessions (e.g., new friends) immediately. + - The decrypted snapshot's session.db can become stale and miss those sessions entirely, causing + the left sidebar list to differ after a refresh (when the UI falls back to decrypted). + + This upsert intentionally avoids depending on message tables; it only keeps SessionTable fresh. """ - if not username: - raise HTTPException(status_code=400, detail="Missing username.") - if max_scan < 50: - max_scan = 50 - if max_scan > 5000: - max_scan = 5000 - account_dir = _resolve_account_dir(account) + if not rows: + return - # Lock per (account, username) to avoid concurrent writes to the same sqlite tables. - with _realtime_sync_lock(account_dir.name, username): + # Ensure SessionTable exists; if not, silently skip (older/partial accounts). + try: + conn.execute("SELECT 1 FROM SessionTable LIMIT 1").fetchone() + except Exception: + return + + cols = _get_session_table_columns(conn) + if "username" not in cols: + return + + uniq_usernames: list[str] = [] + for r in rows: + u = str((r or {}).get("username") or "").strip() + if not u: + continue + uniq_usernames.append(u) + uniq_usernames = list(dict.fromkeys(uniq_usernames)) + if not uniq_usernames: + return + + # Insert missing rows first so UPDATE always has a target. + try: + conn.executemany( + "INSERT OR IGNORE INTO SessionTable(username) VALUES (?)", + [(u,) for u in uniq_usernames], + ) + except Exception: + # If the schema is unusual, don't fail the whole sync. + return + + # Only update columns that exist in this account's schema. + # Keep the order stable so executemany parameters line up. + desired_cols = [ + "unread_count", + "is_hidden", + "summary", + "draft", + "last_timestamp", + "sort_timestamp", + "last_msg_locald_id", + "last_msg_type", + "last_msg_sub_type", + "last_msg_sender", + "last_sender_display_name", + ] + update_cols = [c for c in desired_cols if c in cols] + if not update_cols: + return + + def _int(v: Any) -> int: try: - rt_conn = WCDB_REALTIME.ensure_connected(account_dir) - except WCDBRealtimeError as e: - raise HTTPException(status_code=400, detail=str(e)) + return int(v or 0) + except Exception: + return 0 - resolved = _resolve_decrypted_message_table(account_dir, username) - if not resolved: - raise HTTPException(status_code=404, detail="Conversation table not found in decrypted databases.") - msg_db_path, table_name = resolved + def _text(v: Any) -> str: + try: + return str(v or "") + except Exception: + return "" - msg_conn = sqlite3.connect(str(msg_db_path)) - msg_conn.row_factory = sqlite3.Row + params: list[tuple[Any, ...]] = [] + for r in rows: + u = str((r or {}).get("username") or "").strip() + if not u: + continue + values: list[Any] = [] + for c in update_cols: + if c in { + "unread_count", + "is_hidden", + "last_timestamp", + "sort_timestamp", + "last_msg_locald_id", + "last_msg_type", + "last_msg_sub_type", + }: + values.append(_int((r or {}).get(c))) + else: + values.append(_text((r or {}).get(c))) + values.append(u) + params.append(tuple(values)) + + if not params: + return + + set_expr = ", ".join([f"{c} = ?" for c in update_cols]) + conn.executemany(f"UPDATE SessionTable SET {set_expr} WHERE username = ?", params) + + +def _load_session_last_message_times(conn: sqlite3.Connection, usernames: list[str]) -> dict[str, int]: + """Load last synced message create_time per conversation from session.db::session_last_message. + + Note: This is used as the *sync watermark* for realtime -> decrypted, because SessionTable timestamps may be + updated from WCDB session rows for UI consistency. + """ + + uniq = list(dict.fromkeys([str(u or "").strip() for u in usernames if str(u or "").strip()])) + if not uniq: + return {} + + out: dict[str, int] = {} + chunk_size = 900 + for i in range(0, len(uniq), chunk_size): + chunk = uniq[i : i + chunk_size] + placeholders = ",".join(["?"] * len(chunk)) try: - quoted_table = _quote_ident(table_name) - row = msg_conn.execute(f"SELECT MAX(local_id) AS mx FROM {quoted_table}").fetchone() + rows = conn.execute( + f"SELECT username, create_time FROM session_last_message WHERE username IN ({placeholders})", + chunk, + ).fetchall() + except Exception: + continue + for r in rows: + u = str((r["username"] if isinstance(r, sqlite3.Row) else r[0]) or "").strip() + if not u: + continue try: - max_local_id = int((row["mx"] if row is not None else 0) or 0) + ts = int((r["create_time"] if isinstance(r, sqlite3.Row) else r[1]) or 0) + except Exception: + ts = 0 + out[u] = int(ts or 0) + return out + + +def _session_row_get(row: Any, key: str, default: Any = None) -> Any: + try: + if isinstance(row, sqlite3.Row): + return row[key] + except Exception: + return default + try: + return row.get(key, default) + except Exception: + return default + + +def _contact_flag_is_top(flag_value: Any) -> bool: + try: + flag_int = int(flag_value) + except Exception: + return False + if flag_int < 0: + flag_int &= (1 << 64) - 1 + return bool((flag_int >> 11) & 1) + + +def _load_contact_top_flags(contact_db_path: Path, usernames: list[str]) -> dict[str, bool]: + uniq = list(dict.fromkeys([str(u or "").strip() for u in usernames if str(u or "").strip()])) + if not uniq: + return {} + if not contact_db_path.exists(): + return {} + + out: dict[str, bool] = {} + conn = sqlite3.connect(str(contact_db_path)) + conn.row_factory = sqlite3.Row + try: + def has_flag_column(table: str) -> bool: + try: + rows = conn.execute(f"PRAGMA table_info({table})").fetchall() + except Exception: + return False + cols: set[str] = set() + for r in rows: + try: + cols.add(str(r["name"] if isinstance(r, sqlite3.Row) else r[1]).strip().lower()) + except Exception: + continue + return ("username" in cols) and ("flag" in cols) + + chunk_size = 900 + for table in ("contact", "stranger"): + if not has_flag_column(table): + continue + + for i in range(0, len(uniq), chunk_size): + chunk = uniq[i : i + chunk_size] + placeholders = ",".join(["?"] * len(chunk)) + try: + rows = conn.execute( + f"SELECT username, flag FROM {table} WHERE username IN ({placeholders})", + chunk, + ).fetchall() + except Exception: + continue + + for r in rows: + username = str(_session_row_get(r, "username", "") or "").strip() + if not username: + continue + is_top = _contact_flag_is_top(_session_row_get(r, "flag", 0)) + if is_top: + out[username] = True + else: + out.setdefault(username, False) + return out + finally: + conn.close() + + +@router.post("/api/chat/realtime/sync", summary="实时消息同步到解密库(按会话增量)") +def sync_chat_realtime_messages( + request: Request, + username: str, + account: Optional[str] = None, + max_scan: int = 600, + backfill_limit: int = 200, +): + """ + 设计目的:实时模式只用来“同步增量”到 output/databases 下的解密库,前端始终从解密库读取显示, + 避免 WCDB realtime 返回格式差异(如 compress_content/message_content 的 hex 编码)直接影响渲染。 + + 同步策略:从 WCDB 获取最新消息(从新到旧),直到遇到解密库中已存在的最大 local_id 为止。 + + backfill_limit:同步过程中额外“回填”旧消息的 packed_info_data 的最大行数(用于修复旧库缺失字段)。 + - 设为 0 可显著降低每次同步的扫描/写入开销(更适合前端实时轮询/推送触发的高频增量同步)。 + """ + if not username: + raise HTTPException(status_code=400, detail="Missing username.") + if max_scan < 50: + max_scan = 50 + if max_scan > 5000: + max_scan = 5000 + if backfill_limit < 0: + backfill_limit = 0 + if backfill_limit > 5000: + backfill_limit = 5000 + + account_dir = _resolve_account_dir(account) + trace_id = f"rt-sync-{int(time.time() * 1000)}-{threading.get_ident()}" + logger.info( + "[%s] realtime sync start account=%s username=%s max_scan=%s", + trace_id, + account_dir.name, + username, + int(max_scan), + ) + + # Lock per (account, username) to avoid concurrent writes to the same sqlite tables. + logger.info("[%s] acquiring per-session lock account=%s username=%s", trace_id, account_dir.name, username) + with _realtime_sync_lock(account_dir.name, username): + logger.info("[%s] per-session lock acquired account=%s username=%s", trace_id, account_dir.name, username) + try: + logger.info("[%s] ensure wcdb connected account=%s", trace_id, account_dir.name) + rt_conn = WCDB_REALTIME.ensure_connected(account_dir) + logger.info("[%s] wcdb connected account=%s handle=%s", trace_id, account_dir.name, int(rt_conn.handle)) + except WCDBRealtimeError as e: + raise HTTPException(status_code=400, detail=str(e)) + + # Some sessions may not exist in the decrypted snapshot yet; create the missing Msg_ table + # so we can insert the realtime rows and make `/api/chat/messages` work after switching off realtime. + msg_db_path, table_name = _ensure_decrypted_message_table(account_dir, username) + logger.info( + "[%s] resolved decrypted table account=%s username=%s db=%s table=%s", + trace_id, + account_dir.name, + username, + str(msg_db_path), + table_name, + ) + + msg_conn = sqlite3.connect(str(msg_db_path)) + msg_conn.row_factory = sqlite3.Row + try: + quoted_table = _quote_ident(table_name) + row = msg_conn.execute(f"SELECT MAX(local_id) AS mx FROM {quoted_table}").fetchone() + try: + max_local_id = int((row["mx"] if row is not None else 0) or 0) except Exception: max_local_id = 0 @@ -451,14 +1305,39 @@ def normalize(item: dict[str, Any]) -> dict[str, Any]: offset = 0 new_rows: list[dict[str, Any]] = [] backfill_rows: list[dict[str, Any]] = [] - backfill_limit = min(200, int(max_scan)) reached_existing = False stop = False while scanned < int(max_scan): take = min(batch_size, int(max_scan) - scanned) + logger.info( + "[%s] wcdb_get_messages account=%s username=%s take=%s offset=%s", + trace_id, + account_dir.name, + username, + int(take), + int(offset), + ) + wcdb_t0 = time.perf_counter() with rt_conn.lock: raw_rows = _wcdb_get_messages(rt_conn.handle, username, limit=take, offset=offset) + wcdb_ms = (time.perf_counter() - wcdb_t0) * 1000.0 + logger.info( + "[%s] wcdb_get_messages done account=%s username=%s rows=%s ms=%.1f", + trace_id, + account_dir.name, + username, + len(raw_rows or []), + wcdb_ms, + ) + if wcdb_ms > 2000: + logger.warning( + "[%s] wcdb_get_messages slow account=%s username=%s ms=%.1f", + trace_id, + account_dir.name, + username, + wcdb_ms, + ) if not raw_rows: break @@ -477,8 +1356,11 @@ def normalize(item: dict[str, Any]) -> dict[str, Any]: continue reached_existing = True + if int(backfill_limit) <= 0: + stop = True + break backfill_rows.append(norm) - if len(backfill_rows) >= backfill_limit: + if len(backfill_rows) >= int(backfill_limit): stop = True break @@ -526,9 +1408,27 @@ def normalize(item: dict[str, Any]) -> dict[str, Any]: # Insert older -> newer to keep sqlite btree locality similar to existing data. values = [tuple(r.get(c) for c in insert_cols) for r in reversed(new_rows)] + insert_t0 = time.perf_counter() msg_conn.executemany(insert_sql, values) msg_conn.commit() + insert_ms = (time.perf_counter() - insert_t0) * 1000.0 inserted = len(new_rows) + logger.info( + "[%s] sqlite insert done account=%s username=%s inserted=%s ms=%.1f", + trace_id, + account_dir.name, + username, + int(inserted), + insert_ms, + ) + if insert_ms > 1000: + logger.warning( + "[%s] sqlite insert slow account=%s username=%s ms=%.1f", + trace_id, + account_dir.name, + username, + insert_ms, + ) if ("packed_info_data" in insert_cols) and backfill_rows: update_values = [] @@ -539,12 +1439,30 @@ def normalize(item: dict[str, Any]) -> dict[str, Any]: update_values.append((pdata, int(r.get("local_id") or 0))) if update_values: before_changes = msg_conn.total_changes + update_t0 = time.perf_counter() msg_conn.executemany( f"UPDATE {quoted_table} SET packed_info_data = ? WHERE local_id = ? AND (packed_info_data IS NULL OR length(packed_info_data) = 0)", update_values, ) msg_conn.commit() + update_ms = (time.perf_counter() - update_t0) * 1000.0 backfilled = int(msg_conn.total_changes - before_changes) + logger.info( + "[%s] sqlite backfill done account=%s username=%s rows=%s ms=%.1f", + trace_id, + account_dir.name, + username, + int(backfilled), + update_ms, + ) + if update_ms > 1000: + logger.warning( + "[%s] sqlite backfill slow account=%s username=%s ms=%.1f", + trace_id, + account_dir.name, + username, + update_ms, + ) # Update session.db so left sidebar ordering/time can follow new messages. newest = new_rows[0] if new_rows else None @@ -636,6 +1554,16 @@ def normalize(item: dict[str, Any]) -> dict[str, Any]: finally: sconn.close() + logger.info( + "[%s] realtime sync done account=%s username=%s scanned=%s inserted=%s backfilled=%s maxLocalIdBefore=%s", + trace_id, + account_dir.name, + username, + int(scanned), + int(inserted), + int(backfilled), + int(max_local_id), + ) return { "status": "success", "account": account_dir.name, @@ -658,11 +1586,18 @@ def _sync_chat_realtime_messages_for_table( msg_db_path: Path, table_name: str, max_scan: int, + backfill_limit: int = 200, ) -> dict[str, Any]: if max_scan < 50: max_scan = 50 if max_scan > 5000: max_scan = 5000 + if backfill_limit < 0: + backfill_limit = 0 + if backfill_limit > 5000: + backfill_limit = 5000 + if backfill_limit > max_scan: + backfill_limit = max_scan msg_conn = sqlite3.connect(str(msg_db_path)) msg_conn.row_factory = sqlite3.Row @@ -744,14 +1679,36 @@ def normalize(item: dict[str, Any]) -> dict[str, Any]: offset = 0 new_rows: list[dict[str, Any]] = [] backfill_rows: list[dict[str, Any]] = [] - backfill_limit = min(200, int(max_scan)) reached_existing = False stop = False while scanned < int(max_scan): take = min(batch_size, int(max_scan) - scanned) + logger.debug( + "[realtime] wcdb_get_messages account=%s username=%s take=%s offset=%s", + account_dir.name, + username, + int(take), + int(offset), + ) + wcdb_t0 = time.perf_counter() with rt_conn.lock: raw_rows = _wcdb_get_messages(rt_conn.handle, username, limit=take, offset=offset) + wcdb_ms = (time.perf_counter() - wcdb_t0) * 1000.0 + logger.debug( + "[realtime] wcdb_get_messages done account=%s username=%s rows=%s ms=%.1f", + account_dir.name, + username, + len(raw_rows or []), + wcdb_ms, + ) + if wcdb_ms > 2000: + logger.warning( + "[realtime] wcdb_get_messages slow account=%s username=%s ms=%.1f", + account_dir.name, + username, + wcdb_ms, + ) if not raw_rows: break @@ -770,8 +1727,11 @@ def normalize(item: dict[str, Any]) -> dict[str, Any]: continue reached_existing = True + if int(backfill_limit) <= 0: + stop = True + break backfill_rows.append(norm) - if len(backfill_rows) >= backfill_limit: + if len(backfill_rows) >= int(backfill_limit): stop = True break @@ -816,9 +1776,25 @@ def normalize(item: dict[str, Any]) -> dict[str, Any]: continue values = [tuple(r.get(c) for c in insert_cols) for r in reversed(new_rows)] + insert_t0 = time.perf_counter() msg_conn.executemany(insert_sql, values) msg_conn.commit() + insert_ms = (time.perf_counter() - insert_t0) * 1000.0 inserted = len(new_rows) + logger.info( + "[realtime] sqlite insert done account=%s username=%s inserted=%s ms=%.1f", + account_dir.name, + username, + int(inserted), + insert_ms, + ) + if insert_ms > 1000: + logger.warning( + "[realtime] sqlite insert slow account=%s username=%s ms=%.1f", + account_dir.name, + username, + insert_ms, + ) if ("packed_info_data" in insert_cols) and backfill_rows: update_values = [] @@ -829,12 +1805,28 @@ def normalize(item: dict[str, Any]) -> dict[str, Any]: update_values.append((pdata, int(r.get("local_id") or 0))) if update_values: before_changes = msg_conn.total_changes + update_t0 = time.perf_counter() msg_conn.executemany( f"UPDATE {quoted_table} SET packed_info_data = ? WHERE local_id = ? AND (packed_info_data IS NULL OR length(packed_info_data) = 0)", update_values, ) msg_conn.commit() + update_ms = (time.perf_counter() - update_t0) * 1000.0 backfilled = int(msg_conn.total_changes - before_changes) + logger.info( + "[realtime] sqlite backfill done account=%s username=%s rows=%s ms=%.1f", + account_dir.name, + username, + int(backfilled), + update_ms, + ) + if update_ms > 1000: + logger.warning( + "[realtime] sqlite backfill slow account=%s username=%s ms=%.1f", + account_dir.name, + username, + update_ms, + ) newest = new_rows[0] if new_rows else None preview = "" @@ -938,7 +1930,7 @@ def normalize(item: dict[str, Any]) -> dict[str, Any]: @router.post("/api/chat/realtime/sync_all", summary="实时消息同步到解密库(全会话增量)") -async def sync_chat_realtime_messages_all( +def sync_chat_realtime_messages_all( request: Request, account: Optional[str] = None, max_scan: int = 200, @@ -946,6 +1938,7 @@ async def sync_chat_realtime_messages_all( priority_max_scan: int = 600, include_hidden: bool = True, include_official: bool = True, + backfill_limit: int = 200, ): """ 全量会话同步(增量):遍历会话列表,对每个会话调用与 /realtime/sync 相同的“遇到已同步 local_id 即停止”逻辑。 @@ -953,6 +1946,16 @@ async def sync_chat_realtime_messages_all( 说明:这是增量同步,不会每次全表扫描;priority_username 会优先同步并可设置更大的 priority_max_scan。 """ account_dir = _resolve_account_dir(account) + trace_id = f"rt-syncall-{int(time.time() * 1000)}-{threading.get_ident()}" + logger.info( + "[%s] realtime sync_all start account=%s max_scan=%s priority=%s include_hidden=%s include_official=%s", + trace_id, + account_dir.name, + int(max_scan), + str(priority_username or "").strip(), + bool(include_hidden), + bool(include_official), + ) if max_scan < 20: max_scan = 20 @@ -962,23 +1965,44 @@ async def sync_chat_realtime_messages_all( priority_max_scan = max_scan if priority_max_scan > 5000: priority_max_scan = 5000 + if backfill_limit < 0: + backfill_limit = 0 + if backfill_limit > 5000: + backfill_limit = 5000 + if backfill_limit > max_scan: + backfill_limit = max_scan priority = str(priority_username or "").strip() started = time.time() + logger.info("[%s] acquiring global sync lock account=%s", trace_id, account_dir.name) with _realtime_sync_all_lock(account_dir.name): + logger.info("[%s] global sync lock acquired account=%s", trace_id, account_dir.name) try: + logger.info("[%s] ensure wcdb connected account=%s", trace_id, account_dir.name) rt_conn = WCDB_REALTIME.ensure_connected(account_dir) + logger.info("[%s] wcdb connected account=%s handle=%s", trace_id, account_dir.name, int(rt_conn.handle)) except WCDBRealtimeError as e: raise HTTPException(status_code=400, detail=str(e)) try: + logger.info("[%s] wcdb_get_sessions account=%s", trace_id, account_dir.name) + wcdb_t0 = time.perf_counter() with rt_conn.lock: raw_sessions = _wcdb_get_sessions(rt_conn.handle) + wcdb_ms = (time.perf_counter() - wcdb_t0) * 1000.0 + logger.info( + "[%s] wcdb_get_sessions done account=%s sessions=%s ms=%.1f", + trace_id, + account_dir.name, + len(raw_sessions or []), + wcdb_ms, + ) except Exception: raw_sessions = [] sessions: list[tuple[int, str]] = [] + realtime_rows_by_user: dict[str, dict[str, Any]] = {} for item in raw_sessions: if not isinstance(item, dict): continue @@ -1005,6 +2029,43 @@ async def sync_chat_realtime_messages_all( break sessions.append((ts, uname)) + # Keep a normalized SessionTable row for upserting into decrypted session.db. + norm_row = { + "username": uname, + "unread_count": item.get("unread_count", item.get("unreadCount", 0)), + "is_hidden": item.get("is_hidden", item.get("isHidden", 0)), + "summary": item.get("summary", ""), + "draft": item.get("draft", ""), + "last_timestamp": item.get("last_timestamp", item.get("lastTimestamp", 0)), + "sort_timestamp": item.get( + "sort_timestamp", + item.get("sortTimestamp", item.get("last_timestamp", item.get("lastTimestamp", 0))), + ), + "last_msg_locald_id": item.get( + "last_msg_locald_id", + item.get("lastMsgLocaldId", item.get("lastMsgLocalId", 0)), + ), + "last_msg_type": item.get("last_msg_type", item.get("lastMsgType", 0)), + "last_msg_sub_type": item.get("last_msg_sub_type", item.get("lastMsgSubType", 0)), + "last_msg_sender": item.get("last_msg_sender", item.get("lastMsgSender", "")), + "last_sender_display_name": item.get( + "last_sender_display_name", + item.get("lastSenderDisplayName", ""), + ), + } + # Prefer the row with the newer sort timestamp for the same username. + prev = realtime_rows_by_user.get(uname) + try: + prev_sort = int((prev or {}).get("sort_timestamp") or 0) + except Exception: + prev_sort = 0 + try: + cur_sort = int(norm_row.get("sort_timestamp") or 0) + except Exception: + cur_sort = 0 + if prev is None or cur_sort >= prev_sort: + realtime_rows_by_user[uname] = norm_row + def _dedupe(items: list[tuple[int, str]]) -> list[tuple[int, str]]: seen = set() out: list[tuple[int, str]] = [] @@ -1018,8 +2079,16 @@ def _dedupe(items: list[tuple[int, str]]) -> list[tuple[int, str]]: sessions = _dedupe(sessions) sessions.sort(key=lambda x: int(x[0] or 0), reverse=True) all_usernames = [u for _, u in sessions if u] + logger.info( + "[%s] sessions prepared account=%s raw=%s filtered=%s", + trace_id, + account_dir.name, + len(raw_sessions or []), + len(all_usernames), + ) - # Skip sessions whose decrypted session.db already has a newer/equal sort_timestamp. + # Keep SessionTable fresh for UI consistency, and use session_last_message.create_time as the + # "sync watermark" (instead of SessionTable timestamps) to decide whether a session needs syncing. decrypted_ts_by_user: dict[str, int] = {} if all_usernames: try: @@ -1027,45 +2096,49 @@ def _dedupe(items: list[tuple[int, str]]) -> list[tuple[int, str]]: sconn = sqlite3.connect(str(session_db_path)) sconn.row_factory = sqlite3.Row try: - uniq = list(dict.fromkeys([u for u in all_usernames if u])) - chunk_size = 900 - for i in range(0, len(uniq), chunk_size): - chunk = uniq[i : i + chunk_size] - placeholders = ",".join(["?"] * len(chunk)) + _ensure_session_last_message_table(sconn) + + # If the cache table exists but is empty (older accounts), attempt a one-time build so we + # don't keep treating every session as "needs_sync". + try: + cnt = int(sconn.execute("SELECT COUNT(1) FROM session_last_message").fetchone()[0] or 0) + except Exception: + cnt = 0 + if cnt <= 0: + try: + sconn.close() + except Exception: + pass try: - rows = sconn.execute( - f"SELECT username, sort_timestamp, last_timestamp FROM SessionTable WHERE username IN ({placeholders})", - chunk, - ).fetchall() - for r in rows: - u = str(r["username"] or "").strip() - if not u: - continue - try: - ts = int(r["sort_timestamp"] or 0) - except Exception: - ts = 0 - if ts <= 0: - try: - ts = int(r["last_timestamp"] or 0) - except Exception: - ts = 0 - decrypted_ts_by_user[u] = int(ts or 0) - except sqlite3.OperationalError: - rows = sconn.execute( - f"SELECT username, last_timestamp FROM SessionTable WHERE username IN ({placeholders})", - chunk, - ).fetchall() - for r in rows: - u = str(r["username"] or "").strip() - if not u: - continue - try: - decrypted_ts_by_user[u] = int(r["last_timestamp"] or 0) - except Exception: - decrypted_ts_by_user[u] = 0 + build_session_last_message_table( + account_dir, + rebuild=False, + include_hidden=True, + include_official=True, + ) + except Exception: + pass + sconn = sqlite3.connect(str(session_db_path)) + sconn.row_factory = sqlite3.Row + _ensure_session_last_message_table(sconn) + + # Upsert latest WCDB sessions into decrypted SessionTable so the sidebar list remains stable + # after switching off realtime (or refreshing the page). + try: + _upsert_session_table_rows(sconn, list(realtime_rows_by_user.values())) + sconn.commit() + except Exception: + try: + sconn.rollback() + except Exception: + pass + + decrypted_ts_by_user = _load_session_last_message_times(sconn, all_usernames) finally: - sconn.close() + try: + sconn.close() + except Exception: + pass except Exception: decrypted_ts_by_user = {} @@ -1080,10 +2153,25 @@ def _dedupe(items: list[tuple[int, str]]) -> list[tuple[int, str]]: continue sync_usernames.append(u) + logger.info( + "[%s] sessions need_sync account=%s need_sync=%s skipped_up_to_date=%s", + trace_id, + account_dir.name, + len(sync_usernames), + int(skipped_up_to_date), + ) + if priority and priority in sync_usernames: sync_usernames = [priority] + [u for u in sync_usernames if u != priority] - table_map = _resolve_decrypted_message_tables(account_dir, sync_usernames) + table_map = _ensure_decrypted_message_tables(account_dir, sync_usernames) + logger.info( + "[%s] resolved decrypted tables account=%s resolved=%s need_sync=%s", + trace_id, + account_dir.name, + len(table_map), + len(sync_usernames), + ) scanned_total = 0 inserted_total = 0 @@ -1109,6 +2197,7 @@ def _dedupe(items: list[tuple[int, str]]) -> list[tuple[int, str]]: msg_db_path=msg_db_path, table_name=table_name, max_scan=int(cur_scan), + backfill_limit=int(backfill_limit), ) synced += 1 scanned_total += int(result.get("scanned") or 0) @@ -1116,17 +2205,50 @@ def _dedupe(items: list[tuple[int, str]]) -> list[tuple[int, str]]: inserted_total += ins if ins: updated_sessions += 1 + logger.info( + "[%s] synced session account=%s username=%s inserted=%s scanned=%s", + trace_id, + account_dir.name, + uname, + ins, + int(result.get("scanned") or 0), + ) except HTTPException as e: errors.append(f"{uname}: {str(e.detail or '')}".strip()) + logger.warning( + "[%s] sync session failed account=%s username=%s err=%s", + trace_id, + account_dir.name, + uname, + str(e.detail or "").strip(), + ) continue except Exception as e: errors.append(f"{uname}: {str(e)}".strip()) + logger.exception( + "[%s] sync session crashed account=%s username=%s", + trace_id, + account_dir.name, + uname, + ) continue elapsed_ms = int((time.time() - started) * 1000) if len(errors) > 20: errors = errors[:20] + [f"... and {len(errors) - 20} more"] + logger.info( + "[%s] realtime sync_all done account=%s sessions_total=%s need_sync=%s synced=%s updated=%s inserted_total=%s elapsed_ms=%s errors=%s", + trace_id, + account_dir.name, + len(all_usernames), + len(sync_usernames), + int(synced), + int(updated_sessions), + int(inserted_total), + int(elapsed_ms), + len(errors), + ) return { "status": "success", "account": account_dir.name, @@ -1376,9 +2498,11 @@ async def chat_search_index_senders( continue cnt = int(r["c"] or 0) row = contact_rows.get(su) - avatar_url = _pick_avatar_url(row) - if (not avatar_url) and (su in local_sender_avatars): - avatar_url = _build_avatar_url(account_dir.name, su) + avatar_url = _avatar_url_unified( + account_dir=account_dir, + username=su, + local_avatar_usernames=local_sender_avatars, + ) senders.append( { "username": su, @@ -1503,7 +2627,7 @@ def _append_full_messages_from_rows( if is_group and sender_prefix and (not sender_username): sender_username = sender_prefix - if is_group and (raw_text.startswith("<") or raw_text.startswith('"<')): + if is_group and (not sender_username) and (raw_text.startswith("<") or raw_text.startswith('"<')): xml_sender = _extract_sender_from_group_xml(raw_text) if xml_sender: sender_username = xml_sender @@ -1539,6 +2663,9 @@ def _append_full_messages_from_rows( quote_username = "" quote_title = "" quote_content = "" + quote_thumb_url = "" + link_type = "" + link_style = "" quote_server_id = "" quote_type = "" quote_voice_length = "" @@ -1550,14 +2677,14 @@ def _append_full_messages_from_rows( file_md5 = "" transfer_id = "" voip_type = "" + location_lat: Optional[float] = None + location_lng: Optional[float] = None + location_poiname = "" + location_label = "" if local_type == 10000: render_type = "system" - if "revokemsg" in raw_text: - content_text = "撤回了一条消息" - else: - content_text = re.sub(r"]*>", "", raw_text) - content_text = re.sub(r"\s+", " ", content_text).strip() or "[系统消息]" + content_text = _parse_system_message_content(raw_text) elif local_type == 49: parsed = _parse_app_message(raw_text) render_type = str(parsed.get("renderType") or "text") @@ -1569,6 +2696,9 @@ def _append_full_messages_from_rows( record_item = str(parsed.get("recordItem") or "") quote_title = str(parsed.get("quoteTitle") or "") quote_content = str(parsed.get("quoteContent") or "") + quote_thumb_url = str(parsed.get("quoteThumbUrl") or "") + link_type = str(parsed.get("linkType") or "") + link_style = str(parsed.get("linkStyle") or "") quote_username = str(parsed.get("quoteUsername") or "") quote_server_id = str(parsed.get("quoteServerId") or "") quote_type = str(parsed.get("quoteType") or "") @@ -1612,6 +2742,9 @@ def _append_full_messages_from_rows( content_text = str(parsed.get("content") or "[引用消息]") quote_title = str(parsed.get("quoteTitle") or "") quote_content = str(parsed.get("quoteContent") or "") + quote_thumb_url = str(parsed.get("quoteThumbUrl") or "") + link_type = str(parsed.get("linkType") or "") + link_style = str(parsed.get("linkStyle") or "") quote_username = str(parsed.get("quoteUsername") or "") quote_server_id = str(parsed.get("quoteServerId") or "") quote_type = str(parsed.get("quoteType") or "") @@ -1724,6 +2857,20 @@ def _append_full_messages_from_rows( local_id=local_id, create_time=create_time, ) + + # Some WeChat builds store the on-disk thumbnail basename (32-hex) in packed_info_data (protobuf), + # while the message XML only carries a long cdnthumburl file_id. Prefer packed_info_data when present. + if not _is_hex_md5(video_thumb_md5): + try: + packed_val = r["packed_info_data"] + except Exception: + try: + packed_val = r.get("packed_info_data") # type: ignore[attr-defined] + except Exception: + packed_val = None + packed_md5 = _extract_md5_from_packed_info(packed_val) + if packed_md5: + video_thumb_md5 = packed_md5 content_text = "[视频]" elif local_type == 47: render_type = "emoji" @@ -1744,6 +2891,14 @@ def _append_full_messages_from_rows( create_time=create_time, ) content_text = "[表情]" + elif local_type == 48: + parsed = _parse_location_message(raw_text) + render_type = str(parsed.get("renderType") or "location") + content_text = str(parsed.get("content") or "[Location]") + location_lat = parsed.get("locationLat") + location_lng = parsed.get("locationLng") + location_poiname = str(parsed.get("locationPoiname") or "") + location_label = str(parsed.get("locationLabel") or "") elif local_type == 50: render_type = "voip" try: @@ -1783,14 +2938,22 @@ def _append_full_messages_from_rows( record_item = str(parsed.get("recordItem") or record_item) quote_title = str(parsed.get("quoteTitle") or quote_title) quote_content = str(parsed.get("quoteContent") or quote_content) + quote_thumb_url = str(parsed.get("quoteThumbUrl") or quote_thumb_url) + link_type = str(parsed.get("linkType") or link_type) + link_style = str(parsed.get("linkStyle") or link_style) amount = str(parsed.get("amount") or amount) cover_url = str(parsed.get("coverUrl") or cover_url) thumb_url = str(parsed.get("thumbUrl") or thumb_url) from_name = str(parsed.get("from") or from_name) + from_username = str(parsed.get("fromUsername") or from_username) file_size = str(parsed.get("size") or file_size) pay_sub_type = str(parsed.get("paySubType") or pay_sub_type) file_md5 = str(parsed.get("fileMd5") or file_md5) transfer_id = str(parsed.get("transferId") or transfer_id) + quote_username = str(parsed.get("quoteUsername") or quote_username) + quote_server_id = str(parsed.get("quoteServerId") or quote_server_id) + quote_type = str(parsed.get("quoteType") or quote_type) + quote_voice_length = str(parsed.get("quoteVoiceLength") or quote_voice_length) if render_type == "transfer": # 如果 transferId 仍为空,尝试从原始 XML 提取 @@ -1834,6 +2997,8 @@ def _append_full_messages_from_rows( "content": content_text, "title": title, "url": url, + "linkType": link_type, + "linkStyle": link_style, "from": from_name, "fromUsername": from_username, "recordItem": record_item, @@ -1857,6 +3022,7 @@ def _append_full_messages_from_rows( "quoteVoiceLength": str(quote_voice_length).strip(), "quoteTitle": quote_title, "quoteContent": quote_content, + "quoteThumbUrl": quote_thumb_url, "amount": amount, "coverUrl": cover_url, "fileSize": file_size, @@ -1864,6 +3030,10 @@ def _append_full_messages_from_rows( "paySubType": pay_sub_type, "transferStatus": transfer_status, "transferId": transfer_id, + "locationLat": location_lat, + "locationLng": location_lng, + "locationPoiname": location_poiname, + "locationLabel": location_label, "_rawText": raw_text if local_type == 266287972401 else "", } ) @@ -1875,87 +3045,210 @@ def _append_full_messages_from_rows( pass -def _postprocess_full_messages( - *, - merged: list[dict[str, Any]], - sender_usernames: list[str], - quote_usernames: list[str], - pat_usernames: set[str], - account_dir: Path, - username: str, - base_url: str, - contact_db_path: Path, - head_image_db_path: Path, -) -> None: +def _postprocess_transfer_messages(merged: list[dict[str, Any]]) -> None: # 后处理:关联转账消息的最终状态 # 策略:优先使用 transferId 精确匹配,回退到金额+时间窗口匹配 # paysubtype 含义:1=不明确 3=已收款 4=对方退回给你 8=发起转账 9=被对方退回 10=已过期 + # + # Windows 微信在部分场景会为同一笔转账记录两条消息: + # - paysubtype=1/8:发起/待收款(这里回填为“已被接收”) + # - paysubtype=3:收款确认(展示为“已收款”) + # + # 这两条消息的 isSent 并不能稳定表示“付款方/收款方视角”,因此这里以 transferId 关联结果为准: + # - 将原始转账消息(1/8)回填为“已被接收” + # - 若同一 transferId 同时存在原始消息与 paysubtype=3 消息,则将 paysubtype=3 的那条校正为“已收款” + + def _is_transfer_expired_system_message(text: Any) -> bool: + content = str(text or "").strip() + if not content: + return False + if "转账" not in content or "过期" not in content: + return False + if "未接收" in content and ("24小时" in content or "二十四小时" in content): + return True + return "已过期" in content and ("收款方" in content or "转账" in content) + + def _mark_pending_transfers_expired_by_system_messages() -> set[str]: + expired_system_times: list[int] = [] + pending_candidates: list[tuple[int, int]] = [] # (index, createTime) + + for idx, msg in enumerate(merged): + rt = str(msg.get("renderType") or "").strip() + if rt == "system": + if _is_transfer_expired_system_message(msg.get("content")): + try: + ts = int(msg.get("createTime") or 0) + except Exception: + ts = 0 + if ts > 0: + expired_system_times.append(ts) + continue + + if rt != "transfer": + continue + + pst = str(msg.get("paySubType") or "").strip() + if pst not in ("1", "8"): + continue + + try: + ts = int(msg.get("createTime") or 0) + except Exception: + ts = 0 + if ts <= 0: + continue + + pending_candidates.append((idx, ts)) + + if not expired_system_times or not pending_candidates: + return set() + + used_pending_indexes: set[int] = set() + expired_transfer_ids: set[str] = set() + + # 过期系统提示通常出现在转账发起约 24 小时后。 + # 为避免误匹配,要求时间差落在 [22h, 26h] 范围内,并选择最接近 24h 的待收款消息。 + for sys_ts in sorted(expired_system_times): + best_index = -1 + best_distance = 10**9 + + for idx, transfer_ts in pending_candidates: + if idx in used_pending_indexes: + continue + delta = sys_ts - transfer_ts + if delta < 0: + continue + if delta < 22 * 3600 or delta > 26 * 3600: + continue + + distance = abs(delta - 24 * 3600) + if distance < best_distance: + best_distance = distance + best_index = idx + + if best_index < 0: + continue + + used_pending_indexes.add(best_index) + transfer_msg = merged[best_index] + transfer_msg["paySubType"] = "10" + transfer_msg["transferStatus"] = "已过期" + + tid = str(transfer_msg.get("transferId") or "").strip() + if tid: + expired_transfer_ids.add(tid) + + return expired_transfer_ids + + expired_transfer_ids = _mark_pending_transfers_expired_by_system_messages() - # 收集已退还和已收款的转账ID和金额 returned_transfer_ids: set[str] = set() # 退还状态的 transferId received_transfer_ids: set[str] = set() # 已收款状态的 transferId returned_amounts_with_time: list[tuple[str, int]] = [] # (金额, 时间戳) 用于退还回退匹配 received_amounts_with_time: list[tuple[str, int]] = [] # (金额, 时间戳) 用于收款回退匹配 + pending_transfer_ids: set[str] = set() # (paysubtype=1/8) 的 transferId,用于识别“收款确认”消息 for m in merged: - if m.get("renderType") == "transfer": - pst = str(m.get("paySubType") or "") - tid = str(m.get("transferId") or "").strip() - amt = str(m.get("amount") or "") - ts = int(m.get("createTime") or 0) - - if pst in ("4", "9"): # 退还状态 - if tid: - returned_transfer_ids.add(tid) - if amt: - returned_amounts_with_time.append((amt, ts)) - elif pst == "3": # 已收款状态 - if tid: - received_transfer_ids.add(tid) - if amt: - received_amounts_with_time.append((amt, ts)) - - # 更新原始转账消息的状态 + if m.get("renderType") != "transfer": + continue + + pst = str(m.get("paySubType") or "") + tid = str(m.get("transferId") or "").strip() + amt = str(m.get("amount") or "") + ts = int(m.get("createTime") or 0) + + if tid and pst in ("1", "8"): + pending_transfer_ids.add(tid) + + if pst in ("4", "9"): # 退还状态 + if tid: + returned_transfer_ids.add(tid) + if amt: + returned_amounts_with_time.append((amt, ts)) + elif pst == "3": # 已收款状态 + if tid: + received_transfer_ids.add(tid) + if amt: + received_amounts_with_time.append((amt, ts)) + + backfilled_message_ids: set[str] = set() + for m in merged: - if m.get("renderType") == "transfer": - pst = str(m.get("paySubType") or "") - # 只更新未确定状态的原始转账消息(paysubtype=1 或 8) - if pst in ("1", "8"): - tid = str(m.get("transferId") or "").strip() - amt = str(m.get("amount") or "") - ts = int(m.get("createTime") or 0) - - # 优先检查退还状态(退还优先于收款) - should_mark_returned = False - should_mark_received = False - - # 策略1:精确 transferId 匹配 - if tid: - if tid in returned_transfer_ids: - should_mark_returned = True - elif tid in received_transfer_ids: + if m.get("renderType") != "transfer": + continue + + pst = str(m.get("paySubType") or "") + if pst not in ("1", "8"): + continue + + tid = str(m.get("transferId") or "").strip() + amt = str(m.get("amount") or "") + ts = int(m.get("createTime") or 0) + + should_mark_returned = False + should_mark_received = False + + # 策略1:精确 transferId 匹配 + if tid: + if tid in returned_transfer_ids: + should_mark_returned = True + elif tid in received_transfer_ids: + should_mark_received = True + + # 策略2:回退到金额+时间窗口匹配(24小时内同金额) + if not should_mark_returned and not should_mark_received and amt: + for ret_amt, ret_ts in returned_amounts_with_time: + if ret_amt == amt and abs(ret_ts - ts) <= 86400: + should_mark_returned = True + break + if not should_mark_returned: + for rec_amt, rec_ts in received_amounts_with_time: + if rec_amt == amt and abs(rec_ts - ts) <= 86400: should_mark_received = True + break + + if should_mark_returned: + m["paySubType"] = "9" + m["transferStatus"] = "已被退还" + elif should_mark_received: + m["paySubType"] = "3" + m["transferStatus"] = "已被接收" + mid = str(m.get("id") or "").strip() + if mid: + backfilled_message_ids.add(mid) + + # 修正收款确认消息:当同一 transferId 同时存在原始转账消息(1/8)与收款消息(3)时, + # paysubtype=3 的那条通常是收款确认消息,状态文案应为“已收款”。 + for m in merged: + if m.get("renderType") != "transfer": + continue + pst = str(m.get("paySubType") or "") + if pst != "3": + continue + tid = str(m.get("transferId") or "").strip() + if not tid or tid not in pending_transfer_ids: + continue + if tid in expired_transfer_ids: + continue + mid = str(m.get("id") or "").strip() + if mid and mid in backfilled_message_ids: + continue + m["transferStatus"] = "已收款" - # 策略2:回退到金额+时间窗口匹配(24小时内同金额) - if not should_mark_returned and not should_mark_received and amt: - for ret_amt, ret_ts in returned_amounts_with_time: - if ret_amt == amt and abs(ret_ts - ts) <= 86400: - should_mark_returned = True - break - if not should_mark_returned: - for rec_amt, rec_ts in received_amounts_with_time: - if rec_amt == amt and abs(rec_ts - ts) <= 86400: - should_mark_received = True - break - if should_mark_returned: - m["paySubType"] = "9" - m["transferStatus"] = "已被退还" - elif should_mark_received: - m["paySubType"] = "3" - # 根据 isSent 判断:发起方显示"已收款",收款方显示"已被接收" - is_sent = m.get("isSent", False) - m["transferStatus"] = "已收款" if is_sent else "已被接收" +def _postprocess_full_messages( + *, + merged: list[dict[str, Any]], + sender_usernames: list[str], + quote_usernames: list[str], + pat_usernames: set[str], + account_dir: Path, + username: str, + base_url: str, + contact_db_path: Path, + head_image_db_path: Path, +) -> None: + _postprocess_transfer_messages(merged) # Some appmsg payloads provide only `from` (sourcedisplayname) but not `fromUsername` (sourceusername). # Recover `fromUsername` via contact.db so the frontend can render the publisher avatar. @@ -1985,6 +3278,42 @@ def _postprocess_full_messages( sender_contact_rows = _load_contact_rows(contact_db_path, uniq_senders) local_sender_avatars = _query_head_image_usernames(head_image_db_path, uniq_senders) + # contact.db may not include enterprise/openim contacts (or group chatroom records). WCDB has a more complete + # view of display names + avatar URLs, so we use it as a best-effort fallback. + wcdb_display_names: dict[str, str] = {} + wcdb_avatar_urls: dict[str, str] = {} + try: + need_display: list[str] = [] + need_avatar: list[str] = [] + for u in uniq_senders: + if not u: + continue + row = sender_contact_rows.get(u) + if _pick_display_name(row, u) == u: + need_display.append(u) + if u not in local_sender_avatars: + need_avatar.append(u) + + need_display = list(dict.fromkeys(need_display)) + need_avatar = list(dict.fromkeys(need_avatar)) + if need_display or need_avatar: + wcdb_conn = WCDB_REALTIME.ensure_connected(account_dir) + with wcdb_conn.lock: + if need_display: + wcdb_display_names = _wcdb_get_display_names(wcdb_conn.handle, need_display) + if need_avatar: + wcdb_avatar_urls = _wcdb_get_avatar_urls(wcdb_conn.handle, need_avatar) + except Exception: + wcdb_display_names = {} + wcdb_avatar_urls = {} + + group_nicknames = _load_group_nickname_map( + account_dir=account_dir, + contact_db_path=contact_db_path, + chatroom_id=username, + sender_usernames=uniq_senders, + ) + for m in merged: # If appmsg doesn't provide sourcedisplayname, try mapping sourceusername to display name. if (not str(m.get("from") or "").strip()) and str(m.get("fromUsername") or "").strip(): @@ -1992,15 +3321,25 @@ def _postprocess_full_messages( frow = sender_contact_rows.get(fu) if frow is not None: m["from"] = _pick_display_name(frow, fu) + else: + wd = str(wcdb_display_names.get(fu) or "").strip() + if wd: + m["from"] = wd su = str(m.get("senderUsername") or "") if not su: continue - row = sender_contact_rows.get(su) - m["senderDisplayName"] = _pick_display_name(row, su) - avatar_url = _pick_avatar_url(row) - if not avatar_url and su in local_sender_avatars: - avatar_url = base_url + _build_avatar_url(account_dir.name, su) + m["senderDisplayName"] = _resolve_sender_display_name( + sender_username=su, + sender_contact_rows=sender_contact_rows, + wcdb_display_names=wcdb_display_names, + group_nicknames=group_nicknames, + ) + avatar_url = base_url + _avatar_url_unified( + account_dir=account_dir, + username=su, + local_avatar_usernames=local_sender_avatars, + ) m["senderAvatar"] = avatar_url qu = str(m.get("quoteUsername") or "").strip() @@ -2016,9 +3355,15 @@ def _postprocess_full_messages( if remark: m["quoteTitle"] = remark elif not qt: - m["quoteTitle"] = _pick_display_name(qrow, qu) + title = _pick_display_name(qrow, qu) + if title == qu: + wd = str(wcdb_display_names.get(qu) or "").strip() + if wd and wd != qu: + title = wd + m["quoteTitle"] = title elif not qt: - m["quoteTitle"] = qu + wd = str(wcdb_display_names.get(qu) or "").strip() + m["quoteTitle"] = wd or qu # Media URL fallback: if CDN URLs missing, use local media endpoints. try: @@ -2097,6 +3442,27 @@ def _postprocess_full_messages( base_url + f"/api/chat/media/video?account={quote(account_dir.name)}&file_id={quote(video_file_id)}&username={quote(username)}" ) + elif rt == "link": + # Some appmsg link cards (notably Bilibili shares) carry a non-HTTP `` payload + # (often an ASN.1-ish hex blob). The actual preview image is typically saved as: + # msg/attach/{md5(conv_username)}/.../Img/{local_id}_{create_time}_t.dat + # Expose it via the existing image endpoint using file_id. + thumb_url = str(m.get("thumbUrl") or "").strip() + if thumb_url and (not thumb_url.lower().startswith(("http://", "https://"))): + try: + lid = int(m.get("localId") or 0) + except Exception: + lid = 0 + try: + ct = int(m.get("createTime") or 0) + except Exception: + ct = 0 + if lid > 0 and ct > 0: + file_id = f"{lid}_{ct}" + m["thumbUrl"] = ( + base_url + + f"/api/chat/media/image?account={quote(account_dir.name)}&file_id={quote(file_id)}&username={quote(username)}" + ) elif rt == "voice": if str(m.get("serverId") or ""): sid = int(m.get("serverId") or 0) @@ -2133,15 +3499,94 @@ async def list_chat_accounts(): } -@router.get("/api/chat/sessions", summary="获取会话列表(聊天左侧列表)") -async def list_chat_sessions( - request: Request, - account: Optional[str] = None, - limit: int = 400, - include_hidden: bool = False, - include_official: bool = False, - preview: str = "latest", - source: Optional[str] = None, +@router.get("/api/chat/account_info", summary="获取当前账号信息") +def get_chat_account_info(account: Optional[str] = None): + account_dir = _resolve_account_dir(account) + db_files = sorted([p.name for p in account_dir.glob("*.db") if p.is_file()]) + + session_db = account_dir / "session.db" + session_updated_at = 0 + try: + session_updated_at = int(session_db.stat().st_mtime) + except Exception: + session_updated_at = 0 + + return { + "status": "success", + "account": account_dir.name, + "path": str(account_dir), + "database_count": len(db_files), + "databases": db_files, + "session_updated_at": session_updated_at, + } + + +@router.delete("/api/chat/account", summary="删除当前账号在本项目中的数据") +def delete_chat_account(account: str): + account_name = str(account or "").strip() + if not account_name: + raise HTTPException(status_code=400, detail="Missing account.") + + account_dir = _resolve_account_dir(account_name) + + # Best-effort: close realtime connections first, otherwise Windows may keep db files locked. + try: + WCDB_REALTIME.disconnect(account_name) + except Exception: + pass + + with _REALTIME_SYNC_MU: + _REALTIME_SYNC_ALL_LOCKS.pop(account_name, None) + stale_lock_keys = [k for k in _REALTIME_SYNC_LOCKS.keys() if k and k[0] == account_name] + for k in stale_lock_keys: + _REALTIME_SYNC_LOCKS.pop(k, None) + + removed_edit_count = 0 + try: + removed_edit_count = int(chat_edit_store.delete_account_edits(account_name) or 0) + except Exception: + removed_edit_count = 0 + + removed_key_cache = False + try: + removed_key_cache = bool(remove_account_keys_from_store(account_name)) + except Exception: + removed_key_cache = False + + output_dir = get_output_dir() + exports_dir = output_dir / "exports" / account_name + if exports_dir.exists(): + try: + shutil.rmtree(exports_dir) + except Exception: + # Ignore export cleanup failure; account dir removal is the core operation. + pass + + try: + shutil.rmtree(account_dir) + except Exception as e: + raise HTTPException(status_code=500, detail=f"删除账号数据失败:{e}") + + accounts = _list_decrypted_accounts() + return { + "status": "success", + "deleted_account": account_name, + "accounts": accounts, + "default_account": accounts[0] if accounts else None, + "removed_edit_count": removed_edit_count, + "removed_key_cache": removed_key_cache, + } + + +@router.get("/api/chat/sessions", summary="获取会话列表(聊天左侧列表)") +def list_chat_sessions( + request: Request, + account: Optional[str] = None, + limit: int = 400, + include_hidden: bool = False, + include_official: bool = False, + preview: str = "latest", + source: Optional[str] = None, ): """从 session.db + contact.db 读取会话列表,用于前端聊天界面动态渲染联系人""" if limit <= 0: @@ -2155,12 +3600,36 @@ async def list_chat_sessions( head_image_db_path = account_dir / "head_image.db" base_url = str(request.base_url).rstrip("/") + rt_conn = None rows: list[Any] if source_norm == "realtime": + trace_id = f"rt-sessions-{int(time.time() * 1000)}-{threading.get_ident()}" + logger.info( + "[%s] list_sessions realtime start account=%s limit=%s include_hidden=%s include_official=%s preview=%s", + trace_id, + account_dir.name, + int(limit), + bool(include_hidden), + bool(include_official), + str(preview or ""), + ) try: + logger.info("[%s] ensure wcdb connected account=%s", trace_id, account_dir.name) conn = WCDB_REALTIME.ensure_connected(account_dir) + rt_conn = conn + logger.info("[%s] wcdb connected account=%s handle=%s", trace_id, account_dir.name, int(conn.handle)) + logger.info("[%s] wcdb_get_sessions account=%s", trace_id, account_dir.name) + wcdb_t0 = time.perf_counter() with conn.lock: raw = _wcdb_get_sessions(conn.handle) + wcdb_ms = (time.perf_counter() - wcdb_t0) * 1000.0 + logger.info( + "[%s] wcdb_get_sessions done account=%s sessions=%s ms=%.1f", + trace_id, + account_dir.name, + len(raw or []), + wcdb_ms, + ) except WCDBRealtimeError as e: raise HTTPException(status_code=400, detail=str(e)) @@ -2182,6 +3651,17 @@ async def list_chat_sessions( "sort_timestamp": item.get("sort_timestamp", item.get("sortTimestamp", item.get("last_timestamp", 0))), "last_msg_type": item.get("last_msg_type", item.get("lastMsgType", 0)), "last_msg_sub_type": item.get("last_msg_sub_type", item.get("lastMsgSubType", 0)), + # Keep these fields so group session previews can render "sender: content" without + # crashing (realtime rows are dicts, not sqlite Rows). + "last_msg_sender": item.get("last_msg_sender", item.get("lastMsgSender", "")), + "last_sender_display_name": item.get( + "last_sender_display_name", + item.get("lastSenderDisplayName", ""), + ), + "last_msg_locald_id": item.get( + "last_msg_locald_id", + item.get("lastMsgLocaldId", item.get("lastMsgLocalId", 0)), + ), } ) @@ -2193,6 +3673,7 @@ def _ts(v: Any) -> int: norm.sort(key=lambda r: _ts(r.get("sort_timestamp")), reverse=True) rows = norm + logger.info("[%s] list_sessions realtime normalized account=%s rows=%s", trace_id, account_dir.name, len(rows)) else: session_db_path = account_dir / "session.db" sconn = sqlite3.connect(str(session_db_path)) @@ -2242,24 +3723,96 @@ def _ts(v: Any) -> int: finally: sconn.close() - filtered: list[sqlite3.Row] = [] - usernames: list[str] = [] + filtered: list[Any] = [] for r in rows: - username = r["username"] or "" + username = _session_row_get(r, "username", "") or "" if not username: continue - if not include_hidden and int(r["is_hidden"] or 0) == 1: + if not include_hidden and int((_session_row_get(r, "is_hidden", 0) or 0)) == 1: continue if not _should_keep_session(username, include_official=include_official): continue filtered.append(r) - usernames.append(username) - if len(filtered) >= int(limit): - break + + raw_usernames = [str(_session_row_get(r, "username", "") or "").strip() for r in filtered] + top_flags = _load_contact_top_flags(contact_db_path, raw_usernames) + + def _to_int(v: Any) -> int: + try: + return int(v or 0) + except Exception: + return 0 + + def _session_sort_key(row: Any) -> tuple[int, int, int]: + username = str(_session_row_get(row, "username", "") or "").strip() + sort_ts = _to_int(_session_row_get(row, "sort_timestamp", 0)) + last_ts = _to_int(_session_row_get(row, "last_timestamp", 0)) + return ( + 1 if bool(top_flags.get(username, False)) else 0, + sort_ts, + last_ts, + ) + + filtered.sort(key=_session_sort_key, reverse=True) + if len(filtered) > int(limit): + filtered = filtered[: int(limit)] + + usernames: list[str] = [] + for r in filtered: + username = str(_session_row_get(r, "username", "") or "").strip() + if username: + usernames.append(username) contact_rows = _load_contact_rows(contact_db_path, usernames) local_avatar_usernames = _query_head_image_usernames(head_image_db_path, usernames) + # Some sessions (notably enterprise groups / openim-related IDs) may be missing from decrypted contact.db + # (or lack nickname/avatar columns). In that case, fall back to WCDB APIs (same as WeFlow) to resolve + # display names + avatar URLs. + wcdb_display_names: dict[str, str] = {} + wcdb_avatar_urls: dict[str, str] = {} + try: + need_display: list[str] = [] + need_avatar: list[str] = [] + if source_norm == "realtime": + # In realtime mode, always ask WCDB for display names: decrypted contact.db can be stale. + need_display = [str(u or "").strip() for u in usernames if str(u or "").strip()] + for u in usernames: + if not u: + continue + if source_norm != "realtime": + row = contact_rows.get(u) + if _pick_display_name(row, u) == u: + need_display.append(u) + if source_norm == "realtime": + # In realtime mode, prefer WCDB-resolved avatar URLs (contact.db can be stale). + if u not in local_avatar_usernames: + need_avatar.append(u) + else: + if u not in local_avatar_usernames: + need_avatar.append(u) + + need_display = list(dict.fromkeys(need_display)) + need_avatar = list(dict.fromkeys(need_avatar)) + if need_display or need_avatar: + wcdb_conn = rt_conn + if wcdb_conn is None: + status = WCDB_REALTIME.get_status(account_dir) + can_connect = bool(status.get("dll_present")) and bool(status.get("key_present")) and bool( + status.get("session_db_path") + ) + if can_connect: + wcdb_conn = WCDB_REALTIME.ensure_connected(account_dir) + if wcdb_conn is not None: + with wcdb_conn.lock: + if need_display: + wcdb_display_names = _wcdb_get_display_names(wcdb_conn.handle, need_display) + if need_avatar: + wcdb_avatar_urls = _wcdb_get_avatar_urls(wcdb_conn.handle, need_avatar) + except Exception: + wcdb_display_names = {} + wcdb_avatar_urls = {} + preview_mode = str(preview or "").strip().lower() if preview_mode not in {"latest", "index", "session", "db", "none"}: preview_mode = "latest" @@ -2285,23 +3838,69 @@ def _ts(v: Any) -> int: except Exception: last_previews = {} + def _is_generic_location_preview(value: Any) -> bool: + text = re.sub(r"\s+", " ", str(value or "").strip()).strip() + if not text: + return False + lowered = text.lower() + return lowered in {"[location]", "[位置]"} or lowered.endswith(": [location]") or lowered.endswith(": [位置]") + if preview_mode in {"latest", "db"}: - targets = usernames if preview_mode == "db" else [u for u in usernames if u and (u not in last_previews)] + targets = ( + usernames + if preview_mode == "db" + else [u for u in usernames if u and ((u not in last_previews) or _is_generic_location_preview(last_previews.get(u)))] + ) if targets: legacy = _load_latest_message_previews(account_dir, targets) for u, v in legacy.items(): if v: last_previews[u] = v + group_sender_display_names: dict[str, str] = _build_group_sender_display_name_map( + contact_db_path, + last_previews, + ) + unresolved = [] + for conv_username, preview_text in last_previews.items(): + if not str(conv_username or "").endswith("@chatroom"): + continue + sender_username = _extract_group_preview_sender_username(preview_text) + if sender_username and sender_username not in group_sender_display_names: + unresolved.append(sender_username) + unresolved = list(dict.fromkeys(unresolved)) + if unresolved: + try: + wcdb_conn = rt_conn or WCDB_REALTIME.ensure_connected(account_dir) + with wcdb_conn.lock: + wcdb_names = _wcdb_get_display_names(wcdb_conn.handle, unresolved) + for sender_username in unresolved: + wcdb_name = str(wcdb_names.get(sender_username) or "").strip() + if wcdb_name and wcdb_name != sender_username: + group_sender_display_names[sender_username] = wcdb_name + except Exception: + pass + sessions: list[dict[str, Any]] = [] for r in filtered: username = r["username"] c_row = contact_rows.get(username) display_name = _pick_display_name(c_row, username) - avatar_url = _pick_avatar_url(c_row) - if not avatar_url and username in local_avatar_usernames: - avatar_url = base_url + _build_avatar_url(account_dir.name, username) + wd = str(wcdb_display_names.get(username) or "").strip() + if source_norm == "realtime" and wd and wd != username: + display_name = wd + elif display_name == username: + if wd and wd != username: + display_name = wd + + # Prefer local head_image avatars when available: decrypted contact.db URLs can be stale + # (or hotlink-protected for browsers). WCDB realtime (when available) is the next best. + avatar_url = base_url + _avatar_url_unified( + account_dir=account_dir, + username=username, + local_avatar_usernames=local_avatar_usernames, + ) last_message = "" if preview_mode == "session": @@ -2346,6 +3945,42 @@ def _ts(v: Any) -> int: last_msg_sub_type = 0 if last_msg_type == 81604378673 or (last_msg_type == 49 and last_msg_sub_type == 19): last_message = "[聊天记录]" + elif last_msg_type == 48: + text = re.sub(r"\s+", " ", str(last_message or "").strip()).strip() + text = re.sub(r"^\[location\]", "", text, flags=re.IGNORECASE).strip() + text = re.sub(r"^\[位置\]", "", text).strip() + last_message = f"[位置]{text}" if text else "[位置]" + + last_message = _normalize_session_preview_text( + last_message, + is_group=bool(str(username or "").endswith("@chatroom")), + sender_display_names=group_sender_display_names, + ) + if str(username or "").endswith("@chatroom") and str(last_message or "") and not str(last_message).startswith("[草稿]"): + # Prefer group card nickname when available. In realtime mode, WCDB session rows can provide + # `last_sender_display_name`, but we may still get a summary that doesn't include "sender:". + # Also guard against URL schemes like "https://..." being mis-parsed as "https: //...". + raw_sender_display = "" + try: + raw_sender_display = r["last_sender_display_name"] + except Exception: + try: + raw_sender_display = r.get("last_sender_display_name", "") + except Exception: + raw_sender_display = "" + sender_display = _decode_sqlite_text(raw_sender_display).strip() + if sender_display: + text = re.sub(r"\s+", " ", str(last_message or "").strip()).strip() + match = re.match(r"^([^:\n]{1,128}):\s*(.+)$", text) + if match: + prefix = str(match.group(1) or "").strip() + body = re.sub(r"\s+", " ", str(match.group(2) or "").strip()).strip() + if prefix.lower() in {"http", "https"} and body.startswith("//"): + last_message = f"{sender_display}: {text}" + else: + last_message = f"{sender_display}: {body}" + else: + last_message = f"{sender_display}: {text}" last_time = _format_session_time(r["sort_timestamp"] or r["last_timestamp"]) @@ -2359,6 +3994,7 @@ def _ts(v: Any) -> int: "lastMessageTime": last_time, "unreadCount": int(r["unread_count"] or 0), "isGroup": bool(username.endswith("@chatroom")), + "isTop": bool(top_flags.get(str(username or "").strip(), False)), } ) @@ -2502,7 +4138,7 @@ def _collect_chat_messages( if is_group and sender_prefix and (not sender_username): sender_username = sender_prefix - if is_group and (raw_text.startswith("<") or raw_text.startswith('"<')): + if is_group and (not sender_username) and (raw_text.startswith("<") or raw_text.startswith('"<')): xml_sender = _extract_sender_from_group_xml(raw_text) if xml_sender: sender_username = xml_sender @@ -2535,6 +4171,9 @@ def _collect_chat_messages( quote_username = "" quote_title = "" quote_content = "" + quote_thumb_url = "" + link_type = "" + link_style = "" quote_server_id = "" quote_type = "" quote_voice_length = "" @@ -2546,16 +4185,14 @@ def _collect_chat_messages( file_md5 = "" transfer_id = "" voip_type = "" + location_lat: Optional[float] = None + location_lng: Optional[float] = None + location_poiname = "" + location_label = "" if local_type == 10000: render_type = "system" - if "revokemsg" in raw_text: - content_text = "撤回了一条消息" - else: - import re - - content_text = re.sub(r"]*>", "", raw_text) - content_text = re.sub(r"\s+", " ", content_text).strip() or "[系统消息]" + content_text = _parse_system_message_content(raw_text) elif local_type == 49: parsed = _parse_app_message(raw_text) render_type = str(parsed.get("renderType") or "text") @@ -2567,6 +4204,9 @@ def _collect_chat_messages( record_item = str(parsed.get("recordItem") or "") quote_title = str(parsed.get("quoteTitle") or "") quote_content = str(parsed.get("quoteContent") or "") + quote_thumb_url = str(parsed.get("quoteThumbUrl") or "") + link_type = str(parsed.get("linkType") or "") + link_style = str(parsed.get("linkStyle") or "") quote_username = str(parsed.get("quoteUsername") or "") quote_server_id = str(parsed.get("quoteServerId") or "") quote_type = str(parsed.get("quoteType") or "") @@ -2598,8 +4238,7 @@ def _collect_chat_messages( render_type = "system" template = _extract_xml_tag_text(raw_text, "template") if template: - import re - + # import re pat_usernames.update({m.group(1) for m in re.finditer(r"\$\{([^}]+)\}", template) if m.group(1)}) content_text = "[拍一拍]" else: @@ -2610,6 +4249,9 @@ def _collect_chat_messages( content_text = str(parsed.get("content") or "[引用消息]") quote_title = str(parsed.get("quoteTitle") or "") quote_content = str(parsed.get("quoteContent") or "") + quote_thumb_url = str(parsed.get("quoteThumbUrl") or "") + link_type = str(parsed.get("linkType") or "") + link_style = str(parsed.get("linkStyle") or "") quote_username = str(parsed.get("quoteUsername") or "") quote_server_id = str(parsed.get("quoteServerId") or "") quote_type = str(parsed.get("quoteType") or "") @@ -2709,6 +4351,11 @@ def _collect_chat_messages( local_id=local_id, create_time=create_time, ) + + if not _is_hex_md5(video_thumb_md5): + packed_md5 = _extract_md5_from_packed_info(r["packed_info_data"]) + if packed_md5: + video_thumb_md5 = packed_md5 content_text = "[视频]" elif local_type == 47: render_type = "emoji" @@ -2728,11 +4375,18 @@ def _collect_chat_messages( create_time=create_time, ) content_text = "[表情]" + elif local_type == 48: + parsed = _parse_location_message(raw_text) + render_type = str(parsed.get("renderType") or "location") + content_text = str(parsed.get("content") or "[Location]") + location_lat = parsed.get("locationLat") + location_lng = parsed.get("locationLng") + location_poiname = str(parsed.get("locationPoiname") or "") + location_label = str(parsed.get("locationLabel") or "") elif local_type == 50: render_type = "voip" try: - import re - + # import re block = raw_text m_voip = re.search( r"(]*>.*?)", @@ -2767,9 +4421,13 @@ def _collect_chat_messages( title = str(parsed.get("title") or title) url = str(parsed.get("url") or url) from_name = str(parsed.get("from") or from_name) + from_username = str(parsed.get("fromUsername") or from_username) record_item = str(parsed.get("recordItem") or record_item) quote_title = str(parsed.get("quoteTitle") or quote_title) quote_content = str(parsed.get("quoteContent") or quote_content) + quote_thumb_url = str(parsed.get("quoteThumbUrl") or quote_thumb_url) + link_type = str(parsed.get("linkType") or link_type) + link_style = str(parsed.get("linkStyle") or link_style) amount = str(parsed.get("amount") or amount) cover_url = str(parsed.get("coverUrl") or cover_url) thumb_url = str(parsed.get("thumbUrl") or thumb_url) @@ -2777,6 +4435,10 @@ def _collect_chat_messages( pay_sub_type = str(parsed.get("paySubType") or pay_sub_type) file_md5 = str(parsed.get("fileMd5") or file_md5) transfer_id = str(parsed.get("transferId") or transfer_id) + quote_username = str(parsed.get("quoteUsername") or quote_username) + quote_server_id = str(parsed.get("quoteServerId") or quote_server_id) + quote_type = str(parsed.get("quoteType") or quote_type) + quote_voice_length = str(parsed.get("quoteVoiceLength") or quote_voice_length) if render_type == "transfer": # 如果 transferId 仍为空,尝试从原始 XML 提取 @@ -2827,6 +4489,8 @@ def _collect_chat_messages( "content": content_text, "title": title, "url": url, + "linkType": link_type, + "linkStyle": link_style, "from": from_name, "fromUsername": from_username, "recordItem": record_item, @@ -2850,6 +4514,7 @@ def _collect_chat_messages( "quoteVoiceLength": str(quote_voice_length).strip(), "quoteTitle": quote_title, "quoteContent": quote_content, + "quoteThumbUrl": quote_thumb_url, "amount": amount, "coverUrl": cover_url, "fileSize": file_size, @@ -2857,6 +4522,10 @@ def _collect_chat_messages( "paySubType": pay_sub_type, "transferStatus": transfer_status, "transferId": transfer_id, + "locationLat": location_lat, + "locationLng": location_lng, + "locationPoiname": location_poiname, + "locationLabel": location_label, "_rawText": raw_text if local_type == 266287972401 else "", } ) @@ -2872,8 +4541,184 @@ def _collect_chat_messages( return merged, has_more_any, sender_usernames, quote_usernames, pat_usernames +@router.get("/api/chat/messages/daily_counts", summary="获取某月每日消息数(热力图)") +def get_chat_message_daily_counts( + username: str, + year: int, + month: int, + account: Optional[str] = None, +): + username = str(username or "").strip() + if not username: + raise HTTPException(status_code=400, detail="Missing username.") + try: + y = int(year) + m = int(month) + except Exception: + raise HTTPException(status_code=400, detail="Invalid year or month.") + if m < 1 or m > 12: + raise HTTPException(status_code=400, detail="Invalid month.") + + try: + start_ts, end_ts = _local_month_range_epoch_seconds(year=y, month=m) + except Exception: + raise HTTPException(status_code=400, detail="Invalid year or month.") + + account_dir = _resolve_account_dir(account) + db_paths = _iter_message_db_paths(account_dir) + + counts: dict[str, int] = {} + + for db_path in db_paths: + conn = sqlite3.connect(str(db_path)) + try: + try: + table_name = _resolve_msg_table_name(conn, username) + if not table_name: + continue + quoted_table = _quote_ident(table_name) + rows = conn.execute( + "SELECT strftime('%Y-%m-%d', CAST(create_time AS INTEGER), 'unixepoch', 'localtime') AS day, " + "COUNT(*) AS c " + f"FROM {quoted_table} " + "WHERE CAST(create_time AS INTEGER) >= ? AND CAST(create_time AS INTEGER) < ? " + "GROUP BY day", + (int(start_ts), int(end_ts)), + ).fetchall() + for day, c in rows: + k = str(day or "").strip() + if not k: + continue + try: + vv = int(c or 0) + except Exception: + vv = 0 + if vv <= 0: + continue + counts[k] = int(counts.get(k, 0)) + vv + except Exception: + continue + finally: + conn.close() + + total = int(sum(int(v) for v in counts.values())) if counts else 0 + max_count = int(max(counts.values())) if counts else 0 + + return { + "status": "success", + "account": account_dir.name, + "username": username, + "year": int(y), + "month": int(m), + "counts": counts, + "total": total, + "max": max_count, + } + + +@router.get("/api/chat/messages/anchor", summary="获取定位锚点(某日第一条/会话顶部)") +def get_chat_message_anchor( + username: str, + kind: str, + account: Optional[str] = None, + date: Optional[str] = None, +): + username = str(username or "").strip() + if not username: + raise HTTPException(status_code=400, detail="Missing username.") + + kind_norm = str(kind or "").strip().lower() + if kind_norm not in {"day", "first"}: + raise HTTPException(status_code=400, detail="Invalid kind.") + + date_norm: Optional[str] = None + start_ts: Optional[int] = None + end_ts: Optional[int] = None + if kind_norm == "day": + if not date: + raise HTTPException(status_code=400, detail="Missing date.") + try: + start_ts, end_ts, date_norm = _local_day_range_epoch_seconds(date_str=str(date)) + except Exception: + raise HTTPException(status_code=400, detail="Invalid date.") + + account_dir = _resolve_account_dir(account) + db_paths = _iter_message_db_paths(account_dir) + + best_key: Optional[tuple[int, int, int]] = None + best_anchor_id = "" + best_create_time = 0 + + for db_path in db_paths: + conn = sqlite3.connect(str(db_path)) + try: + try: + table_name = _resolve_msg_table_name(conn, username) + if not table_name: + continue + quoted_table = _quote_ident(table_name) + + if kind_norm == "first": + row = conn.execute( + "SELECT local_id, CAST(create_time AS INTEGER) AS create_time, " + "COALESCE(CAST(sort_seq AS INTEGER), 0) AS sort_seq " + f"FROM {quoted_table} " + "ORDER BY CAST(create_time AS INTEGER) ASC, COALESCE(CAST(sort_seq AS INTEGER), 0) ASC, local_id ASC " + "LIMIT 1" + ).fetchone() + else: + row = conn.execute( + "SELECT local_id, CAST(create_time AS INTEGER) AS create_time, " + "COALESCE(CAST(sort_seq AS INTEGER), 0) AS sort_seq " + f"FROM {quoted_table} " + "WHERE CAST(create_time AS INTEGER) >= ? AND CAST(create_time AS INTEGER) < ? " + "ORDER BY CAST(create_time AS INTEGER) ASC, COALESCE(CAST(sort_seq AS INTEGER), 0) ASC, local_id ASC " + "LIMIT 1", + (int(start_ts or 0), int(end_ts or 0)), + ).fetchone() + + if not row: + continue + try: + local_id = int(row[0] or 0) + create_time = int(row[1] or 0) + sort_seq = int(row[2] or 0) + except Exception: + continue + if local_id <= 0: + continue + + key = (int(create_time), int(sort_seq), int(local_id)) + if (best_key is None) or (key < best_key): + best_key = key + best_create_time = int(create_time) + best_anchor_id = f"{db_path.stem}:{table_name}:{local_id}" + except Exception: + continue + finally: + conn.close() + + if not best_anchor_id: + return { + "status": "empty", + "anchorId": "", + } + + resp: dict[str, Any] = { + "status": "success", + "account": account_dir.name, + "username": username, + "kind": kind_norm, + "anchorId": best_anchor_id, + "createTime": int(best_create_time), + } + if date_norm is not None: + resp["date"] = date_norm + return resp + + @router.get("/api/chat/messages", summary="获取会话消息列表") -async def list_chat_messages( +def list_chat_messages( request: Request, username: str, account: Optional[str] = None, @@ -3066,6 +4911,63 @@ def pick(*keys: str) -> Any: break scan_take = next_take + # Self-heal (default source only): if the decrypted snapshot has no conversation table yet (new session), + # do a one-shot realtime->decrypted sync and re-query once. This avoids "暂无聊天记录" after turning off realtime. + if ( + source_norm != "realtime" + and (source is None or not str(source).strip()) + and (not merged) + and int(offset) == 0 + ): + missing_table = False + try: + missing_table = _resolve_decrypted_message_table(account_dir, username) is None + except Exception: + missing_table = True + + if missing_table: + rt_conn2 = None + try: + rt_conn2 = WCDB_REALTIME.ensure_connected(account_dir) + except WCDBRealtimeError: + rt_conn2 = None + except Exception: + rt_conn2 = None + + if rt_conn2 is not None: + try: + with _realtime_sync_lock(account_dir.name, username): + msg_db_path2, table_name2 = _ensure_decrypted_message_table(account_dir, username) + _sync_chat_realtime_messages_for_table( + account_dir=account_dir, + rt_conn=rt_conn2, + username=username, + msg_db_path=msg_db_path2, + table_name=table_name2, + max_scan=max(200, int(limit) + 50), + backfill_limit=0, + ) + except Exception: + pass + + ( + merged, + has_more_any, + sender_usernames, + quote_usernames, + pat_usernames, + ) = _collect_chat_messages( + username=username, + account_dir=account_dir, + db_paths=db_paths, + resource_conn=resource_conn, + resource_chat_id=resource_chat_id, + take=scan_take, + want_types=want_types, + ) + if want_types is not None: + merged = [m for m in merged if _normalize_render_type_key(m.get("renderType")) in want_types] + r""" take = int(limit) + int(offset) take_probe = take + 1 @@ -3151,7 +5053,7 @@ def pick(*keys: str) -> Any: if is_group and sender_prefix: sender_username = sender_prefix - if is_group and (raw_text.startswith("<") or raw_text.startswith('"<')): + if is_group and (not sender_username) and (raw_text.startswith("<") or raw_text.startswith('"<')): xml_sender = _extract_sender_from_group_xml(raw_text) if xml_sender: sender_username = xml_sender @@ -3187,6 +5089,9 @@ def pick(*keys: str) -> Any: quote_username = "" quote_title = "" quote_content = "" + quote_thumb_url = "" + link_type = "" + link_style = "" quote_server_id = "" quote_type = "" quote_voice_length = "" @@ -3201,13 +5106,7 @@ def pick(*keys: str) -> Any: if local_type == 10000: render_type = "system" - if "revokemsg" in raw_text: - content_text = "撤回了一条消息" - else: - import re - - content_text = re.sub(r"]*>", "", raw_text) - content_text = re.sub(r"\s+", " ", content_text).strip() or "[系统消息]" + content_text = _parse_system_message_content(raw_text) elif local_type == 49: parsed = _parse_app_message(raw_text) render_type = str(parsed.get("renderType") or "text") @@ -3219,6 +5118,9 @@ def pick(*keys: str) -> Any: record_item = str(parsed.get("recordItem") or "") quote_title = str(parsed.get("quoteTitle") or "") quote_content = str(parsed.get("quoteContent") or "") + quote_thumb_url = str(parsed.get("quoteThumbUrl") or "") + link_type = str(parsed.get("linkType") or "") + link_style = str(parsed.get("linkStyle") or "") quote_username = str(parsed.get("quoteUsername") or "") quote_server_id = str(parsed.get("quoteServerId") or "") quote_type = str(parsed.get("quoteType") or "") @@ -3250,7 +5152,7 @@ def pick(*keys: str) -> Any: render_type = "system" template = _extract_xml_tag_text(raw_text, "template") if template: - import re + # import re pat_usernames.update({m.group(1) for m in re.finditer(r"\$\{([^}]+)\}", template) if m.group(1)}) content_text = "[拍一拍]" @@ -3262,6 +5164,9 @@ def pick(*keys: str) -> Any: content_text = str(parsed.get("content") or "[引用消息]") quote_title = str(parsed.get("quoteTitle") or "") quote_content = str(parsed.get("quoteContent") or "") + quote_thumb_url = str(parsed.get("quoteThumbUrl") or "") + link_type = str(parsed.get("linkType") or "") + link_style = str(parsed.get("linkStyle") or "") quote_username = str(parsed.get("quoteUsername") or "") quote_server_id = str(parsed.get("quoteServerId") or "") quote_type = str(parsed.get("quoteType") or "") @@ -3379,7 +5284,7 @@ def pick(*keys: str) -> Any: elif local_type == 50: render_type = "voip" try: - import re + # import re block = raw_text m_voip = re.search( @@ -3418,6 +5323,9 @@ def pick(*keys: str) -> Any: record_item = str(parsed.get("recordItem") or record_item) quote_title = str(parsed.get("quoteTitle") or quote_title) quote_content = str(parsed.get("quoteContent") or quote_content) + quote_thumb_url = str(parsed.get("quoteThumbUrl") or quote_thumb_url) + link_type = str(parsed.get("linkType") or link_type) + link_style = str(parsed.get("linkStyle") or link_style) amount = str(parsed.get("amount") or amount) cover_url = str(parsed.get("coverUrl") or cover_url) thumb_url = str(parsed.get("thumbUrl") or thumb_url) @@ -3468,6 +5376,8 @@ def pick(*keys: str) -> Any: "content": content_text, "title": title, "url": url, + "linkType": link_type, + "linkStyle": link_style, "from": from_name, "fromUsername": from_username, "recordItem": record_item, @@ -3491,6 +5401,7 @@ def pick(*keys: str) -> Any: "quoteVoiceLength": str(quote_voice_length).strip(), "quoteTitle": quote_title, "quoteContent": quote_content, + "quoteThumbUrl": quote_thumb_url, "amount": amount, "coverUrl": cover_url, "fileSize": file_size, @@ -3527,81 +5438,38 @@ def pick(*keys: str) -> Any: deduped.append(m) merged = deduped - # 后处理:关联转账消息的最终状态 - # 策略:优先使用 transferId 精确匹配,回退到金额+时间窗口匹配 - # paysubtype 含义:1=不明确 3=已收款 4=对方退回给你 8=发起转账 9=被对方退回 10=已过期 + _postprocess_transfer_messages(merged) - # 收集已退还和已收款的转账ID和金额 - returned_transfer_ids: set[str] = set() # 退还状态的 transferId - received_transfer_ids: set[str] = set() # 已收款状态的 transferId - returned_amounts_with_time: list[tuple[str, int]] = [] # (金额, 时间戳) 用于退还回退匹配 - received_amounts_with_time: list[tuple[str, int]] = [] # (金额, 时间戳) 用于收款回退匹配 + def sort_key(m: dict[str, Any]) -> tuple[int, int, int]: + sseq = int(m.get("sortSeq") or 0) + cts = int(m.get("createTime") or 0) + lid = int(m.get("localId") or 0) + return (cts, sseq, lid) - for m in merged: - if m.get("renderType") == "transfer": - pst = str(m.get("paySubType") or "") - tid = str(m.get("transferId") or "").strip() - amt = str(m.get("amount") or "") - ts = int(m.get("createTime") or 0) - - if pst in ("4", "9"): # 退还状态 - if tid: - returned_transfer_ids.add(tid) - if amt: - returned_amounts_with_time.append((amt, ts)) - elif pst == "3": # 已收款状态 - if tid: - received_transfer_ids.add(tid) - if amt: - received_amounts_with_time.append((amt, ts)) - - # 更新原始转账消息的状态 - for m in merged: - if m.get("renderType") == "transfer": - pst = str(m.get("paySubType") or "") - # 只更新未确定状态的原始转账消息(paysubtype=1 或 8) - if pst in ("1", "8"): - tid = str(m.get("transferId") or "").strip() - amt = str(m.get("amount") or "") - ts = int(m.get("createTime") or 0) - - # 优先检查退还状态(退还优先于收款) - should_mark_returned = False - should_mark_received = False - - # 策略1:精确 transferId 匹配 - if tid: - if tid in returned_transfer_ids: - should_mark_returned = True - elif tid in received_transfer_ids: - should_mark_received = True + merged.sort(key=sort_key, reverse=True) + has_more_global = bool(has_more_any or (len(merged) > (int(offset) + int(limit)))) + page = merged[int(offset) : int(offset) + int(limit)] + if want_asc: + page = list(reversed(page)) - # 策略2:回退到金额+时间窗口匹配(24小时内同金额) - if not should_mark_returned and not should_mark_received and amt: - for ret_amt, ret_ts in returned_amounts_with_time: - if ret_amt == amt and abs(ret_ts - ts) <= 86400: - should_mark_returned = True - break - if not should_mark_returned: - for rec_amt, rec_ts in received_amounts_with_time: - if rec_amt == amt and abs(rec_ts - ts) <= 86400: - should_mark_received = True - break + # Hot path optimization: only enrich the page we return. + if not page: + return { + "status": "success", + "account": account_dir.name, + "username": username, + "total": int(offset) + (1 if has_more_global else 0), + "hasMore": bool(has_more_global), + "messages": [], + } - if should_mark_returned: - m["paySubType"] = "9" - m["transferStatus"] = "已被退还" - elif should_mark_received: - m["paySubType"] = "3" - # 根据 isSent 判断:发起方显示"已收款",收款方显示"已被接收" - is_sent = m.get("isSent", False) - m["transferStatus"] = "已收款" if is_sent else "已被接收" + messages_window = page # Some appmsg payloads provide only `from` (sourcedisplayname) but not `fromUsername` (sourceusername). # Recover `fromUsername` via contact.db so the frontend can render the publisher avatar. missing_from_names = [ str(m.get("from") or "").strip() - for m in merged + for m in messages_window if str(m.get("renderType") or "").strip() == "link" and str(m.get("from") or "").strip() and not str(m.get("fromUsername") or "").strip() @@ -3609,7 +5477,7 @@ def pick(*keys: str) -> Any: if missing_from_names: name_to_username = _load_usernames_by_display_names(contact_db_path, missing_from_names) if name_to_username: - for m in merged: + for m in messages_window: if str(m.get("fromUsername") or "").strip(): continue if str(m.get("renderType") or "").strip() != "link": @@ -3618,31 +5486,100 @@ def pick(*keys: str) -> Any: if fn and fn in name_to_username: m["fromUsername"] = name_to_username[fn] - from_usernames = [str(m.get("fromUsername") or "").strip() for m in merged] + pat_usernames_in_page: set[str] = set() + for m in messages_window: + if int(m.get("type") or 0) != 266287972401: + continue + raw = str(m.get("_rawText") or "") + if not raw: + continue + template = _extract_xml_tag_text(raw, "template") + if not template: + continue + pat_usernames_in_page.update({mm.group(1) for mm in re.finditer(r"\$\{([^}]+)\}", template) if mm.group(1)}) + + from_usernames = [str(m.get("fromUsername") or "").strip() for m in messages_window] + sender_usernames_in_page = [str(m.get("senderUsername") or "").strip() for m in messages_window] + quote_usernames_in_page = [str(m.get("quoteUsername") or "").strip() for m in messages_window] uniq_senders = list( dict.fromkeys( - [u for u in (sender_usernames + list(pat_usernames) + quote_usernames + from_usernames) if u] + [ + u + for u in ( + sender_usernames_in_page + + list(pat_usernames_in_page) + + quote_usernames_in_page + + from_usernames + ) + if u + ] ) ) sender_contact_rows = _load_contact_rows(contact_db_path, uniq_senders) local_sender_avatars = _query_head_image_usernames(head_image_db_path, uniq_senders) - for m in merged: + # contact.db may not include enterprise/openim contacts (or group chatroom records). WCDB has a more complete + # view of display names + avatar URLs, so we use it as a best-effort fallback. + wcdb_display_names: dict[str, str] = {} + wcdb_avatar_urls: dict[str, str] = {} + try: + need_display: list[str] = [] + need_avatar: list[str] = [] + for u in uniq_senders: + if not u: + continue + row = sender_contact_rows.get(u) + if _pick_display_name(row, u) == u: + need_display.append(u) + if u not in local_sender_avatars: + need_avatar.append(u) + + need_display = list(dict.fromkeys(need_display)) + need_avatar = list(dict.fromkeys(need_avatar)) + if need_display or need_avatar: + wcdb_conn = WCDB_REALTIME.ensure_connected(account_dir) + with wcdb_conn.lock: + if need_display: + wcdb_display_names = _wcdb_get_display_names(wcdb_conn.handle, need_display) + if need_avatar: + wcdb_avatar_urls = _wcdb_get_avatar_urls(wcdb_conn.handle, need_avatar) + except Exception: + wcdb_display_names = {} + wcdb_avatar_urls = {} + + group_nicknames = _load_group_nickname_map( + account_dir=account_dir, + contact_db_path=contact_db_path, + chatroom_id=username, + sender_usernames=uniq_senders, + ) + + for m in messages_window: # If appmsg doesn't provide sourcedisplayname, try mapping sourceusername to display name. if (not str(m.get("from") or "").strip()) and str(m.get("fromUsername") or "").strip(): fu = str(m.get("fromUsername") or "").strip() frow = sender_contact_rows.get(fu) if frow is not None: m["from"] = _pick_display_name(frow, fu) + else: + wd = str(wcdb_display_names.get(fu) or "").strip() + if wd: + m["from"] = wd su = str(m.get("senderUsername") or "") if not su: continue - row = sender_contact_rows.get(su) - m["senderDisplayName"] = _pick_display_name(row, su) - avatar_url = _pick_avatar_url(row) - if not avatar_url and su in local_sender_avatars: - avatar_url = base_url + _build_avatar_url(account_dir.name, su) + m["senderDisplayName"] = _resolve_sender_display_name( + sender_username=su, + sender_contact_rows=sender_contact_rows, + wcdb_display_names=wcdb_display_names, + group_nicknames=group_nicknames, + ) + avatar_url = base_url + _avatar_url_unified( + account_dir=account_dir, + username=su, + local_avatar_usernames=local_sender_avatars, + ) m["senderAvatar"] = avatar_url qu = str(m.get("quoteUsername") or "").strip() @@ -3658,9 +5595,15 @@ def pick(*keys: str) -> Any: if remark: m["quoteTitle"] = remark elif not qt: - m["quoteTitle"] = _pick_display_name(qrow, qu) + title = _pick_display_name(qrow, qu) + if title == qu: + wd = str(wcdb_display_names.get(qu) or "").strip() + if wd and wd != qu: + title = wd + m["quoteTitle"] = title elif not qt: - m["quoteTitle"] = qu + wd = str(wcdb_display_names.get(qu) or "").strip() + m["quoteTitle"] = wd or qu # Media URL fallback: if CDN URLs missing, use local media endpoints. try: @@ -3690,8 +5633,7 @@ def pick(*keys: str) -> Any: if existing_local: try: - import re - + # import re cur = str(m.get("emojiUrl") or "") if cur and re.match(r"^https?://", cur, flags=re.I) and ("/api/chat/media/emoji" not in cur): m["emojiRemoteUrl"] = cur @@ -3739,6 +5681,23 @@ def pick(*keys: str) -> Any: base_url + f"/api/chat/media/video?account={quote(account_dir.name)}&file_id={quote(video_file_id)}&username={quote(username)}" ) + elif rt == "link": + thumb_url = str(m.get("thumbUrl") or "").strip() + if thumb_url and (not thumb_url.lower().startswith(("http://", "https://"))): + try: + lid = int(m.get("localId") or 0) + except Exception: + lid = 0 + try: + ct = int(m.get("createTime") or 0) + except Exception: + ct = 0 + if lid > 0 and ct > 0: + file_id = f"{lid}_{ct}" + m["thumbUrl"] = ( + base_url + + f"/api/chat/media/image?account={quote(account_dir.name)}&file_id={quote(file_id)}&username={quote(username)}" + ) elif rt == "voice": if str(m.get("serverId") or ""): sid = int(m.get("serverId") or 0) @@ -3755,18 +5714,6 @@ def pick(*keys: str) -> Any: if "_rawText" in m: m.pop("_rawText", None) - def sort_key(m: dict[str, Any]) -> tuple[int, int, int]: - sseq = int(m.get("sortSeq") or 0) - cts = int(m.get("createTime") or 0) - lid = int(m.get("localId") or 0) - return (cts, sseq, lid) - - merged.sort(key=sort_key, reverse=True) - has_more_global = bool(has_more_any or (len(merged) > (int(offset) + int(limit)))) - page = merged[int(offset) : int(offset) + int(limit)] - if want_asc: - page = list(reversed(page)) - return { "status": "success", "account": account_dir.name, @@ -4088,26 +6035,69 @@ async def _search_chat_messages_via_fts( uniq_usernames = list(dict.fromkeys([username] + [str(x.get("senderUsername") or "") for x in hits])) contact_rows = _load_contact_rows(contact_db_path, uniq_usernames) local_avatar_usernames = _query_head_image_usernames(head_image_db_path, uniq_usernames) + + wcdb_display_names: dict[str, str] = {} + wcdb_avatar_urls: dict[str, str] = {} + try: + need_display: list[str] = [] + need_avatar: list[str] = [] + for u in uniq_usernames: + uu = str(u or "").strip() + if not uu: + continue + row = contact_rows.get(uu) + if _pick_display_name(row, uu) == uu: + need_display.append(uu) + if uu not in local_avatar_usernames: + need_avatar.append(uu) + + need_display = list(dict.fromkeys(need_display)) + need_avatar = list(dict.fromkeys(need_avatar)) + if need_display or need_avatar: + wcdb_conn = WCDB_REALTIME.ensure_connected(account_dir) + with wcdb_conn.lock: + if need_display: + wcdb_display_names = _wcdb_get_display_names(wcdb_conn.handle, need_display) + if need_avatar: + wcdb_avatar_urls = _wcdb_get_avatar_urls(wcdb_conn.handle, need_avatar) + except Exception: + wcdb_display_names = {} + wcdb_avatar_urls = {} + conv_row = contact_rows.get(username) conv_name = _pick_display_name(conv_row, username) - conv_avatar = _pick_avatar_url(conv_row) - if (not conv_avatar) and (username in local_avatar_usernames): - conv_avatar = base_url + _build_avatar_url(account_dir.name, username) + if conv_name == username: + wd = str(wcdb_display_names.get(username) or "").strip() + if wd and wd != username: + conv_name = wd + conv_avatar = base_url + _avatar_url_unified( + account_dir=account_dir, + username=username, + local_avatar_usernames=local_avatar_usernames, + ) + group_nicknames = _load_group_nickname_map( + account_dir=account_dir, + contact_db_path=contact_db_path, + chatroom_id=username, + sender_usernames=[str(x.get("senderUsername") or "") for x in hits], + ) for h in hits: su = str(h.get("senderUsername") or "").strip() h["conversationName"] = conv_name h["conversationAvatar"] = conv_avatar if su: - row = contact_rows.get(su) - h["senderDisplayName"] = ( - _pick_display_name(row, su) - if row is not None - else (conv_name if su == username else su) + h["senderDisplayName"] = _resolve_sender_display_name( + sender_username=su, + sender_contact_rows=contact_rows, + wcdb_display_names=wcdb_display_names, + group_nicknames=group_nicknames, + ) + avatar_url = base_url + _avatar_url_unified( + account_dir=account_dir, + username=su, + local_avatar_usernames=local_avatar_usernames, ) - avatar_url = _pick_avatar_url(row) - if (not avatar_url) and (su in local_avatar_usernames): - avatar_url = base_url + _build_avatar_url(account_dir.name, su) h["senderAvatar"] = avatar_url else: uniq_contacts = list( @@ -4118,24 +6108,79 @@ async def _search_chat_messages_via_fts( contact_rows = _load_contact_rows(contact_db_path, uniq_contacts) local_avatar_usernames = _query_head_image_usernames(head_image_db_path, uniq_contacts) + wcdb_display_names: dict[str, str] = {} + wcdb_avatar_urls: dict[str, str] = {} + try: + need_display: list[str] = [] + need_avatar: list[str] = [] + for u in uniq_contacts: + uu = str(u or "").strip() + if not uu: + continue + row = contact_rows.get(uu) + if _pick_display_name(row, uu) == uu: + need_display.append(uu) + if uu not in local_avatar_usernames: + need_avatar.append(uu) + + need_display = list(dict.fromkeys(need_display)) + need_avatar = list(dict.fromkeys(need_avatar)) + if need_display or need_avatar: + wcdb_conn = WCDB_REALTIME.ensure_connected(account_dir) + with wcdb_conn.lock: + if need_display: + wcdb_display_names = _wcdb_get_display_names(wcdb_conn.handle, need_display) + if need_avatar: + wcdb_avatar_urls = _wcdb_get_avatar_urls(wcdb_conn.handle, need_avatar) + except Exception: + wcdb_display_names = {} + wcdb_avatar_urls = {} + + group_senders_by_room: dict[str, list[str]] = {} + for h in hits: + cu = str(h.get("username") or "").strip() + su = str(h.get("senderUsername") or "").strip() + if (not cu.endswith("@chatroom")) or (not su): + continue + group_senders_by_room.setdefault(cu, []).append(su) + + group_nickname_cache: dict[str, dict[str, str]] = {} + for cu, senders in group_senders_by_room.items(): + group_nickname_cache[cu] = _load_group_nickname_map( + account_dir=account_dir, + contact_db_path=contact_db_path, + chatroom_id=cu, + sender_usernames=senders, + ) + for h in hits: cu = str(h.get("username") or "").strip() su = str(h.get("senderUsername") or "").strip() crow = contact_rows.get(cu) conv_name = _pick_display_name(crow, cu) if cu else "" + if cu and (conv_name == cu): + wd = str(wcdb_display_names.get(cu) or "").strip() + if wd and wd != cu: + conv_name = wd h["conversationName"] = conv_name or cu - conv_avatar = _pick_avatar_url(crow) - if (not conv_avatar) and cu and (cu in local_avatar_usernames): - conv_avatar = base_url + _build_avatar_url(account_dir.name, cu) + conv_avatar = base_url + _avatar_url_unified( + account_dir=account_dir, + username=cu, + local_avatar_usernames=local_avatar_usernames, + ) h["conversationAvatar"] = conv_avatar if su: - row = contact_rows.get(su) - h["senderDisplayName"] = ( - _pick_display_name(row, su) if row is not None else (conv_name if su == cu else su) + h["senderDisplayName"] = _resolve_sender_display_name( + sender_username=su, + sender_contact_rows=contact_rows, + wcdb_display_names=wcdb_display_names, + group_nicknames=group_nickname_cache.get(cu, {}), + ) + avatar_url = base_url + _avatar_url_unified( + account_dir=account_dir, + username=su, + local_avatar_usernames=local_avatar_usernames, ) - avatar_url = _pick_avatar_url(row) - if (not avatar_url) and (su in local_avatar_usernames): - avatar_url = base_url + _build_avatar_url(account_dir.name, su) h["senderAvatar"] = avatar_url return { @@ -4393,13 +6438,23 @@ def scan_conversation(conv_username: str, *, per_db_limit: int, max_hits: Option contact_rows = _load_contact_rows(contact_db_path, uniq_usernames) conv_row = contact_rows.get(username) conv_name = _pick_display_name(conv_row, username) + group_nicknames = _load_group_nickname_map( + account_dir=account_dir, + contact_db_path=contact_db_path, + chatroom_id=username, + sender_usernames=[str(x.get("senderUsername") or "") for x in page], + ) for h in page: su = str(h.get("senderUsername") or "").strip() h["conversationName"] = conv_name if su: - row = contact_rows.get(su) - h["senderDisplayName"] = _pick_display_name(row, su) if row is not None else (conv_name if su == username else su) + h["senderDisplayName"] = _resolve_sender_display_name( + sender_username=su, + sender_contact_rows=contact_rows, + wcdb_display_names={}, + group_nicknames=group_nicknames, + ) return { "status": "success", @@ -4481,6 +6536,23 @@ def scan_conversation(conv_username: str, *, per_db_limit: int, max_hits: Option ) contact_rows = _load_contact_rows(contact_db_path, uniq_contacts) + group_senders_by_room: dict[str, list[str]] = {} + for h in page: + cu = str(h.get("username") or "").strip() + su = str(h.get("senderUsername") or "").strip() + if (not cu.endswith("@chatroom")) or (not su): + continue + group_senders_by_room.setdefault(cu, []).append(su) + + group_nickname_cache: dict[str, dict[str, str]] = {} + for cu, senders in group_senders_by_room.items(): + group_nickname_cache[cu] = _load_group_nickname_map( + account_dir=account_dir, + contact_db_path=contact_db_path, + chatroom_id=cu, + sender_usernames=senders, + ) + for h in page: cu = str(h.get("username") or "").strip() su = str(h.get("senderUsername") or "").strip() @@ -4488,8 +6560,12 @@ def scan_conversation(conv_username: str, *, per_db_limit: int, max_hits: Option conv_name = _pick_display_name(crow, cu) if cu else "" h["conversationName"] = conv_name or cu if su: - row = contact_rows.get(su) - h["senderDisplayName"] = _pick_display_name(row, su) if row is not None else (conv_name if su == cu else su) + h["senderDisplayName"] = _resolve_sender_display_name( + sender_username=su, + sender_contact_rows=contact_rows, + wcdb_display_names={}, + group_nicknames=group_nickname_cache.get(cu, {}), + ) return { "status": "success", @@ -4522,6 +6598,7 @@ async def get_chat_messages_around( raise HTTPException(status_code=400, detail="Missing username.") if not anchor_id: raise HTTPException(status_code=400, detail="Missing anchor_id.") + if before < 0: before = 0 if after < 0: @@ -4534,7 +6611,7 @@ async def get_chat_messages_around( parts = str(anchor_id).split(":", 2) if len(parts) != 3: raise HTTPException(status_code=400, detail="Invalid anchor_id.") - anchor_db_stem, anchor_table_name, anchor_local_id_str = parts + anchor_db_stem, anchor_table_name_in, anchor_local_id_str = parts try: anchor_local_id = int(anchor_local_id_str) except Exception: @@ -4547,14 +6624,15 @@ async def get_chat_messages_around( message_resource_db_path = account_dir / "message_resource.db" base_url = str(request.base_url).rstrip("/") - target_db: Optional[Path] = None + anchor_db_path: Optional[Path] = None for p in db_paths: if p.stem == anchor_db_stem: - target_db = p + anchor_db_path = p break - if target_db is None: + if anchor_db_path is None: raise HTTPException(status_code=404, detail="Anchor database not found.") + # Open resource DB once (optional), and reuse for all message DBs. resource_conn: Optional[sqlite3.Connection] = None resource_chat_id: Optional[int] = None try: @@ -4571,179 +6649,378 @@ async def get_chat_messages_around( resource_conn = None resource_chat_id = None - conn = sqlite3.connect(str(target_db)) - conn.row_factory = sqlite3.Row + # Resolve anchor message tuple from its DB. + anchor_ct = 0 + anchor_ss = 0 + anchor_table_name = str(anchor_table_name_in or "").strip() + anchor_row: Optional[sqlite3.Row] = None + anchor_packed_select = "NULL AS packed_info_data, " try: - table_name = str(anchor_table_name).strip() - if not table_name: - raise HTTPException(status_code=404, detail="Anchor table not found.") - - # Normalize table name casing if needed + conn_a = sqlite3.connect(str(anchor_db_path)) + conn_a.row_factory = sqlite3.Row try: - trows = conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall() - lower_to_actual = {str(x[0]).lower(): str(x[0]) for x in trows if x and x[0]} - table_name = lower_to_actual.get(table_name.lower(), table_name) - except Exception: - pass + if not anchor_table_name: + try: + anchor_table_name = _resolve_msg_table_name(conn_a, username) or "" + except Exception: + anchor_table_name = "" + if not anchor_table_name: + raise HTTPException(status_code=404, detail="Anchor table not found.") - my_wxid = account_dir.name - my_rowid = None - try: - r2 = conn.execute( - "SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", - (my_wxid,), - ).fetchone() - if r2 is not None: - my_rowid = int(r2[0]) - except Exception: - my_rowid = None + # Normalize table name casing if needed + try: + trows = conn_a.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall() + lower_to_actual = {str(x[0]).lower(): str(x[0]) for x in trows if x and x[0]} + anchor_table_name = lower_to_actual.get(anchor_table_name.lower(), anchor_table_name) + except Exception: + pass - quoted_table = _quote_ident(table_name) - sql_anchor_with_join = ( - "SELECT " - "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " - "m.message_content, m.compress_content, n.user_name AS sender_username " - f"FROM {quoted_table} m " - "LEFT JOIN Name2Id n ON m.real_sender_id = n.rowid " - "WHERE m.local_id = ? " - "LIMIT 1" - ) - sql_anchor_no_join = ( - "SELECT " - "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " - "m.message_content, m.compress_content, '' AS sender_username " - f"FROM {quoted_table} m " - "WHERE m.local_id = ? " - "LIMIT 1" - ) + quoted_table_a = _quote_ident(anchor_table_name) + has_packed_info_data = False + try: + cols = conn_a.execute(f"PRAGMA table_info({quoted_table_a})").fetchall() + has_packed_info_data = any(str(c[1] or "").strip().lower() == "packed_info_data" for c in cols) + except Exception: + has_packed_info_data = False + anchor_packed_select = ( + "m.packed_info_data AS packed_info_data, " if has_packed_info_data else "NULL AS packed_info_data, " + ) - conn.text_factory = bytes + sql_anchor_with_join = ( + "SELECT " + "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " + "m.message_content, m.compress_content, " + + anchor_packed_select + + "n.user_name AS sender_username " + f"FROM {quoted_table_a} m " + "LEFT JOIN Name2Id n ON m.real_sender_id = n.rowid " + "WHERE m.local_id = ? " + "LIMIT 1" + ) + sql_anchor_no_join = ( + "SELECT " + "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " + "m.message_content, m.compress_content, " + + anchor_packed_select + + "'' AS sender_username " + f"FROM {quoted_table_a} m " + "WHERE m.local_id = ? " + "LIMIT 1" + ) + + conn_a.text_factory = bytes + try: + anchor_row = conn_a.execute(sql_anchor_with_join, (anchor_local_id,)).fetchone() + except Exception: + anchor_row = conn_a.execute(sql_anchor_no_join, (anchor_local_id,)).fetchone() + + if anchor_row is None: + raise HTTPException(status_code=404, detail="Anchor message not found.") + + anchor_ct = int(anchor_row["create_time"] or 0) + anchor_ss = int(anchor_row["sort_seq"] or 0) if anchor_row["sort_seq"] is not None else 0 + finally: + conn_a.close() + finally: + pass + + anchor_id_canon = f"{anchor_db_stem}:{anchor_table_name}:{anchor_local_id}" + merged: list[dict[str, Any]] = [] + sender_usernames_all: list[str] = [] + quote_usernames_all: list[str] = [] + pat_usernames_all: set[str] = set() + is_group = bool(username.endswith("@chatroom")) + + for db_path in db_paths: + conn: Optional[sqlite3.Connection] = None try: - anchor_row = conn.execute(sql_anchor_with_join, (anchor_local_id,)).fetchone() - except Exception: - anchor_row = conn.execute(sql_anchor_no_join, (anchor_local_id,)).fetchone() + conn = sqlite3.connect(str(db_path)) + conn.row_factory = sqlite3.Row - if anchor_row is None: - raise HTTPException(status_code=404, detail="Anchor message not found.") + table_name = "" + if db_path.stem == anchor_db_stem: + table_name = anchor_table_name + else: + try: + table_name = _resolve_msg_table_name(conn, username) or "" + except Exception: + table_name = "" + if not table_name: + continue - anchor_ct = int(anchor_row["create_time"] or 0) - anchor_ss = int(anchor_row["sort_seq"] or 0) if anchor_row["sort_seq"] is not None else 0 + my_wxid = account_dir.name + my_rowid = None + try: + r2 = conn.execute( + "SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", + (my_wxid,), + ).fetchone() + if r2 is not None: + my_rowid = int(r2[0]) + except Exception: + my_rowid = None - where_before = ( - "WHERE (" - "m.create_time < ? " - "OR (m.create_time = ? AND COALESCE(m.sort_seq, 0) < ?) " - "OR (m.create_time = ? AND COALESCE(m.sort_seq, 0) = ? AND m.local_id <= ?)" - ")" - ) - where_after = ( - "WHERE (" - "m.create_time > ? " - "OR (m.create_time = ? AND COALESCE(m.sort_seq, 0) > ?) " - "OR (m.create_time = ? AND COALESCE(m.sort_seq, 0) = ? AND m.local_id >= ?)" - ")" - ) + quoted_table = _quote_ident(table_name) + has_packed_info_data = False + try: + cols = conn.execute(f"PRAGMA table_info({quoted_table})").fetchall() + has_packed_info_data = any(str(c[1] or "").strip().lower() == "packed_info_data" for c in cols) + except Exception: + has_packed_info_data = False + packed_select = ( + "m.packed_info_data AS packed_info_data, " if has_packed_info_data else "NULL AS packed_info_data, " + ) - sql_before_with_join = ( - "SELECT " - "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " - "m.message_content, m.compress_content, n.user_name AS sender_username " - f"FROM {quoted_table} m " - "LEFT JOIN Name2Id n ON m.real_sender_id = n.rowid " - f"{where_before} " - "ORDER BY m.create_time DESC, COALESCE(m.sort_seq, 0) DESC, m.local_id DESC " - "LIMIT ?" - ) - sql_before_no_join = ( - "SELECT " - "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " - "m.message_content, m.compress_content, '' AS sender_username " - f"FROM {quoted_table} m " - f"{where_before} " - "ORDER BY m.create_time DESC, COALESCE(m.sort_seq, 0) DESC, m.local_id DESC " - "LIMIT ?" - ) + # Stable cross-db ordering: (create_time, sort_seq, db_stem, local_id) + stem = db_path.stem + if stem < anchor_db_stem: + tie_before = "1" + tie_before_params: tuple[Any, ...] = () + tie_after = "0" + tie_after_params: tuple[Any, ...] = () + elif stem > anchor_db_stem: + tie_before = "0" + tie_before_params = () + tie_after = "1" + tie_after_params = () + else: + tie_before = "m.local_id < ?" + tie_before_params = (int(anchor_local_id),) + tie_after = "m.local_id > ?" + tie_after_params = (int(anchor_local_id),) + + where_before = ( + "WHERE (" + "m.create_time < ? " + "OR (m.create_time = ? AND COALESCE(m.sort_seq, 0) < ?) " + f"OR (m.create_time = ? AND COALESCE(m.sort_seq, 0) = ? AND {tie_before})" + ")" + ) + where_after = ( + "WHERE (" + "m.create_time > ? " + "OR (m.create_time = ? AND COALESCE(m.sort_seq, 0) > ?) " + f"OR (m.create_time = ? AND COALESCE(m.sort_seq, 0) = ? AND {tie_after})" + ")" + ) - sql_after_with_join = ( - "SELECT " - "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " - "m.message_content, m.compress_content, n.user_name AS sender_username " - f"FROM {quoted_table} m " - "LEFT JOIN Name2Id n ON m.real_sender_id = n.rowid " - f"{where_after} " - "ORDER BY m.create_time ASC, COALESCE(m.sort_seq, 0) ASC, m.local_id ASC " - "LIMIT ?" - ) - sql_after_no_join = ( - "SELECT " - "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " - "m.message_content, m.compress_content, '' AS sender_username " - f"FROM {quoted_table} m " - f"{where_after} " - "ORDER BY m.create_time ASC, COALESCE(m.sort_seq, 0) ASC, m.local_id ASC " - "LIMIT ?" - ) + sql_before_with_join = ( + "SELECT " + "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " + "m.message_content, m.compress_content, " + + packed_select + + "n.user_name AS sender_username " + f"FROM {quoted_table} m " + "LEFT JOIN Name2Id n ON m.real_sender_id = n.rowid " + f"{where_before} " + "ORDER BY m.create_time DESC, COALESCE(m.sort_seq, 0) DESC, m.local_id DESC " + "LIMIT ?" + ) + sql_before_no_join = ( + "SELECT " + "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " + "m.message_content, m.compress_content, " + + packed_select + + "'' AS sender_username " + f"FROM {quoted_table} m " + f"{where_before} " + "ORDER BY m.create_time DESC, COALESCE(m.sort_seq, 0) DESC, m.local_id DESC " + "LIMIT ?" + ) + + sql_after_with_join = ( + "SELECT " + "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " + "m.message_content, m.compress_content, " + + packed_select + + "n.user_name AS sender_username " + f"FROM {quoted_table} m " + "LEFT JOIN Name2Id n ON m.real_sender_id = n.rowid " + f"{where_after} " + "ORDER BY m.create_time ASC, COALESCE(m.sort_seq, 0) ASC, m.local_id ASC " + "LIMIT ?" + ) + sql_after_no_join = ( + "SELECT " + "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " + "m.message_content, m.compress_content, " + + packed_select + + "'' AS sender_username " + f"FROM {quoted_table} m " + f"{where_after} " + "ORDER BY m.create_time ASC, COALESCE(m.sort_seq, 0) ASC, m.local_id ASC " + "LIMIT ?" + ) - params_before = (anchor_ct, anchor_ct, anchor_ss, anchor_ct, anchor_ss, anchor_local_id, int(before) + 1) - params_after = (anchor_ct, anchor_ct, anchor_ss, anchor_ct, anchor_ss, anchor_local_id, int(after) + 1) + # Always fetch anchor row from anchor DB, but don't include anchor itself in before/after queries. + anchor_rows: list[sqlite3.Row] = [] + if db_path.stem == anchor_db_stem: + if anchor_row is None: + raise HTTPException(status_code=404, detail="Anchor message not found.") + anchor_rows = [anchor_row] - try: - before_rows = conn.execute(sql_before_with_join, params_before).fetchall() + conn.text_factory = bytes + + before_rows: list[sqlite3.Row] = [] + if int(before) > 0: + params_before = ( + int(anchor_ct), + int(anchor_ct), + int(anchor_ss), + int(anchor_ct), + int(anchor_ss), + *tie_before_params, + int(before) + 1, + ) + try: + before_rows = conn.execute(sql_before_with_join, params_before).fetchall() + except Exception: + before_rows = conn.execute(sql_before_no_join, params_before).fetchall() + + after_rows: list[sqlite3.Row] = [] + if int(after) > 0: + params_after = ( + int(anchor_ct), + int(anchor_ct), + int(anchor_ss), + int(anchor_ct), + int(anchor_ss), + *tie_after_params, + int(after) + 1, + ) + try: + after_rows = conn.execute(sql_after_with_join, params_after).fetchall() + except Exception: + after_rows = conn.execute(sql_after_no_join, params_after).fetchall() + + # Dedup rows by message id within this DB. + seen_ids: set[str] = set() + combined: list[sqlite3.Row] = [] + for rr in list(before_rows) + list(anchor_rows) + list(after_rows): + lid = int(rr["local_id"] or 0) + mid = f"{db_path.stem}:{table_name}:{lid}" + if mid in seen_ids: + continue + seen_ids.add(mid) + combined.append(rr) + + if not combined: + continue + + _append_full_messages_from_rows( + merged=merged, + sender_usernames=sender_usernames_all, + quote_usernames=quote_usernames_all, + pat_usernames=pat_usernames_all, + rows=combined, + db_path=db_path, + table_name=table_name, + username=username, + account_dir=account_dir, + is_group=is_group, + my_rowid=my_rowid, + resource_conn=resource_conn, + resource_chat_id=resource_chat_id, + ) + except HTTPException: + raise except Exception: - before_rows = conn.execute(sql_before_no_join, params_before).fetchall() + # Skip broken DBs / missing tables gracefully. + continue + finally: + if conn is not None: + try: + conn.close() + except Exception: + pass + if resource_conn is not None: try: - after_rows = conn.execute(sql_after_with_join, params_after).fetchall() + resource_conn.close() except Exception: - after_rows = conn.execute(sql_after_no_join, params_after).fetchall() + pass - seen_ids: set[str] = set() - combined: list[sqlite3.Row] = [] - for rr in list(before_rows) + list(after_rows): - lid = int(rr["local_id"] or 0) - mid = f"{target_db.stem}:{table_name}:{lid}" - if mid in seen_ids: + # Global dedupe + sort. + if merged: + seen_ids2: set[str] = set() + deduped: list[dict[str, Any]] = [] + for m in merged: + mid = str(m.get("id") or "").strip() + if mid and mid in seen_ids2: continue - seen_ids.add(mid) - combined.append(rr) + if mid: + seen_ids2.add(mid) + deduped.append(m) + merged = deduped - merged: list[dict[str, Any]] = [] - sender_usernames: list[str] = [] - quote_usernames: list[str] = [] - pat_usernames: set[str] = set() - is_group = bool(username.endswith("@chatroom")) + def sort_key_global(m: dict[str, Any]) -> tuple[int, int, str, int]: + cts = int(m.get("createTime") or 0) + sseq = int(m.get("sortSeq") or 0) + lid = int(m.get("localId") or 0) + mid = str(m.get("id") or "") + stem2 = "" + try: + stem2 = mid.split(":", 1)[0] if ":" in mid else "" + except Exception: + stem2 = "" + return (cts, sseq, stem2, lid) - _append_full_messages_from_rows( - merged=merged, - sender_usernames=sender_usernames, - quote_usernames=quote_usernames, - pat_usernames=pat_usernames, - rows=combined, - db_path=target_db, - table_name=table_name, - username=username, - account_dir=account_dir, - is_group=is_group, - my_rowid=my_rowid, - resource_conn=resource_conn, - resource_chat_id=resource_chat_id, - ) + merged.sort(key=sort_key_global, reverse=False) - return_messages = merged - finally: - conn.close() - if resource_conn is not None: + anchor_index_all = -1 + for i, m in enumerate(merged): + if str(m.get("id") or "") == str(anchor_id_canon): + anchor_index_all = i + break + if anchor_index_all < 0: + # Fallback: ignore table casing differences when matching anchor. + for i, m in enumerate(merged): + mid = str(m.get("id") or "") + p2 = mid.split(":", 2) + if len(p2) != 3: + continue + if p2[0] != anchor_db_stem: + continue try: - resource_conn.close() + if int(p2[2] or 0) == int(anchor_local_id): + anchor_index_all = i + break except Exception: - pass + continue + + if anchor_index_all < 0: + # Should not happen because we always include the anchor row, but keep defensive. + anchor_index_all = 0 + + start = max(0, int(anchor_index_all) - int(before)) + end = min(len(merged), int(anchor_index_all) + int(after) + 1) + return_messages = merged[start:end] + anchor_index = int(anchor_index_all) - start if 0 <= anchor_index_all < len(merged) else -1 + + # Postprocess only the returned window to keep it fast. + sender_usernames_win = [str(m.get("senderUsername") or "").strip() for m in return_messages if str(m.get("senderUsername") or "").strip()] + quote_usernames_win = [str(m.get("quoteUsername") or "").strip() for m in return_messages if str(m.get("quoteUsername") or "").strip()] + pat_usernames_win: set[str] = set() + try: + for m in return_messages: + if int(m.get("type") or 0) != 266287972401: + continue + raw = str(m.get("_rawText") or "") + if not raw: + continue + template = _extract_xml_tag_text(raw, "template") + if not template: + continue + pat_usernames_win.update({mm.group(1) for mm in re.finditer(r"\$\{([^}]+)\}", template) if mm.group(1)}) + except Exception: + pat_usernames_win = set() _postprocess_full_messages( merged=return_messages, - sender_usernames=sender_usernames, - quote_usernames=quote_usernames, - pat_usernames=pat_usernames, + sender_usernames=sender_usernames_win, + quote_usernames=quote_usernames_win, + pat_usernames=pat_usernames_win, account_dir=account_dir, username=username, base_url=base_url, @@ -4751,24 +7028,1858 @@ async def get_chat_messages_around( head_image_db_path=head_image_db_path, ) - def sort_key(m: dict[str, Any]) -> tuple[int, int, int]: - sseq = int(m.get("sortSeq") or 0) - cts = int(m.get("createTime") or 0) - lid = int(m.get("localId") or 0) - return (cts, sseq, lid) - - return_messages.sort(key=sort_key, reverse=False) - anchor_index = -1 - for i, m in enumerate(return_messages): - if str(m.get("id") or "") == str(anchor_id): - anchor_index = i - break - return { "status": "success", "account": account_dir.name, "username": username, - "anchorId": anchor_id, + "anchorId": anchor_id_canon, "anchorIndex": anchor_index, "messages": return_messages, } + + +@router.get("/api/chat/chat_history/resolve", summary="解析嵌套合并转发聊天记录(通过 server_id)") +async def resolve_nested_chat_history( + request: Request, + server_id: int, + account: Optional[str] = None, +): + """Resolve a nested merged-forward chat history item (datatype=17) to its full recordItem XML. + + Some nested records inside a merged-forward recordItem only carry pointers like `fromnewmsgid` (server_id), + while the full recordItem exists in the original app message (local_type=49, appmsg type=19) stored elsewhere. + WeChat can open it by looking up the original message; we do the same here. + """ + if not server_id: + raise HTTPException(status_code=400, detail="Missing server_id.") + + account_dir = _resolve_account_dir(account) + db_paths = _iter_message_db_paths(account_dir) + base_url = str(request.base_url).rstrip("/") + found_appmsg = False + + for db_path in db_paths: + conn: Optional[sqlite3.Connection] = None + try: + conn = sqlite3.connect(str(db_path)) + conn.row_factory = sqlite3.Row + conn.text_factory = bytes + + try: + table_rows = conn.execute( + # Some DBs use `Msg_...` (capital M). Use LOWER() to keep matching even if + # `PRAGMA case_sensitive_like=ON` is set. + "SELECT name FROM sqlite_master WHERE type='table' AND lower(name) LIKE 'msg_%'" + ).fetchall() + except Exception: + table_rows = [] + + # With `conn.text_factory = bytes`, sqlite_master.name comes back as bytes. + # Decode it to the real table name, otherwise we'd end up querying a non-existent + # table like "b'Msg_...'" and never find the message. + table_names = [_decode_sqlite_text(r[0]).strip() for r in table_rows if r and r[0]] + for table_name in table_names: + quoted = _quote_ident(table_name) + try: + row = conn.execute( + f""" + SELECT local_id, server_id, local_type, create_time, message_content, compress_content + FROM {quoted} + -- WeChat v4 can pack appmsg subtype into the high 32 bits of local_type: + -- local_type = base_type + (app_subtype << 32) + -- so a chatHistory appmsg can be 49 + (19<<32), not exactly 49. + WHERE server_id = ? AND (local_type & 4294967295) = 49 + LIMIT 1 + """, + (int(server_id),), + ).fetchone() + except Exception: + row = None + + if row is None: + continue + + found_appmsg = True + raw_text = _decode_message_content(row["compress_content"], row["message_content"]).strip() + if not raw_text: + continue + + # If the stored payload is a zstd frame but we couldn't decode it into XML, it's + # almost always because the optional `zstandard` dependency isn't installed. + try: + blob = row["message_content"] + if isinstance(blob, memoryview): + blob = blob.tobytes() + if isinstance(blob, (bytes, bytearray)) and bytes(blob).startswith(b"\x28\xb5\x2f\xfd"): + lower = raw_text.lower() + if " str: + t = str(table_name or "").strip() + if not t: + return "" + try: + r = conn.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND lower(name)=lower(?) LIMIT 1", + (t,), + ).fetchone() + if r is not None and r[0]: + # With `conn.text_factory = bytes`, sqlite_master.name can be returned as bytes. + # Decode it to avoid querying a non-existent table like "b'Msg_...'". + return _decode_sqlite_text(r[0]).strip() + except Exception: + pass + return t + + +def _table_info_columns(conn: sqlite3.Connection, table_name: str) -> set[str]: + t = str(table_name or "").strip() + if not t: + return set() + quoted = _quote_ident(t) + try: + cols = conn.execute(f"PRAGMA table_info({quoted})").fetchall() + except Exception: + return set() + out: set[str] = set() + for c in cols: + try: + name = _decode_sqlite_text(c[1]).strip() + except Exception: + continue + if name: + out.add(name) + return out + + +def _has_column(conn: sqlite3.Connection, table_name: str, column_name: str) -> bool: + want = str(column_name or "").strip().lower() + if not want: + return False + for c in _table_info_columns(conn, table_name): + if str(c or "").strip().lower() == want: + return True + return False + + +def _lookup_output_my_rowid(conn: sqlite3.Connection, my_wxid: str) -> Optional[int]: + try: + r = conn.execute( + "SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", + (str(my_wxid or "").strip(),), + ).fetchone() + if r is None: + return None + return int(r[0]) + except Exception: + return None + + +def _lookup_output_username_by_rowid(conn: sqlite3.Connection, rowid: int) -> str: + try: + r = conn.execute( + "SELECT user_name FROM Name2Id WHERE rowid = ? LIMIT 1", + (int(rowid or 0),), + ).fetchone() + if r is None: + return "" + return _decode_sqlite_text(r[0]).strip() + except Exception: + return "" + + +def _select_output_message_row(conn: sqlite3.Connection, *, table_name: str, local_id: int) -> Optional[sqlite3.Row]: + t = _normalize_table_name_case(conn, table_name) + if not t: + return None + quoted_table = _quote_ident(t) + has_packed_info_data = _has_column(conn, t, "packed_info_data") + packed_select = "m.packed_info_data AS packed_info_data, " if has_packed_info_data else "NULL AS packed_info_data, " + sql_with_join = ( + "SELECT " + "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " + "m.message_content, m.compress_content, " + + packed_select + + "n.user_name AS sender_username " + f"FROM {quoted_table} m " + "LEFT JOIN Name2Id n ON m.real_sender_id = n.rowid " + "WHERE m.local_id = ? " + "LIMIT 1" + ) + sql_no_join = ( + "SELECT " + "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " + "m.message_content, m.compress_content, " + + packed_select + + "'' AS sender_username " + f"FROM {quoted_table} m " + "WHERE m.local_id = ? " + "LIMIT 1" + ) + try: + return conn.execute(sql_with_join, (int(local_id),)).fetchone() + except Exception: + try: + return conn.execute(sql_no_join, (int(local_id),)).fetchone() + except Exception: + return None + + +def _resolve_db_storage_message_paths(account_dir: Path, db_stem: str) -> tuple[Path, Path]: + db_storage_dir = _resolve_account_db_storage_dir(account_dir) + if db_storage_dir is None: + raise HTTPException(status_code=400, detail="Cannot resolve db_storage directory for this account.") + db_name = str(db_stem or "").strip() + if not db_name: + raise HTTPException(status_code=400, detail="Invalid message_id.") + msg_db_path = db_storage_dir / "message" / f"{db_name}.db" + res_db_path = db_storage_dir / "message" / "message_resource.db" + return msg_db_path, res_db_path + + +def _build_wcdb_update_sql(*, table_name: str, updates: dict[str, Any], where_local_id: int) -> str: + t = str(table_name or "").strip() + if not t: + raise HTTPException(status_code=400, detail="Missing table_name.") + if not updates: + raise HTTPException(status_code=400, detail="Missing edits.") + parts: list[str] = [] + for k, v in updates.items(): + col = str(k or "").strip() + if not col: + continue + parts.append(f"{_quote_ident(col)} = {_sql_literal(v)}") + if not parts: + raise HTTPException(status_code=400, detail="Missing edits.") + return f"UPDATE {_quote_ident(t)} SET " + ", ".join(parts) + f" WHERE local_id = {int(where_local_id)}" + + +def _build_sqlite_update_sql(*, table_name: str, updates: dict[str, Any], where_local_id: int) -> tuple[str, list[Any]]: + t = str(table_name or "").strip() + if not t: + raise HTTPException(status_code=400, detail="Missing table_name.") + if not updates: + raise HTTPException(status_code=400, detail="Missing edits.") + cols: list[str] = [] + params: list[Any] = [] + for k, v in updates.items(): + col = str(k or "").strip() + if not col: + continue + cols.append(f"{_quote_ident(col)} = ?") + params.append(v) + if not cols: + raise HTTPException(status_code=400, detail="Missing edits.") + sql = f"UPDATE {_quote_ident(t)} SET " + ", ".join(cols) + " WHERE local_id = ?" + params.append(int(where_local_id)) + return sql, params + + +@router.get("/api/chat/messages/raw", summary="获取单条消息原始字段(output 解密库)") +def get_chat_message_raw( + *, + account: Optional[str] = None, + username: str, + message_id: str, +) -> dict[str, Any]: + if not username: + raise HTTPException(status_code=400, detail="Missing username.") + if not message_id: + raise HTTPException(status_code=400, detail="Missing message_id.") + + account_dir = _resolve_account_dir(account) + try: + db_stem, table_name_in, local_id = chat_edit_store.parse_message_id(message_id) + except Exception: + raise HTTPException(status_code=400, detail="Invalid message_id.") + + db_path = account_dir / f"{db_stem}.db" + if not db_path.exists(): + raise HTTPException(status_code=404, detail="Message database not found.") + + conn: Optional[sqlite3.Connection] = None + try: + conn = sqlite3.connect(str(db_path)) + conn.row_factory = sqlite3.Row + conn.text_factory = bytes + table_name = _normalize_table_name_case(conn, table_name_in) + if not table_name: + raise HTTPException(status_code=404, detail="Message table not found.") + + quoted_table = _quote_ident(table_name) + row = conn.execute(f"SELECT * FROM {quoted_table} WHERE local_id = ? LIMIT 1", (int(local_id),)).fetchone() + if row is None: + raise HTTPException(status_code=404, detail="Message not found.") + + out_row: dict[str, Any] = {} + for k in row.keys(): + out_row[str(k)] = _jsonify_db_value(str(k), row[k]) + + return { + "status": "success", + "account": account_dir.name, + "username": username, + "messageId": f"{db_stem}:{table_name}:{int(local_id)}", + "row": out_row, + } + finally: + if conn is not None: + try: + conn.close() + except Exception: + pass + + +@router.post("/api/chat/messages/edit", summary="编辑/修改消息(写入真实库 db_storage 并同步 output)") +async def edit_chat_message(request: Request) -> dict[str, Any]: + payload = await request.json() + if not isinstance(payload, dict): + raise HTTPException(status_code=400, detail="Invalid payload.") + + account = str(payload.get("account") or "").strip() or None + session_id = str(payload.get("session_id") or payload.get("username") or payload.get("sessionId") or "").strip() + message_id_in = str(payload.get("message_id") or payload.get("messageId") or "").strip() + edits_in = payload.get("edits") + unsafe = bool(payload.get("unsafe") or False) + + if not session_id: + raise HTTPException(status_code=400, detail="Missing session_id.") + if not message_id_in: + raise HTTPException(status_code=400, detail="Missing message_id.") + if not isinstance(edits_in, dict) or not edits_in: + raise HTTPException(status_code=400, detail="Missing edits.") + + account_dir = _resolve_account_dir(account) + base_url = str(request.base_url).rstrip("/") + + try: + db_stem, table_name_in, local_id_old = chat_edit_store.parse_message_id(message_id_in) + except Exception: + raise HTTPException(status_code=400, detail="Invalid message_id.") + + msg_db_path_out = account_dir / f"{db_stem}.db" + if not msg_db_path_out.exists(): + raise HTTPException(status_code=404, detail="Message database not found.") + + msg_db_path_real, res_db_path_real = _resolve_db_storage_message_paths(account_dir, db_stem) + if not msg_db_path_real.exists(): + raise HTTPException(status_code=404, detail="Real message database not found in db_storage.") + + # Validate edits against output schema and normalize table name casing. + table_name = table_name_in + edits: dict[str, Any] = {} + explicit_keys: set[str] = set() + conn_schema: Optional[sqlite3.Connection] = None + try: + conn_schema = sqlite3.connect(str(msg_db_path_out)) + conn_schema.row_factory = sqlite3.Row + table_name = _normalize_table_name_case(conn_schema, table_name_in) + if not table_name: + raise HTTPException(status_code=404, detail="Message table not found.") + cols = _table_info_columns(conn_schema, table_name) + if not cols: + raise HTTPException(status_code=404, detail="Message table not found.") + + for k, v in edits_in.items(): + col = str(k or "").strip() + if not col: + continue + if col not in cols: + raise HTTPException(status_code=400, detail=f"Unknown column: {col}") + if not _is_safe_edit_column(col, unsafe=unsafe): + raise HTTPException(status_code=400, detail=f"Unsafe column requires unsafe=true: {col}") + explicit_keys.add(col) + edits[col] = _normalize_edit_value(col, v) + if not edits: + raise HTTPException(status_code=400, detail="Missing edits.") + finally: + if conn_schema is not None: + try: + conn_schema.close() + except Exception: + pass + + message_id = f"{db_stem}:{table_name}:{int(local_id_old)}" + + # Decide update strategy for real db_storage. + only_message_content = (set(edits.keys()) == {"message_content"}) and ("compress_content" not in explicit_keys) + + # Default behavior: clear compress_content when message_content changes, unless explicitly provided. + output_edits = dict(edits) + if "message_content" in edits and ("compress_content" not in explicit_keys): + output_edits.setdefault("compress_content", None) + + new_local_id = int(edits.get("local_id") or 0) if "local_id" in edits else int(local_id_old) + if new_local_id <= 0: + new_local_id = int(local_id_old) + + # Resource sync mapping when Msg fields change. + resource_sync_map: dict[str, str] = { + "local_type": "message_local_type", + "create_time": "message_create_time", + "server_id": "message_svr_id", + "origin_source": "message_origin_source", + } + if unsafe: + resource_sync_map["local_id"] = "message_local_id" + + warnings: list[str] = [] + + with _realtime_sync_lock(account_dir.name, session_id): + # Ensure WCDB realtime connection. + try: + wcdb_conn = WCDB_REALTIME.ensure_connected(account_dir) + except WCDBRealtimeError as e: + raise HTTPException(status_code=400, detail=str(e)) + + # Read original row from real db_storage (snapshot). + original_row: Optional[dict[str, Any]] = None + original_create_time = 0 + try: + select_sql = f"SELECT * FROM {_quote_ident(table_name)} WHERE local_id = {int(local_id_old)} LIMIT 1" + with wcdb_conn.lock: + rows = _wcdb_exec_query( + wcdb_conn.handle, + kind="message", + path=str(msg_db_path_real), + sql=select_sql, + ) + if rows and isinstance(rows[0], dict): + original_row = rows[0] + try: + original_create_time = int(original_row.get("create_time") or 0) + except Exception: + original_create_time = 0 + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to read original message row: {e}") + if not original_row: + raise HTTPException(status_code=404, detail="Message not found in real db_storage.") + + # Read original resource row from real db_storage (optional). + original_resource_row: Optional[dict[str, Any]] = None + try: + if res_db_path_real.exists() and original_create_time > 0: + res_sql = ( + "SELECT * FROM MessageResourceInfo " + f"WHERE message_local_id = {int(local_id_old)} AND message_create_time = {int(original_create_time)} " + "ORDER BY message_id DESC " + "LIMIT 1" + ) + with wcdb_conn.lock: + res_rows = _wcdb_exec_query( + wcdb_conn.handle, + kind="message", + path=str(res_db_path_real), + sql=res_sql, + ) + if res_rows and isinstance(res_rows[0], dict): + original_resource_row = res_rows[0] + except Exception: + original_resource_row = None + + # Create snapshot record only if this message hasn't been edited via this tool. + created_record = False + existing_record = chat_edit_store.get_message_edit(account_dir.name, session_id, message_id) + if existing_record is None: + try: + chat_edit_store.upsert_original_once( + account=account_dir.name, + session_id=session_id, + db=db_stem, + table_name=table_name, + local_id=int(local_id_old), + original_msg=original_row, + original_resource=original_resource_row, + now_ms=int(time.time() * 1000), + ) + created_record = True + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to write edit snapshot: {e}") + + # Read current output row (for rollback if real update fails). + out_before: dict[str, Any] = {} + conn_out: Optional[sqlite3.Connection] = None + try: + conn_out = sqlite3.connect(str(msg_db_path_out), timeout=5) + conn_out.row_factory = sqlite3.Row + table_name_out = _normalize_table_name_case(conn_out, table_name) + if not table_name_out: + raise HTTPException(status_code=404, detail="Message table not found.") + quoted = _quote_ident(table_name_out) + row_before = conn_out.execute( + f"SELECT * FROM {quoted} WHERE local_id = ? LIMIT 1", + (int(local_id_old),), + ).fetchone() + if row_before is None: + if created_record: + try: + chat_edit_store.delete_message_edit(account_dir.name, session_id, message_id) + except Exception: + pass + raise HTTPException(status_code=404, detail="Message not found in output database.") + for k in row_before.keys(): + out_before[str(k)] = row_before[k] + + # Apply edits to output decrypted db first; if this fails, do not touch the real db_storage. + sql_out, params_out = _build_sqlite_update_sql( + table_name=table_name_out, + updates=output_edits, + where_local_id=int(local_id_old), + ) + cur_out = conn_out.execute(sql_out, params_out) + conn_out.commit() + if int(getattr(cur_out, "rowcount", 0) or 0) <= 0: + if created_record: + try: + chat_edit_store.delete_message_edit(account_dir.name, session_id, message_id) + except Exception: + pass + raise HTTPException(status_code=404, detail="Message not found in output database.") + except HTTPException: + raise + except Exception as e: + if created_record: + try: + chat_edit_store.delete_message_edit(account_dir.name, session_id, message_id) + except Exception: + pass + raise HTTPException(status_code=500, detail=f"Failed to update output database: {e}") + + # Apply edits to real db_storage. If it fails, rollback output changes. + try: + if only_message_content: + new_content = edits.get("message_content") + if isinstance(new_content, (bytes, bytearray, memoryview)): + try: + new_content = bytes(new_content).decode("utf-8", errors="replace") + except Exception: + new_content = "" + _wcdb_update_message( + wcdb_conn.handle, + session_id=session_id, + local_id=int(local_id_old), + create_time=int(original_create_time), + new_content=str(new_content or ""), + ) + else: + real_edits = dict(edits) + if "message_content" in edits and ("compress_content" not in explicit_keys): + real_edits.setdefault("compress_content", None) + sql_real = _build_wcdb_update_sql( + table_name=table_name, + updates=real_edits, + where_local_id=int(local_id_old), + ) + with wcdb_conn.lock: + _wcdb_exec_query( + wcdb_conn.handle, + kind="message", + path=str(msg_db_path_real), + sql=sql_real, + ) + except Exception as e: + # Roll back output changes. + try: + where_lid = int(new_local_id) if ("local_id" in edits) else int(local_id_old) + cols_now = _table_info_columns(conn_out, table_name_out) + rollback_updates = {k: v for k, v in out_before.items() if str(k or "") in cols_now} + sql_rb, params_rb = _build_sqlite_update_sql( + table_name=table_name_out, + updates=rollback_updates, + where_local_id=where_lid, + ) + conn_out.execute(sql_rb, params_rb) + conn_out.commit() + except Exception: + pass + # Remove newly-created snapshot record (real db was not touched successfully). + if created_record: + try: + chat_edit_store.delete_message_edit(account_dir.name, session_id, message_id) + except Exception: + pass + raise HTTPException(status_code=500, detail=f"Failed to update real db_storage: {e}") + finally: + if conn_out is not None: + try: + conn_out.close() + except Exception: + pass + + # Sync message_resource key fields (best-effort). + try: + msg_to_res_updates: dict[str, Any] = {} + for msg_col, res_col in resource_sync_map.items(): + if msg_col in edits: + msg_to_res_updates[res_col] = _normalize_edit_value(res_col, edits[msg_col]) + if msg_to_res_updates: + res_message_id = 0 + if original_resource_row is not None: + try: + res_message_id = int(original_resource_row.get("message_id") or 0) + except Exception: + res_message_id = 0 + if res_message_id > 0: + # real db_storage + if res_db_path_real.exists(): + parts = [f"{_quote_ident(k)} = {_sql_literal(v)}" for k, v in msg_to_res_updates.items()] + sql_res_real = ( + "UPDATE MessageResourceInfo SET " + + ", ".join(parts) + + f" WHERE message_id = {int(res_message_id)}" + ) + with wcdb_conn.lock: + _wcdb_exec_query( + wcdb_conn.handle, + kind="message", + path=str(res_db_path_real), + sql=sql_res_real, + ) + + # output decrypted + out_res_db_path = account_dir / "message_resource.db" + if out_res_db_path.exists(): + conn_res = sqlite3.connect(str(out_res_db_path), timeout=5) + try: + set_cols = ", ".join([f"{_quote_ident(k)} = ?" for k in msg_to_res_updates.keys()]) + params = list(msg_to_res_updates.values()) + [int(res_message_id)] + conn_res.execute( + f"UPDATE MessageResourceInfo SET {set_cols} WHERE message_id = ?", + params, + ) + conn_res.commit() + finally: + conn_res.close() + else: + warnings.append("message_resource row not found; skipped resource sync.") + except Exception as e: + warnings.append(f"Failed to sync message_resource: {e}") + + # If local_id changed (unsafe), move the edit record key so future reset works. + edit_record_local_id = int(local_id_old) + if "local_id" in edits and int(new_local_id) != int(local_id_old): + ok = chat_edit_store.update_message_edit_local_id( + account=account_dir.name, + session_id=session_id, + db=db_stem, + table_name=table_name, + old_local_id=int(local_id_old), + new_local_id=int(new_local_id), + ) + if not ok: + warnings.append("Failed to update edit record key after local_id change.") + else: + edit_record_local_id = int(new_local_id) + + # If this was an already-tracked message, bump edit metadata. + if existing_record is not None: + try: + chat_edit_store.upsert_original_once( + account=account_dir.name, + session_id=session_id, + db=db_stem, + table_name=table_name, + local_id=int(edit_record_local_id), + original_msg={}, + original_resource=None, + now_ms=int(time.time() * 1000), + ) + except Exception: + pass + + # Track which columns were actually modified so reset can restore only those fields. + try: + chat_edit_store.merge_edited_columns( + account=account_dir.name, + session_id=session_id, + db=db_stem, + table_name=table_name, + local_id=int(edit_record_local_id), + columns=list(output_edits.keys()), + ) + except Exception: + pass + + # Build updated message object (best-effort, from output). + updated_message: Optional[dict[str, Any]] = None + try: + conn_msg = sqlite3.connect(str(msg_db_path_out)) + conn_msg.row_factory = sqlite3.Row + conn_msg.text_factory = bytes + row = _select_output_message_row(conn_msg, table_name=table_name, local_id=int(new_local_id)) + if row is not None: + my_rowid = _lookup_output_my_rowid(conn_msg, account_dir.name) + out_res_db_path2 = account_dir / "message_resource.db" + resource_conn: Optional[sqlite3.Connection] = None + resource_chat_id: Optional[int] = None + try: + if out_res_db_path2.exists(): + resource_conn = sqlite3.connect(str(out_res_db_path2)) + resource_conn.row_factory = sqlite3.Row + resource_chat_id = _resource_lookup_chat_id(resource_conn, session_id) + except Exception: + if resource_conn is not None: + try: + resource_conn.close() + except Exception: + pass + resource_conn = None + resource_chat_id = None + + merged: list[dict[str, Any]] = [] + sender_usernames: list[str] = [] + quote_usernames: list[str] = [] + pat_usernames: set[str] = set() + _append_full_messages_from_rows( + merged=merged, + sender_usernames=sender_usernames, + quote_usernames=quote_usernames, + pat_usernames=pat_usernames, + rows=[row], + db_path=msg_db_path_out, + table_name=table_name, + username=session_id, + account_dir=account_dir, + is_group=bool(session_id.endswith("@chatroom")), + my_rowid=my_rowid, + resource_conn=resource_conn, + resource_chat_id=resource_chat_id, + ) + _postprocess_full_messages( + merged=merged, + sender_usernames=sender_usernames, + quote_usernames=quote_usernames, + pat_usernames=pat_usernames, + account_dir=account_dir, + username=session_id, + base_url=base_url, + contact_db_path=account_dir / "contact.db", + head_image_db_path=account_dir / "head_image.db", + ) + if merged: + updated_message = merged[0] + if resource_conn is not None: + try: + resource_conn.close() + except Exception: + pass + conn_msg.close() + except Exception: + updated_message = None + + resp: dict[str, Any] = { + "status": "success", + "account": account_dir.name, + "session_id": session_id, + "messageId": f"{db_stem}:{table_name}:{int(new_local_id)}", + } + if warnings: + resp["warnings"] = warnings + if updated_message is not None: + resp["updated_message"] = updated_message + return resp + + +@router.get("/api/chat/edits/sessions", summary="获取有修改记录的会话列表") +def list_chat_edited_sessions(request: Request, account: Optional[str] = None) -> dict[str, Any]: + account_dir = _resolve_account_dir(account) + base_url = str(request.base_url).rstrip("/") + + stats = chat_edit_store.list_sessions(account_dir.name) + session_ids = [str(s.get("session_id") or "").strip() for s in stats if str(s.get("session_id") or "").strip()] + contact_db_path = account_dir / "contact.db" + contact_rows = _load_contact_rows(contact_db_path, session_ids) + + sessions: list[dict[str, Any]] = [] + for s in stats: + uname = str(s.get("session_id") or "").strip() + if not uname: + continue + row = contact_rows.get(uname) + name = _pick_display_name(row, uname) if row is not None else uname + avatar = base_url + _avatar_url_unified(account_dir=account_dir, username=uname) + sessions.append( + { + "username": uname, + "name": name, + "avatar": avatar, + "isGroup": bool(uname.endswith("@chatroom")), + "editedCount": int(s.get("msg_count") or 0), + "lastEditedAt": int(s.get("last_edited_at") or 0), + } + ) + + return { + "status": "success", + "account": account_dir.name, + "sessions": sessions, + } + + +@router.get("/api/chat/edits/messages", summary="获取某会话下所有被修改过的消息(原/现对比)") +def list_chat_edited_messages( + request: Request, + username: str, + account: Optional[str] = None, +) -> dict[str, Any]: + if not username: + raise HTTPException(status_code=400, detail="Missing username.") + account_dir = _resolve_account_dir(account) + base_url = str(request.base_url).rstrip("/") + + edits = chat_edit_store.list_messages(account_dir.name, username) + if not edits: + return {"status": "success", "account": account_dir.name, "username": username, "items": []} + + # Open resource DB once (optional). + resource_conn: Optional[sqlite3.Connection] = None + resource_chat_id: Optional[int] = None + out_res_db_path = account_dir / "message_resource.db" + try: + if out_res_db_path.exists(): + resource_conn = sqlite3.connect(str(out_res_db_path)) + resource_conn.row_factory = sqlite3.Row + resource_chat_id = _resource_lookup_chat_id(resource_conn, username) + except Exception: + if resource_conn is not None: + try: + resource_conn.close() + except Exception: + pass + + resource_conn = None + resource_chat_id = None + + is_group = bool(username.endswith("@chatroom")) + + msg_conns: dict[str, sqlite3.Connection] = {} + my_rowids: dict[str, Optional[int]] = {} + + merged_current: list[dict[str, Any]] = [] + sender_usernames_current: list[str] = [] + quote_usernames_current: list[str] = [] + pat_usernames_current: set[str] = set() + + merged_original: list[dict[str, Any]] = [] + sender_usernames_original: list[str] = [] + quote_usernames_original: list[str] = [] + pat_usernames_original: set[str] = set() + + current_raw_by_id: dict[str, dict[str, Any]] = {} + original_raw_by_id: dict[str, Any] = {} + + try: + for rec in edits: + db_stem = str(rec.get("db") or "").strip() + table_name = str(rec.get("table_name") or "").strip() + try: + local_id = int(rec.get("local_id") or 0) + except Exception: + local_id = 0 + if not db_stem or not table_name or local_id <= 0: + continue + + message_id = str(rec.get("message_id") or "").strip() or f"{db_stem}:{table_name}:{int(local_id)}" + + conn_msg = msg_conns.get(db_stem) + if conn_msg is None: + db_path_out = account_dir / f"{db_stem}.db" + if not db_path_out.exists(): + continue + conn_msg = sqlite3.connect(str(db_path_out)) + conn_msg.row_factory = sqlite3.Row + conn_msg.text_factory = bytes + msg_conns[db_stem] = conn_msg + my_rowids[db_stem] = _lookup_output_my_rowid(conn_msg, account_dir.name) + + row_cur = _select_output_message_row(conn_msg, table_name=table_name, local_id=local_id) + if row_cur is not None: + _append_full_messages_from_rows( + merged=merged_current, + sender_usernames=sender_usernames_current, + quote_usernames=quote_usernames_current, + pat_usernames=pat_usernames_current, + rows=[row_cur], + db_path=account_dir / f"{db_stem}.db", + table_name=table_name, + username=username, + account_dir=account_dir, + is_group=is_group, + my_rowid=my_rowids.get(db_stem), + resource_conn=resource_conn, + resource_chat_id=resource_chat_id, + ) + cur_raw: dict[str, Any] = {} + for k in row_cur.keys(): + cur_raw[str(k)] = _jsonify_db_value(str(k), row_cur[k]) + current_raw_by_id[message_id] = cur_raw + + # Original raw snapshot (for UI raw display) + try: + original_raw_by_id[message_id] = json.loads(str(rec.get("original_msg_json") or "") or "null") + except Exception: + original_raw_by_id[message_id] = None + + # Original row for rendering + try: + orig_row = chat_edit_store.loads_json_with_blobs(str(rec.get("original_msg_json") or "") or "") + except Exception: + orig_row = None + if isinstance(orig_row, dict): + try: + rsid = int(orig_row.get("real_sender_id") or 0) + except Exception: + rsid = 0 + sender_username = _lookup_output_username_by_rowid(conn_msg, rsid) if rsid > 0 else "" + orig_row["sender_username"] = sender_username + orig_row.setdefault("packed_info_data", None) + _append_full_messages_from_rows( + merged=merged_original, + sender_usernames=sender_usernames_original, + quote_usernames=quote_usernames_original, + pat_usernames=pat_usernames_original, + rows=[orig_row], + db_path=account_dir / f"{db_stem}.db", + table_name=table_name, + username=username, + account_dir=account_dir, + is_group=is_group, + my_rowid=my_rowids.get(db_stem), + resource_conn=resource_conn, + resource_chat_id=resource_chat_id, + ) + + if merged_current: + _postprocess_full_messages( + merged=merged_current, + sender_usernames=sender_usernames_current, + quote_usernames=quote_usernames_current, + pat_usernames=pat_usernames_current, + account_dir=account_dir, + username=username, + base_url=base_url, + contact_db_path=account_dir / "contact.db", + head_image_db_path=account_dir / "head_image.db", + ) + if merged_original: + _postprocess_full_messages( + merged=merged_original, + sender_usernames=sender_usernames_original, + quote_usernames=quote_usernames_original, + pat_usernames=pat_usernames_original, + account_dir=account_dir, + username=username, + base_url=base_url, + contact_db_path=account_dir / "contact.db", + head_image_db_path=account_dir / "head_image.db", + ) + + current_by_id = {str(m.get("id") or ""): m for m in merged_current if str(m.get("id") or "").strip()} + original_by_id = {str(m.get("id") or ""): m for m in merged_original if str(m.get("id") or "").strip()} + + items: list[dict[str, Any]] = [] + for rec in edits: + mid = str(rec.get("message_id") or "").strip() + if not mid: + try: + mid = chat_edit_store.format_message_id( + rec.get("db") or "", + rec.get("table_name") or "", + int(rec.get("local_id") or 0), + ) + except Exception: + mid = "" + if not mid: + continue + items.append( + { + "messageId": mid, + "firstEditedAt": int(rec.get("first_edited_at") or 0), + "lastEditedAt": int(rec.get("last_edited_at") or 0), + "editCount": int(rec.get("edit_count") or 0), + "original": original_by_id.get(mid), + "current": current_by_id.get(mid), + "originalRaw": original_raw_by_id.get(mid), + "currentRaw": current_raw_by_id.get(mid), + } + ) + + items.sort(key=lambda x: int(((x.get("current") or x.get("original") or {}) or {}).get("createTime") or 0)) + return { + "status": "success", + "account": account_dir.name, + "username": username, + "items": items, + } + finally: + for c in msg_conns.values(): + try: + c.close() + except Exception: + pass + if resource_conn is not None: + try: + resource_conn.close() + except Exception: + pass + + +@router.get("/api/chat/edits/message_status", summary="某条消息是否被本工具修改过") +def get_chat_edit_status(*, account: Optional[str] = None, username: str, message_id: str) -> dict[str, Any]: + if not username: + raise HTTPException(status_code=400, detail="Missing username.") + if not message_id: + raise HTTPException(status_code=400, detail="Missing message_id.") + account_dir = _resolve_account_dir(account) + item = chat_edit_store.get_message_edit(account_dir.name, username, message_id) + if not item: + return {"modified": False} + return { + "modified": True, + "firstEditedAt": int(item.get("first_edited_at") or 0), + "lastEditedAt": int(item.get("last_edited_at") or 0), + "editCount": int(item.get("edit_count") or 0), + } + + +@router.post("/api/chat/messages/repair_sender", summary="修复某条消息的发送者(real_sender_id)") +async def repair_chat_message_sender(request: Request) -> dict[str, Any]: + """Repair message sender for cases where an incorrect reset wrote wrong metadata. + + Currently this supports forcing the message to be treated as "sent by me" by setting + `real_sender_id` to the account's Name2Id rowid, in both db_storage and output DB. + """ + payload = await request.json() + if not isinstance(payload, dict): + raise HTTPException(status_code=400, detail="Invalid payload.") + + account = str(payload.get("account") or "").strip() or None + session_id = str(payload.get("session_id") or payload.get("username") or payload.get("sessionId") or "").strip() + message_id = str(payload.get("message_id") or payload.get("messageId") or "").strip() + mode = str(payload.get("mode") or "me").strip().lower() + + if not session_id: + raise HTTPException(status_code=400, detail="Missing session_id.") + if not message_id: + raise HTTPException(status_code=400, detail="Missing message_id.") + if mode not in {"me"}: + raise HTTPException(status_code=400, detail="Unsupported mode.") + + account_dir = _resolve_account_dir(account) + try: + db_stem, table_name_in, local_id = chat_edit_store.parse_message_id(message_id) + except Exception: + raise HTTPException(status_code=400, detail="Invalid message_id.") + + msg_db_path_out = account_dir / f"{db_stem}.db" + if not msg_db_path_out.exists(): + raise HTTPException(status_code=404, detail="Message database not found.") + + msg_db_path_real, _res_db_path_real = _resolve_db_storage_message_paths(account_dir, db_stem) + if not msg_db_path_real.exists(): + raise HTTPException(status_code=404, detail="Real message database not found in db_storage.") + + # Resolve output table name casing and the "my" rowid for this message DB. + table_name_out = "" + my_rowid_out: Optional[int] = None + conn_probe: Optional[sqlite3.Connection] = None + try: + conn_probe = sqlite3.connect(str(msg_db_path_out), timeout=5) + conn_probe.row_factory = sqlite3.Row + table_name_out = _normalize_table_name_case(conn_probe, table_name_in) or "" + if not table_name_out: + raise HTTPException(status_code=404, detail="Message table not found.") + + r = conn_probe.execute( + "SELECT rowid FROM Name2Id WHERE user_name = ? ORDER BY rowid ASC LIMIT 1", + (account_dir.name,), + ).fetchone() + if r is not None: + try: + my_rowid_out = int(r[0]) + except Exception: + my_rowid_out = None + finally: + if conn_probe is not None: + try: + conn_probe.close() + except Exception: + pass + + if my_rowid_out is None or my_rowid_out <= 0: + raise HTTPException(status_code=404, detail="Name2Id row not found for account in output db.") + + with _realtime_sync_lock(account_dir.name, session_id): + try: + wcdb_conn = WCDB_REALTIME.ensure_connected(account_dir) + except WCDBRealtimeError as e: + raise HTTPException(status_code=400, detail=str(e)) + + # Resolve "my" rowid from the live db_storage message DB. + sql_my = ( + "SELECT rowid FROM Name2Id WHERE user_name = " + + _sql_literal(account_dir.name) + + " ORDER BY rowid ASC LIMIT 1" + ) + with wcdb_conn.lock: + rows = _wcdb_exec_query(wcdb_conn.handle, kind="message", path=str(msg_db_path_real), sql=sql_my) + + my_rowid_real = 0 + if rows and isinstance(rows[0], dict): + for k, v in rows[0].items(): + if str(k or "").strip().lower() == "rowid": + try: + my_rowid_real = int(v or 0) + except Exception: + my_rowid_real = 0 + break + + if my_rowid_real <= 0: + raise HTTPException(status_code=404, detail="Name2Id row not found for account in real db_storage.") + + # 1) Update real db_storage (source of truth). + try: + sql_real = _build_wcdb_update_sql( + table_name=table_name_in, + updates={"real_sender_id": int(my_rowid_real)}, + where_local_id=int(local_id), + ) + with wcdb_conn.lock: + _wcdb_exec_query(wcdb_conn.handle, kind="message", path=str(msg_db_path_real), sql=sql_real) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to update real db_storage: {e}") + + # 2) Sync output decrypted DB so UI reflects the change immediately. + try: + conn_out = sqlite3.connect(str(msg_db_path_out), timeout=5) + try: + sql_out, params_out = _build_sqlite_update_sql( + table_name=table_name_out, + updates={"real_sender_id": int(my_rowid_out)}, + where_local_id=int(local_id), + ) + conn_out.execute(sql_out, params_out) + conn_out.commit() + finally: + conn_out.close() + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to update output db: {e}") + + return { + "status": "success", + "account": account_dir.name, + "sessionId": session_id, + "messageId": f"{db_stem}:{table_name_out or table_name_in}:{int(local_id)}", + "mode": mode, + } + + +@router.post("/api/chat/messages/flip_direction", summary="反转某条消息在微信客户端的左右位置(packed_info_data)") +async def flip_chat_message_direction(request: Request) -> dict[str, Any]: + """Flip a message's bubble side in the *WeChat client* by swapping from/to in packed_info_data. + + Note: this intentionally edits `packed_info_data` (a protobuf-like BLOB). It is risky. + A snapshot is recorded so users can undo via `/api/chat/edits/reset_message`. + """ + + payload = await request.json() + if not isinstance(payload, dict): + raise HTTPException(status_code=400, detail="Invalid payload.") + + account = str(payload.get("account") or "").strip() or None + session_id = str(payload.get("session_id") or payload.get("username") or payload.get("sessionId") or "").strip() + message_id_in = str(payload.get("message_id") or payload.get("messageId") or "").strip() + + if not session_id: + raise HTTPException(status_code=400, detail="Missing session_id.") + if not message_id_in: + raise HTTPException(status_code=400, detail="Missing message_id.") + + account_dir = _resolve_account_dir(account) + try: + db_stem, table_name_in, local_id = chat_edit_store.parse_message_id(message_id_in) + except Exception: + raise HTTPException(status_code=400, detail="Invalid message_id.") + + msg_db_path_out = account_dir / f"{db_stem}.db" + if not msg_db_path_out.exists(): + raise HTTPException(status_code=404, detail="Message database not found.") + + msg_db_path_real, _res_db_path_real = _resolve_db_storage_message_paths(account_dir, db_stem) + if not msg_db_path_real.exists(): + raise HTTPException(status_code=404, detail="Real message database not found in db_storage.") + + def _coerce_packed_bytes(value: Any) -> Optional[bytes]: + if value is None: + return None + if isinstance(value, memoryview): + value = value.tobytes() + if isinstance(value, bytearray): + value = bytes(value) + if isinstance(value, bytes): + # If a past bug stored the blob as TEXT hex, sqlite may return ASCII bytes here. + try: + s = value.decode("ascii").strip() + except Exception: + return value + if not s: + return b"" + b = _hex_to_bytes(s) + if b is not None: + return b + if (len(s) % 2 == 0) and (_HEX_RE.fullmatch(s) is not None): + try: + return bytes.fromhex(s) + except Exception: + return value + return value + if isinstance(value, str): + s = value.strip() + if not s: + return b"" + b = _hex_to_bytes(s) + if b is not None: + return b + if (len(s) % 2 == 0) and (_HEX_RE.fullmatch(s) is not None): + try: + return bytes.fromhex(s) + except Exception: + return None + return s.encode("utf-8", errors="replace") + return None + + # Resolve output table name casing and read packed_info_data bytes from output DB. + table_name_out = "" + packed_before: Optional[bytes] = None + conn_out_probe: Optional[sqlite3.Connection] = None + try: + conn_out_probe = sqlite3.connect(str(msg_db_path_out), timeout=5) + conn_out_probe.row_factory = sqlite3.Row + conn_out_probe.text_factory = bytes + table_name_out = _normalize_table_name_case(conn_out_probe, table_name_in) or "" + if not table_name_out: + raise HTTPException(status_code=404, detail="Message table not found.") + cols = _table_info_columns(conn_out_probe, table_name_out) + if not cols or ("packed_info_data" not in cols): + raise HTTPException(status_code=400, detail="packed_info_data column not found.") + quoted = _quote_ident(table_name_out) + row = conn_out_probe.execute( + f"SELECT packed_info_data FROM {quoted} WHERE local_id = ? LIMIT 1", + (int(local_id),), + ).fetchone() + if row is None: + raise HTTPException(status_code=404, detail="Message not found in output database.") + packed_before = _coerce_packed_bytes(row["packed_info_data"]) + finally: + if conn_out_probe is not None: + try: + conn_out_probe.close() + except Exception: + pass + + if not packed_before: + raise HTTPException(status_code=400, detail="packed_info_data is empty; cannot flip direction.") + + try: + packed_after, old_from_id, old_to_id = _swap_packed_info_from_to(packed_before) + except Exception as e: + raise HTTPException(status_code=400, detail=f"Cannot flip packed_info_data: {e}") + + # Apply to output DB first, then real db_storage. Record snapshot so users can undo. + message_id = f"{db_stem}:{table_name_out or table_name_in}:{int(local_id)}" + created_record = False + + with _realtime_sync_lock(account_dir.name, session_id): + try: + wcdb_conn = WCDB_REALTIME.ensure_connected(account_dir) + except WCDBRealtimeError as e: + raise HTTPException(status_code=400, detail=str(e)) + + # Snapshot original row from real db_storage (only once). + existing_record = chat_edit_store.get_message_edit(account_dir.name, session_id, message_id) + if existing_record is None: + try: + select_sql = f"SELECT * FROM {_quote_ident(table_name_in)} WHERE local_id = {int(local_id)} LIMIT 1" + with wcdb_conn.lock: + rows = _wcdb_exec_query( + wcdb_conn.handle, + kind="message", + path=str(msg_db_path_real), + sql=select_sql, + ) + if not rows or not isinstance(rows[0], dict): + raise HTTPException(status_code=404, detail="Message not found in real db_storage.") + original_row = rows[0] + + chat_edit_store.upsert_original_once( + account=account_dir.name, + session_id=session_id, + db=db_stem, + table_name=table_name_out or table_name_in, + local_id=int(local_id), + original_msg=original_row, + original_resource=None, + now_ms=int(time.time() * 1000), + ) + created_record = True + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to write edit snapshot: {e}") + + # 1) Update output decrypted DB (so UI can show it in raw view). + try: + conn_out = sqlite3.connect(str(msg_db_path_out), timeout=5) + try: + sql_out, params_out = _build_sqlite_update_sql( + table_name=table_name_out, + updates={"packed_info_data": packed_after}, + where_local_id=int(local_id), + ) + cur = conn_out.execute(sql_out, params_out) + conn_out.commit() + if int(getattr(cur, "rowcount", 0) or 0) <= 0: + raise HTTPException(status_code=404, detail="Message not found in output database.") + finally: + conn_out.close() + except HTTPException: + if created_record: + try: + chat_edit_store.delete_message_edit(account_dir.name, session_id, message_id) + except Exception: + pass + raise + except Exception as e: + if created_record: + try: + chat_edit_store.delete_message_edit(account_dir.name, session_id, message_id) + except Exception: + pass + raise HTTPException(status_code=500, detail=f"Failed to update output database: {e}") + + # 2) Update real db_storage (source of truth). Rollback output on failure. + try: + sql_real = _build_wcdb_update_sql( + table_name=table_name_in, + updates={"packed_info_data": packed_after}, + where_local_id=int(local_id), + ) + with wcdb_conn.lock: + _wcdb_exec_query(wcdb_conn.handle, kind="message", path=str(msg_db_path_real), sql=sql_real) + except Exception as e: + # Roll back output changes. + try: + conn_rb = sqlite3.connect(str(msg_db_path_out), timeout=5) + try: + sql_rb, params_rb = _build_sqlite_update_sql( + table_name=table_name_out, + updates={"packed_info_data": packed_before}, + where_local_id=int(local_id), + ) + conn_rb.execute(sql_rb, params_rb) + conn_rb.commit() + finally: + conn_rb.close() + except Exception: + pass + + if created_record: + try: + chat_edit_store.delete_message_edit(account_dir.name, session_id, message_id) + except Exception: + pass + raise HTTPException(status_code=500, detail=f"Failed to update real db_storage: {e}") + + # Track which columns were modified so reset restores only those. + try: + chat_edit_store.merge_edited_columns( + account=account_dir.name, + session_id=session_id, + db=db_stem, + table_name=table_name_out or table_name_in, + local_id=int(local_id), + columns=["packed_info_data"], + ) + except Exception: + pass + + # Bump edit metadata for already-tracked messages. + if existing_record is not None: + try: + chat_edit_store.upsert_original_once( + account=account_dir.name, + session_id=session_id, + db=db_stem, + table_name=table_name_out or table_name_in, + local_id=int(local_id), + original_msg={}, + original_resource=None, + now_ms=int(time.time() * 1000), + ) + except Exception: + pass + + return { + "status": "success", + "account": account_dir.name, + "sessionId": session_id, + "messageId": message_id, + "before": { + "packed_info_data": _bytes_to_hex(packed_before), + "fromId": int(old_from_id), + "toId": int(old_to_id), + }, + "after": { + "packed_info_data": _bytes_to_hex(packed_after), + "fromId": int(old_to_id), + "toId": int(old_from_id), + }, + } + + +def _restore_message_from_snapshot( + *, + account_dir: Path, + session_id: str, + message_id: str, + record: dict[str, Any], + wcdb_conn, +) -> None: + db_stem, table_name, local_id_current = chat_edit_store.parse_message_id(message_id) + msg_db_path_out = account_dir / f"{db_stem}.db" + if not msg_db_path_out.exists(): + raise HTTPException(status_code=404, detail="Message database not found.") + + msg_db_path_real, res_db_path_real = _resolve_db_storage_message_paths(account_dir, db_stem) + if not msg_db_path_real.exists(): + raise HTTPException(status_code=404, detail="Real message database not found in db_storage.") + + original_msg = chat_edit_store.loads_json_with_blobs(str(record.get("original_msg_json") or "") or "") + if not isinstance(original_msg, dict): + raise HTTPException(status_code=500, detail="Invalid original snapshot.") + + original_resource = None + if str(record.get("original_resource_json") or ""): + try: + original_resource = chat_edit_store.loads_json_with_blobs(str(record.get("original_resource_json") or "") or "") + except Exception: + original_resource = None + + edited_cols: set[str] = set() + try: + raw = str(record.get("edited_cols_json") or "").strip() + if raw: + v = json.loads(raw) + if isinstance(v, list): + edited_cols = {str(x or "").strip().lower() for x in v if str(x or "").strip()} + except Exception: + edited_cols = set() + + # Backward compatible default: older records didn't track edited columns. + if not edited_cols: + edited_cols = {"message_content", "compress_content"} + + # Editing content implicitly clears compress_content unless explicitly provided. + if "message_content" in edited_cols: + edited_cols.add("compress_content") + + orig_key_map = {str(k or "").strip().lower(): str(k) for k in original_msg.keys()} + + # Read current create_time from real db to call wcdb_update_message reliably. + cur_create_time = 0 + try: + sql_ct = f"SELECT create_time FROM {_quote_ident(table_name)} WHERE local_id = {int(local_id_current)} LIMIT 1" + with wcdb_conn.lock: + rows = _wcdb_exec_query(wcdb_conn.handle, kind="message", path=str(msg_db_path_real), sql=sql_ct) + if rows and isinstance(rows[0], dict): + cur_create_time = int(rows[0].get("create_time") or 0) + except Exception: + cur_create_time = 0 + if cur_create_time <= 0: + raise HTTPException(status_code=404, detail="Message not found in real db_storage.") + + # Restore message_content via wcdb_update_message (best-effort). + # Some builds store message_content as an encrypted/compressed BLOB; WCDB exec_query may return it as bare hex. + # In that case, don't call update_message with the hex string; restoring the raw column bytes below is safer. + if "message_content" in edited_cols and "message_content" in orig_key_map: + try: + content = original_msg.get(orig_key_map["message_content"]) + if isinstance(content, str): + s = content.strip() + if s and (len(s) % 2 == 0) and (_HEX_RE.fullmatch(s) is not None): + s_lower = s.lower() + if (len(s) >= 64) or (s_lower.startswith("28b52ffd") and len(s) >= 16): + content = None + if isinstance(content, (bytes, bytearray, memoryview)): + try: + content = bytes(content).decode("utf-8", errors="replace") + except Exception: + content = "" + if content is not None: + _wcdb_update_message( + wcdb_conn.handle, + session_id=session_id, + local_id=int(local_id_current), + create_time=int(cur_create_time), + new_content=str(content or ""), + ) + except Exception: + pass + + # Restore only columns that were actually edited by the tool. + try: + restore_updates: dict[str, Any] = {} + for col_lc in sorted(edited_cols): + k = orig_key_map.get(col_lc) + if not k: + continue + restore_updates[k] = _normalize_edit_value(k, original_msg.get(k), from_snapshot=True) + + if restore_updates: + sql_real = _build_wcdb_update_sql( + table_name=table_name, + updates=restore_updates, + where_local_id=int(local_id_current), + ) + with wcdb_conn.lock: + _wcdb_exec_query(wcdb_conn.handle, kind="message", path=str(msg_db_path_real), sql=sql_real) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to restore real db_storage: {e}") + + # Restore output decrypted Msg_*. + try: + conn_out = sqlite3.connect(str(msg_db_path_out), timeout=5) + try: + tnorm = _normalize_table_name_case(conn_out, table_name) + if not tnorm: + raise HTTPException(status_code=404, detail="Message table not found.") + cols = _table_info_columns(conn_out, tnorm) + col_map = {str(c or "").strip().lower(): str(c) for c in cols if str(c or "").strip()} + restore_out: dict[str, Any] = {} + for col_lc in sorted(edited_cols): + col = col_map.get(col_lc) + k = orig_key_map.get(col_lc) + if not col or not k: + continue + restore_out[col] = _normalize_edit_value(col, original_msg.get(k), from_snapshot=True) + + if restore_out: + sql_out, params = _build_sqlite_update_sql( + table_name=tnorm, + updates=restore_out, + where_local_id=int(local_id_current), + ) + conn_out.execute(sql_out, params) + conn_out.commit() + finally: + conn_out.close() + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to restore output database: {e}") + + # Restore message_resource key fields (best-effort, by message_id). + need_restore_resource = any( + k in edited_cols for k in {"local_type", "create_time", "server_id", "origin_source", "local_id"} + ) + if need_restore_resource and isinstance(original_resource, dict): + try: + res_message_id = int(original_resource.get("message_id") or 0) + except Exception: + res_message_id = 0 + if res_message_id > 0: + restore_res: dict[str, Any] = {} + msg_to_res = { + "local_type": "message_local_type", + "create_time": "message_create_time", + "server_id": "message_svr_id", + "origin_source": "message_origin_source", + "local_id": "message_local_id", + } + for msg_col, res_col in msg_to_res.items(): + if msg_col not in edited_cols: + continue + if res_col in original_resource: + restore_res[res_col] = _normalize_edit_value(res_col, original_resource.get(res_col), from_snapshot=True) + if restore_res: + try: + parts = [f"{_quote_ident(k)} = {_sql_literal(v)}" for k, v in restore_res.items()] + sql_res_real = ( + "UPDATE MessageResourceInfo SET " + ", ".join(parts) + f" WHERE message_id = {int(res_message_id)}" + ) + if res_db_path_real.exists(): + with wcdb_conn.lock: + _wcdb_exec_query( + wcdb_conn.handle, + kind="message", + path=str(res_db_path_real), + sql=sql_res_real, + ) + except Exception: + pass + + try: + out_res_db_path = account_dir / "message_resource.db" + if out_res_db_path.exists(): + conn_res = sqlite3.connect(str(out_res_db_path), timeout=5) + try: + set_cols = ", ".join([f"{_quote_ident(k)} = ?" for k in restore_res.keys()]) + params = list(restore_res.values()) + [int(res_message_id)] + conn_res.execute(f"UPDATE MessageResourceInfo SET {set_cols} WHERE message_id = ?", params) + conn_res.commit() + finally: + conn_res.close() + except Exception: + pass + + +@router.post("/api/chat/edits/reset_message", summary="恢复某条消息到首次快照,并删除修改记录") +async def reset_chat_edited_message(request: Request) -> dict[str, Any]: + payload = await request.json() + if not isinstance(payload, dict): + raise HTTPException(status_code=400, detail="Invalid payload.") + + account = str(payload.get("account") or "").strip() or None + session_id = str(payload.get("session_id") or payload.get("username") or payload.get("sessionId") or "").strip() + message_id = str(payload.get("message_id") or payload.get("messageId") or "").strip() + if not session_id: + raise HTTPException(status_code=400, detail="Missing session_id.") + if not message_id: + raise HTTPException(status_code=400, detail="Missing message_id.") + + account_dir = _resolve_account_dir(account) + record = chat_edit_store.get_message_edit(account_dir.name, session_id, message_id) + if not record: + raise HTTPException(status_code=404, detail="Edit record not found.") + + with _realtime_sync_lock(account_dir.name, session_id): + try: + wcdb_conn = WCDB_REALTIME.ensure_connected(account_dir) + except WCDBRealtimeError as e: + raise HTTPException(status_code=400, detail=str(e)) + + _restore_message_from_snapshot( + account_dir=account_dir, + session_id=session_id, + message_id=message_id, + record=record, + wcdb_conn=wcdb_conn, + ) + + try: + chat_edit_store.delete_message_edit(account_dir.name, session_id, message_id) + except Exception: + pass + + return {"status": "success"} + + +@router.post("/api/chat/edits/reset_session", summary="一键恢复某会话下全部修改记录") +async def reset_chat_edited_session(request: Request) -> dict[str, Any]: + payload = await request.json() + if not isinstance(payload, dict): + raise HTTPException(status_code=400, detail="Invalid payload.") + + account = str(payload.get("account") or "").strip() or None + session_id = str(payload.get("session_id") or payload.get("username") or payload.get("sessionId") or "").strip() + if not session_id: + raise HTTPException(status_code=400, detail="Missing session_id.") + + account_dir = _resolve_account_dir(account) + records = chat_edit_store.list_messages(account_dir.name, session_id) + if not records: + return {"status": "success", "restored": 0, "failed": 0, "failures": []} + + restored = 0 + failures: list[dict[str, Any]] = [] + + with _realtime_sync_lock(account_dir.name, session_id): + try: + wcdb_conn = WCDB_REALTIME.ensure_connected(account_dir) + except WCDBRealtimeError as e: + raise HTTPException(status_code=400, detail=str(e)) + + for rec in records: + mid = str(rec.get("message_id") or "").strip() + if not mid: + try: + mid = chat_edit_store.format_message_id( + rec.get("db") or "", + rec.get("table_name") or "", + int(rec.get("local_id") or 0), + ) + except Exception: + mid = "" + if not mid: + continue + try: + _restore_message_from_snapshot( + account_dir=account_dir, + session_id=session_id, + message_id=mid, + record=rec, + wcdb_conn=wcdb_conn, + ) + try: + chat_edit_store.delete_message_edit(account_dir.name, session_id, mid) + except Exception: + pass + restored += 1 + except Exception as e: + failures.append({"messageId": mid, "error": str(e)}) + + return {"status": "success", "restored": int(restored), "failed": int(len(failures)), "failures": failures} diff --git a/src/wechat_decrypt_tool/routers/chat_contacts.py b/src/wechat_decrypt_tool/routers/chat_contacts.py new file mode 100644 index 0000000..e6e86ec --- /dev/null +++ b/src/wechat_decrypt_tool/routers/chat_contacts.py @@ -0,0 +1,836 @@ +import csv +import json +import re +import sqlite3 +from datetime import datetime, timezone +from functools import lru_cache +from pathlib import Path +from typing import Any, Literal, Optional + +from fastapi import APIRouter, HTTPException, Request +from pypinyin import Style, lazy_pinyin +from pydantic import BaseModel, Field + +from ..chat_helpers import ( + _build_avatar_url, + _pick_avatar_url, + _pick_display_name, + _resolve_account_dir, + _should_keep_session, +) +from ..path_fix import PathFixRoute + +router = APIRouter(route_class=PathFixRoute) + + +_SYSTEM_USERNAMES = { + "filehelper", + "fmessage", + "floatbottle", + "medianote", + "newsapp", + "qmessage", + "qqmail", + "tmessage", + "brandsessionholder", + "brandservicesessionholder", + "notifymessage", + "opencustomerservicemsg", + "notification_messages", + "userexperience_alarm", +} + +_SOURCE_SCENE_LABELS = { + 1: "通过QQ号添加", + 3: "通过微信号添加", + 6: "通过手机号添加", + 10: "通过名片添加", + 14: "通过群聊添加", + 30: "通过扫一扫添加", +} + +_COUNTRY_LABELS = { + "CN": "中国大陆", +} + + +class ContactTypeFilter(BaseModel): + friends: bool = True + groups: bool = True + officials: bool = True + + +class ContactExportRequest(BaseModel): + account: Optional[str] = Field(None, description="账号目录名(可选,默认使用第一个)") + output_dir: str = Field(..., description="导出目录绝对路径") + format: str = Field("json", description="导出格式,仅支持 json/csv") + include_avatar_link: bool = Field(True, description="是否导出 avatarLink 字段") + contact_types: ContactTypeFilter = Field(default_factory=ContactTypeFilter) + keyword: Optional[str] = Field(None, description="关键词筛选(可选)") + + +def _normalize_text(v: Any) -> str: + if v is None: + return "" + return str(v).strip() + + +def _to_int(v: Any) -> int: + try: + return int(v or 0) + except Exception: + return 0 + + +def _to_optional_int(v: Any) -> Optional[int]: + if v is None: + return None + if isinstance(v, bool): + return int(v) + if isinstance(v, int): + return v + s = _normalize_text(v) + if not s: + return None + try: + return int(s) + except Exception: + return None + + +_PINYIN_CLEAN_RE = re.compile(r"[^a-z0-9]+") +_PINYIN_ALPHA_RE = re.compile(r"[A-Za-z]") + +# 多音字姓氏:pypinyin 对单字默认读音不一定是姓氏读音(例如:曾= ceng / zeng)。 +# 这里在“姓名首字”场景优先采用常见姓氏读音,用于联系人列表的分组/排序。 +_SURNAME_PINYIN_OVERRIDES: dict[str, str] = { + "曾": "zeng", + "区": "ou", + "仇": "qiu", + "解": "xie", + "单": "shan", + "查": "zha", + "乐": "yue", + "朴": "piao", + "盖": "ge", + "缪": "miao", +} + + +@lru_cache(maxsize=4096) +def _build_contact_pinyin_key(name: str) -> str: + text = _normalize_text(name) + if not text: + return "" + + # Keep non-CJK segments so English names can be sorted/grouped as expected. + first = text[0] + override = _SURNAME_PINYIN_OVERRIDES.get(first) + if override: + rest = text[1:] + parts = [override] + if rest: + parts.extend(lazy_pinyin(rest, style=Style.NORMAL, errors="default")) + else: + parts = lazy_pinyin(text, style=Style.NORMAL, errors="default") + out: list[str] = [] + for part in parts: + cleaned = _PINYIN_CLEAN_RE.sub("", _normalize_text(part).lower()) + if cleaned: + out.append(cleaned) + return "".join(out) + + +@lru_cache(maxsize=4096) +def _build_contact_pinyin_initial(name: str) -> str: + text = _normalize_text(name).lstrip() + if not text: + return "#" + + first = text[0] + if "A" <= first <= "Z": + return first + if "a" <= first <= "z": + return first.upper() + + override = _SURNAME_PINYIN_OVERRIDES.get(first) + if override: + return override[0].upper() + + # For CJK, try to convert the first character to pinyin initial. + parts = lazy_pinyin(first, style=Style.NORMAL, errors="ignore") + if parts: + m = _PINYIN_ALPHA_RE.search(parts[0]) + if m: + return m.group(0).upper() + + # Emoji / digits / symbols, etc. + return "#" + + +def _decode_varint(raw: bytes, offset: int) -> tuple[Optional[int], int]: + value = 0 + shift = 0 + pos = int(offset) + n = len(raw) + while pos < n: + byte = raw[pos] + pos += 1 + value |= (byte & 0x7F) << shift + if (byte & 0x80) == 0: + return value, pos + shift += 7 + if shift > 63: + return None, n + return None, n + + +def _decode_proto_text(raw: bytes) -> str: + if not raw: + return "" + try: + text = raw.decode("utf-8", errors="ignore") + except Exception: + return "" + return re.sub(r"[\x00-\x08\x0b\x0c\x0e-\x1f]", "", text).strip() + + +def _parse_contact_extra_buffer(extra_buffer: Any) -> dict[str, Any]: + out = { + "gender": 0, + "signature": "", + "country": "", + "province": "", + "city": "", + "source_scene": None, + } + if extra_buffer is None: + return out + + raw: bytes + if isinstance(extra_buffer, memoryview): + raw = extra_buffer.tobytes() + elif isinstance(extra_buffer, (bytes, bytearray)): + raw = bytes(extra_buffer) + else: + return out + + if not raw: + return out + + idx = 0 + n = len(raw) + while idx < n: + tag, idx_next = _decode_varint(raw, idx) + if tag is None: + break + idx = idx_next + field_no = tag >> 3 + wire_type = tag & 0x7 + + if wire_type == 0: + val, idx_next = _decode_varint(raw, idx) + if val is None: + break + idx = idx_next + if field_no == 2: + # 性别: 1=男, 2=女, 0=未知 + out["gender"] = int(val) + if field_no == 8: + out["source_scene"] = int(val) + continue + + if wire_type == 2: + size, idx_next = _decode_varint(raw, idx) + if size is None: + break + idx = idx_next + end = idx + int(size) + if end > n: + break + chunk = raw[idx:end] + idx = end + + if field_no in {4, 5, 6, 7}: + text = _decode_proto_text(chunk) + if field_no == 4: + out["signature"] = text + elif field_no == 5: + out["country"] = text + elif field_no == 6: + out["province"] = text + elif field_no == 7: + out["city"] = text + continue + + if wire_type == 1: + idx += 8 + continue + if wire_type == 5: + idx += 4 + continue + + break + + return out + + +def _country_label(country: str) -> str: + c = _normalize_text(country) + if not c: + return "" + return _COUNTRY_LABELS.get(c.upper(), c) + + +def _source_scene_label(source_scene: Optional[int]) -> str: + if source_scene is None: + return "" + if source_scene in _SOURCE_SCENE_LABELS: + return _SOURCE_SCENE_LABELS[source_scene] + return f"场景码 {source_scene}" + + +def _build_region(country: str, province: str, city: str) -> str: + parts: list[str] = [] + country_text = _country_label(country) + province_text = _normalize_text(province) + city_text = _normalize_text(city) + if country_text: + parts.append(country_text) + if province_text: + parts.append(province_text) + if city_text: + parts.append(city_text) + return "·".join(parts) + + +def _safe_export_part(s: str) -> str: + cleaned = re.sub(r"[^0-9A-Za-z._-]+", "_", str(s or "").strip()) + cleaned = cleaned.strip("._-") + return cleaned or "account" + + +def _is_valid_contact_username(username: str) -> bool: + u = _normalize_text(username) + if not u: + return False + if u in _SYSTEM_USERNAMES: + return False + if u.startswith("fake_"): + return False + if not _should_keep_session(u, include_official=True) and not u.startswith("gh_") and u != "weixin": + return False + return True + + +def _get_table_columns(conn: sqlite3.Connection, table: str) -> set[str]: + try: + rows = conn.execute(f"PRAGMA table_info({table})").fetchall() + except Exception: + return set() + + out: set[str] = set() + for row in rows: + try: + name = _normalize_text(row["name"] if "name" in row.keys() else row[1]).lower() + except Exception: + continue + if name: + out.add(name) + return out + + +def _build_contact_select_sql(table: str, columns: set[str]) -> Optional[str]: + if "username" not in columns: + return None + + specs: list[tuple[str, str, str]] = [ + ("username", "username", "''"), + ("remark", "remark", "''"), + ("nick_name", "nick_name", "''"), + ("alias", "alias", "''"), + ("local_type", "local_type", "0"), + ("verify_flag", "verify_flag", "0"), + ("big_head_url", "big_head_url", "''"), + ("small_head_url", "small_head_url", "''"), + ("extra_buffer", "extra_buffer", "x''"), + ] + + select_parts: list[str] = [] + for key, alias, fallback in specs: + if key in columns: + select_parts.append(key) + else: + select_parts.append(f"{fallback} AS {alias}") + return f"SELECT {', '.join(select_parts)} FROM {table}" + + +def _load_contact_rows_map(contact_db_path: Path) -> dict[str, dict[str, Any]]: + out: dict[str, dict[str, Any]] = {} + if not contact_db_path.exists(): + return out + + conn = sqlite3.connect(str(contact_db_path)) + conn.row_factory = sqlite3.Row + try: + def read_rows(table: str) -> list[sqlite3.Row]: + columns = _get_table_columns(conn, table) + sql = _build_contact_select_sql(table, columns) + if not sql: + return [] + try: + return conn.execute(sql).fetchall() + except Exception: + return [] + return [] + + for table in ("contact", "stranger"): + rows = read_rows(table) + for row in rows: + username = _normalize_text(row["username"] if "username" in row.keys() else "") + if (not username) or (username in out): + continue + + extra_info = _parse_contact_extra_buffer( + row["extra_buffer"] if "extra_buffer" in row.keys() else b"" + ) + out[username] = { + "username": username, + "remark": _normalize_text(row["remark"] if "remark" in row.keys() else ""), + "nick_name": _normalize_text(row["nick_name"] if "nick_name" in row.keys() else ""), + "alias": _normalize_text(row["alias"] if "alias" in row.keys() else ""), + "local_type": _to_int(row["local_type"] if "local_type" in row.keys() else 0), + "verify_flag": _to_int(row["verify_flag"] if "verify_flag" in row.keys() else 0), + "big_head_url": _normalize_text(row["big_head_url"] if "big_head_url" in row.keys() else ""), + "small_head_url": _normalize_text(row["small_head_url"] if "small_head_url" in row.keys() else ""), + "gender": _to_int(extra_info.get("gender")), + "signature": _normalize_text(extra_info.get("signature")), + "country": _normalize_text(extra_info.get("country")), + "province": _normalize_text(extra_info.get("province")), + "city": _normalize_text(extra_info.get("city")), + "source_scene": _to_optional_int(extra_info.get("source_scene")), + } + return out + finally: + conn.close() + + +def _load_session_sort_timestamps(session_db_path: Path) -> dict[str, int]: + out: dict[str, int] = {} + if not session_db_path.exists(): + return out + + conn = sqlite3.connect(str(session_db_path)) + conn.row_factory = sqlite3.Row + try: + rows: list[sqlite3.Row] = [] + queries = [ + "SELECT username, COALESCE(sort_timestamp, 0) AS ts FROM SessionTable", + "SELECT username, COALESCE(last_timestamp, 0) AS ts FROM SessionTable", + ] + for sql in queries: + try: + rows = conn.execute(sql).fetchall() + break + except Exception: + continue + + for row in rows: + username = _normalize_text(row["username"] if "username" in row.keys() else "") + if not username: + continue + ts = _to_int(row["ts"] if "ts" in row.keys() else 0) + prev = out.get(username, 0) + if ts > prev: + out[username] = ts + return out + finally: + conn.close() + + +def _load_session_group_usernames(session_db_path: Path) -> set[str]: + out: set[str] = set() + if not session_db_path.exists(): + return out + + conn = sqlite3.connect(str(session_db_path)) + conn.row_factory = sqlite3.Row + try: + queries = [ + "SELECT username FROM SessionTable", + "SELECT username FROM sessiontable", + ] + for sql in queries: + try: + rows = conn.execute(sql).fetchall() + except Exception: + continue + for row in rows: + username = _normalize_text(row["username"] if "username" in row.keys() else "") + if username and ("@chatroom" in username): + out.add(username) + return out + return out + finally: + conn.close() + + +def _infer_contact_type(username: str, row: dict[str, Any]) -> Optional[str]: + if not username: + return None + + if "@chatroom" in username: + return "group" + + verify_flag = _to_int(row.get("verify_flag")) + if username.startswith("gh_") or verify_flag != 0: + return "official" + + local_type = _to_int(row.get("local_type")) + if local_type == 1: + return "friend" + + return None + + +def _matches_keyword(contact: dict[str, Any], keyword: str) -> bool: + kw = _normalize_text(keyword).lower() + if not kw: + return True + + fields = [ + contact.get("username", ""), + contact.get("displayName", ""), + contact.get("remark", ""), + contact.get("nickname", ""), + contact.get("alias", ""), + contact.get("region", ""), + contact.get("source", ""), + contact.get("country", ""), + contact.get("province", ""), + contact.get("city", ""), + ] + for field in fields: + if kw in _normalize_text(field).lower(): + return True + return False + + +def _collect_contacts_for_account( + *, + account_dir: Path, + base_url: str, + keyword: Optional[str], + include_friends: bool, + include_groups: bool, + include_officials: bool, +) -> list[dict[str, Any]]: + if not (include_friends or include_groups or include_officials): + return [] + + contact_db_path = account_dir / "contact.db" + session_db_path = account_dir / "session.db" + contact_rows = _load_contact_rows_map(contact_db_path) + session_ts_map = _load_session_sort_timestamps(session_db_path) + session_group_usernames = _load_session_group_usernames(session_db_path) + + contacts: list[dict[str, Any]] = [] + for username, row in contact_rows.items(): + if not _is_valid_contact_username(username): + continue + + contact_type = _infer_contact_type(username, row) + if contact_type is None: + continue + if contact_type == "friend" and not include_friends: + continue + if contact_type == "group" and not include_groups: + continue + if contact_type == "official" and not include_officials: + continue + + display_name = _pick_display_name(row, username) + if not display_name: + display_name = username + + avatar_link = _normalize_text(_pick_avatar_url(row) or "") + avatar = base_url + _build_avatar_url(account_dir.name, username) + country = _normalize_text(row.get("country")) + province = _normalize_text(row.get("province")) + city = _normalize_text(row.get("city")) + source_scene = _to_optional_int(row.get("source_scene")) + gender = _to_int(row.get("gender")) + signature = _normalize_text(row.get("signature")) + + item = { + "username": username, + "displayName": display_name, + "remark": _normalize_text(row.get("remark")), + "nickname": _normalize_text(row.get("nick_name")), + "alias": _normalize_text(row.get("alias")), + "gender": gender, + "signature": signature, + "type": contact_type, + "country": country, + "province": province, + "city": city, + "region": _build_region(country, province, city), + "sourceScene": source_scene, + "source": _source_scene_label(source_scene), + "avatar": avatar, + "avatarLink": avatar_link, + "_sortTs": _to_int(session_ts_map.get(username, 0)), + } + + if not _matches_keyword(item, keyword or ""): + continue + contacts.append(item) + + if include_groups: + for username in session_group_usernames: + if username in contact_rows: + continue + if not _is_valid_contact_username(username): + continue + + avatar_link = "" + avatar = base_url + _build_avatar_url(account_dir.name, username) + + item = { + "username": username, + "displayName": username, + "remark": "", + "nickname": "", + "alias": "", + "gender": 0, + "signature": "", + "type": "group", + "country": "", + "province": "", + "city": "", + "region": "", + "sourceScene": None, + "source": "", + "avatar": avatar, + "avatarLink": avatar_link, + "_sortTs": _to_int(session_ts_map.get(username, 0)), + } + + if not _matches_keyword(item, keyword or ""): + continue + contacts.append(item) + + contacts.sort( + key=lambda x: ( + -_to_int(x.get("_sortTs", 0)), + _normalize_text(x.get("displayName", "")).lower(), + _normalize_text(x.get("username", "")).lower(), + ) + ) + for item in contacts: + item.pop("_sortTs", None) + name_for_pinyin = _normalize_text(item.get("displayName")) or _normalize_text(item.get("username")) + item["pinyinKey"] = _build_contact_pinyin_key(name_for_pinyin) + item["pinyinInitial"] = _build_contact_pinyin_initial(name_for_pinyin) + return contacts + + +def _build_counts(contacts: list[dict[str, Any]]) -> dict[str, int]: + counts = { + "friends": 0, + "groups": 0, + "officials": 0, + "total": 0, + } + for item in contacts: + t = _normalize_text(item.get("type")) + if t == "friend": + counts["friends"] += 1 + elif t == "group": + counts["groups"] += 1 + elif t == "official": + counts["officials"] += 1 + counts["total"] = len(contacts) + return counts + + +def _build_export_contacts( + contacts: list[dict[str, Any]], + *, + include_avatar_link: bool, +) -> list[dict[str, Any]]: + out: list[dict[str, Any]] = [] + for item in contacts: + row = { + "username": _normalize_text(item.get("username")), + "displayName": _normalize_text(item.get("displayName")), + "remark": _normalize_text(item.get("remark")), + "nickname": _normalize_text(item.get("nickname")), + "alias": _normalize_text(item.get("alias")), + "type": _normalize_text(item.get("type")), + "region": _normalize_text(item.get("region")), + "country": _normalize_text(item.get("country")), + "province": _normalize_text(item.get("province")), + "city": _normalize_text(item.get("city")), + "source": _normalize_text(item.get("source")), + "sourceScene": _to_optional_int(item.get("sourceScene")), + } + if include_avatar_link: + row["avatarLink"] = _normalize_text(item.get("avatarLink")) + out.append(row) + return out + + +def _write_json_export( + output_path: Path, + *, + account: str, + contacts: list[dict[str, Any]], + include_avatar_link: bool, + keyword: str, + contact_types: ContactTypeFilter, +) -> None: + payload = { + "exportedAt": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"), + "account": account, + "count": len(contacts), + "filters": { + "keyword": keyword, + "contactTypes": { + "friends": bool(contact_types.friends), + "groups": bool(contact_types.groups), + "officials": bool(contact_types.officials), + }, + "includeAvatarLink": bool(include_avatar_link), + }, + "contacts": contacts, + } + output_path.write_text(json.dumps(payload, ensure_ascii=False, indent=2), encoding="utf-8") + + +def _write_csv_export( + output_path: Path, + *, + contacts: list[dict[str, Any]], + include_avatar_link: bool, +) -> None: + columns: list[tuple[str, str]] = [ + ("username", "用户名"), + ("displayName", "显示名称"), + ("remark", "备注"), + ("nickname", "昵称"), + ("alias", "微信号"), + ("type", "类型"), + ("region", "地区"), + ("country", "国家/地区码"), + ("province", "省份"), + ("city", "城市"), + ("source", "来源"), + ("sourceScene", "来源场景码"), + ] + if include_avatar_link: + columns.append(("avatarLink", "头像链接")) + + with output_path.open("w", encoding="utf-8-sig", newline="") as f: + writer = csv.writer(f) + writer.writerow([label for _, label in columns]) + for item in contacts: + writer.writerow([_normalize_text(item.get(key, "")) for key, _ in columns]) + + +@router.get("/api/chat/contacts", summary="获取联系人列表") +def list_chat_contacts( + request: Request, + account: Optional[str] = None, + keyword: Optional[str] = None, + include_friends: bool = True, + include_groups: bool = True, + include_officials: bool = True, +): + account_dir = _resolve_account_dir(account) + base_url = str(request.base_url).rstrip("/") + + contacts = _collect_contacts_for_account( + account_dir=account_dir, + base_url=base_url, + keyword=keyword, + include_friends=bool(include_friends), + include_groups=bool(include_groups), + include_officials=bool(include_officials), + ) + + return { + "status": "success", + "account": account_dir.name, + "total": len(contacts), + "counts": _build_counts(contacts), + "contacts": contacts, + } + + +@router.post("/api/chat/contacts/export", summary="导出联系人") +def export_chat_contacts(request: Request, req: ContactExportRequest): + account_dir = _resolve_account_dir(req.account) + + output_dir_raw = _normalize_text(req.output_dir) + if not output_dir_raw: + raise HTTPException(status_code=400, detail="output_dir is required.") + + output_dir = Path(output_dir_raw).expanduser() + if not output_dir.is_absolute(): + raise HTTPException(status_code=400, detail="output_dir must be an absolute path.") + + try: + output_dir.mkdir(parents=True, exist_ok=True) + except Exception as e: + raise HTTPException(status_code=400, detail=f"Failed to prepare output_dir: {e}") + + base_url = str(request.base_url).rstrip("/") + contacts = _collect_contacts_for_account( + account_dir=account_dir, + base_url=base_url, + keyword=req.keyword, + include_friends=bool(req.contact_types.friends), + include_groups=bool(req.contact_types.groups), + include_officials=bool(req.contact_types.officials), + ) + + export_contacts = _build_export_contacts( + contacts, + include_avatar_link=bool(req.include_avatar_link), + ) + + fmt = _normalize_text(req.format).lower() + if fmt not in {"json", "csv"}: + raise HTTPException(status_code=400, detail="Unsupported format, use 'json' or 'csv'.") + + ts = datetime.now().strftime("%Y%m%d_%H%M%S") + safe_account = _safe_export_part(account_dir.name) + output_path = output_dir / f"contacts_{safe_account}_{ts}.{fmt}" + + try: + if fmt == "json": + _write_json_export( + output_path, + account=account_dir.name, + contacts=export_contacts, + include_avatar_link=bool(req.include_avatar_link), + keyword=_normalize_text(req.keyword), + contact_types=req.contact_types, + ) + else: + _write_csv_export( + output_path, + contacts=export_contacts, + include_avatar_link=bool(req.include_avatar_link), + ) + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to export contacts: {e}") + + return { + "status": "success", + "account": account_dir.name, + "format": fmt, + "outputPath": str(output_path), + "count": len(export_contacts), + } diff --git a/src/wechat_decrypt_tool/routers/chat_export.py b/src/wechat_decrypt_tool/routers/chat_export.py index a8cb149..feddffa 100644 --- a/src/wechat_decrypt_tool/routers/chat_export.py +++ b/src/wechat_decrypt_tool/routers/chat_export.py @@ -12,34 +12,57 @@ router = APIRouter(route_class=PathFixRoute) -ExportFormat = Literal["json", "txt"] +ExportFormat = Literal["json", "txt", "html"] ExportScope = Literal["selected", "all", "groups", "singles"] MediaKind = Literal["image", "emoji", "video", "video_thumb", "voice", "file"] -MessageType = Literal["text", "image", "emoji", "video", "voice", "file", "link", "transfer", "redPacket", "system", "quote", "voip"] +MessageType = Literal[ + "text", + "image", + "emoji", + "video", + "voice", + "chatHistory", + "file", + "link", + "transfer", + "redPacket", + "system", + "quote", + "voip", +] class ChatExportCreateRequest(BaseModel): account: Optional[str] = Field(None, description="账号目录名(可选,默认使用第一个)") scope: ExportScope = Field("selected", description="导出范围:selected=指定会话;all=全部;groups=仅群聊;singles=仅单聊") usernames: list[str] = Field(default_factory=list, description="会话 username 列表(scope=selected 时使用)") - format: ExportFormat = Field("json", description="导出格式:json 或 txt(zip 内每个会话一个文件)") + format: ExportFormat = Field("json", description="导出格式:json/txt/html(zip 内每个会话一个文件;html 可离线打开 index.html 查看)") start_time: Optional[int] = Field(None, description="起始时间(Unix 秒,含)") end_time: Optional[int] = Field(None, description="结束时间(Unix 秒,含)") include_hidden: bool = Field(False, description="是否包含隐藏会话(scope!=selected 时)") include_official: bool = Field(False, description="是否包含公众号/官方账号会话(scope!=selected 时)") - include_media: bool = Field(True, description="是否打包离线媒体(图片/表情/视频/语音/文件)") + include_media: bool = Field(True, description="是否允许打包离线媒体(最终仍受 message_types 与 privacy_mode 约束)") media_kinds: list[MediaKind] = Field( default_factory=lambda: ["image", "emoji", "video", "video_thumb", "voice", "file"], - description="打包的媒体类型", + description="允许打包的媒体类型(最终仍受 message_types 勾选约束)", ) message_types: list[MessageType] = Field( default_factory=list, - description="导出消息类型(renderType)过滤:为空=导出全部消息;可多选(如仅 voice / 仅 transfer / 仅 redPacket 等)", + description="导出消息类型(renderType)过滤:为空=导出全部类型;不为空时,仅导出勾选类型", ) + output_dir: Optional[str] = Field(None, description="导出目录绝对路径(可选;不填时使用默认目录)") allow_process_key_extract: bool = Field( False, description="预留字段:本项目不从微信进程提取媒体密钥,请使用 wx_key 获取并保存/批量解密", ) + download_remote_media: bool = Field( + False, + description="HTML 导出时允许联网下载链接/引用缩略图等远程媒体(提高离线完整性)", + ) + html_page_size: int = Field( + 1000, + description="HTML 导出分页大小(每页消息数);<=0 表示禁用分页(单文件,打开大聊天可能很卡)", + ) privacy_mode: bool = Field( False, description="隐私模式导出:隐藏会话/用户名/内容,不打包头像与媒体", @@ -61,7 +84,10 @@ async def create_chat_export(req: ChatExportCreateRequest): include_media=req.include_media, media_kinds=req.media_kinds, message_types=req.message_types, + output_dir=req.output_dir, allow_process_key_extract=req.allow_process_key_extract, + download_remote_media=req.download_remote_media, + html_page_size=req.html_page_size, privacy_mode=req.privacy_mode, file_name=req.file_name, ) diff --git a/src/wechat_decrypt_tool/routers/chat_media.py b/src/wechat_decrypt_tool/routers/chat_media.py index 655f0bc..70b7290 100644 --- a/src/wechat_decrypt_tool/routers/chat_media.py +++ b/src/wechat_decrypt_tool/routers/chat_media.py @@ -8,7 +8,7 @@ import sqlite3 import subprocess from pathlib import Path -from typing import Optional +from typing import Any, Optional from urllib.parse import urlparse import requests @@ -16,9 +16,24 @@ from fastapi.responses import FileResponse, Response from pydantic import BaseModel, Field +from ..avatar_cache import ( + AVATAR_CACHE_TTL_SECONDS, + avatar_cache_entry_file_exists, + avatar_cache_entry_is_fresh, + build_avatar_cache_response_headers, + cache_key_for_avatar_user, + cache_key_for_avatar_url, + get_avatar_cache_url_entry, + get_avatar_cache_user_entry, + is_avatar_cache_enabled, + normalize_avatar_source_url, + touch_avatar_cache_entry, + upsert_avatar_cache_entry, + write_avatar_cache_payload, +) from ..logging_config import get_logger from ..media_helpers import ( - _convert_silk_to_wav, + _convert_silk_to_browser_audio, _decrypt_emoticon_aes_cbc, _detect_image_extension, _detect_image_media_type, @@ -43,14 +58,56 @@ _try_find_decrypted_resource, _try_strip_media_prefix, ) -from ..chat_helpers import _extract_md5_from_packed_info +from ..chat_helpers import _extract_md5_from_packed_info, _load_contact_rows, _pick_avatar_url from ..path_fix import PathFixRoute +from ..wcdb_realtime import WCDB_REALTIME, get_avatar_urls as _wcdb_get_avatar_urls logger = get_logger(__name__) router = APIRouter(route_class=PathFixRoute) +def _resolve_avatar_remote_url(*, account_dir: Path, username: str) -> str: + u = str(username or "").strip() + if not u: + return "" + + # 1) contact.db first (cheap local lookup) + try: + rows = _load_contact_rows(account_dir / "contact.db", [u]) + row = rows.get(u) + raw = str(_pick_avatar_url(row) or "").strip() + if raw.lower().startswith(("http://", "https://")): + return normalize_avatar_source_url(raw) + except Exception: + pass + + # 2) WCDB fallback (more complete on enterprise/openim IDs) + try: + wcdb_conn = WCDB_REALTIME.ensure_connected(account_dir) + with wcdb_conn.lock: + mp = _wcdb_get_avatar_urls(wcdb_conn.handle, [u]) + wa = str(mp.get(u) or "").strip() + if wa.lower().startswith(("http://", "https://")): + return normalize_avatar_source_url(wa) + except Exception: + pass + + return "" + + +def _parse_304_headers(headers: Any) -> tuple[str, str]: + try: + etag = str((headers or {}).get("ETag") or "").strip() + except Exception: + etag = "" + try: + last_modified = str((headers or {}).get("Last-Modified") or "").strip() + except Exception: + last_modified = "" + return etag, last_modified + + @lru_cache(maxsize=4096) def _fast_probe_image_path_in_chat_attach( *, @@ -267,27 +324,309 @@ async def get_chat_avatar(username: str, account: Optional[str] = None): if not username: raise HTTPException(status_code=400, detail="Missing username.") account_dir = _resolve_account_dir(account) + account_name = str(account_dir.name or "").strip() + user_key = str(username or "").strip() + + # 1) Try on-disk cache first (fast path) + user_entry = None + cached_file = None + if is_avatar_cache_enabled() and account_name and user_key: + try: + user_entry = get_avatar_cache_user_entry(account_name, user_key) + cached_file = avatar_cache_entry_file_exists(account_name, user_entry) + if cached_file is not None: + logger.info(f"[avatar_cache_hit] kind=user account={account_name} username={user_key}") + except Exception as e: + logger.warning(f"[avatar_cache_error] read user cache failed account={account_name} username={user_key} err={e}") + head_image_db_path = account_dir / "head_image.db" if not head_image_db_path.exists(): + # No local head_image.db: allow fallback from cached/remote URL path. + if cached_file is not None and user_entry: + headers = build_avatar_cache_response_headers(user_entry) + return FileResponse( + str(cached_file), + media_type=str(user_entry.get("media_type") or "application/octet-stream"), + headers=headers, + ) raise HTTPException(status_code=404, detail="head_image.db not found.") conn = sqlite3.connect(str(head_image_db_path)) try: - row = conn.execute( - "SELECT image_buffer FROM head_image WHERE username = ? ORDER BY update_time DESC LIMIT 1", + meta = conn.execute( + "SELECT md5, update_time FROM head_image WHERE username = ? ORDER BY update_time DESC LIMIT 1", (username,), ).fetchone() + if meta and meta[0] is not None: + db_md5 = str(meta[0] or "").strip().lower() + try: + db_update_time = int(meta[1] or 0) + except Exception: + db_update_time = 0 + + # Cache still valid against head_image metadata. + if cached_file is not None and user_entry: + cached_md5 = str(user_entry.get("source_md5") or "").strip().lower() + try: + cached_update = int(user_entry.get("source_update_time") or 0) + except Exception: + cached_update = 0 + if cached_md5 == db_md5 and cached_update == db_update_time: + touch_avatar_cache_entry(account_name, str(user_entry.get("cache_key") or "")) + headers = build_avatar_cache_response_headers(user_entry) + return FileResponse( + str(cached_file), + media_type=str(user_entry.get("media_type") or "application/octet-stream"), + headers=headers, + ) + + # Refresh from blob (changed or first-load) + row = conn.execute( + "SELECT image_buffer FROM head_image WHERE username = ? ORDER BY update_time DESC LIMIT 1", + (username,), + ).fetchone() + if row and row[0] is not None: + data = bytes(row[0]) if isinstance(row[0], (memoryview, bytearray)) else row[0] + if not isinstance(data, (bytes, bytearray)): + data = bytes(data) + if data: + media_type = _detect_image_media_type(data) + media_type = media_type if media_type.startswith("image/") else "application/octet-stream" + entry, out_path = write_avatar_cache_payload( + account_name, + source_kind="user", + username=user_key, + payload=bytes(data), + media_type=media_type, + source_md5=db_md5, + source_update_time=db_update_time, + ttl_seconds=AVATAR_CACHE_TTL_SECONDS, + ) + if entry and out_path: + logger.info( + f"[avatar_cache_download] kind=user account={account_name} username={user_key} src=head_image" + ) + headers = build_avatar_cache_response_headers(entry) + return FileResponse(str(out_path), media_type=media_type, headers=headers) + + # cache write failed: fallback to response bytes + logger.warning( + f"[avatar_cache_error] kind=user account={account_name} username={user_key} action=write_fallback" + ) + return Response(content=bytes(data), media_type=media_type) + + # meta not found (no local avatar blob) + row = None finally: conn.close() - if not row or row[0] is None: - raise HTTPException(status_code=404, detail="Avatar not found.") + # 2) Fallback: remote avatar URL (contact/WCDB), cache by URL. + remote_url = _resolve_avatar_remote_url(account_dir=account_dir, username=user_key) + if remote_url and is_avatar_cache_enabled(): + url_entry = get_avatar_cache_url_entry(account_name, remote_url) + url_file = avatar_cache_entry_file_exists(account_name, url_entry) + if url_entry and url_file and avatar_cache_entry_is_fresh(url_entry): + logger.info(f"[avatar_cache_hit] kind=url account={account_name} username={user_key}") + touch_avatar_cache_entry(account_name, str(url_entry.get("cache_key") or "")) + # Keep user-key mapping aligned, so next user lookup is direct. + try: + upsert_avatar_cache_entry( + account_name, + cache_key=cache_key_for_avatar_user(user_key), + source_kind="user", + username=user_key, + source_url=remote_url, + source_md5=str(url_entry.get("source_md5") or ""), + source_update_time=int(url_entry.get("source_update_time") or 0), + rel_path=str(url_entry.get("rel_path") or ""), + media_type=str(url_entry.get("media_type") or "application/octet-stream"), + size_bytes=int(url_entry.get("size_bytes") or 0), + etag=str(url_entry.get("etag") or ""), + last_modified=str(url_entry.get("last_modified") or ""), + fetched_at=int(url_entry.get("fetched_at") or 0), + checked_at=int(url_entry.get("checked_at") or 0), + expires_at=int(url_entry.get("expires_at") or 0), + ) + except Exception: + pass + headers = build_avatar_cache_response_headers(url_entry) + return FileResponse( + str(url_file), + media_type=str(url_entry.get("media_type") or "application/octet-stream"), + headers=headers, + ) - data = bytes(row[0]) if isinstance(row[0], (memoryview, bytearray)) else row[0] - if not isinstance(data, (bytes, bytearray)): - data = bytes(data) - media_type = _detect_image_media_type(data) - return Response(content=data, media_type=media_type) + # Revalidate / download remote avatar + def _download_remote_avatar( + source_url: str, + *, + etag: str, + last_modified: str, + ) -> tuple[bytes, str, str, str, bool]: + base_headers = { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120 Safari/537.36", + "Accept": "image/avif,image/webp,image/apng,image/*,*/*;q=0.8", + } + + header_variants = [ + { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 MicroMessenger/7.0.20.1781(0x6700143B) WindowsWechat(0x63090719) XWEB/8351", + "Accept": "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8", + "Accept-Language": "zh-CN,zh;q=0.9", + "Referer": "https://servicewechat.com/", + "Origin": "https://servicewechat.com", + "Range": "bytes=0-", + }, + {"Referer": "https://wx.qq.com/", "Origin": "https://wx.qq.com"}, + {"Referer": "https://mp.weixin.qq.com/", "Origin": "https://mp.weixin.qq.com"}, + {"Referer": "https://www.baidu.com/", "Origin": "https://www.baidu.com"}, + {}, + ] + + last_err: Exception | None = None + for extra in header_variants: + headers = dict(base_headers) + headers.update(extra) + if etag: + headers["If-None-Match"] = etag + if last_modified: + headers["If-Modified-Since"] = last_modified + + r = requests.get(source_url, headers=headers, timeout=20, stream=True) + try: + if r.status_code == 304: + e2, lm2 = _parse_304_headers(r.headers) + return b"", "", (e2 or etag), (lm2 or last_modified), True + r.raise_for_status() + content_type = str(r.headers.get("Content-Type") or "").strip() + e2, lm2 = _parse_304_headers(r.headers) + max_bytes = 10 * 1024 * 1024 + chunks: list[bytes] = [] + total = 0 + for ch in r.iter_content(chunk_size=64 * 1024): + if not ch: + continue + chunks.append(ch) + total += len(ch) + if total > max_bytes: + raise HTTPException(status_code=400, detail="Avatar too large (>10MB).") + return b"".join(chunks), content_type, e2, lm2, False + except HTTPException: + raise + except Exception as e: + last_err = e + finally: + try: + r.close() + except Exception: + pass + + raise last_err or RuntimeError("avatar remote download failed") + + etag0 = str((url_entry or {}).get("etag") or "").strip() + lm0 = str((url_entry or {}).get("last_modified") or "").strip() + try: + payload, ct, etag_new, lm_new, not_modified = await asyncio.to_thread( + _download_remote_avatar, + remote_url, + etag=etag0, + last_modified=lm0, + ) + except Exception as e: + logger.warning(f"[avatar_cache_error] kind=url account={account_name} username={user_key} err={e}") + if url_entry and url_file: + headers = build_avatar_cache_response_headers(url_entry) + return FileResponse( + str(url_file), + media_type=str(url_entry.get("media_type") or "application/octet-stream"), + headers=headers, + ) + raise HTTPException(status_code=404, detail="Avatar not found.") + + if not_modified and url_entry and url_file: + touch_avatar_cache_entry(account_name, cache_key_for_avatar_url(remote_url)) + if etag_new or lm_new: + try: + upsert_avatar_cache_entry( + account_name, + cache_key=cache_key_for_avatar_url(remote_url), + source_kind="url", + username=user_key, + source_url=remote_url, + source_md5=str(url_entry.get("source_md5") or ""), + source_update_time=int(url_entry.get("source_update_time") or 0), + rel_path=str(url_entry.get("rel_path") or ""), + media_type=str(url_entry.get("media_type") or "application/octet-stream"), + size_bytes=int(url_entry.get("size_bytes") or 0), + etag=etag_new or etag0, + last_modified=lm_new or lm0, + ) + except Exception: + pass + logger.info(f"[avatar_cache_revalidate] kind=url account={account_name} username={user_key} status=304") + headers = build_avatar_cache_response_headers(url_entry) + return FileResponse( + str(url_file), + media_type=str(url_entry.get("media_type") or "application/octet-stream"), + headers=headers, + ) + + if payload: + payload2, media_type, _ext = _detect_media_type_and_ext(payload) + if media_type == "application/octet-stream" and ct: + try: + mt = ct.split(";")[0].strip() + if mt.startswith("image/"): + media_type = mt + except Exception: + pass + if str(media_type or "").startswith("image/"): + entry, out_path = write_avatar_cache_payload( + account_name, + source_kind="url", + username=user_key, + source_url=remote_url, + payload=payload2, + media_type=media_type, + etag=etag_new, + last_modified=lm_new, + ttl_seconds=AVATAR_CACHE_TTL_SECONDS, + ) + if entry and out_path: + # bind user-key record to same file for quicker next access + try: + upsert_avatar_cache_entry( + account_name, + cache_key=cache_key_for_avatar_user(user_key), + source_kind="user", + username=user_key, + source_url=remote_url, + source_md5=str(entry.get("source_md5") or ""), + source_update_time=int(entry.get("source_update_time") or 0), + rel_path=str(entry.get("rel_path") or ""), + media_type=str(entry.get("media_type") or "application/octet-stream"), + size_bytes=int(entry.get("size_bytes") or 0), + etag=str(entry.get("etag") or ""), + last_modified=str(entry.get("last_modified") or ""), + fetched_at=int(entry.get("fetched_at") or 0), + checked_at=int(entry.get("checked_at") or 0), + expires_at=int(entry.get("expires_at") or 0), + ) + except Exception: + pass + logger.info(f"[avatar_cache_download] kind=url account={account_name} username={user_key}") + headers = build_avatar_cache_response_headers(entry) + return FileResponse(str(out_path), media_type=media_type, headers=headers) + + if cached_file is not None and user_entry: + headers = build_avatar_cache_response_headers(user_entry) + return FileResponse( + str(cached_file), + media_type=str(user_entry.get("media_type") or "application/octet-stream"), + headers=headers, + ) + + raise HTTPException(status_code=404, detail="Avatar not found.") class EmojiDownloadRequest(BaseModel): @@ -349,6 +688,83 @@ def _lookup_resource_md5_by_server_id(account_dir_str: str, server_id: int, want pass +@lru_cache(maxsize=4096) +def _lookup_image_md5_by_server_id_from_messages(account_dir_str: str, server_id: int, username: str) -> str: + account_dir_str = str(account_dir_str or "").strip() + username = str(username or "").strip() + if not account_dir_str or not username: + return "" + + try: + sid = int(server_id or 0) + except Exception: + sid = 0 + if not sid: + return "" + + try: + chat_hash = hashlib.md5(username.encode()).hexdigest() + except Exception: + return "" + if not chat_hash: + return "" + + table_name = f"Msg_{chat_hash}" + account_dir = Path(account_dir_str) + + db_paths: list[Path] = [] + try: + for p in account_dir.glob("message_*.db"): + try: + if p.is_file(): + db_paths.append(p) + except Exception: + continue + except Exception: + db_paths = [] + + if not db_paths: + return "" + db_paths.sort(key=lambda p: p.name) + + for db_path in db_paths: + try: + conn = sqlite3.connect(str(db_path)) + except Exception: + continue + + try: + row = conn.execute( + f"SELECT local_type, packed_info_data FROM {table_name} " + "WHERE server_id = ? ORDER BY create_time DESC LIMIT 1", + (sid,), + ).fetchone() + except Exception: + row = None + finally: + try: + conn.close() + except Exception: + pass + + if not row: + continue + + try: + local_type = int(row[0] or 0) + except Exception: + local_type = 0 + if local_type != 3: + continue + + md5 = _extract_md5_from_packed_info(row[1]) + md5_norm = str(md5 or "").strip().lower() + if _is_valid_md5(md5_norm): + return md5_norm + + return "" + + def _is_safe_http_url(url: str) -> bool: u = str(url or "").strip() if not u: @@ -414,7 +830,7 @@ def _is_allowed_proxy_image_host(host: str) -> bool: if not h: return False # WeChat public account/article thumbnails and avatars commonly live on these CDNs. - return h.endswith(".qpic.cn") or h.endswith(".qlogo.cn") + return h.endswith(".qpic.cn") or h.endswith(".qlogo.cn") or h.endswith(".tc.qq.com") @router.get("/api/chat/media/proxy_image", summary="代理获取远程图片(解决微信公众号图片防盗链)") @@ -434,43 +850,137 @@ async def proxy_image(url: str): if not _is_allowed_proxy_image_host(host): raise HTTPException(status_code=400, detail="Unsupported url host for proxy_image.") - def _download_bytes() -> tuple[bytes, str]: - headers = { + source_url = normalize_avatar_source_url(u) + proxy_account = "_proxy" + cache_entry = get_avatar_cache_url_entry(proxy_account, source_url) if is_avatar_cache_enabled() else None + cache_file = avatar_cache_entry_file_exists(proxy_account, cache_entry) + if cache_entry and cache_file and avatar_cache_entry_is_fresh(cache_entry): + logger.info(f"[avatar_cache_hit] kind=proxy_url account={proxy_account}") + touch_avatar_cache_entry(proxy_account, cache_key_for_avatar_url(source_url)) + headers = build_avatar_cache_response_headers(cache_entry) + return FileResponse( + str(cache_file), + media_type=str(cache_entry.get("media_type") or "application/octet-stream"), + headers=headers, + ) + + def _download_bytes( + *, + if_none_match: str = "", + if_modified_since: str = "", + ) -> tuple[bytes, str, str, str, bool]: + base_headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120 Safari/537.36", "Accept": "image/avif,image/webp,image/apng,image/*,*/*;q=0.8", - # qpic/qlogo often require a mp.weixin.qq.com referer (anti-hotlink) - "Referer": "https://mp.weixin.qq.com/", - "Origin": "https://mp.weixin.qq.com", } - r = requests.get(u, headers=headers, timeout=20, stream=True) - try: - r.raise_for_status() - content_type = str(r.headers.get("Content-Type") or "").strip() - max_bytes = 10 * 1024 * 1024 - chunks: list[bytes] = [] - total = 0 - for ch in r.iter_content(chunk_size=64 * 1024): - if not ch: - continue - chunks.append(ch) - total += len(ch) - if total > max_bytes: - raise HTTPException(status_code=400, detail="Proxy image too large (>10MB).") - return b"".join(chunks), content_type - finally: + + # Different Tencent CDNs enforce different anti-hotlink rules. + # Try a couple of safe referers so Moments(qpic) and MP(qpic) both work. + header_variants = [ + # WeFlow/Electron uses a MicroMessenger UA + servicewechat.com referer to pass some + # WeChat CDN anti-hotlink checks (qlogo/qpic). Browsers can't set these headers for , + # but our backend proxy can. + { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 MicroMessenger/7.0.20.1781(0x6700143B) WindowsWechat(0x63090719) XWEB/8351", + "Accept": "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8", + "Accept-Language": "zh-CN,zh;q=0.9", + "Referer": "https://servicewechat.com/", + "Origin": "https://servicewechat.com", + "Range": "bytes=0-", + }, + {"Referer": "https://wx.qq.com/", "Origin": "https://wx.qq.com"}, + {"Referer": "https://mp.weixin.qq.com/", "Origin": "https://mp.weixin.qq.com"}, + {"Referer": "https://www.baidu.com/", "Origin": "https://www.baidu.com"}, + {}, + ] + + last_err: Exception | None = None + for extra in header_variants: + headers = dict(base_headers) + headers.update(extra) + if if_none_match: + headers["If-None-Match"] = if_none_match + if if_modified_since: + headers["If-Modified-Since"] = if_modified_since + r = requests.get(u, headers=headers, timeout=20, stream=True) try: - r.close() - except Exception: - pass + if r.status_code == 304: + etag0 = str(r.headers.get("ETag") or "").strip() + lm0 = str(r.headers.get("Last-Modified") or "").strip() + return b"", "", etag0, lm0, True + r.raise_for_status() + content_type = str(r.headers.get("Content-Type") or "").strip() + etag0 = str(r.headers.get("ETag") or "").strip() + lm0 = str(r.headers.get("Last-Modified") or "").strip() + max_bytes = 10 * 1024 * 1024 + chunks: list[bytes] = [] + total = 0 + for ch in r.iter_content(chunk_size=64 * 1024): + if not ch: + continue + chunks.append(ch) + total += len(ch) + if total > max_bytes: + raise HTTPException(status_code=400, detail="Proxy image too large (>10MB).") + return b"".join(chunks), content_type, etag0, lm0, False + except HTTPException: + # Hard failure, don't retry with another referer. + raise + except Exception as e: + last_err = e + finally: + try: + r.close() + except Exception: + pass + # All variants failed. + raise last_err or RuntimeError("proxy_image download failed") + + etag0 = str((cache_entry or {}).get("etag") or "").strip() + lm0 = str((cache_entry or {}).get("last_modified") or "").strip() try: - data, ct = await asyncio.to_thread(_download_bytes) + data, ct, etag_new, lm_new, not_modified = await asyncio.to_thread( + _download_bytes, + if_none_match=etag0, + if_modified_since=lm0, + ) except HTTPException: raise except Exception as e: logger.warning(f"proxy_image failed: url={u} err={e}") + if cache_entry and cache_file: + headers = build_avatar_cache_response_headers(cache_entry) + return FileResponse( + str(cache_file), + media_type=str(cache_entry.get("media_type") or "application/octet-stream"), + headers=headers, + ) raise HTTPException(status_code=502, detail=f"Proxy image failed: {e}") + if not_modified and cache_entry and cache_file: + logger.info(f"[avatar_cache_revalidate] kind=proxy_url account={proxy_account} status=304") + upsert_avatar_cache_entry( + proxy_account, + cache_key=cache_key_for_avatar_url(source_url), + source_kind="url", + source_url=source_url, + username="", + source_md5=str(cache_entry.get("source_md5") or ""), + source_update_time=int(cache_entry.get("source_update_time") or 0), + rel_path=str(cache_entry.get("rel_path") or ""), + media_type=str(cache_entry.get("media_type") or "application/octet-stream"), + size_bytes=int(cache_entry.get("size_bytes") or 0), + etag=etag_new or etag0, + last_modified=lm_new or lm0, + ) + headers = build_avatar_cache_response_headers(cache_entry) + return FileResponse( + str(cache_file), + media_type=str(cache_entry.get("media_type") or "application/octet-stream"), + headers=headers, + ) + if not data: raise HTTPException(status_code=502, detail="Proxy returned empty body.") @@ -488,11 +998,192 @@ def _download_bytes() -> tuple[bytes, str]: if not str(media_type or "").startswith("image/"): raise HTTPException(status_code=502, detail="Proxy did not return an image.") + if is_avatar_cache_enabled(): + entry, out_path = write_avatar_cache_payload( + proxy_account, + source_kind="url", + source_url=source_url, + payload=payload, + media_type=media_type, + etag=etag_new, + last_modified=lm_new, + ttl_seconds=AVATAR_CACHE_TTL_SECONDS, + ) + if entry and out_path: + logger.info(f"[avatar_cache_download] kind=proxy_url account={proxy_account}") + headers = build_avatar_cache_response_headers(entry) + return FileResponse(str(out_path), media_type=media_type, headers=headers) + resp = Response(content=payload, media_type=media_type) - resp.headers["Cache-Control"] = "public, max-age=86400" + resp.headers["Cache-Control"] = f"public, max-age={AVATAR_CACHE_TTL_SECONDS}" return resp +def _origin_favicon_url(page_url: str) -> str: + """Best-effort favicon URL for a given page URL (origin + /favicon.ico).""" + u = str(page_url or "").strip() + if not u: + return "" + try: + p = urlparse(u) + except Exception: + return "" + if not p.scheme or not p.netloc: + return "" + return f"{p.scheme}://{p.netloc}/favicon.ico" + + +def _resolve_final_url_for_favicon(page_url: str) -> str: + """Resolve final URL for redirects (used for favicon host inference).""" + u = str(page_url or "").strip() + if not u: + return "" + + headers = { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120 Safari/537.36", + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", + } + + # Prefer HEAD (no body). Some hosts reject HEAD; fall back to GET+stream. + try: + r = requests.head(u, headers=headers, timeout=10, allow_redirects=True) + try: + final = str(getattr(r, "url", "") or "").strip() + return final or u + finally: + try: + r.close() + except Exception: + pass + except Exception: + pass + + try: + r = requests.get(u, headers=headers, timeout=10, allow_redirects=True, stream=True) + try: + final = str(getattr(r, "url", "") or "").strip() + return final or u + finally: + try: + r.close() + except Exception: + pass + except Exception: + return u + + +@router.get("/api/chat/media/favicon", summary="获取网站 favicon(用于链接卡片来源头像)") +async def get_favicon(url: str): + page_url = html.unescape(str(url or "")).strip() + if not page_url: + raise HTTPException(status_code=400, detail="Missing url.") + if not _is_safe_http_url(page_url): + raise HTTPException(status_code=400, detail="Invalid url (only public http/https allowed).") + + # Resolve redirects first (e.g. b23.tv -> www.bilibili.com), so cached favicons are hit early. + final_url = _resolve_final_url_for_favicon(page_url) + candidates: list[str] = [] + for u in (final_url, page_url): + fav = _origin_favicon_url(u) + if fav and fav not in candidates: + candidates.append(fav) + + proxy_account = "_favicon" + max_bytes = 512 * 1024 # favicons should be small; protect against huge downloads. + + for cand in candidates: + if not _is_safe_http_url(cand): + continue + source_url = normalize_avatar_source_url(cand) + + cache_entry = get_avatar_cache_url_entry(proxy_account, source_url) if is_avatar_cache_enabled() else None + cache_file = avatar_cache_entry_file_exists(proxy_account, cache_entry) + if cache_entry and cache_file and avatar_cache_entry_is_fresh(cache_entry): + logger.info(f"[avatar_cache_hit] kind=favicon account={proxy_account} url={source_url}") + touch_avatar_cache_entry(proxy_account, cache_key_for_avatar_url(source_url)) + headers = build_avatar_cache_response_headers(cache_entry) + return FileResponse( + str(cache_file), + media_type=str(cache_entry.get("media_type") or "application/octet-stream"), + headers=headers, + ) + + # Download favicon bytes (best-effort) + headers = { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120 Safari/537.36", + "Accept": "image/avif,image/webp,image/apng,image/*,*/*;q=0.8", + } + r = None + try: + r = requests.get(source_url, headers=headers, timeout=20, stream=True, allow_redirects=True) + if int(getattr(r, "status_code", 0) or 0) != 200: + continue + + ct = str((getattr(r, "headers", {}) or {}).get("Content-Type") or "").strip() + try: + cl = int((getattr(r, "headers", {}) or {}).get("content-length") or 0) + except Exception: + cl = 0 + if cl and cl > max_bytes: + raise HTTPException(status_code=413, detail="Remote favicon too large.") + + chunks: list[bytes] = [] + total = 0 + for chunk in r.iter_content(chunk_size=64 * 1024): + if not chunk: + continue + chunks.append(chunk) + total += len(chunk) + if total > max_bytes: + raise HTTPException(status_code=413, detail="Remote favicon too large.") + data = b"".join(chunks) + except HTTPException: + raise + except Exception: + continue + finally: + if r is not None: + try: + r.close() + except Exception: + pass + + if not data: + continue + + payload, media_type, _ext = _detect_media_type_and_ext(data) + if media_type == "application/octet-stream" and ct: + try: + mt = ct.split(";")[0].strip() + if mt.startswith("image/"): + media_type = mt + except Exception: + pass + + if not str(media_type or "").startswith("image/"): + continue + + if is_avatar_cache_enabled(): + entry, out_path = write_avatar_cache_payload( + proxy_account, + source_kind="url", + source_url=source_url, + payload=payload, + media_type=media_type, + ttl_seconds=AVATAR_CACHE_TTL_SECONDS, + ) + if entry and out_path: + logger.info(f"[avatar_cache_download] kind=favicon account={proxy_account} url={source_url}") + headers = build_avatar_cache_response_headers(entry) + return FileResponse(str(out_path), media_type=media_type, headers=headers) + + resp = Response(content=payload, media_type=media_type) + resp.headers["Cache-Control"] = f"public, max-age={AVATAR_CACHE_TTL_SECONDS}" + return resp + + raise HTTPException(status_code=404, detail="favicon not found.") + + @router.post("/api/chat/media/emoji/download", summary="下载表情消息资源到本地 resource") async def download_chat_emoji(req: EmojiDownloadRequest): md5 = str(req.md5 or "").strip().lower() @@ -613,6 +1304,12 @@ async def get_chat_image( resource_md5 = _lookup_resource_md5_by_server_id(str(account_dir), int(server_id), want_local_type=3) if resource_md5: md5 = resource_md5 + elif username: + md5_from_msg = _lookup_image_md5_by_server_id_from_messages( + str(account_dir), int(server_id), str(username) + ) + if md5_from_msg: + md5 = md5_from_msg # md5 模式:优先从解密资源目录读取(更快) if md5: @@ -1065,12 +1762,12 @@ async def get_chat_voice(server_id: int, account: Optional[str] = None): if not isinstance(data, (bytes, bytearray)): data = bytes(data) - # Try to convert SILK to WAV for browser playback - wav_data = _convert_silk_to_wav(data) - if wav_data != data: + payload, ext, media_type = _convert_silk_to_browser_audio(data, preferred_format="mp3") + if payload and ext != "silk": return Response( - content=wav_data, - media_type="audio/wav", + content=payload, + media_type=media_type, + headers={"Content-Disposition": f"inline; filename=voice_{int(server_id)}.{ext}"}, ) # Fallback to raw SILK if conversion fails @@ -1124,11 +1821,16 @@ async def open_chat_media_folder( if not isinstance(data, (bytes, bytearray)): data = bytes(data) + payload, ext, _media_type = _convert_silk_to_browser_audio(data, preferred_format="mp3") + if not payload: + payload = data + ext = "silk" + export_dir = account_dir / "_exports" export_dir.mkdir(parents=True, exist_ok=True) - p = export_dir / f"voice_{int(server_id)}.silk" + p = export_dir / f"voice_{int(server_id)}.{ext}" try: - p.write_bytes(data) + p.write_bytes(payload) except Exception as e: raise HTTPException(status_code=500, detail=f"Failed to export voice: {e}") else: diff --git a/src/wechat_decrypt_tool/routers/decrypt.py b/src/wechat_decrypt_tool/routers/decrypt.py index 077f346..bbf7b46 100644 --- a/src/wechat_decrypt_tool/routers/decrypt.py +++ b/src/wechat_decrypt_tool/routers/decrypt.py @@ -1,10 +1,20 @@ -from fastapi import APIRouter, HTTPException +from __future__ import annotations + +import asyncio +import json +import os +import time +from pathlib import Path + +from fastapi import APIRouter, HTTPException, Request from pydantic import BaseModel, Field +from starlette.responses import StreamingResponse +from ..app_paths import get_output_databases_dir from ..logging_config import get_logger from ..path_fix import PathFixRoute from ..key_store import upsert_account_keys_in_store -from ..wechat_decrypt import decrypt_wechat_databases +from ..wechat_decrypt import WeChatDatabaseDecryptor, decrypt_wechat_databases logger = get_logger(__name__) @@ -72,3 +82,273 @@ async def decrypt_databases(request: DecryptRequest): except Exception as e: logger.error(f"解密API异常: {str(e)}") raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/api/decrypt_stream", summary="解密微信数据库(SSE实时进度)") +async def decrypt_databases_stream( + request: Request, + key: str | None = None, + db_storage_path: str | None = None, +): + """通过SSE实时推送数据库解密进度。 + + 注意:EventSource 只支持 GET,因此参数通过 querystring 传递。 + """ + + def _sse(payload: dict) -> str: + return f"data: {json.dumps(payload, ensure_ascii=False)}\n\n" + + async def generate_progress(): + # 1) Basic validation (keep 200 + SSE error event, avoid 422 breaking EventSource). + k = str(key or "").strip() + p = str(db_storage_path or "").strip() + + if not k or len(k) != 64: + yield _sse({"type": "error", "message": "密钥格式无效,必须是64位十六进制字符串"}) + return + + try: + bytes.fromhex(k) + except Exception: + yield _sse({"type": "error", "message": "密钥必须是有效的十六进制字符串"}) + return + + if not p: + yield _sse({"type": "error", "message": "请提供 db_storage_path 参数"}) + return + + storage_path = Path(p) + if not storage_path.exists(): + yield _sse({"type": "error", "message": f"指定的数据库路径不存在: {p}"}) + return + + # 2) Scan databases. + yield _sse({"type": "scanning", "message": "正在扫描数据库文件..."}) + await asyncio.sleep(0) + + account_name = "unknown_account" + path_parts = storage_path.parts + account_patterns = ["wxid_"] + for part in path_parts: + for pattern in account_patterns: + if part.startswith(pattern): + parts = part.split("_") + if len(parts) >= 3: + account_name = "_".join(parts[:-1]) + else: + account_name = part + break + if account_name != "unknown_account": + break + + if account_name == "unknown_account": + for part in reversed(path_parts): + if part != "db_storage" and len(part) > 3: + account_name = part + break + + databases: list[dict] = [] + for root, _dirs, files in os.walk(storage_path): + if "db_storage" not in str(root): + continue + for file_name in files: + if not file_name.endswith(".db"): + continue + if file_name in ["key_info.db"]: + continue + db_path = os.path.join(root, file_name) + databases.append({"path": db_path, "name": file_name, "account": account_name}) + + if not databases: + yield _sse({"type": "error", "message": "未找到微信数据库文件!请检查 db_storage_path 是否正确"}) + return + + account_databases = {account_name: databases} + total_databases = sum(len(dbs) for dbs in account_databases.values()) + + yield _sse({"type": "start", "total": total_databases, "message": f"开始解密 {total_databases} 个数据库"}) + await asyncio.sleep(0) + + # 3) Init output dir & decryptor. + base_output_dir = get_output_databases_dir() + base_output_dir.mkdir(parents=True, exist_ok=True) + + try: + decryptor = WeChatDatabaseDecryptor(k) + except ValueError as e: + yield _sse({"type": "error", "message": f"密钥错误: {e}"}) + return + + # 4) Decrypt per account, stream progress. + success_count = 0 + fail_count = 0 + processed_files: list[str] = [] + failed_files: list[str] = [] + account_results: dict = {} + overall_current = 0 + + for account, dbs in account_databases.items(): + account_output_dir = base_output_dir / account + account_output_dir.mkdir(parents=True, exist_ok=True) + + # Save a hint for later UI (same as non-stream endpoint). + try: + source_db_storage_path = p + wxid_dir = "" + if storage_path.name.lower() == "db_storage": + wxid_dir = str(storage_path.parent) + else: + wxid_dir = str(storage_path) + (account_output_dir / "_source.json").write_text( + json.dumps({"db_storage_path": source_db_storage_path, "wxid_dir": wxid_dir}, ensure_ascii=False, indent=2), + encoding="utf-8", + ) + except Exception: + pass + + account_success = 0 + account_processed: list[str] = [] + account_failed: list[str] = [] + + for db_info in dbs: + if await request.is_disconnected(): + return + + overall_current += 1 + db_path = str(db_info.get("path") or "") + db_name = str(db_info.get("name") or "") + current_file = f"{account}/{db_name}" if account else db_name + + # Emit a "processing" event so UI updates immediately for large db files. + yield _sse( + { + "type": "progress", + "current": overall_current, + "total": total_databases, + "success_count": success_count, + "fail_count": fail_count, + "current_file": current_file, + "status": "processing", + "message": "解密中...", + } + ) + + output_path = account_output_dir / db_name + task = asyncio.create_task(asyncio.to_thread(decryptor.decrypt_database, db_path, str(output_path))) + + # Wait with heartbeat (can't yield while awaiting the thread directly). + last_heartbeat = time.time() + while not task.done(): + if await request.is_disconnected(): + return + now = time.time() + if now - last_heartbeat > 15: + last_heartbeat = now + # SSE comment heartbeat; browsers ignore but keeps proxies alive. + yield ": ping\n\n" + await asyncio.sleep(0.6) + try: + ok = bool(task.result()) + except Exception: + ok = False + + if ok: + account_success += 1 + success_count += 1 + account_processed.append(str(output_path)) + processed_files.append(str(output_path)) + status = "success" + msg = "解密成功" + else: + account_failed.append(db_path) + failed_files.append(db_path) + fail_count += 1 + status = "fail" + msg = "解密失败" + + yield _sse( + { + "type": "progress", + "current": overall_current, + "total": total_databases, + "success_count": success_count, + "fail_count": fail_count, + "current_file": current_file, + "status": status, + "message": msg, + } + ) + + if overall_current % 5 == 0: + await asyncio.sleep(0) + + account_results[account] = { + "total": len(dbs), + "success": account_success, + "failed": len(dbs) - account_success, + "output_dir": str(account_output_dir), + "processed_files": account_processed, + "failed_files": account_failed, + } + + # Build cache table (keep behavior consistent with the POST endpoint). + if os.environ.get("WECHAT_TOOL_BUILD_SESSION_LAST_MESSAGE", "1") != "0": + yield _sse( + { + "type": "phase", + "phase": "session_last_message", + "account": account, + "message": "正在构建会话缓存(最后一条消息)...", + } + ) + await asyncio.sleep(0) + + try: + from ..session_last_message import build_session_last_message_table + + task = asyncio.create_task( + asyncio.to_thread( + build_session_last_message_table, + account_output_dir, + rebuild=True, + include_hidden=True, + include_official=True, + ) + ) + last_heartbeat = time.time() + while not task.done(): + if await request.is_disconnected(): + return + now = time.time() + if now - last_heartbeat > 15: + last_heartbeat = now + yield ": ping\n\n" + await asyncio.sleep(0.6) + account_results[account]["session_last_message"] = task.result() + except Exception as e: + account_results[account]["session_last_message"] = {"status": "error", "message": str(e)} + + status = "completed" if success_count > 0 else "failed" + result = { + "status": status, + "total_databases": total_databases, + "success_count": success_count, + "failure_count": total_databases - success_count, + "output_directory": str(base_output_dir.absolute()), + "message": f"解密完成: 成功 {success_count}/{total_databases}", + "processed_files": processed_files, + "failed_files": failed_files, + "account_results": account_results, + } + + # Save db key for frontend autofill. + try: + for account in (account_results or {}).keys(): + upsert_account_keys_in_store(str(account), db_key=k) + except Exception: + pass + + yield _sse({"type": "complete", **result}) + + headers = {"Cache-Control": "no-cache", "Connection": "keep-alive", "X-Accel-Buffering": "no"} + return StreamingResponse(generate_progress(), media_type="text/event-stream", headers=headers) diff --git a/src/wechat_decrypt_tool/routers/keys.py b/src/wechat_decrypt_tool/routers/keys.py index 20344e4..2a4da9a 100644 --- a/src/wechat_decrypt_tool/routers/keys.py +++ b/src/wechat_decrypt_tool/routers/keys.py @@ -3,6 +3,7 @@ from fastapi import APIRouter from ..key_store import get_account_keys_from_store +from ..key_service import get_db_key_workflow, fetch_and_save_remote_keys from ..media_helpers import _load_media_keys, _resolve_account_dir from ..path_fix import PathFixRoute @@ -51,3 +52,74 @@ async def get_saved_keys(account: Optional[str] = None): "keys": result, } + +@router.get("/api/get_keys", summary="自动获取微信数据库与图片密钥") +async def get_wechat_db_key(): + """ + 自动流程: + 1. 结束微信进程 + 2. 启动微信 + 3. 根据版本注入双 Hook + 4. 抓取 DB 与 图片密钥(AES + XOR)并返回 + """ + try: + keys_data = get_db_key_workflow() + + return { + "status": 0, + "errmsg": "ok", + "data": keys_data # 现在完美包含了 db_key, aes_key, xor_key + } + + except TimeoutError: + return { + "status": -1, + "errmsg": "获取超时,请确保微信没有开启自动登录并且在弹窗中完成了登录", + "data": {} + } + except Exception as e: + return { + "status": -1, + "errmsg": f"获取失败: {str(e)}", + "data": {} + } + + + +@router.get("/api/get_image_key", summary="获取并保存微信图片密钥") +async def get_image_key(account: Optional[str] = None): + """ + 通过模拟 Next.js Server Action 协议,利用本地微信配置文件换取 AES/XOR 密钥。 + + 1. 读取 [wx_dir]/all_users/config/global_config (Blob 1) + 2. 读 同上目录下的global_config.crc + 3. 构造 Multipart 包发送至远程服务器 + 4. 解析返回流,自动存入本地数据库 + """ + try: + result = await fetch_and_save_remote_keys(account) + + return { + "status": 0, + "errmsg": "ok", + "data": { + "xor_key": result["xor_key"], + "aes_key": result["aes_key"], + "nick_name": result.get("nick_name"), + "account": result["wxid"] + } + } + except FileNotFoundError as e: + return { + "status": -1, + "errmsg": f"文件缺失: {str(e)}", + "data": {} + } + except Exception as e: + import traceback + traceback.print_exc() + return { + "status": -1, + "errmsg": f"获取失败: {str(e)}", + "data": {} + } diff --git a/src/wechat_decrypt_tool/routers/sns.py b/src/wechat_decrypt_tool/routers/sns.py new file mode 100644 index 0000000..e1343a6 --- /dev/null +++ b/src/wechat_decrypt_tool/routers/sns.py @@ -0,0 +1,3235 @@ +from bisect import bisect_left, bisect_right +from functools import lru_cache +from pathlib import Path +import os +import base64 +import hashlib +import json +import re +import httpx +import html # 修复&转义的问题!!! +import sqlite3 +import subprocess +import threading +import time +import xml.etree.ElementTree as ET +from typing import Any, Optional +from urllib.parse import urlparse + +from starlette.background import BackgroundTask + +from fastapi import APIRouter, HTTPException +from fastapi.responses import Response, FileResponse # 返回视频文件 +from pydantic import BaseModel, Field + +from ..chat_helpers import _load_contact_rows, _pick_display_name, _resolve_account_dir +from ..logging_config import get_logger +from ..media_helpers import _read_and_maybe_decrypt_media, _resolve_account_wxid_dir +from ..path_fix import PathFixRoute +from .. import sns_media as _sns_media +from ..wcdb_realtime import ( + WCDBRealtimeError, + WCDB_REALTIME, + decrypt_sns_image as _wcdb_decrypt_sns_image, + exec_query as _wcdb_exec_query, + get_sns_timeline as _wcdb_get_sns_timeline, +) + +try: + import zstandard as zstd # type: ignore +except Exception: + zstd = None + +logger = get_logger(__name__) + +router = APIRouter(route_class=PathFixRoute) + +SNS_MEDIA_PICKS_FILE = "_sns_media_picks.json" + +_SNS_VIDEO_KEY_RE = re.compile(r']*>([\s\S]*?)", flags=re.IGNORECASE) +_SNS_XML_CDATA_BLOCK_RE = re.compile(r"", flags=re.IGNORECASE) +_SNS_XML_BARE_AMP_RE = re.compile(r"&(?!(?:[a-zA-Z]+|#\d+|#x[0-9a-fA-F]+);)") +_SNS_XML_INVALID_CHARS_RE = re.compile(r"[\x00-\x08\x0b\x0c\x0e-\x1f]") + +_SNS_REALTIME_SYNC_STATE_FILE = "_sns_realtime_sync_state.json" +_SNS_DECRYPTED_DB_LOCKS: dict[str, threading.Lock] = {} +_SNS_DECRYPTED_DB_LOCKS_MU = threading.Lock() + +_SNS_TIMELINE_AUTO_CACHE_TTL_SECONDS = 60 +# Key: (account_dir.name, sorted(usernames), keyword) -> (expires_at_ts, force_sqlite) +_SNS_TIMELINE_AUTO_CACHE: dict[tuple[str, tuple[str, ...], str], tuple[float, bool]] = {} +_SNS_TIMELINE_AUTO_CACHE_MU = threading.Lock() + + +def _sns_timeline_auto_cache_key(account_dir: Path, users: list[str], kw: str) -> tuple[str, tuple[str, ...], str]: + # Normalize so different param orders map to the same key. + a = str(Path(account_dir).name) + u = tuple(sorted([str(x or "").strip() for x in (users or []) if str(x or "").strip()])) + k = str(kw or "").strip() + return (a, u, k) + + +def _sns_timeline_auto_cache_get(key: tuple[str, tuple[str, ...], str]) -> Optional[bool]: + now = time.time() + with _SNS_TIMELINE_AUTO_CACHE_MU: + rec = _SNS_TIMELINE_AUTO_CACHE.get(key) + if not rec: + return None + exp_ts, val = rec + if exp_ts <= now: + try: + del _SNS_TIMELINE_AUTO_CACHE[key] + except Exception: + pass + return None + return bool(val) + + +def _sns_timeline_auto_cache_set( + key: tuple[str, tuple[str, ...], str], + val: bool, + *, + ttl_seconds: int = _SNS_TIMELINE_AUTO_CACHE_TTL_SECONDS, +) -> None: + ttl = int(ttl_seconds or _SNS_TIMELINE_AUTO_CACHE_TTL_SECONDS) + if ttl <= 0: + ttl = _SNS_TIMELINE_AUTO_CACHE_TTL_SECONDS + exp_ts = time.time() + float(ttl) + with _SNS_TIMELINE_AUTO_CACHE_MU: + _SNS_TIMELINE_AUTO_CACHE[key] = (exp_ts, bool(val)) + + +def _sns_decrypted_db_lock(account: str) -> threading.Lock: + key = str(account or "").strip() + if not key: + key = "_" + with _SNS_DECRYPTED_DB_LOCKS_MU: + lock = _SNS_DECRYPTED_DB_LOCKS.get(key) + if lock is None: + lock = threading.Lock() + _SNS_DECRYPTED_DB_LOCKS[key] = lock + return lock + + +def _parse_csv_list(raw: Optional[str]) -> list[str]: + if raw is None: + return [] + s = str(raw or "").strip() + if not s: + return [] + # Best-effort: allow comma-separated list in one query param. + return [p.strip() for p in s.split(",") if p.strip()] + + +def _safe_int(v: Any) -> int: + try: + return int(v) + except Exception: + return 0 + + +def _count_sns_timeline_rows_in_decrypted_sqlite( + sns_db_path: Path, + *, + users: list[str], + kw: str, +) -> int: + """Count rows in decrypted `sns.db` for a given query (raw rows, not timeline-filtered).""" + sns_db_path = Path(sns_db_path) + try: + if (not sns_db_path.exists()) or (not sns_db_path.is_file()): + return 0 + except Exception: + return 0 + + filters: list[str] = [] + params: list[Any] = [] + + if users: + placeholders = ",".join(["?"] * len(users)) + filters.append(f"user_name IN ({placeholders})") + params.extend(users) + + if kw: + filters.append("content LIKE ?") + params.append(f"%{kw}%") + + where_sql = f"WHERE {' AND '.join(filters)}" if filters else "" + sql = f"SELECT COUNT(*) AS c FROM SnsTimeLine {where_sql}" + + try: + conn = sqlite3.connect(str(sns_db_path), timeout=2.0) + try: + conn.execute("PRAGMA busy_timeout=2000") + row = conn.execute(sql, params).fetchone() + return int((row[0] if row else 0) or 0) + finally: + try: + conn.close() + except Exception: + pass + except Exception: + return 0 + + +def _count_sns_timeline_posts_in_decrypted_sqlite( + sns_db_path: Path, + *, + users: list[str], + kw: str, +) -> int: + """Count visible-post rows in decrypted `sns.db` for a given query. + + This matches `/api/sns/users`'s `postCount` definition: + - content not null/empty + - exclude cover rows: `7` + """ + sns_db_path = Path(sns_db_path) + try: + if (not sns_db_path.exists()) or (not sns_db_path.is_file()): + return 0 + except Exception: + return 0 + + filters: list[str] = [] + params: list[Any] = [] + + # Base filter: align with list_sns_users() postCount. + filters.append("content IS NOT NULL") + filters.append("content != ?") + params.append("") + filters.append("content NOT LIKE ?") + params.append("%7%") + + if users: + placeholders = ",".join(["?"] * len(users)) + filters.append(f"user_name IN ({placeholders})") + params.extend(users) + + if kw: + filters.append("content LIKE ?") + params.append(f"%{kw}%") + + where_sql = f"WHERE {' AND '.join(filters)}" if filters else "" + sql = f"SELECT COUNT(*) AS c FROM SnsTimeLine {where_sql}" + + try: + conn = sqlite3.connect(str(sns_db_path), timeout=2.0) + try: + conn.execute("PRAGMA busy_timeout=2000") + row = conn.execute(sql, params).fetchone() + return int((row[0] if row else 0) or 0) + finally: + try: + conn.close() + except Exception: + pass + except Exception: + return 0 + + +def _to_signed_i64(v: int) -> int: + x = int(v) & 0xFFFFFFFFFFFFFFFF + if x >= 0x8000000000000000: + x -= 0x10000000000000000 + return int(x) + +def _to_unsigned_i64_str(v: Any) -> str: + """Return unsigned decimal string for a signed/unsigned 64-bit integer-ish value. + + Moments `tid/id` is often an unsigned u64 stored as signed i64 (negative) in sqlite/WCDB. + Frontend cache-key formulas expect the *unsigned* decimal string. + """ + try: + x = int(v) + except Exception: + return str(v or "").strip() + return str(x & 0xFFFFFFFFFFFFFFFF) + + +def _read_sns_realtime_sync_state(account_dir: Path) -> dict[str, Any]: + p = Path(account_dir) / _SNS_REALTIME_SYNC_STATE_FILE + try: + if not p.exists() or (not p.is_file()): + return {} + except Exception: + return {} + + try: + data = json.loads(p.read_text(encoding="utf-8")) + except Exception: + return {} + + return data if isinstance(data, dict) else {} + + +def _write_sns_realtime_sync_state(account_dir: Path, data: dict[str, Any]) -> None: + p = Path(account_dir) / _SNS_REALTIME_SYNC_STATE_FILE + try: + p.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8") + except Exception: + pass + + +def _ensure_decrypted_sns_db(account_dir: Path) -> Path: + """Ensure `{account}/sns.db` exists with at least a minimal `SnsTimeLine` table. + + We keep it minimal (tid/user_name/content) so it stays compatible with older schema + while enabling incremental cache/writeback from WCDB realtime. + """ + account_dir = Path(account_dir) + sns_db_path = account_dir / "sns.db" + + # If something weird exists at that path, bail out. + try: + if sns_db_path.exists() and (not sns_db_path.is_file()): + raise RuntimeError("sns.db path is not a file") + except Exception as e: + raise RuntimeError(f"Invalid sns.db path: {e}") from e + + conn = sqlite3.connect(str(sns_db_path)) + try: + conn.execute( + """ + CREATE TABLE IF NOT EXISTS SnsTimeLine( + tid INTEGER PRIMARY KEY, + user_name TEXT, + content TEXT + ) + """ + ) + conn.commit() + finally: + try: + conn.close() + except Exception: + pass + + return sns_db_path + + +def _upsert_sns_timeline_rows_to_decrypted_db( + account_dir: Path, + rows: list[tuple[int, str, str, Optional[str]]], + *, + source: str, +) -> int: + """Upsert rows into decrypted `{account}/sns.db` to avoid local missing data. + + rows: [(tid_signed, user_name, content_xml, pack_info_buf_or_none)] + """ + if not rows: + return 0 + + sns_db_path = _ensure_decrypted_sns_db(account_dir) + + # Serialize writes per-account to avoid sqlite "database is locked" errors under concurrency. + with _sns_decrypted_db_lock(Path(account_dir).name): + conn = sqlite3.connect(str(sns_db_path), timeout=2.0) + try: + conn.execute("PRAGMA busy_timeout=2000") + cols: set[str] = set() + try: + info_rows = conn.execute("PRAGMA table_info(SnsTimeLine)").fetchall() + for r in info_rows or []: + try: + cols.add(str(r[1] or "").strip()) + except Exception: + continue + except Exception: + cols = set() + + has_pack = "pack_info_buf" in cols + + if has_pack: + sql = """ + INSERT INTO SnsTimeLine (tid, user_name, content, pack_info_buf) + VALUES (?, ?, ?, ?) + ON CONFLICT(tid) DO UPDATE SET + user_name=excluded.user_name, + content=COALESCE(NULLIF(excluded.content, ''), SnsTimeLine.content), + pack_info_buf=COALESCE(excluded.pack_info_buf, SnsTimeLine.pack_info_buf) + """ + data = [(int(tid), str(u or "").strip(), str(c or ""), p) for tid, u, c, p in rows] + else: + sql = """ + INSERT INTO SnsTimeLine (tid, user_name, content) + VALUES (?, ?, ?) + ON CONFLICT(tid) DO UPDATE SET + user_name=excluded.user_name, + content=COALESCE(NULLIF(excluded.content, ''), SnsTimeLine.content) + """ + data = [(int(tid), str(u or "").strip(), str(c or "")) for tid, u, c, _p in rows] + + conn.executemany(sql, data) + conn.commit() + return len(rows) + except Exception as e: + logger.debug("[sns] decrypted sns.db upsert failed source=%s err=%s", source, e) + try: + conn.rollback() + except Exception: + pass + return 0 + finally: + try: + conn.close() + except Exception: + pass + +def _extract_mp_biz_from_url(url: str) -> str: + """Extract `__biz` from mp.weixin.qq.com URLs (best-effort).""" + u = html.unescape(str(url or "")).replace("&", "&").strip() + if not u: + return "" + m = _MP_BIZ_RE.search(u) + if not m: + return "" + return str(m.group(1) or "").strip() + + +@lru_cache(maxsize=16) +def _build_biz_to_official_index(contact_db_path: str, mtime_ns: int, size: int) -> dict[str, dict[str, Any]]: + """Build mapping: __biz -> { username, serviceType } from contact.db.biz_info.""" + out: dict[str, dict[str, Any]] = {} + if not contact_db_path: + return out + + conn = sqlite3.connect(str(contact_db_path)) + conn.row_factory = sqlite3.Row + try: + try: + rows = conn.execute( + "SELECT username, brand_info, external_info, home_url FROM biz_info" + ).fetchall() + except Exception: + rows = [] + + for r in rows: + try: + uname = str(r["username"] or "").strip() + except Exception: + uname = "" + if not uname: + continue + + try: + brand_info = str(r["brand_info"] or "") + except Exception: + brand_info = "" + try: + external_info = str(r["external_info"] or "") + except Exception: + external_info = "" + try: + home_url = str(r["home_url"] or "") + except Exception: + home_url = "" + + service_type: Optional[int] = None + if external_info: + try: + j = json.loads(external_info) + st = j.get("ServiceType") + if st is not None: + service_type = int(st) + except Exception: + service_type = None + + blob = " ".join([brand_info, external_info, home_url]) + for biz in _MP_BIZ_RE.findall(blob): + b = str(biz or "").strip() + if not b: + continue + prev = out.get(b) + if prev is None: + out[b] = {"username": uname, "serviceType": service_type} + else: + if prev.get("serviceType") is None and service_type is not None: + prev["serviceType"] = service_type + finally: + conn.close() + + return out + + +def _get_biz_to_official_index(contact_db_path: Path) -> dict[str, dict[str, Any]]: + if not contact_db_path.exists(): + return {} + st = contact_db_path.stat() + mtime_ns = int(getattr(st, "st_mtime_ns", int(st.st_mtime * 1e9))) + return _build_biz_to_official_index(str(contact_db_path), mtime_ns, int(st.st_size)) + + +def _extract_sns_video_key(raw_xml: Any) -> str: + """Extract Isaac64 video key from raw XML, e.g. ``.""" + text = _decode_sns_text_blob(raw_xml) + m = _SNS_VIDEO_KEY_RE.search(text or "") + return str(m.group(1) or "").strip() if m else "" + + +def _looks_like_xml_text(s: str) -> bool: + if not s: + return False + t = str(s).lstrip() + if t.startswith('"') and t.endswith('"'): + t = t.strip('"').lstrip() + return t.startswith("<") + + +def _sanitize_wechat_xml_for_et(xml_text: str) -> str: + """Best-effort sanitize for ElementTree parsing. + + WeChat Moments "XML" is sometimes not well-formed XML (commonly: raw `&` inside URLs), + which breaks `xml.etree.ElementTree.fromstring`. We keep CDATA blocks intact and: + - strip invalid control chars + - escape bare `&` outside CDATA blocks + """ + + s = str(xml_text or "") + if not s: + return "" + + s = _SNS_XML_INVALID_CHARS_RE.sub("", s) + + parts: list[str] = [] + last = 0 + for m in _SNS_XML_CDATA_BLOCK_RE.finditer(s): + head = s[last : m.start()] + if head: + parts.append(_SNS_XML_BARE_AMP_RE.sub("&", head)) + parts.append(m.group(0)) + last = m.end() + + tail = s[last:] + if tail: + parts.append(_SNS_XML_BARE_AMP_RE.sub("&", tail)) + + return "".join(parts) + + +def _decode_sns_text_blob(value: Any) -> str: + """Decode text/blob values that may be hex/base64 encoded and/or zstd-compressed. + + WeChat WCDB realtime can return TEXT/BLOB fields as: + - plain XML string + - hex string (often a zstd frame starting with 28b52ffd...) + - base64 string (same) + """ + + if value is None: + return "" + + if isinstance(value, memoryview): + raw = bytes(value) + if raw and zstd is not None and raw.startswith(_ZSTD_MAGIC): + try: + raw = zstd.decompress(raw) + except Exception: + pass + try: + s = raw.decode("utf-8", errors="ignore") + except Exception: + s = "" + s = html.unescape(str(s or "").strip()) + return s if _looks_like_xml_text(s) else (str(s or "").strip()) + + if isinstance(value, (bytes, bytearray)): + raw = bytes(value) + if raw and zstd is not None and raw.startswith(_ZSTD_MAGIC): + try: + raw = zstd.decompress(raw) + except Exception: + pass + try: + s = raw.decode("utf-8", errors="ignore") + except Exception: + s = "" + s = html.unescape(str(s or "").strip()) + return s if _looks_like_xml_text(s) else (str(s or "").strip()) + + try: + text = str(value or "") + except Exception: + return "" + + text = html.unescape(text.strip()) + if not text: + return "" + + if _looks_like_xml_text(text): + return text + + def _accept_xml(decoded: str) -> str: + s2 = html.unescape(str(decoded or "").strip()) + return s2 if _looks_like_xml_text(s2) else "" + + # Hex string (optionally prefixed with 0x) + t_hex = text[2:] if text.lower().startswith("0x") else text + if len(t_hex) >= 16 and len(t_hex) % 2 == 0 and re.fullmatch(r"[0-9a-fA-F]+", t_hex): + try: + raw = bytes.fromhex(t_hex) + if raw and zstd is not None and raw.startswith(_ZSTD_MAGIC): + try: + raw = zstd.decompress(raw) + except Exception: + raw = b"" + if raw: + s2 = _accept_xml(raw.decode("utf-8", errors="ignore")) + if s2: + return s2 + except Exception: + pass + + # Base64 string + if len(text) >= 24 and len(text) % 4 == 0 and re.fullmatch(r"[A-Za-z0-9+/=]+", text): + try: + raw = base64.b64decode(text) + if raw and zstd is not None and raw.startswith(_ZSTD_MAGIC): + try: + raw = zstd.decompress(raw) + except Exception: + raw = b"" + if raw: + s2 = _accept_xml(raw.decode("utf-8", errors="ignore")) + if s2: + return s2 + except Exception: + pass + + return text + + +def _extract_sns_source_name(raw_xml: Any) -> str: + text = _decode_sns_text_blob(raw_xml) + if not text: + return "" + m = _SNS_APP_NAME_RE.search(text) + if not m: + return "" + v = str(m.group(1) or "") + v = v.replace("", "") + v = re.sub(r"<[^>]+>", "", v) + return html.unescape(v.strip()) + + +def _build_location_text(node: Optional[ET.Element]) -> str: + if node is None: + return "" + + def _get(key: str) -> str: + return str(node.get(key) or node.findtext(key) or "").strip() + + def _clean(v: str) -> str: + # Some WeChat XML uses special whitespace (NBSP / thin spaces) inside the location string. + return ( + str(v or "") + .replace("\u00a0", " ") + .replace("\u2006", " ") + .strip() + ) + + city = _clean(_get("city")) + poi = _clean(_get("poiName") or _get("poi") or _get("label")) + address = _clean(_get("address") or _get("poiAddress")) + + # Avoid duplicated city prefix like: "广安市·广安市·xxx". + if city and poi and poi.startswith(city): + rest = poi[len(city):].lstrip(" ·") + if rest: + poi = rest + + # WeChat UI typically renders `city·poi/address`. + if city and (poi or address): + return f"{city}·{poi or address}".strip() + + for cand in (poi, address, city): + if cand: + return cand + return "" + + +def _parse_timeline_xml(xml_text: str, fallback_username: str) -> dict[str, Any]: + out: dict[str, Any] = { + "username": fallback_username, + "createTime": 0, + "contentDesc": "", + "location": "", + "sourceName": "", + "media": [], + "likes": [], + "comments": [], + "type": 1, # 默认类型 + "title": "", + "contentUrl": "", + "finderFeed": {} + } + + xml_str = _decode_sns_text_blob(xml_text) + if not xml_str: + return out + + + try: + root = ET.fromstring(_sanitize_wechat_xml_for_et(xml_str)) + except Exception: + return out + + # External share source label (e.g. QQ音乐 / 哔哩哔哩) is usually stored in `...`. + try: + for el in root.iter(): + try: + tag = str(el.tag or "").lower() + except Exception: + continue + if tag in {"appname", "sourcename"}: + v = str(el.text or "").strip() + if v: + out["sourceName"] = html.unescape(v).strip() + break + try: + attrs = el.attrib or {} + except Exception: + attrs = {} + for k, v in attrs.items(): + if str(k or "").lower() in {"appname", "sourcename"}: + vv = str(v or "").strip() + if vv: + out["sourceName"] = html.unescape(vv).strip() + break + if out["sourceName"]: + break + except Exception: + pass + + def _find_text(*paths: str) -> str: + for p in paths: + try: + v = root.findtext(p) + except Exception: + v = None + if isinstance(v, str) and v.strip(): + return v.strip() + return "" + # &转义!! + def _clean_url(u: str) -> str: + if not u: + return "" + + cleaned = html.unescape(u) + cleaned = cleaned.replace("&", "&") + return cleaned.strip() + + out["username"] = _find_text(".//TimelineObject/username", ".//TimelineObject/user_name", + ".//username") or fallback_username + out["createTime"] = _safe_int(_find_text(".//TimelineObject/createTime", ".//createTime")) + out["contentDesc"] = _find_text(".//TimelineObject/contentDesc", ".//contentDesc") + out["location"] = _build_location_text(root.find(".//location")) + + # --- 提取内容类型 --- + post_type = _safe_int(_find_text(".//ContentObject/type", ".//type")) + out["type"] = post_type + + # --- 如果是公众号文章 (Type 3) --- + if post_type == 3: + out["title"] = _find_text(".//ContentObject/title") + out["contentUrl"] = _clean_url(_find_text(".//ContentObject/contentUrl")) + + # --- 如果是外部分享链接 (Type 5) --- + if post_type == 5: + out["title"] = _find_text( + ".//ContentObject/title", + ".//ContentObject/linkTitle", + ".//ContentObject/name", + ".//ContentObject/desc", + ".//ContentObject/description", + ) + out["contentUrl"] = _clean_url( + _find_text( + ".//ContentObject/contentUrl", + ".//ContentObject/linkUrl", + ".//ContentObject/url", + ".//ContentObject/jumpUrl", + ) + ) + + # --- 如果是音乐分享/链接卡片 (Type 42) --- + if post_type == 42: + # WeChat sometimes stores link/music share metadata under ContentObject fields. + out["title"] = _find_text( + ".//ContentObject/title", + ".//ContentObject/linkTitle", + ".//ContentObject/name", + ".//ContentObject/desc", + ) + out["contentUrl"] = _clean_url( + _find_text( + ".//ContentObject/contentUrl", + ".//ContentObject/linkUrl", + ".//ContentObject/url", + ".//ContentObject/jumpUrl", + ) + ) + + # --- 如果是视频号 (Type 28) --- + if post_type == 28: + out["title"] = _find_text(".//ContentObject/title") + out["contentUrl"] = _clean_url(_find_text(".//ContentObject/contentUrl")) + out["finderFeed"] = { + "nickname": _find_text(".//finderFeed/nickname"), + "desc": _find_text(".//finderFeed/desc"), + "thumbUrl": _clean_url( + _find_text(".//finderFeed/mediaList/media/thumbUrl", ".//finderFeed/mediaList/media/coverUrl")), + "url": _clean_url(_find_text(".//finderFeed/mediaList/media/url")) + } + + media: list[dict[str, Any]] = [] + try: + for m in root.findall(".//mediaList//media"): + mt = _safe_int(m.findtext("type")) + url_el = m.find("url") if m.find("url") is not None else m.find("urlV") + thumb_el = m.find("thumb") if m.find("thumb") is not None else m.find("thumbV") + + url = _clean_url(url_el.text if url_el is not None else "") + thumb = _clean_url(thumb_el.text if thumb_el is not None else "") + + url_attrs = dict(url_el.attrib) if url_el is not None and url_el.attrib else {} + thumb_attrs = dict(thumb_el.attrib) if thumb_el is not None and thumb_el.attrib else {} + media_id = str(m.findtext("id") or "").strip() + size_el = m.find("size") + size = dict(size_el.attrib) if size_el is not None and size_el.attrib else {} + + if not url and not thumb: + continue + + media.append({ + "type": mt, + "id": media_id, + "url": url, + "thumb": thumb, + "urlAttrs": url_attrs, + "thumbAttrs": thumb_attrs, + "size": size, + }) + except Exception: + pass + out["media"] = media + + # Fallback: some type=42 shares only expose the jump URL via media[0].url. + if post_type in (5, 42): + if (not str(out.get("contentUrl") or "").strip()) and media: + m0 = media[0] if isinstance(media[0], dict) else {} + u0 = str(m0.get("url") or "").strip() + if u0: + out["contentUrl"] = u0 + + likes: list[str] = [] + try: + for u in root.findall(".//likeList//like//username"): + if u is None or not u.text: + continue + v = str(u.text).strip() + if v: + likes.append(v) + except Exception: + likes = [] + out["likes"] = likes + + comments: list[dict[str, Any]] = [] + try: + for c in root.findall(".//commentList//comment"): + content = str(c.findtext("content") or "").strip() + if not content: + continue + comments.append( + { + "username": str(c.findtext("username") or "").strip(), + "nickname": str(c.findtext("nickName") or "").strip(), + "content": content, + "refUsername": str(c.findtext("refUserName") or "").strip(), + "refNickname": str(c.findtext("refNickName") or "").strip(), + } + ) + except Exception: + comments = [] + out["comments"] = comments + + return out + + +def _image_size_from_bytes(data: bytes, media_type: str) -> tuple[int, int]: + mt = str(media_type or "").lower() + if mt == "image/png": + # PNG IHDR width/height are stored at byte offsets 16..24 + if len(data) >= 24 and data.startswith(b"\x89PNG\r\n\x1a\n"): + try: + w = int.from_bytes(data[16:20], "big") + h = int.from_bytes(data[20:24], "big") + return w, h + except Exception: + return 0, 0 + return 0, 0 + if mt in {"image/jpeg", "image/jpg"}: + # Minimal JPEG SOF parser. + if len(data) < 4 or (not data.startswith(b"\xFF\xD8")): + return 0, 0 + i = 2 + while i + 3 < len(data): + if data[i] != 0xFF: + i += 1 + continue + # Skip padding 0xFF bytes. + while i < len(data) and data[i] == 0xFF: + i += 1 + if i >= len(data): + break + marker = data[i] + i += 1 + # Markers without a segment length. + if marker in (0xD8, 0xD9): + continue + if marker == 0xDA: # Start of scan. + break + if i + 1 >= len(data): + break + seg_len = (data[i] << 8) + data[i + 1] + i += 2 + if seg_len < 2: + break + # SOF markers which contain width/height. + if marker in { + 0xC0, + 0xC1, + 0xC2, + 0xC3, + 0xC5, + 0xC6, + 0xC7, + 0xC9, + 0xCA, + 0xCB, + 0xCD, + 0xCE, + 0xCF, + }: + # segment: [precision(1), height(2), width(2), ...] + if i + 4 < len(data): + try: + h = (data[i + 1] << 8) + data[i + 2] + w = (data[i + 3] << 8) + data[i + 4] + return w, h + except Exception: + return 0, 0 + i += seg_len - 2 + return 0, 0 + return 0, 0 + + +@lru_cache(maxsize=16) +def _sns_img_time_index(wxid_dir_str: str) -> tuple[list[float], list[str]]: + """Build a (mtime_sorted, path_sorted) index for local Moments cache images. + + WeChat stores encrypted SNS cache images under: + `{wxid_dir}/cache/YYYY-MM/Sns/Img/<2hex>/<30hex>` + """ + wxid_dir = Path(str(wxid_dir_str or "").strip()) + out: list[tuple[float, str]] = [] + + cache_root = wxid_dir / "cache" + try: + month_dirs = [p for p in cache_root.iterdir() if p.is_dir()] + except Exception: + month_dirs = [] + + for mdir in month_dirs: + img_root = mdir / "Sns" / "Img" + try: + if not (img_root.exists() and img_root.is_dir()): + continue + except Exception: + continue + # The Img dir uses a 2-level layout; keep this tight (no global rglob). + try: + for sub in img_root.iterdir(): + if not sub.is_dir(): + continue + for f in sub.iterdir(): + try: + if not f.is_file(): + continue + st = f.stat() + out.append((float(st.st_mtime), str(f))) + except Exception: + continue + except Exception: + continue + + out.sort(key=lambda x: x[0]) + mtimes = [m for m, _p in out] + paths = [_p for _m, _p in out] + return mtimes, paths + + +def _normalize_hex32(value: Optional[str]) -> str: + """Return the first 32 hex chars from value, or '' if not present.""" + s = str(value or "").strip().lower() + if not s: + return "" + # Keep only hex chars. Some attrs may contain separators or be wrapped. + s = re.sub(r"[^0-9a-f]", "", s) + if len(s) < 32: + return "" + return s[:32] + + +def _sns_media_picks_path(account_dir: Path) -> Path: + return account_dir / SNS_MEDIA_PICKS_FILE + + +def _sns_post_id_from_media_key(media_key: str) -> str: + # Frontend stores picks under `${postId}:${idx}`. + s = str(media_key or "").strip() + if not s: + return "" + return s.split(":", 1)[0].strip() + + +@lru_cache(maxsize=32) +def _load_sns_media_picks_cached(path_str: str, mtime: float) -> dict[str, str]: + p = Path(str(path_str or "").strip()) + try: + raw = p.read_text(encoding="utf-8") + except Exception: + return {} + + try: + obj = json.loads(raw) + except Exception: + return {} + + picks_obj = obj.get("picks") if isinstance(obj, dict) else None + if not isinstance(picks_obj, dict): + return {} + + out: dict[str, str] = {} + for k, v in picks_obj.items(): + mk = str(k or "").strip() + if not mk: + continue + ck = _normalize_hex32(str(v or "")) + if not ck: + continue + out[mk] = ck + return out + + +def _load_sns_media_picks(account_dir: Path) -> dict[str, str]: + p = _sns_media_picks_path(account_dir) + try: + st = p.stat() + mtime = float(st.st_mtime) + except Exception: + mtime = 0.0 + return _load_sns_media_picks_cached(str(p), mtime) + + +def _save_sns_media_picks(account_dir: Path, picks: dict[str, str]) -> int: + # Normalize + keep it stable for easier diff/debugging. + out: dict[str, str] = {} + for k, v in (picks or {}).items(): + mk = str(k or "").strip() + if not mk: + continue + ck = _normalize_hex32(str(v or "")) + if not ck: + continue + out[mk] = ck + + try: + payload = {"updated_at": int(time.time()), "picks": dict(sorted(out.items(), key=lambda x: x[0]))} + _sns_media_picks_path(account_dir).write_text( + json.dumps(payload, ensure_ascii=False, indent=2), + encoding="utf-8", + ) + except Exception: + pass + + try: + _load_sns_media_picks_cached.cache_clear() + except Exception: + pass + + return len(out) + + +@lru_cache(maxsize=16) +def _sns_img_roots(wxid_dir_str: str) -> tuple[str, ...]: + """List all month cache roots that contain `Sns/Img`.""" + wxid_dir = Path(str(wxid_dir_str or "").strip()) + cache_root = wxid_dir / "cache" + try: + month_dirs = [p for p in cache_root.iterdir() if p.is_dir()] + except Exception: + month_dirs = [] + + roots: list[str] = [] + for mdir in month_dirs: + img_root = mdir / "Sns" / "Img" + try: + if img_root.exists() and img_root.is_dir(): + roots.append(str(img_root)) + except Exception: + continue + # Keep it stable (helps debugging and caching predictability). + roots.sort() + return tuple(roots) + +@lru_cache(maxsize=16) +def _sns_video_roots(wxid_dir_str: str) -> tuple[str, ...]: + """List all month cache roots that contain `Sns/Video`.""" + wxid_dir = Path(str(wxid_dir_str or "").strip()) + cache_root = wxid_dir / "cache" + try: + month_dirs = [p for p in cache_root.iterdir() if p.is_dir()] + except Exception: + month_dirs = [] + + roots: list[str] = [] + for mdir in month_dirs: + video_root = mdir / "Sns" / "Video" + try: + if video_root.exists() and video_root.is_dir(): + roots.append(str(video_root)) + except Exception: + continue + roots.sort() + return tuple(roots) + +def _resolve_sns_cached_video_path( + wxid_dir: Path, + post_id: str, + media_id: str +) -> Optional[str]: + """基于逆向出的固定盐值 3,解析朋友圈视频的本地缓存路径""" + if not post_id or not media_id: + return None + + raw_key = f"{post_id}_{media_id}_3" # 暂时硬编码,大概率是对的 + try: + key32 = hashlib.md5(raw_key.encode("utf-8")).hexdigest() + except Exception: + return None + + sub = key32[:2] + rest = key32[2:] + + roots = _sns_video_roots(str(wxid_dir)) + for root_str in roots: + try: + base_path = Path(root_str) / sub / rest + for ext in [".mp4", ".tmp"]: + p = base_path.with_suffix(ext) + if p.exists() and p.is_file(): + return str(p) + except Exception: + continue + + return None + + +def _resolve_sns_cached_image_path_by_md5( + *, + wxid_dir: Path, + md5: str, + create_time: int, +) -> Optional[str]: + """Try to resolve SNS cache image by md5-based cache path layout.""" + md5_32 = _normalize_hex32(md5) + if not md5_32: + return None + + sub = md5_32[:2] + rest = md5_32[2:] + roots = _sns_img_roots(str(wxid_dir)) + if not roots: + return None + + best: tuple[float, str] | None = None + for root_str in roots: + try: + p = Path(root_str) / sub / rest + if not (p.exists() and p.is_file()): + continue + # Prefer the cache file closest to the post create_time (if provided), + # otherwise pick the newest one. + st = p.stat() + if create_time > 0: + score = abs(float(st.st_mtime) - float(create_time)) + else: + score = -float(st.st_mtime) + if best is None or score < best[0]: + best = (score, str(p)) + except Exception: + continue + return best[1] if best else None + + +def _sns_cache_key_from_path(p: Path) -> str: + """Return the 32-hex cache key for a SNS cache file path, or ''.""" + try: + # cache/.../Sns/Img/<2hex>/<30hex> + key = f"{p.parent.name}{p.name}" + except Exception: + return "" + return _normalize_hex32(key) + + +def _generate_sns_cache_key(tid: str, media_id: str, media_type: int = 2) -> str: + """ + 公式: md5(tid_mediaId_type) + Example: 14852422213384352392_14852422213963625090_2 -> 6d479249ca5a090fab5c42c79bc56b89 + """ + if not tid or not media_id: + return "" + + raw_key = f"{tid}_{media_id}_{media_type}" + + try: + return hashlib.md5(raw_key.encode("utf-8")).hexdigest() + except Exception: + return "" + +def _resolve_sns_cached_image_path_by_cache_key( + *, + wxid_dir: Path, + cache_key: str, + create_time: int, +) -> Optional[str]: + """Resolve SNS cache image by `<2hex>/<30hex>` cache key.""" + key32 = _normalize_hex32(cache_key) + if not key32: + return None + + sub = key32[:2] + rest = key32[2:] + roots = _sns_img_roots(str(wxid_dir)) + if not roots: + return None + + best: tuple[float, str] | None = None + for root_str in roots: + try: + p = Path(root_str) / sub / rest + if not (p.exists() and p.is_file()): + continue + st = p.stat() + if create_time > 0: + score = abs(float(st.st_mtime) - float(create_time)) + else: + score = -float(st.st_mtime) + if best is None or score < best[0]: + best = (score, str(p)) + except Exception: + continue + return best[1] if best else None + + +@lru_cache(maxsize=4096) +def _resolve_sns_cached_image_path( + *, + account_dir_str: str, + create_time: int, + width: int, + height: int, + idx: int, + total_size: int = 0, +) -> Optional[str]: + """Best-effort resolve a local cached SNS image for a post+media meta.""" + total_size_i = int(total_size or 0) + must_match_size = width > 0 and height > 0 + # Without size/total_size, time-only matching is too error-prone and can easily mix images. + if (not must_match_size) and total_size_i <= 0: + return None + + account_dir = Path(str(account_dir_str or "").strip()) + if not account_dir.exists(): + return None + + wxid_dir = _resolve_account_wxid_dir(account_dir) + if not wxid_dir: + return None + + mtimes, paths = _sns_img_time_index(str(wxid_dir)) + if not mtimes: + return None + + create_time_i = int(create_time or 0) + if create_time_i > 0: + # We don't know when the image was cached (could be close to create_time, could be hours later). + # Use a generous window but keep it bounded for performance. + window = 72 * 3600 # 72h + lo = create_time_i - window + hi = create_time_i + window + + l = bisect_left(mtimes, lo) + r = bisect_right(mtimes, hi) + if l >= r: + # Fallback: search the newest N files if time window has no hits. + l = max(0, len(mtimes) - 800) + r = len(mtimes) + else: + # Missing createTime: only probe the newest cache entries. + l = max(0, len(mtimes) - 800) + r = len(mtimes) + + # Rank by time proximity to create_time (or by recency when createTime is missing). + candidates: list[tuple[float, str]] = [] + for j in range(l, r): + try: + if create_time_i > 0: + candidates.append((abs(mtimes[j] - float(create_time_i)), paths[j])) + else: + candidates.append((-mtimes[j], paths[j])) + except Exception: + continue + candidates.sort(key=lambda x: x[0]) + + matched: list[tuple[int, float, str]] = [] + # Limit the work per request. + max_probe = 2000 if (r - l) <= 2000 else 2000 + for _diff, pstr in candidates[:max_probe]: + try: + p = Path(pstr) + payload, media_type = _read_and_maybe_decrypt_media(p, account_dir) + if not payload or not str(media_type or "").startswith("image/"): + continue + if must_match_size: + w0, h0 = _image_size_from_bytes(payload, str(media_type or "")) + if (w0, h0) != (width, height): + continue + + size_diff = abs(len(payload) - total_size_i) if total_size_i > 0 else 0 + # When totalSize is available, it tends to be a stronger discriminator than mtime. + matched.append((int(size_diff), float(_diff), pstr)) + except Exception: + continue + + if not matched: + return None + if must_match_size: + matched.sort(key=lambda x: (x[0], x[1], x[2])) + # If we have totalSize, treat it as a strong discriminator and always take the best match. + if total_size_i > 0: + return matched[0][2] + idx0 = max(0, int(idx or 0)) + return matched[idx0][2] if idx0 < len(matched) else None + # No size: only return a best-effort match when totalSize is available. + if total_size_i > 0: + matched.sort(key=lambda x: (x[0], x[1], x[2])) + return matched[0][2] + return None + + +@lru_cache(maxsize=2048) +def _list_sns_cached_image_candidate_keys( + *, + account_dir_str: str, + create_time: int, + width: int, + height: int, +) -> tuple[str, ...]: + """List local SNS cache candidates (as 32-hex cache keys) for a media item. + + The ordering matches `_resolve_sns_cached_image_path()`'s scan order, so `idx` + is stable within the same (account, create_time, width, height) input. + """ + if create_time <= 0 or width <= 0 or height <= 0: + return tuple() + + account_dir = Path(str(account_dir_str or "").strip()) + if not account_dir.exists(): + return tuple() + + wxid_dir = _resolve_account_wxid_dir(account_dir) + if not wxid_dir: + return tuple() + + mtimes, paths = _sns_img_time_index(str(wxid_dir)) + if not mtimes: + return tuple() + + window = 72 * 3600 # 72h + lo = create_time - window + hi = create_time + window + + l = bisect_left(mtimes, lo) + r = bisect_right(mtimes, hi) + if l >= r: + l = max(0, len(mtimes) - 800) + r = len(mtimes) + + candidates: list[tuple[float, str]] = [] + for j in range(l, r): + try: + candidates.append((abs(mtimes[j] - float(create_time)), paths[j])) + except Exception: + continue + candidates.sort(key=lambda x: x[0]) + + max_probe = 2000 if (r - l) <= 2000 else 2000 + out: list[str] = [] + seen: set[str] = set() + for _diff, pstr in candidates[:max_probe]: + try: + p = Path(pstr) + payload, media_type = _read_and_maybe_decrypt_media(p, account_dir) + if not payload or not str(media_type or "").startswith("image/"): + continue + w0, h0 = _image_size_from_bytes(payload, str(media_type or "")) + if (w0, h0) != (width, height): + continue + key = _sns_cache_key_from_path(p) + if not key or key in seen: + continue + seen.add(key) + out.append(key) + except Exception: + continue + + return tuple(out) + +def _get_sns_covers(account_dir: Path, target_wxid: str, limit: int = 20) -> list[dict[str, Any]]: + """无论多古老,强行揪出用户的朋友圈封面历史 (type=7)。 + + 返回倒序(最新在前)的列表,包含 createTime 便于前端叠加显示。 + """ + wxid = str(target_wxid or "").strip() + if not wxid: + return [] + + try: + lim = int(limit or 20) + except Exception: + lim = 20 + if lim <= 0: + lim = 1 + # Keep payload bounded; cover history isn't worth huge queries. + if lim > 50: + lim = 50 + + wxid_esc = wxid.replace("'", "''") + cover_sql = ( + "SELECT tid, content FROM SnsTimeLine " + f"WHERE user_name = '{wxid_esc}' AND content LIKE '%7%' " + "ORDER BY tid DESC " + f"LIMIT {lim}" + ) + + rows: list[dict[str, Any]] = [] + + # 1) Prefer real-time WCDB if available (reads db_storage/sns/sns.db). + try: + if WCDB_REALTIME.is_connected(account_dir.name): + conn = WCDB_REALTIME.ensure_connected(account_dir) + with conn.lock: + sns_db_path = conn.db_storage_dir / "sns" / "sns.db" + if not sns_db_path.exists(): + sns_db_path = conn.db_storage_dir / "sns.db" + # 利用 exec_query 强行查 + rows = _wcdb_exec_query(conn.handle, kind="media", path=str(sns_db_path), sql=cover_sql) or [] + except Exception as e: + logger.warning("[sns] WCDB cover fetch failed: %s", e) + + # 2) Fallback to local decrypted snapshot sns.db. + if not rows: + sns_db_path = account_dir / "sns.db" + if sns_db_path.exists(): + try: + # 只读模式防止锁死 + conn_sq = sqlite3.connect(f"file:{sns_db_path}?mode=ro", uri=True) + conn_sq.row_factory = sqlite3.Row + rows_sq = conn_sq.execute(cover_sql).fetchall() + conn_sq.close() + rows = [{"tid": r["tid"], "content": r["content"]} for r in (rows_sq or [])] + except Exception as e: + logger.warning("[sns] SQLite cover fetch failed: %s", e) + + out: list[dict[str, Any]] = [] + seen: set[str] = set() + for rr in rows: + if not isinstance(rr, dict): + continue + cover_xml = rr.get("content") + if not cover_xml: + continue + + try: + cover_tid = int(rr.get("tid") or 0) + except Exception: + cover_tid = 0 + + parsed = _parse_timeline_xml(str(cover_xml or ""), wxid) + media = parsed.get("media") or [] + if not isinstance(media, list) or not media: + continue + + cid = _to_unsigned_i64_str(cover_tid or "") + if cid in seen: + continue + seen.add(cid) + + out.append( + { + "id": cid, + "tid": cover_tid, + "username": wxid, + "createTime": int(parsed.get("createTime") or 0), + "media": media, + "type": 7, + } + ) + return out + + +def _get_sns_cover(account_dir: Path, target_wxid: str) -> Optional[dict[str, Any]]: + """兼容旧逻辑:返回最近的一张朋友圈封面 (type=7)""" + covers = _get_sns_covers(account_dir, target_wxid, limit=1) + return covers[0] if covers else None + + + + +@router.get("/api/sns/self_info", summary="获取个人信息(wxid和nickname)") +def api_sns_self_info(account: Optional[str] = None): + + account_dir = _resolve_account_dir(account) + wxid = account_dir.name + + logger.info(f"[self_info] 开始获取账号信息, 预设 wxid: {wxid}") + + nickname = wxid + source = "wxid_dir" + + try: + status = WCDB_REALTIME.get_status(account_dir) + if status.get("dll_present") and status.get("key_present"): + rt_conn = WCDB_REALTIME.ensure_connected(account_dir) + with rt_conn.lock: + + names_map = _wcdb_get_display_names(rt_conn.handle, [wxid]) + if names_map and names_map.get(wxid): + nickname = names_map[wxid] + source = "wcdb_realtime" + logger.info(f"[self_info] 从 WCDB 实时连接获取成功: {nickname}") + return {"wxid": wxid, "nickname": nickname, "source": source} + except Exception as e: + logger.debug(f"[self_info] WCDB 路径跳过或失败: {e}") + + contact_db_path = account_dir / "contact.db" + if contact_db_path.exists(): + conn = None + try: + db_uri = f"file:{contact_db_path}?mode=ro" + conn = sqlite3.connect(db_uri, uri=True, timeout=5) + conn.row_factory = sqlite3.Row + + cursor = conn.execute("PRAGMA table_info(contact)") + cols = {row["name"].lower() for row in cursor.fetchall()} + logger.debug(f"[self_info] contact 表现有字段: {cols}") + + target_nick_col = "nick_name" if "nick_name" in cols else ("nickname" if "nickname" in cols else None) + + if target_nick_col: + sql = f"SELECT remark, {target_nick_col} as nickname_val, alias FROM contact WHERE username = ? LIMIT 1" + row = conn.execute(sql, (wxid,)).fetchone() + + + if row: + raw_remark = str(row["remark"] or "").strip() if "remark" in row.keys() else "" + raw_nick = str(row["nickname_val"] or "").strip() + raw_alias = str(row["alias"] or "").strip() if "alias" in row.keys() else "" + + if raw_remark: + nickname = raw_remark + source = "contact_db_remark" + elif raw_nick: + nickname = raw_nick + source = "contact_db_nickname" + elif raw_alias: + nickname = raw_alias + source = "contact_db_alias" + + logger.info(f"[self_info] 从数据库提取成功: {nickname} (src: {source})") + else: + logger.warning("[self_info] contact 表中找不到任何昵称相关字段") + + except sqlite3.OperationalError as e: + logger.error(f"[self_info] 数据库繁忙或锁定: {e}") + except Exception as e: + logger.exception(f"[self_info] 查询异常: {e}") + finally: + if conn: conn.close() + else: + logger.warning(f"[self_info] 找不到 contact.db: {contact_db_path}") + + return { + "wxid": wxid, + "nickname": nickname, + "source": source + } + + +@router.post("/api/sns/realtime/sync_latest", summary="实时朋友圈同步到解密库(增量)") +def sync_sns_realtime_timeline_latest( + account: Optional[str] = None, + max_scan: int = 200, + force: int = 0, +): + """Sync latest visible Moments from WCDB realtime into decrypted `{account}/sns.db`. + + This is best-effort and intentionally **append-only**: we never delete rows from the decrypted snapshot + even if the post is deleted/hidden later, so users can still browse/export historical cached content. + """ + try: + lim = int(max_scan or 200) + except Exception: + lim = 200 + if lim <= 0: + lim = 200 + if lim > 2000: + lim = 2000 + + try: + force_flag = bool(int(force or 0)) + except Exception: + force_flag = False + + account_dir = _resolve_account_dir(account) + + # If there is no local decrypted sns.db yet, force a first-time materialization. + try: + if not (account_dir / "sns.db").exists(): + force_flag = True + except Exception: + force_flag = True + + info = WCDB_REALTIME.get_status(account_dir) + available = bool(info.get("dll_present") and info.get("key_present") and info.get("db_storage_dir")) + if not available: + raise HTTPException(status_code=404, detail="WCDB realtime not available.") + + st = _read_sns_realtime_sync_state(account_dir) + last_max_id_u = 0 + try: + last_max_id_u = int(str(st.get("maxId") or st.get("max_id") or "0").strip() or "0") + except Exception: + last_max_id_u = 0 + + conn = WCDB_REALTIME.ensure_connected(account_dir) + + t0 = time.perf_counter() + rows: list[dict[str, Any]] = [] + max_id_u = 0 + upsert_rows: list[tuple[int, str, str, Optional[str]]] = [] + + with conn.lock: + rows = _wcdb_get_sns_timeline( + conn.handle, + limit=lim, + offset=0, + usernames=[], + keyword="", + ) + + if not rows: + return { + "status": "ok", + "scanned": 0, + "upserted": 0, + "maxId": str(last_max_id_u or 0), + "elapsedMs": int((time.perf_counter() - t0) * 1000.0), + } + + # Compute the newest unsigned tid/id from WCDB rows. + for r in rows: + if not isinstance(r, dict): + continue + try: + tid_u = int(r.get("id") or 0) + except Exception: + continue + if tid_u > max_id_u: + max_id_u = tid_u + + if (not force_flag) and max_id_u and (max_id_u <= last_max_id_u): + # No new top item; skip heavy exec_query + sqlite writes. + return { + "status": "noop", + "scanned": len(rows), + "upserted": 0, + "maxId": str(max_id_u), + "lastMaxId": str(last_max_id_u), + "elapsedMs": int((time.perf_counter() - t0) * 1000.0), + } + + username_by_tid: dict[int, str] = {} + rawxml_by_tid: dict[int, str] = {} + tids: list[int] = [] + for r in rows: + if not isinstance(r, dict): + continue + uname = str(r.get("username") or "").strip() + try: + tid_u = int(r.get("id") or 0) + except Exception: + continue + tid_s = _to_signed_i64(tid_u) + tids.append(tid_s) + if uname: + username_by_tid[tid_s] = uname + raw_xml = str(r.get("rawXml") or "") + if raw_xml: + rawxml_by_tid[tid_s] = raw_xml + + tids = [t for t in list(dict.fromkeys(tids)) if isinstance(t, int)] + + sql_rows: list[dict[str, Any]] = [] + try: + sns_db_path = conn.db_storage_dir / "sns" / "sns.db" + if not sns_db_path.exists(): + sns_db_path = conn.db_storage_dir / "sns.db" + + if tids and sns_db_path.exists(): + in_sql = ",".join([str(x) for x in tids]) + # Newer schema may have pack_info_buf; try it first, then fall back. + sql = f"SELECT tid, user_name, content, pack_info_buf FROM SnsTimeLine WHERE tid IN ({in_sql})" + try: + sql_rows = _wcdb_exec_query(conn.handle, kind="media", path=str(sns_db_path), sql=sql) + except Exception: + sql = f"SELECT tid, user_name, content FROM SnsTimeLine WHERE tid IN ({in_sql})" + sql_rows = _wcdb_exec_query(conn.handle, kind="media", path=str(sns_db_path), sql=sql) + except Exception: + sql_rows = [] + + if sql_rows: + for rr in sql_rows: + if not isinstance(rr, dict): + continue + try: + tid_val = int(rr.get("tid") or 0) + except Exception: + continue + content_xml = _decode_sns_text_blob(rr.get("content")) + if not content_xml: + continue + uname = str(rr.get("user_name") or rr.get("username") or "").strip() + if not uname: + uname = username_by_tid.get(tid_val, "") + if not uname: + continue + pack = rr.get("pack_info_buf") + pack_text = None if pack is None else str(pack) + upsert_rows.append((tid_val, uname, content_xml, pack_text)) + else: + # Fallback: store rawXml from WCDB rows (may be enough for parsing/export). + for tid_val, uname in username_by_tid.items(): + raw_xml = rawxml_by_tid.get(tid_val) or "" + if not raw_xml: + continue + upsert_rows.append((int(tid_val), str(uname), str(raw_xml), None)) + + upserted = _upsert_sns_timeline_rows_to_decrypted_db( + account_dir, + upsert_rows, + source="realtime-sync-latest", + ) + + if max_id_u: + st2 = dict(st) + st2["maxId"] = str(max_id_u) + st2["updatedAt"] = int(time.time()) + _write_sns_realtime_sync_state(account_dir, st2) + + return { + "status": "ok", + "scanned": len(rows), + "upserted": int(upserted), + "maxId": str(max_id_u or 0), + "lastMaxId": str(last_max_id_u or 0), + "elapsedMs": int((time.perf_counter() - t0) * 1000.0), + } + + +@router.get("/api/sns/timeline", summary="获取朋友圈时间线") +def list_sns_timeline( + account: Optional[str] = None, + limit: int = 20, + offset: int = 0, + usernames: Optional[str] = None, + keyword: Optional[str] = None, +): + if limit <= 0: + raise HTTPException(status_code=400, detail="Invalid limit.") + if limit > 200: + limit = 200 + if offset < 0: + offset = 0 + + account_dir = _resolve_account_dir(account) + contact_db_path = account_dir / "contact.db" + + users = _parse_csv_list(usernames) + kw = str(keyword or "").strip() + + cover_data = None + covers_data: list[dict[str, Any]] = [] + if offset == 0: + target_wxid = users[0] if users else account_dir.name + covers_data = _get_sns_covers(account_dir, target_wxid, limit=20) + cover_data = covers_data[0] if covers_data else None + + def _list_from_decrypted_sqlite() -> dict[str, Any]: + """Legacy path: query the decrypted sns.db under output/databases/{account}. + + Note: This path may contain historical timeline items that are no longer + visible in WeChat due to privacy settings (e.g. "only last 3 days"). + """ + sns_db_path = account_dir / "sns.db" + if not sns_db_path.exists(): + raise HTTPException(status_code=404, detail="sns.db not found for this account.") + + filters: list[str] = [] + params: list[Any] = [] + + if users: + placeholders = ",".join(["?"] * len(users)) + filters.append(f"user_name IN ({placeholders})") + params.extend(users) + + if kw: + filters.append("content LIKE ?") + params.append(f"%{kw}%") + + where_sql = f"WHERE {' AND '.join(filters)}" if filters else "" + + sql = f""" + SELECT tid, user_name, content + FROM SnsTimeLine + {where_sql} + ORDER BY tid DESC + LIMIT ? OFFSET ? + """ + # Fetch 1 extra row to determine hasMore. + params_with_page = params + [limit + 1, offset] + + conn2 = sqlite3.connect(str(sns_db_path)) + conn2.row_factory = sqlite3.Row + try: + rows2 = conn2.execute(sql, params_with_page).fetchall() + except sqlite3.OperationalError as e: + logger.warning("[sns] query failed: %s", e) + raise HTTPException(status_code=500, detail=f"sns.db query failed: {e}") + finally: + conn2.close() + + has_more2 = len(rows2) > limit + rows2 = rows2[:limit] + + post_usernames2 = [str(r["user_name"] or "").strip() for r in rows2 if str(r["user_name"] or "").strip()] + contact_rows2 = _load_contact_rows(contact_db_path, post_usernames2) if contact_db_path.exists() else {} + biz_index2 = _get_biz_to_official_index(contact_db_path) if contact_db_path.exists() else {} + official_usernames2: set[str] = set() + + timeline2: list[dict[str, Any]] = [] + for r in rows2: + try: + tid2 = r["tid"] + except Exception: + tid2 = None + uname2 = str(r["user_name"] or "").strip() + + content_xml = str(r["content"] or "") + parsed2 = _parse_timeline_xml(content_xml, uname2) + + # Best-effort: attach ISAAC64 video key for SNS videos/live-photos (WeFlow compatible). + video_key2 = _extract_sns_video_key(content_xml) + if video_key2: + pmedia2 = parsed2.get("media") + if isinstance(pmedia2, list): + for m0 in pmedia2: + if not isinstance(m0, dict): + continue + if "videoKey" not in m0: + m0["videoKey"] = video_key2 + lp = m0.get("livePhoto") + if isinstance(lp, dict): + if not str(lp.get("key") or "").strip(): + lp["key"] = video_key2 + + display2 = _pick_display_name(contact_rows2.get(uname2), uname2) if uname2 else uname2 + post_type2 = int(parsed2.get("type", 1) or 1) + + official2: dict[str, Any] = {} + if post_type2 == 3: + content_url2 = str(parsed2.get("contentUrl") or "") + biz2 = _extract_mp_biz_from_url(content_url2) + info2 = biz_index2.get(biz2) if biz2 else None + off_username2 = str(info2.get("username") or "").strip() if isinstance(info2, dict) else "" + off_service_type2 = info2.get("serviceType") if isinstance(info2, dict) else None + official2 = { + "biz": biz2, + "username": off_username2, + "serviceType": off_service_type2, + "displayName": "", + } + if off_username2: + official_usernames2.add(off_username2) + + timeline2.append( + { + "id": _to_unsigned_i64_str(tid2) if tid2 is not None else (str(parsed2.get("createTime") or "") or uname2), + "tid": tid2, + "username": uname2 or parsed2.get("username") or "", + "displayName": display2, + "createTime": int(parsed2.get("createTime") or 0), + "contentDesc": str(parsed2.get("contentDesc") or ""), + "location": str(parsed2.get("location") or ""), + "sourceName": str(parsed2.get("sourceName") or ""), + "media": parsed2.get("media") or [], + "likes": parsed2.get("likes") or [], + "comments": parsed2.get("comments") or [], + "type": post_type2, + "title": parsed2.get("title", ""), + "contentUrl": parsed2.get("contentUrl", ""), + "finderFeed": parsed2.get("finderFeed", {}), + "official": official2, + } + ) + + if official_usernames2 and contact_db_path.exists(): + official_rows2 = _load_contact_rows(contact_db_path, list(official_usernames2)) + for item in timeline2: + off2 = item.get("official") + if not isinstance(off2, dict): + continue + u0_2 = str(off2.get("username") or "").strip() + if not u0_2: + continue + row2 = official_rows2.get(u0_2) + if row2 is None: + continue + off2["displayName"] = str(_pick_display_name(row2, u0_2)).strip() + + return { + "timeline": timeline2, + "hasMore": has_more2, + "limit": limit, + "offset": offset, + "source": "sqlite", + "cover": cover_data, + "covers": covers_data, + } + + auto_cache_key = _sns_timeline_auto_cache_key(account_dir, users, kw) if users else None + # If we previously detected that WCDB only returns a visible subset for this contact (less than + # the local decrypted snapshot), skip WCDB for subsequent pages to keep pagination flowing. + if auto_cache_key is not None and offset > 0: + try: + if _sns_timeline_auto_cache_get(auto_cache_key): + out = _list_from_decrypted_sqlite() + out["source"] = "sqlite-auto" + return out + except Exception: + pass + + def _list_from_wcdb_snstimeline_table(wcdb_conn: Any) -> Optional[dict[str, Any]]: + """Query encrypted `SnsTimeLine` table directly (bypass timeline API filtering). + + In some cases (commonly: contact sets "only show last 3 days"), the WCDB timeline API returns + an empty list even though the encrypted `sns.db` still contains cached historical rows. + """ + if not users: + return None + + def _q(v: str) -> str: + return "'" + str(v or "").replace("'", "''") + "'" + + try: + sns_db_path = wcdb_conn.db_storage_dir / "sns" / "sns.db" + if not sns_db_path.exists(): + sns_db_path = wcdb_conn.db_storage_dir / "sns.db" + except Exception: + return None + + if not (sns_db_path.exists() and sns_db_path.is_file()): + return None + + filters: list[str] = [ + "content IS NOT NULL", + "content != ''", + # Cover rows are returned separately via `cover`, do not mix into timeline. + "content NOT LIKE '%7%'", + ] + + ulist = [str(u or "").strip() for u in users if str(u or "").strip()] + if ulist: + filters.append(f"user_name IN ({','.join([_q(u) for u in ulist])})") + + if kw: + kw_esc = str(kw).replace("'", "''") + filters.append(f"content LIKE '%{kw_esc}%'") + + where_sql = f"WHERE {' AND '.join(filters)}" if filters else "" + # Fetch 1 extra row to determine hasMore. + sql = f""" + SELECT tid, user_name, content, pack_info_buf + FROM SnsTimeLine + {where_sql} + ORDER BY tid DESC + LIMIT {int(limit) + 1} OFFSET {int(offset)} + """ + + sql_rows: list[dict[str, Any]] = [] + with wcdb_conn.lock: + try: + sql_rows = _wcdb_exec_query(wcdb_conn.handle, kind="media", path=str(sns_db_path), sql=sql) + except Exception: + # Older schema without pack_info_buf. + sql = f""" + SELECT tid, user_name, content + FROM SnsTimeLine + {where_sql} + ORDER BY tid DESC + LIMIT {int(limit) + 1} OFFSET {int(offset)} + """ + sql_rows = _wcdb_exec_query(wcdb_conn.handle, kind="media", path=str(sns_db_path), sql=sql) + + if not sql_rows: + return None + + has_more3 = len(sql_rows) > int(limit) + sql_rows = sql_rows[: int(limit)] + + post_usernames3: list[str] = [] + upsert_rows3: list[tuple[int, str, str, Optional[str]]] = [] + + # Prepare contact/biz mapping (same as other code paths). + for rr in sql_rows: + if not isinstance(rr, dict): + continue + uname3 = str(rr.get("user_name") or rr.get("username") or "").strip() + if uname3: + post_usernames3.append(uname3) + + contact_rows3 = _load_contact_rows(contact_db_path, post_usernames3) if contact_db_path.exists() else {} + biz_index3 = _get_biz_to_official_index(contact_db_path) if contact_db_path.exists() else {} + official_usernames3: set[str] = set() + + timeline3: list[dict[str, Any]] = [] + for rr in sql_rows: + if not isinstance(rr, dict): + continue + + try: + tid3 = int(rr.get("tid") or 0) + except Exception: + continue + + uname3 = str(rr.get("user_name") or rr.get("username") or "").strip() + if not uname3: + continue + + content_xml3 = _decode_sns_text_blob(rr.get("content")) + if not content_xml3: + continue + + parsed3 = _parse_timeline_xml(content_xml3, uname3) + + # Attach ISAAC64 key for SNS video/live-photo. + video_key3 = _extract_sns_video_key(content_xml3) + if video_key3: + pmedia3 = parsed3.get("media") + if isinstance(pmedia3, list): + for m0 in pmedia3: + if not isinstance(m0, dict): + continue + if "videoKey" not in m0: + m0["videoKey"] = video_key3 + lp = m0.get("livePhoto") + if isinstance(lp, dict): + if not str(lp.get("key") or "").strip(): + lp["key"] = video_key3 + + post_type3 = int(parsed3.get("type", 1) or 1) + if post_type3 == 7: + continue + + display3 = _pick_display_name(contact_rows3.get(uname3), uname3) if uname3 else uname3 + + official3: dict[str, Any] = {} + if post_type3 == 3: + content_url3 = str(parsed3.get("contentUrl") or "") + biz3 = _extract_mp_biz_from_url(content_url3) + info3 = biz_index3.get(biz3) if biz3 else None + off_username3 = str(info3.get("username") or "").strip() if isinstance(info3, dict) else "" + off_service_type3 = info3.get("serviceType") if isinstance(info3, dict) else None + official3 = { + "biz": biz3, + "username": off_username3, + "serviceType": off_service_type3, + "displayName": "", + } + if off_username3: + official_usernames3.add(off_username3) + + timeline3.append( + { + "id": _to_unsigned_i64_str(tid3), + "tid": _to_unsigned_i64_str(tid3), + "username": uname3, + "displayName": str(display3 or "").replace("\xa0", " ").strip() or uname3, + "createTime": int(parsed3.get("createTime") or 0), + "contentDesc": str(parsed3.get("contentDesc") or ""), + "location": str(parsed3.get("location") or ""), + "sourceName": str(parsed3.get("sourceName") or ""), + "media": parsed3.get("media") or [], + "likes": parsed3.get("likes") or [], + "comments": parsed3.get("comments") or [], + "type": post_type3, + "title": parsed3.get("title", ""), + "contentUrl": parsed3.get("contentUrl", ""), + "finderFeed": parsed3.get("finderFeed", {}), + "official": official3, + } + ) + + pack3 = rr.get("pack_info_buf") + upsert_rows3.append((int(tid3), uname3, content_xml3, None if pack3 is None else str(pack3))) + + if official_usernames3 and contact_db_path.exists(): + official_rows3 = _load_contact_rows(contact_db_path, list(official_usernames3)) + for item in timeline3: + off3 = item.get("official") + if not isinstance(off3, dict): + continue + u0_3 = str(off3.get("username") or "").strip() + if not u0_3: + continue + row3 = official_rows3.get(u0_3) + if row3 is None: + continue + off3["displayName"] = str(_pick_display_name(row3, u0_3) or "").replace("\xa0", " ").strip() + + # Incremental writeback: cache what we just fetched into decrypted snapshot. + if upsert_rows3: + _upsert_sns_timeline_rows_to_decrypted_db(account_dir, upsert_rows3, source="timeline-wcdb-direct") + + if not timeline3: + return None + + return { + "timeline": timeline3, + "hasMore": has_more3, + "limit": limit, + "offset": offset, + "source": "wcdb-direct", + "cover": cover_data, + "covers": covers_data, + } + + # Prefer real-time WCDB access (reads the latest encrypted db_storage/sns/sns.db). + # Fallback to the decrypted sqlite copy in output/{account}/sns.db. + try: + conn = WCDB_REALTIME.ensure_connected(account_dir) + writeback_rows: list[tuple[int, str, str, Optional[str]]] = [] + + cached_posts_total = 0 + if users: + # Used to decide whether to auto-switch to the decrypted sqlite snapshot when WCDB only + # returns a small visible subset (privacy settings, etc.). + try: + with _sns_decrypted_db_lock(Path(account_dir).name): + cached_posts_total = _count_sns_timeline_posts_in_decrypted_sqlite( + account_dir / "sns.db", + users=users, + kw=kw, + ) + except Exception: + cached_posts_total = 0 + + def _clean_name(v: Any) -> str: + return str(v or "").replace("\xa0", " ").strip() + + # Base timeline (includes likes/comments) from WCDB API. + with conn.lock: + wcdb_fetch_limit = limit + 1 + wcdb_probe_total: Optional[int] = None + + # Probe WCDB total when we already have a small (<=200) local cache. + # This lets us switch to sqlite on the *first page* without requiring the user + # to scroll to the end of WCDB's (possibly smaller) visible subset. + if users and offset == 0 and cached_posts_total > int(limit) and cached_posts_total <= 200: + wcdb_fetch_limit = 201 # 200 + 1 sentinel + + rows = _wcdb_get_sns_timeline( + conn.handle, + limit=wcdb_fetch_limit, + offset=offset, + usernames=users, + keyword=kw, + ) + + if wcdb_fetch_limit == 201: + try: + wcdb_probe_total = len(rows) if isinstance(rows, list) else 0 + except Exception: + wcdb_probe_total = None + + # If WCDB ends within 200 and is smaller than the local snapshot, serve snapshot immediately. + if ( + users + and offset == 0 + and isinstance(wcdb_probe_total, int) + and wcdb_probe_total >= 0 + and wcdb_probe_total <= 200 + and cached_posts_total > wcdb_probe_total + ): + try: + if auto_cache_key is None: + auto_cache_key = _sns_timeline_auto_cache_key(account_dir, users, kw) + _sns_timeline_auto_cache_set(auto_cache_key, True) + except Exception: + pass + out = _list_from_decrypted_sqlite() + out["source"] = "sqlite-auto" + return out + + # Best-effort: enrich posts with XML-only fields (location + media attrs/size) + # by querying SnsTimeLine.content from the encrypted sns.db. + username_by_tid: dict[int, str] = {} + content_by_tid: dict[int, str] = {} + try: + sns_db_path = conn.db_storage_dir / "sns" / "sns.db" + if not sns_db_path.exists(): + sns_db_path = conn.db_storage_dir / "sns.db" + + tids: list[int] = [] + for r in (rows or [])[: int(limit)]: + if not isinstance(r, dict): + continue + uname0 = str(r.get("username") or "").strip() + try: + tid_u = int(r.get("id") or 0) + except Exception: + continue + tid_s = _to_signed_i64(tid_u) + tids.append(tid_s) + if uname0: + username_by_tid[tid_s] = uname0 + + tids = list(dict.fromkeys(tids)) + if tids and sns_db_path.exists(): + in_sql = ",".join([str(x) for x in tids]) + sql = f"SELECT tid, user_name, content, pack_info_buf FROM SnsTimeLine WHERE tid IN ({in_sql})" + try: + sql_rows = _wcdb_exec_query(conn.handle, kind="media", path=str(sns_db_path), sql=sql) + except Exception: + sql = f"SELECT tid, user_name, content FROM SnsTimeLine WHERE tid IN ({in_sql})" + sql_rows = _wcdb_exec_query(conn.handle, kind="media", path=str(sns_db_path), sql=sql) + for rr in sql_rows: + try: + tid_val = int(rr.get("tid")) + except Exception: + continue + content_xml = _decode_sns_text_blob(rr.get("content")) + if content_xml: + content_by_tid[tid_val] = content_xml + uname1 = str(rr.get("user_name") or rr.get("username") or "").strip() + if not uname1: + uname1 = username_by_tid.get(tid_val, "") + if uname1 and content_xml: + pack = rr.get("pack_info_buf") + writeback_rows.append((tid_val, uname1, content_xml, None if pack is None else str(pack))) + except Exception: + content_by_tid = {} + writeback_rows = [] + + has_more = len(rows) > limit + rows = rows[:limit] + + # Incremental writeback: cache what we just saw into the decrypted snapshot, + # so later "not visible" (e.g. only last 3 days) still has local data. + if writeback_rows: + _upsert_sns_timeline_rows_to_decrypted_db( + account_dir, + writeback_rows, + source="timeline-wcdb", + ) + + post_usernames = [str((r or {}).get("username") or "").strip() for r in rows if isinstance(r, dict)] + post_usernames = [u for u in post_usernames if u] + contact_rows = _load_contact_rows(contact_db_path, post_usernames) if contact_db_path.exists() else {} + biz_index = _get_biz_to_official_index(contact_db_path) if contact_db_path.exists() else {} + official_usernames: set[str] = set() + + timeline: list[dict[str, Any]] = [] + for r in rows: + if not isinstance(r, dict): + continue + + uname = str(r.get("username") or "").strip() + nickname = _clean_name(r.get("nickname")) + display = nickname or (_pick_display_name(contact_rows.get(uname), uname) if uname else uname) + + create_time = _safe_int(r.get("createTime")) + content_desc = str(r.get("contentDesc") or "") + media = r.get("media") if isinstance(r.get("media"), list) else [] + likes = r.get("likes") if isinstance(r.get("likes"), list) else [] + likes = [_clean_name(x) for x in likes if _clean_name(x)] + comments = r.get("comments") if isinstance(r.get("comments"), list) else [] + + # WeFlow: live photo / SNS video decryption key comes from `` in raw XML. + # Keep it local to avoid sending huge rawXml to the frontend. + video_key = _extract_sns_video_key(r.get("rawXml")) + if video_key and isinstance(media, list): + for m0 in media: + if not isinstance(m0, dict): + continue + if "videoKey" not in m0: + m0["videoKey"] = video_key + lp = m0.get("livePhoto") + if isinstance(lp, dict): + if not str(lp.get("key") or "").strip(): + lp["key"] = video_key + + # Enrich with parsed XML when available. + location = str(r.get("location") or "") + source_name = _extract_sns_source_name(r.get("rawXml")) + + post_type = 1 + title = "" + content_url = "" + finder_feed = {} + try: + tid_u = int(r.get("id") or 0) + tid_s = (tid_u & 0xFFFFFFFFFFFFFFFF) + if tid_s >= 0x8000000000000000: + tid_s -= 0x10000000000000000 + xml = content_by_tid.get(int(tid_s)) + if xml: + parsed = _parse_timeline_xml(xml, uname) + if parsed.get("location"): + location = str(parsed.get("location") or "") + sn0 = str(parsed.get("sourceName") or "").strip() + if sn0: + source_name = sn0 + + post_type = parsed.get("type", 1) + + if post_type == 7: # 朋友圈封面 + continue + + title = parsed.get("title", "") + content_url = parsed.get("contentUrl", "") + finder_feed = parsed.get("finderFeed", {}) + + pmedia = parsed.get("media") or [] + if isinstance(pmedia, list) and isinstance(media, list) and pmedia: + # Merge by index (best-effort). + merged: list[dict[str, Any]] = [] + for i, m0 in enumerate(media): + mp = pmedia[i] if i < len(pmedia) else None + if not isinstance(mp, dict): + merged.append(m0 if isinstance(m0, dict) else {}) + continue + mm = dict(mp) + if isinstance(m0, dict): + for k in ("url", "thumb"): + v = m0.get(k) + if v: + mm[k] = v + for k, v in m0.items(): + if k not in mm: + mm[k] = v + merged.append(mm) + media = merged + + # If rawXml didn't contain ``, try extracting from the content XML. + # Some WCDB timeline APIs omit rawXml, but the encrypted sns.db content still has the key. + if isinstance(media, list) and (not video_key): + video_key_xml = _extract_sns_video_key(xml) + if video_key_xml: + for m0 in media: + if not isinstance(m0, dict): + continue + if "videoKey" not in m0: + m0["videoKey"] = video_key_xml + lp = m0.get("livePhoto") + if isinstance(lp, dict): + if not str(lp.get("key") or "").strip(): + lp["key"] = video_key_xml + except Exception: + pass + + official: dict[str, Any] = {} + if post_type == 3: + biz = _extract_mp_biz_from_url(content_url) + info = biz_index.get(biz) if biz else None + off_username = str(info.get("username") or "").strip() if isinstance(info, dict) else "" + off_service_type = info.get("serviceType") if isinstance(info, dict) else None + official = { + "biz": biz, + "username": off_username, + "serviceType": off_service_type, + "displayName": "", + } + if off_username: + official_usernames.add(off_username) + + pid = str(r.get("id") or "") or str(create_time or "") or uname + timeline.append( + { + "id": pid, + "tid": r.get("id"), + "username": uname, + "displayName": _clean_name(display) or uname, + "createTime": create_time, + "contentDesc": content_desc, + "location": str(location or ""), + "sourceName": str(source_name or ""), + "media": media, + "likes": likes, + "comments": comments, + "type": post_type, + "title": title, + "contentUrl": content_url, + "finderFeed": finder_feed, + "official": official, + } + ) + + if official_usernames and contact_db_path.exists(): + official_rows = _load_contact_rows(contact_db_path, list(official_usernames)) + for item in timeline: + off = item.get("official") + if not isinstance(off, dict): + continue + u0 = str(off.get("username") or "").strip() + if not u0: + continue + row = official_rows.get(u0) + if row is None: + continue + off["displayName"] = _clean_name(_pick_display_name(row, u0)) + + wcdb_resp = { + "timeline": timeline, + "hasMore": has_more, + "limit": limit, + "offset": offset, + "source": "wcdb", + "cover": cover_data, + "covers": covers_data, + } + + # Some contacts may have Moments cached in the decrypted sqlite, while the WCDB + # real-time API returns empty (commonly caused by privacy settings like + # "only show last 3 days"). In that case, fall back to the decrypted sqlite + # so the UI doesn't show an empty timeline when data exists locally. + if (not timeline) and users: + # 1) Try querying encrypted `SnsTimeLine` table directly (can bypass API filtering). + try: + direct = _list_from_wcdb_snstimeline_table(conn) + except Exception: + direct = None + if isinstance(direct, dict) and direct.get("timeline"): + return direct + + # 2) Fallback to decrypted sqlite snapshot (historical cached content). + try: + legacy = _list_from_decrypted_sqlite() + except HTTPException: + legacy = None + except Exception: + legacy = None + if isinstance(legacy, dict) and legacy.get("timeline"): + return legacy + + # Auto-fallback: if WCDB timeline ends but the local decrypted snapshot has more rows for this + # contact query, switch to the snapshot so the frontend can keep paging. + if users and timeline and (not has_more): + try: + with _sns_decrypted_db_lock(Path(account_dir).name): + cached_total = _count_sns_timeline_posts_in_decrypted_sqlite( + account_dir / "sns.db", + users=users, + kw=kw, + ) + wcdb_total = int(offset) + int(len(timeline)) + if cached_total > wcdb_total: + if auto_cache_key is None: + auto_cache_key = _sns_timeline_auto_cache_key(account_dir, users, kw) + _sns_timeline_auto_cache_set(auto_cache_key, True) + out = _list_from_decrypted_sqlite() + out["source"] = "sqlite-auto" + return out + except Exception: + pass + + return wcdb_resp + except WCDBRealtimeError as e: + logger.info("[sns] wcdb realtime unavailable: %s", e) + except Exception as e: + logger.warning("[sns] wcdb realtime failed: %s", e) + + return _list_from_decrypted_sqlite() + + +@router.get("/api/sns/users", summary="列出朋友圈联系人(按发圈数统计)") +def list_sns_users( + account: Optional[str] = None, + keyword: Optional[str] = None, + limit: int = 5000, +): + account_dir = _resolve_account_dir(account) + sns_db_path = account_dir / "sns.db" + if not sns_db_path.exists(): + raise HTTPException(status_code=404, detail="sns.db not found for this account.") + + contact_db_path = account_dir / "contact.db" + + try: + lim = int(limit or 5000) + except Exception: + lim = 5000 + if lim <= 0: + lim = 5000 + if lim > 5000: + lim = 5000 + + kw = str(keyword or "").strip().lower() + + conn = sqlite3.connect(str(sns_db_path)) + conn.row_factory = sqlite3.Row + try: + cur = conn.cursor() + cur.execute( + """ + SELECT + user_name AS username, + SUM( + CASE + WHEN content IS NOT NULL AND content != '' AND content NOT LIKE '%7%' + THEN 1 ELSE 0 + END + ) AS postCount, + COUNT(*) AS totalCount + FROM SnsTimeLine + GROUP BY user_name + ORDER BY postCount DESC, totalCount DESC + """ + ) + rows = cur.fetchall() or [] + finally: + try: + conn.close() + except Exception: + pass + + usernames = [str(r["username"] or "").strip() for r in rows if r is not None] + usernames = [u for u in usernames if u] + contact_rows = _load_contact_rows(contact_db_path, usernames) if contact_db_path.exists() else {} + + items: list[dict[str, Any]] = [] + + def _clean_name(v: Any) -> str: + return str(v or "").replace("\xa0", " ").strip() + + for r in rows: + try: + uname = str(r["username"] or "").strip() + except Exception: + uname = "" + if not uname: + continue + + try: + post_count = int(r["postCount"] or 0) + except Exception: + post_count = 0 + + row = contact_rows.get(uname) + display = _clean_name(_pick_display_name(row, uname)) or uname + + if kw: + if kw not in uname.lower() and kw not in display.lower(): + continue + + items.append({"username": uname, "displayName": display, "postCount": post_count}) + if len(items) >= lim: + break + + return {"items": items, "count": len(items), "limit": lim} + + +class SnsMediaPicksSaveRequest(BaseModel): + account: Optional[str] = Field(None, description="账号目录名(可选,默认使用第一个)") + picks: dict[str, str] = Field(default_factory=dict, description="手动匹配表:`${postId}:${idx}` -> 32hex cacheKey") + + +@router.post("/api/sns/media_picks", summary="保存朋友圈图片手动匹配结果(本机)") +async def save_sns_media_picks(request: SnsMediaPicksSaveRequest): + account_dir = _resolve_account_dir(request.account) + count = _save_sns_media_picks(account_dir, request.picks or {}) + return {"status": "success", "count": int(count)} + + +@router.get("/api/sns/media_candidates", summary="获取朋友圈图片本地缓存候选") +def list_sns_media_candidates( + account: Optional[str] = None, + create_time: int = 0, + width: int = 0, + height: int = 0, + limit: int = 24, + offset: int = 0, +): + if limit <= 0: + raise HTTPException(status_code=400, detail="Invalid limit.") + if limit > 200: + limit = 200 + if offset < 0: + offset = 0 + + account_dir = _resolve_account_dir(account) + keys = _list_sns_cached_image_candidate_keys( + account_dir_str=str(account_dir), + create_time=int(create_time or 0), + width=int(width or 0), + height=int(height or 0), + ) + total = len(keys) + end = min(total, offset + limit) + items = [{"idx": i, "key": keys[i]} for i in range(offset, end)] + return {"count": total, "items": items, "hasMore": end < total, "limit": limit, "offset": offset} + + +def _is_allowed_sns_media_host(host: str) -> bool: + return _sns_media.is_allowed_sns_media_host(host) + + +def _fix_sns_cdn_url(url: str, *, token: str = "", is_video: bool = False) -> str: + return _sns_media.fix_sns_cdn_url(url, token=token, is_video=is_video) + + +def _detect_mp4_ftyp(head: bytes) -> bool: + return bool(head) and len(head) >= 8 and head[4:8] == b"ftyp" + + +@lru_cache(maxsize=1) +def _weflow_wxisaac64_script_path() -> str: + """Locate the Node helper that wraps WeFlow's wasm_video_decode.* assets.""" + repo_root = Path(__file__).resolve().parents[3] + script = repo_root / "tools" / "weflow_wasm_keystream.js" + if script.exists() and script.is_file(): + return str(script) + return "" + + +@lru_cache(maxsize=64) +def _weflow_wxisaac64_keystream(key: str, size: int) -> bytes: + return _sns_media.weflow_wxisaac64_keystream(key, size) + + +_SNS_REMOTE_VIDEO_CACHE_EXTS = [ + ".mp4", + ".bin", # legacy/unknown +] + + +def _sns_remote_video_cache_dir_and_stem(account_dir: Path, *, url: str, key: str) -> tuple[Path, str]: + digest = hashlib.md5(f"video|{url}|{key}".encode("utf-8", errors="ignore")).hexdigest() + cache_dir = account_dir / "sns_remote_video_cache" / digest[:2] + return cache_dir, digest + + +def _sns_remote_video_cache_existing_path(cache_dir: Path, stem: str) -> Optional[Path]: + for ext in _SNS_REMOTE_VIDEO_CACHE_EXTS: + p = cache_dir / f"{stem}{ext}" + try: + if p.exists() and p.is_file(): + return p + except Exception: + continue + return None + + +async def _download_sns_remote_to_file(url: str, dest_path: Path, *, max_bytes: int) -> tuple[str, str]: + """Download SNS media to file (streaming) from Tencent CDN. + + Returns: (content_type, x_enc) + """ + u = str(url or "").strip() + if not u: + return "", "" + + # Safety: only allow Tencent CDN hosts. + try: + p = urlparse(u) + host = str(p.hostname or "").lower() + if not _is_allowed_sns_media_host(host): + raise HTTPException(status_code=400, detail="SNS media host not allowed.") + except HTTPException: + raise + except Exception: + raise HTTPException(status_code=400, detail="Invalid SNS media URL.") + + base_headers = { + "User-Agent": "MicroMessenger Client", + "Accept": "*/*", + # Do not request compression for video streams. + "Connection": "keep-alive", + } + + header_variants = [ + {}, + { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 MicroMessenger/7.0.20.1781(0x6700143B) WindowsWechat(0x63090719) XWEB/8351", + "Referer": "https://servicewechat.com/", + "Origin": "https://servicewechat.com", + }, + {"Referer": "https://wx.qq.com/", "Origin": "https://wx.qq.com"}, + {"Referer": "https://mp.weixin.qq.com/", "Origin": "https://mp.weixin.qq.com"}, + ] + + last_err: Exception | None = None + async with httpx.AsyncClient(timeout=30.0, follow_redirects=True) as client: + for extra in header_variants: + headers = dict(base_headers) + headers.update(extra) + try: + if dest_path.exists(): + try: + dest_path.unlink(missing_ok=True) + except Exception: + pass + + total = 0 + async with client.stream("GET", u, headers=headers) as resp: + resp.raise_for_status() + content_type = str(resp.headers.get("Content-Type") or "").strip() + x_enc = str(resp.headers.get("x-enc") or "").strip() + dest_path.parent.mkdir(parents=True, exist_ok=True) + with dest_path.open("wb") as f: + async for chunk in resp.aiter_bytes(): + if not chunk: + continue + total += len(chunk) + if total > max_bytes: + raise HTTPException(status_code=400, detail="SNS video too large.") + f.write(chunk) + return content_type, x_enc + except HTTPException: + raise + except Exception as e: + last_err = e + continue + + raise last_err or RuntimeError("sns remote download failed") + + +def _maybe_decrypt_sns_video_file(path: Path, key: str) -> bool: + return _sns_media.maybe_decrypt_sns_video_file(path, key) + + +async def _materialize_sns_remote_video( + *, + account_dir: Path, + url: str, + key: str, + token: str, + use_cache: bool, +) -> Optional[Path]: + return await _sns_media.materialize_sns_remote_video( + account_dir=account_dir, + url=url, + key=key, + token=token, + use_cache=use_cache, + ) + + +def _best_effort_unlink(path: str) -> None: + _sns_media.best_effort_unlink(path) + + +def _detect_image_mime(data: bytes) -> str: + return _sns_media.detect_image_mime(data) + + +_SNS_REMOTE_CACHE_EXTS = [ + ".jpg", + ".jpeg", + ".png", + ".gif", + ".webp", + ".bmp", + ".avif", + ".heic", + ".heif", + ".bin", # legacy/unknown +] + + +def _mime_to_ext(mt: str) -> str: + m = str(mt or "").split(";", 1)[0].strip().lower() + return { + "image/jpeg": ".jpg", + "image/jpg": ".jpg", + "image/png": ".png", + "image/gif": ".gif", + "image/webp": ".webp", + "image/bmp": ".bmp", + "image/avif": ".avif", + "image/heic": ".heic", + "image/heif": ".heif", + }.get(m, ".bin") + + +def _ext_to_mime(ext: str) -> str: + e = str(ext or "").strip().lower().lstrip(".") + return { + "jpg": "image/jpeg", + "jpeg": "image/jpeg", + "png": "image/png", + "gif": "image/gif", + "webp": "image/webp", + "bmp": "image/bmp", + "avif": "image/avif", + "heic": "image/heic", + "heif": "image/heif", + }.get(e, "") + + +def _sns_remote_cache_dir_and_stem(account_dir: Path, *, url: str, key: str) -> tuple[Path, str]: + digest = hashlib.md5(f"{url}|{key}".encode("utf-8", errors="ignore")).hexdigest() + cache_dir = account_dir / "sns_remote_cache" / digest[:2] + return cache_dir, digest + + +def _sns_remote_cache_existing_path(cache_dir: Path, stem: str) -> Optional[Path]: + for ext in _SNS_REMOTE_CACHE_EXTS: + p = cache_dir / f"{stem}{ext}" + try: + if p.exists() and p.is_file(): + return p + except Exception: + continue + return None + + +def _sniff_image_mime_from_file(path: Path) -> str: + try: + with path.open("rb") as f: + head = f.read(64) + return _detect_image_mime(head) + except Exception: + return "" + + +async def _download_sns_remote_bytes(url: str) -> tuple[bytes, str, str]: + """Download SNS media bytes from Tencent CDN with a few safe header variants.""" + u = str(url or "").strip() + if not u: + return b"", "", "" + + max_bytes = 25 * 1024 * 1024 + + base_headers = { + "User-Agent": "MicroMessenger Client", + "Accept": "*/*", + "Accept-Language": "zh-CN,zh;q=0.9", + # Avoid brotli dependency issues; images are already compressed anyway. + "Accept-Encoding": "identity", + "Connection": "keep-alive", + } + + # Some CDN endpoints return a small placeholder image for certain UA/Referer + # combinations but still respond 200. Try the simplest (base headers only) + # first to maximize the chance of getting the real media in one request. + header_variants = [ + {}, + # WeFlow/Electron: MicroMessenger UA + servicewechat.com referer passes some CDN anti-hotlink checks. + { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 MicroMessenger/7.0.20.1781(0x6700143B) WindowsWechat(0x63090719) XWEB/8351", + "Referer": "https://servicewechat.com/", + "Origin": "https://servicewechat.com", + }, + {"Referer": "https://wx.qq.com/", "Origin": "https://wx.qq.com"}, + {"Referer": "https://mp.weixin.qq.com/", "Origin": "https://mp.weixin.qq.com"}, + ] + + last_err: Exception | None = None + async with httpx.AsyncClient(timeout=20.0, follow_redirects=True) as client: + for extra in header_variants: + headers = dict(base_headers) + headers.update(extra) + try: + resp = await client.get(u, headers=headers) + resp.raise_for_status() + payload = bytes(resp.content or b"") + if len(payload) > max_bytes: + raise HTTPException(status_code=400, detail="SNS media too large (>25MB).") + content_type = str(resp.headers.get("Content-Type") or "").strip() + x_enc = str(resp.headers.get("x-enc") or "").strip() + return payload, content_type, x_enc + except HTTPException: + raise + except Exception as e: + last_err = e + continue + + raise last_err or RuntimeError("sns remote download failed") + + +async def _try_fetch_and_decrypt_sns_remote( + *, + account_dir: Path, + url: str, + key: str, + token: str, + use_cache: bool, +) -> Optional[Response]: + """Try remote download+decrypt first (accurate when keys are present). + + Returns a Response on success, or None on failure so caller can fall back to local cache matching. + """ + res = await _sns_media.try_fetch_and_decrypt_sns_image_remote( + account_dir=account_dir, + url=str(url or ""), + key=str(key or ""), + token=str(token or ""), + use_cache=bool(use_cache), + ) + if res is None: + return None + + resp = Response(content=res.payload, media_type=res.media_type) + resp.headers["Cache-Control"] = "public, max-age=86400" if use_cache else "no-store" + resp.headers["X-SNS-Source"] = str(res.source or "remote") + if res.x_enc: + resp.headers["X-SNS-X-Enc"] = str(res.x_enc) + return resp + + +@router.get("/api/sns/media", summary="获取朋友圈图片(下载解密优先)") +async def get_sns_media( + account: Optional[str] = None, + create_time: int = 0, + width: int = 0, + height: int = 0, + total_size: int = 0, + idx: int = 0, + avoid_picked: int = 0, + post_id: Optional[str] = None, + media_id: Optional[str] = None, + post_type: int = 1, + media_type: int = 2, + pick: Optional[str] = None, + md5: Optional[str] = None, + token: Optional[str] = None, + key: Optional[str] = None, + use_cache: int = 1, + url: Optional[str] = None, +): + account_dir = _resolve_account_dir(account) + wxid_dir = _resolve_account_wxid_dir(account_dir) + + try: + use_cache_flag = bool(int(use_cache or 1)) + except Exception: + use_cache_flag = True + + # 0) Prefer WeFlow-style remote download + decrypt (accurate, avoids local cache mismatch). + remote_resp = await _try_fetch_and_decrypt_sns_remote( + account_dir=account_dir, + url=str(url or ""), + key=str(key or ""), + token=str(token or ""), + use_cache=use_cache_flag, + ) + if remote_resp is not None: + return remote_resp + + # Cache disabled: do not fall back to local cache heuristics. + if not use_cache_flag: + raise HTTPException(status_code=404, detail="SNS media not found (cache disabled).") + + if wxid_dir and post_id and media_id: + if int(post_type) == 7: + raw_key = f"{post_id}_{media_id}_4" # 硬编码 + + md5_str = hashlib.md5(raw_key.encode("utf-8")).hexdigest() + bkg_path = wxid_dir / "business" / "sns" / "bkg" / md5_str[:2] / md5_str + + if bkg_path.exists() and bkg_path.is_file(): + print(f"===== Hit Bkg Cover ======= {bkg_path}") + + return FileResponse(bkg_path, media_type="image/jpeg", + headers={"Cache-Control": "public, max-age=31536000", "X-SNS-Source": "bkg-cover"}) + exact_match_path = None + hit_type = "" + + # 尝试 1: 使用 post_type 计算 MD5 + key_post = _generate_sns_cache_key(post_id, media_id, post_type) + exact_match_path = _resolve_sns_cached_image_path_by_cache_key( + wxid_dir=wxid_dir, + cache_key=key_post, + create_time=0 + ) + if exact_match_path: + hit_type = "post_type" + + # 尝试 2: 如果没找到,并且 media_type 和 post_type 不一样,再试一次 + if not exact_match_path and post_type != media_type: + key_media = _generate_sns_cache_key(post_id, media_id, media_type) + exact_match_path = _resolve_sns_cached_image_path_by_cache_key( + wxid_dir=wxid_dir, + cache_key=key_media, + create_time=0 + ) + if exact_match_path: + hit_type = "media_type" + + # 如果通过这两种精确定位找到了文件,直接返回 + if exact_match_path: + print(f"=====exact_match_path======={exact_match_path}============= (Hit: {hit_type})") + try: + payload, mtype = _read_and_maybe_decrypt_media(Path(exact_match_path), account_dir) + if payload and str(mtype or "").startswith("image/"): + resp = Response(content=payload, media_type=str(mtype or "image/jpeg")) + resp.headers["Cache-Control"] = "public, max-age=31536000" + resp.headers["X-SNS-Source"] = "deterministic-hash" + # 在 Header 里塞入到底是哪个 type 命中的,方便 F12 调试 + resp.headers["X-SNS-Hit-Type"] = hit_type + return resp + except Exception: + pass + + print("no exact match path, falling back...") + + # 0) User-picked cache key override (stable across candidate ordering). + pick_key = _normalize_hex32(pick) + if pick_key: + wxid_dir = _resolve_account_wxid_dir(account_dir) + if wxid_dir: + local = _resolve_sns_cached_image_path_by_cache_key( + wxid_dir=wxid_dir, + cache_key=pick_key, + create_time=int(create_time or 0), + ) + if local: + try: + payload, media_type = _read_and_maybe_decrypt_media(Path(local), account_dir) + if payload and str(media_type or "").startswith("image/"): + resp = Response(content=payload, media_type=str(media_type or "image/jpeg")) + resp.headers["Cache-Control"] = "public, max-age=86400" + resp.headers["X-SNS-Source"] = "manual-pick" + return resp + except Exception: + pass + + # Optional: avoid using a cache image that was manually pinned to another post. + # Only applies when frontend enables this setting and the current media has no explicit `pick`. + try: + avoid_flag = bool(int(avoid_picked or 0)) + except Exception: + avoid_flag = False + cur_post_id = str(post_id or "").strip() + reserved_other: set[str] = set() + if avoid_flag and (not pick_key) and cur_post_id: + picks_map = _load_sns_media_picks(account_dir) + for mk, ck in (picks_map or {}).items(): + pid = _sns_post_id_from_media_key(mk) + if not pid or pid == cur_post_id: + continue + if ck: + reserved_other.add(str(ck)) + + # 1) Try local decrypted cache first (works for old posts where CDN URLs return placeholders). + local = _resolve_sns_cached_image_path( + account_dir_str=str(account_dir), + create_time=int(create_time or 0), + width=int(width or 0), + height=int(height or 0), + idx=max(0, int(idx or 0)), + total_size=int(total_size or 0), + ) + if local and reserved_other: + try: + ck0 = _sns_cache_key_from_path(Path(local)) + if ck0 and ck0 in reserved_other: + local = None + except Exception: + pass + if local: + try: + payload, media_type = _read_and_maybe_decrypt_media(Path(local), account_dir) + if payload and str(media_type or "").startswith("image/"): + resp = Response(content=payload, media_type=str(media_type or "image/jpeg")) + resp.headers["Cache-Control"] = "public, max-age=86400" + resp.headers["X-SNS-Source"] = "local-heuristic" + return resp + except Exception: + pass + + # 1.5) If enabled, and the default match was skipped (or not found), pick the next candidate + # that is not reserved by a manual pick on another post. + if reserved_other and int(create_time or 0) > 0 and int(width or 0) > 0 and int(height or 0) > 0: + wxid_dir = _resolve_account_wxid_dir(account_dir) + if wxid_dir: + keys = _list_sns_cached_image_candidate_keys( + account_dir_str=str(account_dir), + create_time=int(create_time or 0), + width=int(width or 0), + height=int(height or 0), + ) + base_idx = max(0, int(idx or 0)) + for ck in keys[base_idx:]: + if not ck or ck in reserved_other: + continue + local2 = _resolve_sns_cached_image_path_by_cache_key( + wxid_dir=wxid_dir, + cache_key=str(ck), + create_time=int(create_time or 0), + ) + if not local2: + continue + try: + payload, media_type = _read_and_maybe_decrypt_media(Path(local2), account_dir) + if payload and str(media_type or "").startswith("image/"): + resp = Response(content=payload, media_type=str(media_type or "image/jpeg")) + resp.headers["Cache-Control"] = "public, max-age=86400" + resp.headers["X-SNS-Source"] = "local-heuristic-next" + return resp + except Exception: + continue + + # 2) Fallback to the remote URL (may still return a Tencent placeholder image). + u = str(url or "").strip() + if not u: + raise HTTPException(status_code=404, detail="SNS media not found.") + + # Delay-import to avoid pulling requests machinery during normal timeline listing. + from .chat_media import proxy_image # pylint: disable=import-outside-toplevel + + try: + resp0 = await proxy_image(u) + try: + resp0.headers["X-SNS-Source"] = "proxy" + except Exception: + pass + return resp0 + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=502, detail=f"Fetch sns media failed: {e}") + + +@router.get("/api/sns/article_thumb", summary="提取公众号文章封面图") +async def proxy_article_thumb(url: str): + u = str(url or "").strip() + if not u.startswith("http"): + raise HTTPException(status_code=400, detail="Invalid URL") + + try: + async with httpx.AsyncClient(timeout=10.0) as client: + headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"} + resp = await client.get(u, headers=headers) + resp.raise_for_status() + html_text = resp.text + + match = re.search(r'["\'](https?://[^"\']*?mmbiz_[a-zA-Z]+[^"\']*?)["\']', html_text) + + if not match: + raise HTTPException(status_code=404, detail="未在 HTML 中找到图片 URL") + + img_url = match.group(1) + img_url = html.unescape(img_url).replace("&", "&") + + img_resp = await client.get(img_url, headers=headers) + img_resp.raise_for_status() + + return Response( + content=img_resp.content, + media_type=img_resp.headers.get("Content-Type", "image/jpeg") + ) + + except Exception as e: + logger.warning(f"[sns] 提取公众号封面失败 url={u[:50]}... : {e}") + raise HTTPException(status_code=404, detail="无法获取文章封面") + + +@router.get("/api/sns/video_remote", summary="获取朋友圈远程视频/实况(下载解密优先)") +async def get_sns_video_remote( + account: Optional[str] = None, + url: Optional[str] = None, + token: Optional[str] = None, + key: Optional[str] = None, + use_cache: int = 1, +): + account_dir = _resolve_account_dir(account) + + try: + use_cache_flag = bool(int(use_cache or 1)) + except Exception: + use_cache_flag = True + + path = await _materialize_sns_remote_video( + account_dir=account_dir, + url=str(url or ""), + key=str(key or ""), + token=str(token or ""), + use_cache=use_cache_flag, + ) + if path is None: + raise HTTPException(status_code=404, detail="SNS remote video not found.") + + headers = {"X-SNS-Source": "remote-video-cache" if use_cache_flag else "remote-video"} + headers["Cache-Control"] = "public, max-age=86400" if use_cache_flag else "no-store" + + if use_cache_flag: + return FileResponse(str(path), media_type="video/mp4", headers=headers) + + # Cache disabled: delete the temp file after response. + return FileResponse( + str(path), + media_type="video/mp4", + headers=headers, + background=BackgroundTask(_best_effort_unlink, str(path)), + ) + + +@router.get("/api/sns/video", summary="获取朋友圈本地缓存视频") +async def get_sns_video( + account: Optional[str] = None, + post_id: Optional[str] = None, + media_id: Optional[str] = None, +): + if not post_id or not media_id: + raise HTTPException(status_code=400, detail="Missing post_id or media_id") + + account_dir = _resolve_account_dir(account) + wxid_dir = _resolve_account_wxid_dir(account_dir) + + if not wxid_dir: + raise HTTPException(status_code=404, detail="WXID dir not found") + + video_path = _resolve_sns_cached_video_path(wxid_dir, post_id, media_id) + + if not video_path: + raise HTTPException(status_code=404, detail="Local video cache not found") + + return FileResponse(video_path, media_type="video/mp4") diff --git a/src/wechat_decrypt_tool/routers/sns_export.py b/src/wechat_decrypt_tool/routers/sns_export.py new file mode 100644 index 0000000..c6b4f7d --- /dev/null +++ b/src/wechat_decrypt_tool/routers/sns_export.py @@ -0,0 +1,114 @@ +import asyncio +import json +import time +from typing import Literal, Optional + +from fastapi import APIRouter, HTTPException, Request +from fastapi.responses import FileResponse, StreamingResponse +from pydantic import BaseModel, Field + +from ..path_fix import PathFixRoute +from ..sns_export_service import SNS_EXPORT_MANAGER + +router = APIRouter(route_class=PathFixRoute) + +ExportScope = Literal["selected", "all"] + + +class SnsExportCreateRequest(BaseModel): + account: Optional[str] = Field(None, description="账号目录名(可选,默认使用第一个)") + scope: ExportScope = Field("selected", description="导出范围:selected=指定联系人;all=全部联系人") + usernames: list[str] = Field(default_factory=list, description="朋友圈 username 列表(scope=selected 时使用)") + use_cache: bool = Field(True, description="是否复用导出过程中的本地缓存(默认开启)") + output_dir: Optional[str] = Field(None, description="导出目录绝对路径(可选;不填时使用默认目录)") + file_name: Optional[str] = Field(None, description="导出 zip 文件名(可选,不含/含 .zip 都可)") + + +@router.post("/api/sns/exports", summary="创建朋友圈导出任务(离线 HTML zip)") +async def create_sns_export(req: SnsExportCreateRequest): + job = SNS_EXPORT_MANAGER.create_job( + account=req.account, + scope=req.scope, + usernames=req.usernames, + use_cache=bool(req.use_cache), + output_dir=req.output_dir, + file_name=req.file_name, + ) + return {"status": "success", "job": job.to_public_dict()} + + +@router.get("/api/sns/exports", summary="列出导出任务(内存)") +async def list_sns_exports(): + jobs = [j.to_public_dict() for j in SNS_EXPORT_MANAGER.list_jobs()] + jobs.sort(key=lambda x: int(x.get("createdAt") or 0), reverse=True) + return {"status": "success", "jobs": jobs} + + +@router.get("/api/sns/exports/{export_id}", summary="获取导出任务状态") +async def get_sns_export(export_id: str): + job = SNS_EXPORT_MANAGER.get_job(str(export_id or "").strip()) + if not job: + raise HTTPException(status_code=404, detail="Export not found.") + return {"status": "success", "job": job.to_public_dict()} + + +@router.get("/api/sns/exports/{export_id}/download", summary="下载导出 zip") +async def download_sns_export(export_id: str): + job = SNS_EXPORT_MANAGER.get_job(str(export_id or "").strip()) + if not job: + raise HTTPException(status_code=404, detail="Export not found.") + if not job.zip_path or (not job.zip_path.exists()): + raise HTTPException(status_code=409, detail="Export not ready.") + return FileResponse( + str(job.zip_path), + media_type="application/zip", + filename=job.zip_path.name, + ) + + +@router.get("/api/sns/exports/{export_id}/events", summary="导出任务进度 SSE") +async def stream_sns_export_events(export_id: str, request: Request): + export_id = str(export_id or "").strip() + job0 = SNS_EXPORT_MANAGER.get_job(export_id) + if not job0: + raise HTTPException(status_code=404, detail="Export not found.") + + async def gen(): + last_payload = "" + last_heartbeat = 0.0 + + while True: + if await request.is_disconnected(): + break + + job = SNS_EXPORT_MANAGER.get_job(export_id) + if not job: + yield "event: error\ndata: " + json.dumps({"error": "Export not found."}, ensure_ascii=False) + "\n\n" + break + + payload = json.dumps(job.to_public_dict(), ensure_ascii=False) + if payload != last_payload: + last_payload = payload + yield f"data: {payload}\n\n" + + now = time.time() + if now - last_heartbeat > 15: + last_heartbeat = now + yield ": ping\n\n" + + if job.status in {"done", "error", "cancelled"}: + break + + await asyncio.sleep(0.6) + + headers = {"Cache-Control": "no-cache", "X-Accel-Buffering": "no"} + return StreamingResponse(gen(), media_type="text/event-stream", headers=headers) + + +@router.delete("/api/sns/exports/{export_id}", summary="取消导出任务") +async def cancel_sns_export(export_id: str): + ok = SNS_EXPORT_MANAGER.cancel_job(str(export_id or "").strip()) + if not ok: + raise HTTPException(status_code=404, detail="Export not found.") + return {"status": "success"} + diff --git a/src/wechat_decrypt_tool/routers/wechat_detection.py b/src/wechat_decrypt_tool/routers/wechat_detection.py index ae0077d..b2fe1c5 100644 --- a/src/wechat_decrypt_tool/routers/wechat_detection.py +++ b/src/wechat_decrypt_tool/routers/wechat_detection.py @@ -1,5 +1,5 @@ from typing import Optional - +import psutil from fastapi import APIRouter from ..logging_config import get_logger @@ -71,3 +71,49 @@ async def detect_current_account(data_root_path: Optional[str] = None): 'error': str(e), 'data': None, } + + +@router.get("/api/wechat/status", summary="检查微信运行状态") +async def check_wechat_status(): + """ + 检查系统中是否有 Weixin.exe 或 WeChat.exe 进程在运行 + 返回: status=0 成功, wx_status={is_running: bool, pid: int, ...} + """ + process_name_targets = ["Weixin.exe", "WeChat.exe"] + + wx_status = { + "is_running": False, + "pid": None, + "exe_path": None, + "memory_usage_mb": 0.0 + } + + try: + for proc in psutil.process_iter(['pid', 'name', 'exe', 'memory_info']): + try: + if proc.info['name'] and proc.info['name'] in process_name_targets: + wx_status["is_running"] = True + wx_status["pid"] = proc.info['pid'] + wx_status["exe_path"] = proc.info['exe'] + + mem = proc.info['memory_info'] + if mem: + wx_status["memory_usage_mb"] = round(mem.rss / (1024 * 1024), 2) + + break + except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess): + continue + + return { + "status": 0, + "errmsg": "ok", + "wx_status": wx_status + } + + except Exception as e: + # 即使出错也返回 JSON,但 status 非 0 + return { + "status": -1, + "errmsg": f"检查进程失败: {str(e)}", + "wx_status": wx_status + } diff --git a/src/wechat_decrypt_tool/routers/wrapped.py b/src/wechat_decrypt_tool/routers/wrapped.py new file mode 100644 index 0000000..e95f339 --- /dev/null +++ b/src/wechat_decrypt_tool/routers/wrapped.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +import asyncio +from typing import Optional + +from fastapi import APIRouter, HTTPException, Path, Query + +from ..path_fix import PathFixRoute +from ..wrapped.service import build_wrapped_annual_card, build_wrapped_annual_meta, build_wrapped_annual_response + +router = APIRouter(route_class=PathFixRoute) + + +@router.get("/api/wrapped/annual", summary="微信聊天年度总结(WeChat Wrapped)- 后端数据") +async def wrapped_annual( + year: Optional[int] = Query(None, description="年份(例如 2026)。默认当前年份。"), + account: Optional[str] = Query(None, description="解密后的账号目录名。默认取第一个可用账号。"), + refresh: bool = Query(False, description="是否强制重新计算(忽略缓存)。"), +): + """返回年度总结完整数据(一次性包含全部卡片,可能较慢)。""" + + # This endpoint performs blocking sqlite/file IO, so run it in a worker thread. + return await asyncio.to_thread(build_wrapped_annual_response, account=account, year=year, refresh=refresh) + + +@router.get("/api/wrapped/annual/meta", summary="微信聊天年度总结(WeChat Wrapped)- 目录(轻量)") +async def wrapped_annual_meta( + year: Optional[int] = Query(None, description="年份(例如 2026)。默认当前年份。"), + account: Optional[str] = Query(None, description="解密后的账号目录名。默认取第一个可用账号。"), + refresh: bool = Query(False, description="是否强制重新计算(忽略缓存)。"), +): + """返回年度总结的目录/元信息,用于前端懒加载每一页。""" + + return await asyncio.to_thread(build_wrapped_annual_meta, account=account, year=year, refresh=refresh) + + +@router.get("/api/wrapped/annual/cards/{card_id}", summary="微信聊天年度总结(WeChat Wrapped)- 单张卡片(按页加载)") +async def wrapped_annual_card( + card_id: int = Path(..., description="卡片ID(与前端页面一一对应)", ge=0), + year: Optional[int] = Query(None, description="年份(例如 2026)。默认当前年份。"), + account: Optional[str] = Query(None, description="解密后的账号目录名。默认取第一个可用账号。"), + refresh: bool = Query(False, description="是否强制重新计算(忽略缓存)。"), +): + """按卡片 ID 返回单页数据(避免首屏一次性计算全部卡片)。""" + + try: + return await asyncio.to_thread( + build_wrapped_annual_card, + account=account, + year=year, + card_id=card_id, + refresh=refresh, + ) + except ValueError as e: + raise HTTPException(status_code=404, detail=str(e)) from e diff --git a/src/wechat_decrypt_tool/runtime_settings.py b/src/wechat_decrypt_tool/runtime_settings.py new file mode 100644 index 0000000..d071d53 --- /dev/null +++ b/src/wechat_decrypt_tool/runtime_settings.py @@ -0,0 +1,175 @@ +from __future__ import annotations + +import json +import os +import re +from pathlib import Path + + +RUNTIME_SETTINGS_FILENAME = "runtime_settings.json" +BACKEND_PORT_KEY = "backend_port" +ENV_PORT_KEY = "WECHAT_TOOL_PORT" +ENV_FILE_KEY = "WECHAT_TOOL_ENV_FILE" +DEFAULT_ENV_FILENAME = ".env" + + +def _parse_port(value: object) -> int | None: + if value is None: + return None + try: + raw = str(value).strip() + except Exception: + return None + if not raw: + return None + try: + port = int(raw, 10) + except Exception: + return None + if port < 1 or port > 65535: + return None + return port + + +def get_runtime_settings_path() -> Path: + from .app_paths import get_output_dir + + return get_output_dir() / RUNTIME_SETTINGS_FILENAME + + +def read_backend_port_setting() -> int | None: + path = get_runtime_settings_path() + try: + if not path.is_file(): + return None + data = json.loads(path.read_text(encoding="utf-8") or "{}") + if not isinstance(data, dict): + return None + return _parse_port(data.get(BACKEND_PORT_KEY)) + except Exception: + return None + + +def write_backend_port_setting(port: int | None) -> None: + path = get_runtime_settings_path() + safe_port = _parse_port(port) + try: + path.parent.mkdir(parents=True, exist_ok=True) + except Exception: + return + + try: + data: dict = {} + if path.is_file(): + try: + existing = json.loads(path.read_text(encoding="utf-8") or "{}") + if isinstance(existing, dict): + data = existing + except Exception: + data = {} + + if safe_port is None: + data.pop(BACKEND_PORT_KEY, None) + else: + data[BACKEND_PORT_KEY] = safe_port + + # Keep the file small and stable; remove if empty. + if not data: + try: + path.unlink(missing_ok=True) + except Exception: + pass + return + + path.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8") + except Exception: + return + + +def read_effective_backend_port(default: int) -> tuple[int, str]: + """Return (port, source) where source is one of: env | settings | default.""" + + env_raw = str(os.environ.get("WECHAT_TOOL_PORT", "") or "").strip() + env_port = _parse_port(env_raw) + if env_port is not None: + return env_port, "env" + + settings_port = read_backend_port_setting() + if settings_port is not None: + return settings_port, "settings" + + return int(default), "default" + + +def get_env_file_path() -> Path | None: + """Best-effort env file path for `uv run` (defaults to repo root `.env`).""" + + v = str(os.environ.get(ENV_FILE_KEY, "") or "").strip() + if v: + try: + return Path(v) + except Exception: + return None + + cwd = Path.cwd() + # Heuristic: only write `.env` in a project root (avoid polluting random dirs). + try: + if (cwd / "pyproject.toml").is_file(): + return cwd / DEFAULT_ENV_FILENAME + except Exception: + return None + + return None + + +def _set_env_var_in_file(env_file: Path, key: str, value: str | None) -> bool: + try: + env_file.parent.mkdir(parents=True, exist_ok=True) + except Exception: + return False + + pattern = re.compile(rf"^\s*(?:export\s+)?{re.escape(key)}\s*=") + try: + raw = env_file.read_text(encoding="utf-8") if env_file.is_file() else "" + except Exception: + raw = "" + + lines = raw.splitlines(keepends=True) if raw else [] + out: list[str] = [] + replaced = False + for line in lines: + if pattern.match(line): + if value is None: + continue + if not replaced: + out.append(f"{key}={value}\n") + replaced = True + continue + out.append(line) + + if value is not None and not replaced: + if out and not out[-1].endswith("\n"): + out[-1] = out[-1] + "\n" + out.append(f"{key}={value}\n") + + try: + env_file.write_text("".join(out), encoding="utf-8") + return True + except Exception: + return False + + +def write_backend_port_env_file(port: int | None) -> Path | None: + """Write `WECHAT_TOOL_PORT` into a `.env` file so `uv run main.py` picks it up on restart. + + Note: `uv` doesn't override already-set env vars; `.env` only applies when the variable is not + present in the current shell/session. + """ + + env_file = get_env_file_path() + if not env_file: + return None + + safe_port = _parse_port(port) + ok = _set_env_var_in_file(env_file, ENV_PORT_KEY, str(safe_port) if safe_port is not None else None) + return env_file if ok else None diff --git a/src/wechat_decrypt_tool/sns_export_service.py b/src/wechat_decrypt_tool/sns_export_service.py new file mode 100644 index 0000000..98dc579 --- /dev/null +++ b/src/wechat_decrypt_tool/sns_export_service.py @@ -0,0 +1,1606 @@ +from __future__ import annotations + +"""SNS (Moments) HTML export service (offline ZIP).""" + +import asyncio +from dataclasses import dataclass, field +from datetime import datetime +import hashlib +import html +import json +import os +import re +import sqlite3 +import threading +import time +import uuid +import zipfile +from pathlib import Path +from typing import Any, Literal, Optional + +from .chat_helpers import _load_contact_rows, _pick_display_name, _resolve_account_dir +from .logging_config import get_logger +from .media_helpers import _detect_image_media_type, _read_and_maybe_decrypt_media, _resolve_account_wxid_dir + +# Reuse UI CSS + wxemoji mapping from chat export to keep styling consistent. +from .chat_export_service import ( # pylint: disable=protected-access + _load_ui_css_bundle, + _load_wechat_emoji_regex, + _load_wechat_emoji_table, + _resolve_ui_public_dir, + _zip_write_tree, +) + +# Reuse SNS timeline/local cache helpers. +from .routers.sns import ( # pylint: disable=protected-access + _generate_sns_cache_key, + _resolve_sns_cached_image_path, + _resolve_sns_cached_image_path_by_cache_key, + _resolve_sns_cached_image_path_by_md5, + _resolve_sns_cached_video_path, + list_sns_timeline, +) + +# SNS remote download+decrypt helpers (shared with API endpoints). +from .sns_media import ( # pylint: disable=protected-access + fix_sns_cdn_url as _fix_sns_cdn_url, + materialize_sns_remote_video as _materialize_sns_remote_video, + try_fetch_and_decrypt_sns_image_remote as _try_fetch_and_decrypt_sns_image_remote, +) + +logger = get_logger(__name__) + +ExportStatus = Literal["queued", "running", "done", "error", "cancelled"] +ExportScope = Literal["selected", "all"] + +_INVALID_PATH_CHARS = re.compile(r'[<>:"/\\|?*\x00-\x1f]') + + +def _safe_name(s: str, max_len: int = 80) -> str: + t = str(s or "").strip() + if not t: + return "" + t = _INVALID_PATH_CHARS.sub("_", t) + t = re.sub(r"\s+", " ", t).strip() + if len(t) > max_len: + t = t[:max_len].rstrip() + return t + + +def _resolve_export_output_dir(account_dir: Path, output_dir_raw: Any) -> Path: + text = str(output_dir_raw or "").strip() + if not text: + default_dir = account_dir.parents[1] / "exports" / account_dir.name + default_dir.mkdir(parents=True, exist_ok=True) + return default_dir + + out_dir = Path(text).expanduser() + if not out_dir.is_absolute(): + raise ValueError("output_dir must be an absolute path.") + + try: + out_dir.mkdir(parents=True, exist_ok=True) + except Exception as e: + raise ValueError(f"Failed to prepare output_dir: {e}") from e + + return out_dir.resolve() + + +def _mime_to_ext(mt: str) -> str: + m = str(mt or "").split(";", 1)[0].strip().lower() + return { + "image/jpeg": ".jpg", + "image/jpg": ".jpg", + "image/png": ".png", + "image/gif": ".gif", + "image/webp": ".webp", + "image/bmp": ".bmp", + "image/avif": ".avif", + "image/heic": ".heic", + "image/heif": ".heif", + }.get(m, ".bin") + + +def _format_dt(ts_seconds: Any) -> str: + try: + t = int(ts_seconds or 0) + except Exception: + t = 0 + if t <= 0: + return "" + try: + return datetime.fromtimestamp(t).strftime("%Y-%m-%d %H:%M:%S") + except Exception: + return str(t) + + +def _clean_name(v: Any) -> str: + return str(v or "").replace("\xa0", " ").strip() + + +def _esc_text(v: Any) -> str: + return html.escape(str(v or ""), quote=False) + + +def _esc_attr(v: Any) -> str: + return html.escape(str(v or ""), quote=True) + + +_SNS_EXPORT_CSS_PATCH = """ +/* Moments export tweaks (keep consistent with frontend `sns.vue`). */ +body { background-color: #EDEDED; } +.wse-live-photo video { display: none; } +.wse-live-photo:hover video { display: block; } +.wse-live-photo:hover img { display: none; } +""" + + +def _load_sns_users(account_dir: Path, *, usernames: Optional[list[str]] = None) -> list[dict[str, Any]]: + """Return [{username, displayName, postCount}] sorted by postCount desc.""" + sns_db_path = account_dir / "sns.db" + if not sns_db_path.exists(): + raise FileNotFoundError("sns.db not found for this account.") + + wanted = {str(u or "").strip() for u in (usernames or []) if str(u or "").strip()} + conn = sqlite3.connect(str(sns_db_path)) + conn.row_factory = sqlite3.Row + try: + rows = conn.execute( + """ + SELECT user_name AS username, COUNT(*) AS postCount + FROM SnsTimeLine + WHERE content IS NOT NULL + AND content != '' + AND content NOT LIKE '%7%' + GROUP BY user_name + ORDER BY postCount DESC + """ + ).fetchall() + finally: + try: + conn.close() + except Exception: + pass + + names = [str(r["username"] or "").strip() for r in (rows or []) if r is not None] + names = [u for u in names if u] + if wanted: + names = [u for u in names if u in wanted] + + contact_db_path = account_dir / "contact.db" + contact_rows = _load_contact_rows(contact_db_path, names) if contact_db_path.exists() else {} + + items: list[dict[str, Any]] = [] + for r in rows or []: + try: + uname = str(r["username"] or "").strip() + except Exception: + uname = "" + if not uname: + continue + if wanted and uname not in wanted: + continue + try: + post_count = int(r["postCount"] or 0) + except Exception: + post_count = 0 + display = _clean_name(_pick_display_name(contact_rows.get(uname), uname)) or uname + items.append({"username": uname, "displayName": display, "postCount": post_count}) + return items + + +@dataclass +class ExportProgress: + users_total: int = 0 + users_done: int = 0 + current_username: str = "" + current_display_name: str = "" + posts_exported: int = 0 + media_copied: int = 0 + media_missing: int = 0 + + +@dataclass +class ExportJob: + export_id: str + account: str + status: ExportStatus = "queued" + created_at: float = field(default_factory=time.time) + started_at: Optional[float] = None + finished_at: Optional[float] = None + error: str = "" + zip_path: Optional[Path] = None + options: dict[str, Any] = field(default_factory=dict) + progress: ExportProgress = field(default_factory=ExportProgress) + cancel_requested: bool = False + + def to_public_dict(self) -> dict[str, Any]: + return { + "exportId": self.export_id, + "account": self.account, + "status": self.status, + "createdAt": int(self.created_at), + "startedAt": int(self.started_at) if self.started_at else None, + "finishedAt": int(self.finished_at) if self.finished_at else None, + "error": self.error or "", + "zipPath": str(self.zip_path) if self.zip_path else "", + "zipReady": bool(self.zip_path and self.zip_path.exists()), + "options": self.options, + "progress": { + "usersTotal": self.progress.users_total, + "usersDone": self.progress.users_done, + "currentUsername": self.progress.current_username, + "currentDisplayName": self.progress.current_display_name, + "postsExported": self.progress.posts_exported, + "mediaCopied": self.progress.media_copied, + "mediaMissing": self.progress.media_missing, + }, + } + + +class _JobCancelled(Exception): + pass + + +class SnsExportManager: + def __init__(self) -> None: + self._lock = threading.Lock() + self._jobs: dict[str, ExportJob] = {} + + def list_jobs(self) -> list[ExportJob]: + with self._lock: + return list(self._jobs.values()) + + def get_job(self, export_id: str) -> Optional[ExportJob]: + with self._lock: + return self._jobs.get(export_id) + + def cancel_job(self, export_id: str) -> bool: + with self._lock: + job = self._jobs.get(export_id) + if not job: + return False + job.cancel_requested = True + if job.status in {"queued"}: + job.status = "cancelled" + job.finished_at = time.time() + return True + + def create_job( + self, + *, + account: Optional[str], + scope: ExportScope, + usernames: list[str], + use_cache: bool, + output_dir: Optional[str], + file_name: Optional[str], + ) -> ExportJob: + account_dir = _resolve_account_dir(account) + export_id = uuid.uuid4().hex[:12] + + job = ExportJob( + export_id=export_id, + account=account_dir.name, + status="queued", + options={ + "scope": str(scope or "selected"), + "usernames": [str(u or "").strip() for u in (usernames or []) if str(u or "").strip()], + "useCache": bool(use_cache), + "outputDir": str(output_dir or "").strip(), + "fileName": str(file_name or "").strip(), + }, + ) + + with self._lock: + self._jobs[export_id] = job + + t = threading.Thread( + target=self._run_job_safe, + args=(job, account_dir), + name=f"sns-export-{export_id}", + daemon=True, + ) + t.start() + return job + + def _should_cancel(self, job: ExportJob) -> bool: + with self._lock: + return bool(job.cancel_requested) + + def _run_job_safe(self, job: ExportJob, account_dir: Path) -> None: + tmp_zip: Optional[Path] = None + try: + tmp_zip = self._run_job(job, account_dir) + except _JobCancelled: + logger.info("sns export cancelled: %s", job.export_id) + with self._lock: + job.status = "cancelled" + job.finished_at = time.time() + if tmp_zip is not None: + try: + tmp_zip.unlink(missing_ok=True) + except Exception: + pass + except Exception as e: + logger.exception("sns export job failed: %s: %s", job.export_id, e) + with self._lock: + job.status = "error" + job.error = str(e) + job.finished_at = time.time() + if tmp_zip is not None: + try: + tmp_zip.unlink(missing_ok=True) + except Exception: + pass + + def _run_job(self, job: ExportJob, account_dir: Path) -> Path: + with self._lock: + if job.status == "cancelled": + raise _JobCancelled() + job.status = "running" + job.started_at = time.time() + job.error = "" + + opts = dict(job.options or {}) + scope_raw = str(opts.get("scope") or "selected").strip() or "selected" + scope: ExportScope = "all" if scope_raw == "all" else "selected" # type: ignore[assignment] + target_usernames = [str(u or "").strip() for u in (opts.get("usernames") or []) if str(u or "").strip()] + if scope == "selected" and not target_usernames: + raise ValueError("No target usernames to export.") + + use_cache = bool(opts.get("useCache")) + exports_root = _resolve_export_output_dir(account_dir, opts.get("outputDir")) + ts = datetime.now().strftime("%Y%m%d_%H%M%S") + + base_name = str(opts.get("fileName") or "").strip() + if not base_name: + if scope == "all": + base_name = f"wechat_sns_export_{account_dir.name}_{ts}_{job.export_id}.zip" + else: + hint = _safe_name(target_usernames[0], max_len=40) or "selected" + base_name = f"wechat_sns_export_{account_dir.name}_{hint}_{ts}_{job.export_id}.zip" + if not base_name.lower().endswith(".zip"): + base_name += ".zip" + base_name = _safe_name(base_name, max_len=120) or f"wechat_sns_export_{account_dir.name}_{ts}_{job.export_id}.zip" + + final_zip = (exports_root / base_name).resolve() + tmp_zip = (exports_root / f".{base_name}.{job.export_id}.part").resolve() + try: + tmp_zip.unlink(missing_ok=True) + except Exception: + pass + + report: dict[str, Any] = {"errors": []} + ui_public_dir = _resolve_ui_public_dir() + + emoji_table = _load_wechat_emoji_table() + emoji_regex = _load_wechat_emoji_regex() + + def render_text_with_emojis(v: Any) -> str: + text = str(v or "") + if not text: + return "" + if not emoji_table or emoji_regex is None: + return _esc_text(text) + + parts: list[str] = [] + last = 0 + for match in emoji_regex.finditer(text): + start = match.start() + end = match.end() + if start > last: + parts.append(_esc_text(text[last:start])) + + key = match.group(0) + value = str(emoji_table.get(key) or "") + if value: + src = f"wxemoji/{value}" + parts.append( + '' + ) + else: + parts.append(_esc_text(key)) + last = end + + if last < len(text): + parts.append(_esc_text(text[last:])) + return "".join(parts) + + def should_cancel() -> None: + if self._should_cancel(job): + raise _JobCancelled() + + written: set[str] = set() + media_written: dict[str, str] = {} + avatar_written: dict[str, str] = {} + + wxid_dir = _resolve_account_wxid_dir(account_dir) + + avatar_conn: Optional[sqlite3.Connection] = None + head_image_db_path = account_dir / "head_image.db" + if head_image_db_path.exists(): + try: + avatar_conn = sqlite3.connect(str(head_image_db_path)) + avatar_conn.row_factory = sqlite3.Row + except Exception: + avatar_conn = None + + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + def run_async(coro): + return loop.run_until_complete(coro) + + _HEX_ONLY_RE = re.compile(r"[^0-9a-fA-F]+") + + def _pick_str(*vals: Any) -> str: + for v in vals: + try: + s = str(v or "").strip() + except Exception: + s = "" + if s: + return s + return "" + + def _normalize_hex32(v: Any) -> str: + raw = str(v or "").strip() + if not raw: + return "" + hex_only = _HEX_ONLY_RE.sub("", raw).lower() + return hex_only[:32] if len(hex_only) >= 32 else "" + + def _sns_media_token(m: dict[str, Any]) -> str: + url_attrs = m.get("urlAttrs") if isinstance(m.get("urlAttrs"), dict) else {} + thumb_attrs = m.get("thumbAttrs") if isinstance(m.get("thumbAttrs"), dict) else {} + return _pick_str(m.get("token"), url_attrs.get("token"), thumb_attrs.get("token")) + + def _sns_media_key(m: dict[str, Any]) -> str: + url_attrs = m.get("urlAttrs") if isinstance(m.get("urlAttrs"), dict) else {} + thumb_attrs = m.get("thumbAttrs") if isinstance(m.get("thumbAttrs"), dict) else {} + return _pick_str(m.get("key"), url_attrs.get("key"), thumb_attrs.get("key")) + + def _sns_media_md5(m: dict[str, Any], raw_url: str) -> str: + url_attrs = m.get("urlAttrs") if isinstance(m.get("urlAttrs"), dict) else {} + thumb_attrs = m.get("thumbAttrs") if isinstance(m.get("thumbAttrs"), dict) else {} + md5_raw = _pick_str(url_attrs.get("md5"), thumb_attrs.get("md5"), url_attrs.get("MD5"), thumb_attrs.get("MD5")) + if not md5_raw: + match = re.search(r"[?&]md5=([0-9a-fA-F]{16,32})", str(raw_url or "")) + if match and match.group(1): + md5_raw = match.group(1) + return _normalize_hex32(md5_raw) + + def _sns_media_size(m: dict[str, Any]) -> tuple[int, int, int]: + size = m.get("size") if isinstance(m.get("size"), dict) else {} + try: + w0 = int(size.get("width") or size.get("w") or 0) + except Exception: + w0 = 0 + try: + h0 = int(size.get("height") or size.get("h") or 0) + except Exception: + h0 = 0 + ts0 = size.get("totalSize") + if ts0 is None: + ts0 = size.get("total_size") + if ts0 is None: + ts0 = size.get("total") + try: + t0 = int(ts0 or 0) + except Exception: + t0 = 0 + return w0, h0, t0 + + def _response_bytes(resp: Any) -> tuple[bytes, str]: + if resp is None: + return b"", "" + mt = str(getattr(resp, "media_type", "") or "") + path = getattr(resp, "path", None) + if path: + try: + data = Path(str(path)).read_bytes() + except Exception: + data = b"" + if not mt and data: + mt = _detect_image_media_type(data[:32]) + return bytes(data), mt + try: + data = bytes(getattr(resp, "body", b"") or b"") + except Exception: + data = b"" + if not mt and data: + mt = _detect_image_media_type(data[:32]) + return data, mt + + def _write_image_payload( + *, + zf: zipfile.ZipFile, + payload: bytes, + media_type: str, + cache_key: str, + subdir: str, + ) -> str: + if not payload: + return "" + mt = str(media_type or "").split(";", 1)[0].strip() + if (not mt) or mt == "application/octet-stream": + mt = _detect_image_media_type(payload[:32]) + if not mt: + return "" + ext = _mime_to_ext(mt) + arc = f"media/{str(subdir or 'images').strip().strip('/')}/{cache_key}{ext}".replace("\\", "/") + try: + if arc not in written: + zf.writestr(arc, payload) + written.add(arc) + with self._lock: + job.progress.media_copied += 1 + return arc + except Exception: + return "" + + def export_avatar_to_zip(*, zf: zipfile.ZipFile, username: str, display_name: str) -> str: + uname0 = str(username or "").strip() + if not uname0: + return "" + if uname0 in avatar_written: + return avatar_written[uname0] + + payload = b"" + mt = "" + + if avatar_conn is not None: + try: + row = avatar_conn.execute( + "SELECT image_buffer FROM head_image WHERE username = ? ORDER BY update_time DESC LIMIT 1", + (uname0,), + ).fetchone() + if row is not None and row[0] is not None: + buf = row[0] + if isinstance(buf, (bytes, bytearray)): + payload = bytes(buf) + elif isinstance(buf, memoryview): + payload = buf.tobytes() + else: + payload = bytes(buf) + except Exception: + payload = b"" + + # Fallback: reuse the backend avatar endpoint (supports remote URL cache). + if not payload: + try: + from .routers.chat_media import get_chat_avatar # pylint: disable=import-outside-toplevel + + resp = run_async(get_chat_avatar(username=uname0, account=account_dir.name)) + payload2, mt2 = _response_bytes(resp) + if payload2: + payload = payload2 + mt = mt2 + except Exception: + payload = b"" + + if not payload: + avatar_written[uname0] = "" + return "" + + if not mt: + mt = _detect_image_media_type(payload[:32]) + + cache_key = hashlib.md5(f"avatar|{uname0}".encode("utf-8", errors="ignore")).hexdigest() + arc = _write_image_payload(zf=zf, payload=payload, media_type=mt, cache_key=cache_key, subdir="avatars") + avatar_written[uname0] = arc + return arc + + def export_image_to_zip( + *, + zf: zipfile.ZipFile, + post: dict[str, Any], + media: dict[str, Any], + idx: int, + prefer_thumb: bool = False, + ) -> str: + m = media if isinstance(media, dict) else {} + raw_url = str(m.get("thumb") or m.get("url") or "").strip() if prefer_thumb else str(m.get("url") or m.get("thumb") or "").strip() + if not raw_url: + return "" + + token = _sns_media_token(m) + key = _sns_media_key(m) + fixed = _fix_sns_cdn_url(raw_url, token=token, is_video=False) + + post_id = str(post.get("id") or post.get("tid") or "").strip() + media_id = str(m.get("id") or "").strip() + kind = "thumb" if prefer_thumb else "url" + if post_id and media_id: + ident = f"snsimg|{kind}|{post_id}|{media_id}" + else: + ident = f"snsimg|{kind}|{fixed or raw_url}|{key}" + cache_key = hashlib.md5(ident.encode("utf-8", errors="ignore")).hexdigest() + + if cache_key in media_written: + return media_written[cache_key] + + payload = b"" + mt = "" + + # 0) Prefer WeFlow-style remote download+decrypt (accurate when keys are present). + if fixed: + should_cancel() + res = run_async( + _try_fetch_and_decrypt_sns_image_remote( + account_dir=account_dir, + url=fixed, + key=str(key or ""), + token=str(token or ""), + use_cache=use_cache, + ) + ) + if res is not None: + payload = bytes(res.payload or b"") + mt = str(res.media_type or "") + + # 1) Local cache fallback (only when cache is enabled; mirrors `/api/sns/media` semantics). + if (not payload) and use_cache: + try: + post_type = int(post.get("type") or 1) + except Exception: + post_type = 1 + try: + media_type = int(m.get("type") or 2) + except Exception: + media_type = 2 + + try: + create_time = int(post.get("createTime") or 0) + except Exception: + create_time = 0 + + w0, h0, total_size = _sns_media_size(m) + md5_32 = _sns_media_md5(m, fixed or raw_url) + + local_path = "" + + # Special case: Moments cover background (type=7) may live in `business/sns/bkg`. + if wxid_dir and post_id and media_id and post_type == 7: + try: + raw_key = f"{post_id}_{media_id}_4" + bkg_md5 = hashlib.md5(raw_key.encode("utf-8", errors="ignore")).hexdigest() + bkg_path = wxid_dir / "business" / "sns" / "bkg" / bkg_md5[:2] / bkg_md5 + if bkg_path.exists() and bkg_path.is_file(): + payload = bkg_path.read_bytes() + mt = "image/jpeg" + except Exception: + payload = b"" + mt = "" + + # Deterministic cache key match: md5(tid_mediaId_type) + if (not payload) and wxid_dir and post_id and media_id: + try: + key_post = _generate_sns_cache_key(post_id, media_id, post_type) + local_path = _resolve_sns_cached_image_path_by_cache_key( + wxid_dir=wxid_dir, cache_key=key_post, create_time=0 + ) or "" + except Exception: + local_path = "" + + if (not local_path) and post_type != media_type: + try: + key_media = _generate_sns_cache_key(post_id, media_id, media_type) + local_path = _resolve_sns_cached_image_path_by_cache_key( + wxid_dir=wxid_dir, cache_key=key_media, create_time=0 + ) or "" + except Exception: + local_path = "" + + # Md5-based SNS cache layout fallback (when available). + if (not payload) and (not local_path) and wxid_dir and md5_32: + try: + local_path = _resolve_sns_cached_image_path_by_md5( + wxid_dir=wxid_dir, + md5=md5_32, + create_time=create_time, + ) or "" + except Exception: + local_path = "" + + # Heuristic match by (create_time, width, height, idx, total_size). + if (not payload) and (not local_path): + try: + local_path = _resolve_sns_cached_image_path( + account_dir_str=str(account_dir), + create_time=create_time, + width=int(w0 or 0), + height=int(h0 or 0), + idx=max(0, int(idx or 0)), + total_size=int(total_size or 0), + ) or "" + except Exception: + local_path = "" + + if (not payload) and local_path: + try: + payload2, mt2 = _read_and_maybe_decrypt_media(Path(local_path), account_dir) + if payload2 and str(mt2 or "").startswith("image/"): + payload = payload2 + mt = str(mt2 or "") + except Exception: + payload = b"" + mt = "" + + # 2) Last resort: proxy the raw URL (may return a Tencent placeholder image). + if (not payload) and str(raw_url or "").startswith("http"): + try: + from .routers.chat_media import proxy_image # pylint: disable=import-outside-toplevel + + should_cancel() + resp = run_async(proxy_image(url=str(raw_url))) + payload2, mt2 = _response_bytes(resp) + if payload2: + payload = payload2 + mt = mt2 + except Exception: + payload = b"" + mt = "" + + if not payload: + with self._lock: + job.progress.media_missing += 1 + media_written[cache_key] = "" + return "" + + arc = _write_image_payload(zf=zf, payload=payload, media_type=mt, cache_key=cache_key, subdir="images") + if not arc: + with self._lock: + job.progress.media_missing += 1 + media_written[cache_key] = "" + return "" + + media_written[cache_key] = arc + return arc + + def export_video_to_zip( + *, + zf: zipfile.ZipFile, + post_id: str, + media_id: str, + url: str, + key: str, + token: str, + ) -> str: + fixed = _fix_sns_cdn_url(str(url or ""), token=str(token or ""), is_video=True) + if not fixed: + return "" + + ident = f"snsvid|{str(post_id or '').strip()}|{str(media_id or '').strip()}|{fixed}|{key}" + cache_key = hashlib.md5(ident.encode("utf-8", errors="ignore")).hexdigest() + + if cache_key in media_written: + return media_written[cache_key] + + # Prefer local cached video when possible (fast, offline-friendly). + if use_cache and wxid_dir and str(post_id or "").strip() and str(media_id or "").strip(): + try: + local = _resolve_sns_cached_video_path(wxid_dir, str(post_id), str(media_id)) + except Exception: + local = None + if local: + arc = f"media/videos/{cache_key}.mp4" + if arc not in written: + try: + zf.write(str(local), arcname=arc) + written.add(arc) + with self._lock: + job.progress.media_copied += 1 + except Exception: + arc = "" + if arc: + media_written[cache_key] = arc + return arc + + should_cancel() + path = run_async( + _materialize_sns_remote_video( + account_dir=account_dir, + url=fixed, + key=str(key or ""), + token=str(token or ""), + use_cache=use_cache, + ) + ) + if path is None: + with self._lock: + job.progress.media_missing += 1 + media_written[cache_key] = "" + return "" + + arc = f"media/videos/{cache_key}.mp4" + if arc not in written: + try: + zf.write(str(path), arcname=arc) + written.add(arc) + with self._lock: + job.progress.media_copied += 1 + # When cache is disabled, `_materialize_sns_remote_video` returns a temp file path. + # Clean it up after the zip entry is written to avoid leaving `.tmp` files behind. + if not use_cache: + try: + Path(str(path)).unlink(missing_ok=True) + except Exception: + pass + except Exception: + with self._lock: + job.progress.media_missing += 1 + media_written[cache_key] = "" + return "" + + media_written[cache_key] = arc + return arc + + def render_media_block(*, zf: zipfile.ZipFile, post: dict[str, Any]) -> str: + media = post.get("media") if isinstance(post.get("media"), list) else [] + if not media: + return "" + + def is_live_photo(m: dict[str, Any]) -> bool: + lp = m.get("livePhoto") + return isinstance(lp, dict) and bool(str(lp.get("url") or "").strip()) + + def media_size_key(m: dict[str, Any]) -> str: + try: + t0 = str(m.get("type") or "").strip() + except Exception: + t0 = "" + w0, h0, _ts0 = _sns_media_size(m) + if w0 <= 0 or h0 <= 0: + return "" + return f"{t0}:{w0}x{h0}" + + def media_size_group_index(idx0: int) -> int: + i0 = int(idx0 or 0) + if i0 <= 0 or i0 >= len(media): + return max(0, i0) + m0 = media[i0] if isinstance(media[i0], dict) else {} + key0 = media_size_key(m0) + if not key0: + return max(0, i0) + count = 0 + for j in range(i0): + mj = media[j] if isinstance(media[j], dict) else {} + if media_size_key(mj) == key0: + count += 1 + return count + + if len(media) == 1: + m0 = media[0] if isinstance(media[0], dict) else {} + mtype = int(m0.get("type") or 0) + idx_group = 0 + post_id = str(post.get("id") or "").strip() + media_id = str(m0.get("id") or "").strip() + + if mtype == 6: + vid_arc = export_video_to_zip( + zf=zf, + post_id=post_id, + media_id=media_id, + url=str(m0.get("url") or ""), + key=str(m0.get("videoKey") or ""), + token=_sns_media_token(m0), + ) + poster_arc = export_image_to_zip( + zf=zf, + post=post, + media=m0, + idx=idx_group, + prefer_thumb=True, + ) + if not vid_arc: + return "" + poster_attr = f' poster="{_esc_attr(poster_arc)}"' if poster_arc else "" + return ( + '
' + '
' + f'' + '
' + '
' + '' + "
" + "
" + ) + + img_arc = export_image_to_zip( + zf=zf, + post=post, + media=m0, + idx=idx_group, + prefer_thumb=True, + ) + if not img_arc: + return "" + + if is_live_photo(m0): + lp = m0.get("livePhoto") if isinstance(m0.get("livePhoto"), dict) else {} + vid_arc = export_video_to_zip( + zf=zf, + url=str(lp.get("url") or ""), + key=str(lp.get("key") or m0.get("videoKey") or ""), + token=_pick_str(lp.get("token"), _sns_media_token(m0)), + post_id="", + media_id="", + ) + video_html = "" + if vid_arc: + video_html = ( + f'" + ) + return ( + '
' + '
' + f'' + f'' + "" + f"{video_html}" + '
实况
' + "
" + ) + + return ( + '" + ) + + cells: list[str] = [] + for idx0, m_raw in enumerate(media[:9]): + m = m_raw if isinstance(m_raw, dict) else {} + mtype = int(m.get("type") or 0) + idx_group = media_size_group_index(idx0) + post_id = str(post.get("id") or "").strip() + media_id = str(m.get("id") or "").strip() + + if mtype == 6: + vid_arc = export_video_to_zip( + zf=zf, + post_id=post_id, + media_id=media_id, + url=str(m.get("url") or ""), + key=str(m.get("videoKey") or ""), + token=_sns_media_token(m), + ) + poster_arc = export_image_to_zip( + zf=zf, + post=post, + media=m, + idx=idx_group, + prefer_thumb=True, + ) + if not vid_arc: + continue + poster_attr = f' poster="{_esc_attr(poster_arc)}"' if poster_arc else "" + cells.append( + '
' + f'' + '
' + '
' + '' + "
" + "
" + ) + continue + + img_arc = export_image_to_zip( + zf=zf, + post=post, + media=m, + idx=idx_group, + prefer_thumb=True, + ) + if not img_arc: + continue + + if is_live_photo(m): + lp = m.get("livePhoto") if isinstance(m.get("livePhoto"), dict) else {} + vid_arc = export_video_to_zip( + zf=zf, + url=str(lp.get("url") or ""), + key=str(lp.get("key") or m.get("videoKey") or ""), + token=_pick_str(lp.get("token"), _sns_media_token(m)), + post_id="", + media_id="", + ) + video_html = "" + if vid_arc: + video_html = ( + f'' + ) + cells.append( + '
' + f'' + f'' + "" + f"{video_html}" + '
实况
' + "
" + ) + else: + cells.append( + '" + ) + + if not cells: + return "" + return '
' + "".join(cells) + "
" + + def render_post_html(*, zf: zipfile.ZipFile, post: dict[str, Any]) -> str: + pid = str(post.get("id") or "").strip() + uname = str(post.get("username") or "").strip() + display = _clean_name(post.get("displayName")) or uname + ts = _format_dt(post.get("createTime")) + content_desc = str(post.get("contentDesc") or "") + location = str(post.get("location") or "").strip() + likes = post.get("likes") if isinstance(post.get("likes"), list) else [] + comments = post.get("comments") if isinstance(post.get("comments"), list) else [] + + def guess_official_name_from_title(title: str) -> str: + t0 = str(title or "").strip() + if not t0: + return "" + m = re.search(r"[《「【](.+?)[》」】]", t0) + return str(m.group(1) or "").strip() if m and m.group(1) else "" + + def format_moment_type_label(p: dict[str, Any]) -> str: + try: + t = int(p.get("type") or 0) + except Exception: + t = 0 + if t == 3: + off = p.get("official") if isinstance(p.get("official"), dict) else {} + st0 = off.get("serviceType") if isinstance(off, dict) else None + try: + st = int(st0) if st0 not in (None, "") else None + except Exception: + st = None + prefix = "服务号" if st == 1 else "公众号" + name = str(off.get("displayName") or "").strip() if isinstance(off, dict) else "" + if not name: + name = guess_official_name_from_title(str(p.get("title") or "")) + return f"{prefix}·{name}" if name else prefix + if t == 28: + ff = p.get("finderFeed") if isinstance(p.get("finderFeed"), dict) else {} + name = str(ff.get("nickname") or "").strip() if isinstance(ff, dict) else "" + return f"视频号·{name}" if name else "视频号" + if t in (5, 42): + name0 = str(p.get("sourceName") or "").strip() + if name0: + return name0 + url0 = str(p.get("contentUrl") or "").strip() + if not url0: + ml0 = p.get("media") if isinstance(p.get("media"), list) else [] + m0 = ml0[0] if (ml0 and isinstance(ml0[0], dict)) else {} + url0 = str(m0.get("url") or "").strip() + if url0: + # host+path (no query) as a readable fallback label. + s = re.sub(r"^https?://", "", url0.strip(), flags=re.I) + s = s.split("#", 1)[0].split("?", 1)[0].rstrip("/") + return s or ("音乐" if t == 42 else "外部分享") + return "音乐" if t == 42 else "外部分享" + return "" + + def format_finder_feed_card_text(p: dict[str, Any]) -> str: + title0 = str(p.get("title") or "").strip() + if title0: + return title0 + ff = p.get("finderFeed") if isinstance(p.get("finderFeed"), dict) else {} + desc0 = str(ff.get("desc") or "").strip() if isinstance(ff, dict) else "" + if desc0: + return re.sub(r"\\s+", " ", desc0) + fallback0 = str(p.get("contentDesc") or "").strip() + return re.sub(r"\\s+", " ", fallback0) if fallback0 else "视频号" + + def export_external_thumb(url: str, *, kind: str) -> str: + u0 = str(url or "").strip() + if not u0 or (not u0.lower().startswith("http")): + return "" + ident = f"extimg|{kind}|{u0}" + ck = hashlib.md5(ident.encode("utf-8", errors="ignore")).hexdigest() + if ck in media_written: + return media_written[ck] + try: + from .routers.chat_media import proxy_image # pylint: disable=import-outside-toplevel + + should_cancel() + resp = run_async(proxy_image(url=u0)) + payload, mt = _response_bytes(resp) + except Exception: + payload, mt = b"", "" + if not payload: + media_written[ck] = "" + return "" + arc0 = _write_image_payload(zf=zf, payload=payload, media_type=mt, cache_key=ck, subdir="images") + media_written[ck] = arc0 + return arc0 + + avatar_arc = export_avatar_to_zip(zf=zf, username=uname, display_name=display) + if avatar_arc: + avatar_html = ( + '
' + f'{_esc_attr(display or uname)}' + "
" + ) + else: + fallback = _esc_text((display or uname or "友")[:1] or "友") + avatar_html = ( + '
' + '
{fallback}
' + ) + + moment_label = format_moment_type_label(post) + try: + post_type = int(post.get("type") or 1) + except Exception: + post_type = 1 + + out: list[str] = [] + out.append(f'
') + out.append('
') + out.append(avatar_html) + out.append('
') + out.append(f'
{_esc_text(display)}
') + + if content_desc: + out.append( + '
' + + render_text_with_emojis(content_desc) + + "
" + ) + + if post_type == 3: + # Official account article card (matches `sns.vue` layout). + content_url = str(post.get("contentUrl") or "").strip() + title0 = str(post.get("title") or "").strip() + media_list = post.get("media") if isinstance(post.get("media"), list) else [] + m0 = media_list[0] if (media_list and isinstance(media_list[0], dict)) else {} + thumb_arc = export_image_to_zip(zf=zf, post=post, media=m0, idx=0, prefer_thumb=True) if m0 else "" + + # Best-effort: extract thumb from mp.weixin.qq.com HTML when SNS media is missing. + if (not thumb_arc) and content_url.lower().startswith("http"): + try: + from .routers.sns import proxy_article_thumb # pylint: disable=import-outside-toplevel + + should_cancel() + resp = run_async(proxy_article_thumb(url=content_url)) + payload, mt = _response_bytes(resp) + if payload: + ck = hashlib.md5(f"articlethumb|{content_url}".encode("utf-8", errors="ignore")).hexdigest() + if ck in media_written: + thumb_arc = media_written[ck] + else: + thumb_arc = _write_image_payload( + zf=zf, payload=payload, media_type=mt, cache_key=ck, subdir="images" + ) + media_written[ck] = thumb_arc + except Exception: + pass + + out.append('") + elif post_type in (5, 42): + # External share card (WeChat-like, clickable). + content_url = str(post.get("contentUrl") or "").strip() + title0 = str(post.get("title") or "").strip() + media_list = post.get("media") if isinstance(post.get("media"), list) else [] + m0 = media_list[0] if (media_list and isinstance(media_list[0], dict)) else {} + if not content_url and m0: + content_url = str(m0.get("url") or "").strip() + + if not title0: + title0 = content_url or ("音乐分享" if post_type == 42 else "外部分享") + + thumb_arc = export_image_to_zip(zf=zf, post=post, media=m0, idx=0, prefer_thumb=True) if m0 else "" + + placeholder = "音乐" if post_type == 42 else "链接" + out.append('") + elif post_type == 28 and isinstance(post.get("finderFeed"), dict) and post.get("finderFeed"): + ff = post.get("finderFeed") if isinstance(post.get("finderFeed"), dict) else {} + thumb_url = str(ff.get("thumbUrl") or "").strip() if isinstance(ff, dict) else "" + thumb_arc = export_external_thumb(thumb_url, kind="finder") if thumb_url else "" + out.append('
') + out.append('
') + if thumb_arc: + out.append( + f'' + ) + else: + out.append( + '
' + f'{_esc_text(format_finder_feed_card_text(post))}' + "
" + ) + out.append('
') + out.append('
') + out.append( + '' + ) + out.append("
") + else: + out.append(render_media_block(zf=zf, post=post)) + + if location: + out.append(f'
{_esc_text(location)}
') + + out.append('
') + if ts: + out.append(f'{_esc_text(ts)}') + if moment_label: + out.append( + f'{_esc_text(moment_label)}' + ) + out.append("
") + + if (likes and len(likes) > 0) or (comments and len(comments) > 0): + out.append('
') + if likes and len(likes) > 0: + like_names = "、".join([_clean_name(x) for x in likes if _clean_name(x)]) + out.append('
') + out.append( + '' + '' + "" + ) + out.append(f'
{_esc_text(like_names)}
') + out.append("
") + + if likes and len(likes) > 0 and comments and len(comments) > 0: + out.append('
') + + if comments and len(comments) > 0: + out.append('
') + for c0 in comments: + c = c0 if isinstance(c0, dict) else {} + cn = _clean_name(c.get("nickname") or c.get("displayName") or c.get("username") or "") or "未知" + refn = _clean_name(c.get("refNickname") or c.get("refUsername") or c.get("refUserName") or "") + text = str(c.get("content") or "").strip() + out.append('
') + out.append(f'{_esc_text(cn)}') + if refn: + out.append('回复') + out.append(f'{_esc_text(refn)}') + out.append(': ') + out.append(render_text_with_emojis(text)) + out.append("
") + out.append("
") + out.append("
") + + out.append("
") + return "".join(out) + + def render_cover_header_html( + *, + zf: zipfile.ZipFile, + username: str, + display_name: str, + cover_data: Optional[dict[str, Any]], + ) -> str: + cover = cover_data if isinstance(cover_data, dict) else {} + media_list = cover.get("media") if isinstance(cover.get("media"), list) else [] + m0 = media_list[0] if (media_list and isinstance(media_list[0], dict)) else {} + + cover_post: dict[str, Any] = {} + try: + cover_post = dict(cover) + except Exception: + cover_post = {} + cover_post.setdefault("type", 7) + cover_post.setdefault("id", str(cover.get("id") or "").strip()) + + cover_arc = export_image_to_zip(zf=zf, post=cover_post, media=m0, idx=0, prefer_thumb=False) if m0 else "" + avatar_arc = export_avatar_to_zip(zf=zf, username=username, display_name=display_name) + + out: list[str] = [] + out.append('
') + out.append('
') + if cover_arc: + out.append( + f'朋友圈封面' + ) + out.append("
") + + out.append('
') + out.append( + f'
{_esc_text(display_name or username)}
' + ) + + out.append('
') + if avatar_arc: + out.append( + f'' + ) + else: + fallback = _esc_text(((display_name or username or "友")[:1]) or "友") + out.append( + '
{fallback}
' + ) + out.append("
") + out.append("
") + return "".join(out) + + try: + with zipfile.ZipFile(str(tmp_zip), mode="w", compression=zipfile.ZIP_DEFLATED) as zf: + css_payload = _load_ui_css_bundle(ui_public_dir=ui_public_dir, report=report) + "\n\n" + _SNS_EXPORT_CSS_PATCH + zf.writestr("assets/wechat-sns-export.css", css_payload) + written.add("assets/wechat-sns-export.css") + + repo_root = Path(__file__).resolve().parents[2] + wxemoji_src: Optional[Path] = None + if ui_public_dir is not None: + cand = Path(ui_public_dir) / "wxemoji" + if cand.is_dir(): + wxemoji_src = cand + if wxemoji_src is None: + cand = repo_root / "frontend" / "public" / "wxemoji" + if cand.is_dir(): + wxemoji_src = cand + if wxemoji_src is not None: + _zip_write_tree(zf=zf, src_dir=wxemoji_src, dest_prefix="wxemoji", written=written) + + if scope == "all": + users = _load_sns_users(account_dir) + else: + users = _load_sns_users(account_dir, usernames=target_usernames) + order = {u: i for i, u in enumerate(target_usernames)} + users.sort(key=lambda x: order.get(str(x.get("username") or ""), 10**9)) + + with self._lock: + job.progress.users_total = len(users) + + user_pages: list[dict[str, Any]] = [] + css_href = "assets/wechat-sns-export.css" + + for i, u in enumerate(users): + should_cancel() + uname = str(u.get("username") or "").strip() + display = _clean_name(u.get("displayName")) or uname + with self._lock: + job.progress.current_username = uname + job.progress.current_display_name = display + + posts_all: list[dict[str, Any]] = [] + cover_data: Optional[dict[str, Any]] = None + off = 0 + while True: + should_cancel() + resp = list_sns_timeline( + account=account_dir.name, + limit=200, + offset=off, + usernames=uname, + keyword=None, + ) + if off == 0 and cover_data is None and isinstance(resp, dict) and isinstance(resp.get("cover"), dict): + cover_data = resp.get("cover") + items = resp.get("timeline") if isinstance(resp, dict) else None + items = items if isinstance(items, list) else [] + if not items: + break + posts_all.extend([p for p in items if isinstance(p, dict)]) + off += len(items) + if not bool(resp.get("hasMore")): + break + + post_parts: list[str] = [] + for p in posts_all: + should_cancel() + post_parts.append(render_post_html(zf=zf, post=p)) + with self._lock: + job.progress.posts_exported += 1 + + safe_uname = _safe_name(uname, max_len=80) or hashlib.md5(uname.encode("utf-8", errors="ignore")).hexdigest()[:12] + page_name = f"sns_{safe_uname}.html" + title = f"朋友圈导出 - {display}" + back_link = ( + '← 返回' + if scope == "all" + else "" + ) + cover_html = render_cover_header_html(zf=zf, username=uname, display_name=display, cover_data=cover_data) + page_html = "\n".join( + [ + "", + "", + "", + '', + '', + f"{_esc_text(title)}", + f'', + "", + '', + '
', + '
', + cover_html, + ('
' + back_link + (f'
{_esc_text(uname)}
' if uname else "") + "
") if back_link else "", + "".join(post_parts), + "
", + "
", + "", + "", + "", + ] + ) + zf.writestr(page_name, page_html) + written.add(page_name) + + user_pages.append( + { + "username": uname, + "displayName": display, + "postCount": int(u.get("postCount") or 0), + "page": page_name, + } + ) + + with self._lock: + job.progress.users_done = i + 1 + + if scope == "all": + rows: list[str] = [] + for u in user_pages: + uname = str(u.get("username") or "").strip() + display = _clean_name(u.get("displayName")) or uname + pc = int(u.get("postCount") or 0) + href = str(u.get("page") or "").strip() + avatar_arc = export_avatar_to_zip(zf=zf, username=uname, display_name=display) + if avatar_arc: + avatar_html = ( + '
' + f'' + "
" + ) + else: + fallback = _esc_text((display or uname or "友")[:1] or "友") + avatar_html = ( + '
' + '
{fallback}
' + ) + rows.append( + '' + f"{avatar_html}" + '
' + f'
{_esc_text(display)}
' + f'
{_esc_text(uname)} · {pc} 条
' + "
" + ) + + index_html = "\n".join( + [ + "", + "", + "", + '', + '', + "朋友圈导出", + f'', + "", + '', + '
', + '
', + '
', + '
朋友圈联系人
', + f'
{len(user_pages)} 人
', + "
", + '
', + "".join(rows), + "
", + "
", + "
", + "", + "", + "", + ] + ) + zf.writestr("index.html", index_html) + written.add("index.html") + else: + only_page = user_pages[0]["page"] if user_pages else "" + if only_page: + index_html = ( + "" + '' + f'' + "" + ) + zf.writestr("index.html", index_html) + written.add("index.html") + + try: + zf.writestr("export_report.json", json.dumps(report, ensure_ascii=False, indent=2)) + except Exception: + pass + finally: + try: + if avatar_conn is not None: + avatar_conn.close() + except Exception: + pass + try: + loop.close() + except Exception: + pass + + try: + os.replace(str(tmp_zip), str(final_zip)) + final_out = final_zip + except Exception: + final_out = tmp_zip + + with self._lock: + job.zip_path = final_out + if job.status != "cancelled": + job.status = "done" + job.finished_at = time.time() + + return tmp_zip + + +SNS_EXPORT_MANAGER = SnsExportManager() diff --git a/src/wechat_decrypt_tool/sns_media.py b/src/wechat_decrypt_tool/sns_media.py new file mode 100644 index 0000000..8cf0bcd --- /dev/null +++ b/src/wechat_decrypt_tool/sns_media.py @@ -0,0 +1,710 @@ +from __future__ import annotations + +"""SNS (Moments) remote media download + decryption helpers. + +This module centralizes the "remote URL -> download -> decrypt -> validate -> cache" pipeline +so it can be reused by: +- FastAPI endpoints (`routers/sns.py`) +- Offline export (`sns_export_service.py`) + +Important notes (empirical, matches current repo behavior): +- SNS images: prefer `wcdb_api.dll` export `wcdb_decrypt_sns_image` (black-box). Pure ISAAC64 + keystream XOR is NOT reliable for images across versions. +- SNS videos: encrypted only for the first 128KB; decrypt via WeFlow's WxIsaac64 (WASM keystream) + and XOR in-place. +""" + +from dataclasses import dataclass +from functools import lru_cache +from pathlib import Path +from typing import Optional +from urllib.parse import urlparse +import base64 +import hashlib +import html +import os +import re +import subprocess +import time + +import httpx +from fastapi import HTTPException + +from .logging_config import get_logger +from .wcdb_realtime import decrypt_sns_image as _wcdb_decrypt_sns_image + +logger = get_logger(__name__) + + +def is_allowed_sns_media_host(host: str) -> bool: + h = str(host or "").strip().lower() + if not h: + return False + # Images: qpic/qlogo. Thumbs: *.tc.qq.com. Videos/live photos: *.video.qq.com. + return h.endswith(".qpic.cn") or h.endswith(".qlogo.cn") or h.endswith(".tc.qq.com") or h.endswith(".video.qq.com") + + +def fix_sns_cdn_url(url: str, *, token: str = "", is_video: bool = False) -> str: + """WeFlow-compatible SNS CDN URL normalization. + + - Force https for Tencent CDNs. + - For images, replace `/150` with `/0` to request the original. + - If token is provided and url doesn't contain it, append `token=&idx=1`. + """ + u = html.unescape(str(url or "")).strip() + if not u: + return "" + + # Only touch Tencent CDNs; keep other URLs intact. + try: + p = urlparse(u) + host = str(p.hostname or "").lower() + if not is_allowed_sns_media_host(host): + return u + except Exception: + return u + + # http -> https + u = re.sub(r"^http://", "https://", u, flags=re.I) + + # /150 -> /0 (image only) + if not is_video: + u = re.sub(r"/150(?=($|\\?))", "/0", u) + + tok = str(token or "").strip() + if tok and ("token=" not in u): + if is_video: + # Match WeFlow: place `token&idx=1` in front of existing query params. + base, sep, qs = u.partition("?") + if sep: + qs = qs.lstrip("&") + u = f"{base}?token={tok}&idx=1" + if qs: + u = f"{u}&{qs}" + else: + u = f"{u}?token={tok}&idx=1" + else: + connector = "&" if "?" in u else "?" + u = f"{u}{connector}token={tok}&idx=1" + + return u + + +def _detect_mp4_ftyp(head: bytes) -> bool: + return bool(head) and len(head) >= 8 and head[4:8] == b"ftyp" + + +@lru_cache(maxsize=1) +def _weflow_wxisaac64_script_path() -> str: + """Locate the Node helper that wraps WeFlow's wasm_video_decode.* assets.""" + repo_root = Path(__file__).resolve().parents[2] + script = repo_root / "tools" / "weflow_wasm_keystream.js" + if script.exists() and script.is_file(): + return str(script) + return "" + + +@lru_cache(maxsize=64) +def weflow_wxisaac64_keystream(key: str, size: int) -> bytes: + """Generate keystream via WeFlow's WASM (preferred; matches real video decryption).""" + key_text = str(key or "").strip() + if not key_text or size <= 0: + return b"" + + # WeFlow is the source-of-truth; use its WASM first, then fall back to our pure-python ISAAC64. + script = _weflow_wxisaac64_script_path() + if script: + try: + # The JS helper prints ONLY base64 bytes to stdout; keep stderr for debugging. + proc = subprocess.run( + ["node", script, key_text, str(int(size))], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + timeout=30, + check=False, + ) + if proc.returncode == 0: + out_b64 = (proc.stdout or b"").strip() + if out_b64: + return base64.b64decode(out_b64, validate=False) + except Exception: + pass + + # Fallback: pure python ISAAC64 (best-effort; may not match WxIsaac64 for all versions). + from .isaac64 import Isaac64 # pylint: disable=import-outside-toplevel + + want = int(size) + # ISAAC64 generates 8-byte words; generate enough and slice. + size8 = ((want + 7) // 8) * 8 + return Isaac64(key_text).generate_keystream(size8)[:want] + + +_SNS_REMOTE_VIDEO_CACHE_EXTS = [ + ".mp4", + ".bin", # legacy/unknown +] + + +def _sns_remote_video_cache_dir_and_stem(account_dir: Path, *, url: str, key: str) -> tuple[Path, str]: + digest = hashlib.md5(f"video|{url}|{key}".encode("utf-8", errors="ignore")).hexdigest() + cache_dir = account_dir / "sns_remote_video_cache" / digest[:2] + return cache_dir, digest + + +def _sns_remote_video_cache_existing_path(cache_dir: Path, stem: str) -> Optional[Path]: + for ext in _SNS_REMOTE_VIDEO_CACHE_EXTS: + p = cache_dir / f"{stem}{ext}" + try: + if p.exists() and p.is_file(): + return p + except Exception: + continue + return None + + +async def _download_sns_remote_to_file(url: str, dest_path: Path, *, max_bytes: int) -> tuple[str, str]: + """Download SNS media to file (streaming) from Tencent CDN. + + Returns: (content_type, x_enc) + """ + u = str(url or "").strip() + if not u: + return "", "" + + # Safety: only allow Tencent CDN hosts. + try: + p = urlparse(u) + host = str(p.hostname or "").lower() + if not is_allowed_sns_media_host(host): + raise HTTPException(status_code=400, detail="SNS media host not allowed.") + except HTTPException: + raise + except Exception: + raise HTTPException(status_code=400, detail="Invalid SNS media URL.") + + base_headers = { + "User-Agent": "MicroMessenger Client", + "Accept": "*/*", + # Do not request compression for video streams. + "Connection": "keep-alive", + } + + header_variants = [ + {}, + # WeFlow/Electron: MicroMessenger UA + servicewechat.com referer passes some CDN anti-hotlink checks. + { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 MicroMessenger/7.0.20.1781(0x6700143B) WindowsWechat(0x63090719) XWEB/8351", + "Referer": "https://servicewechat.com/", + "Origin": "https://servicewechat.com", + }, + {"Referer": "https://wx.qq.com/", "Origin": "https://wx.qq.com"}, + {"Referer": "https://mp.weixin.qq.com/", "Origin": "https://mp.weixin.qq.com"}, + ] + + last_err: Exception | None = None + async with httpx.AsyncClient(timeout=30.0, follow_redirects=True) as client: + for extra in header_variants: + headers = dict(base_headers) + headers.update(extra) + try: + if dest_path.exists(): + try: + dest_path.unlink(missing_ok=True) + except Exception: + pass + + total = 0 + async with client.stream("GET", u, headers=headers) as resp: + resp.raise_for_status() + content_type = str(resp.headers.get("Content-Type") or "").strip() + x_enc = str(resp.headers.get("x-enc") or "").strip() + dest_path.parent.mkdir(parents=True, exist_ok=True) + with dest_path.open("wb") as f: + async for chunk in resp.aiter_bytes(): + if not chunk: + continue + total += len(chunk) + if total > max_bytes: + raise HTTPException(status_code=400, detail="SNS video too large.") + f.write(chunk) + return content_type, x_enc + except HTTPException: + raise + except Exception as e: + last_err = e + continue + + raise last_err or RuntimeError("sns remote download failed") + + +def maybe_decrypt_sns_video_file(path: Path, key: str) -> bool: + """Decrypt the first 128KB of an encrypted mp4 file in-place (WeFlow/Isaac64). + + Returns True if decryption was performed, False otherwise. + """ + key_text = str(key or "").strip() + if not key_text: + return False + + try: + size = int(path.stat().st_size) + except Exception: + return False + + if size <= 8: + return False + + decrypt_size = min(131072, size) + if decrypt_size <= 0: + return False + + try: + with path.open("r+b") as f: + head = f.read(8) + if _detect_mp4_ftyp(head): + return False + + f.seek(0) + buf = bytearray(f.read(decrypt_size)) + if not buf: + return False + + ks = weflow_wxisaac64_keystream(key_text, decrypt_size) + n = min(len(buf), len(ks)) + for i in range(n): + buf[i] ^= ks[i] + + f.seek(0) + f.write(buf) + f.flush() + + f.seek(0) + head2 = f.read(8) + if _detect_mp4_ftyp(head2): + return True + # Still return True to indicate we mutated bytes; caller may treat as failure if desired. + return True + except Exception: + return False + + +async def materialize_sns_remote_video( + *, + account_dir: Path, + url: str, + key: str, + token: str, + use_cache: bool, +) -> Optional[Path]: + """Download SNS video from CDN, decrypt (if needed), and return a local mp4 path.""" + fixed_url = fix_sns_cdn_url(str(url or ""), token=str(token or ""), is_video=True) + if not fixed_url: + return None + + cache_dir, cache_stem = _sns_remote_video_cache_dir_and_stem(account_dir, url=fixed_url, key=str(key or "")) + + if use_cache: + existing = _sns_remote_video_cache_existing_path(cache_dir, cache_stem) + if existing is not None: + # Best-effort migrate legacy `.bin` -> `.mp4` when it's already decrypted. + try: + if existing.suffix.lower() == ".bin": + with existing.open("rb") as f: + head = f.read(8) + if _detect_mp4_ftyp(head): + target = cache_dir / f"{cache_stem}.mp4" + cache_dir.mkdir(parents=True, exist_ok=True) + os.replace(str(existing), str(target)) + existing = target + except Exception: + pass + return existing + + # Download to a temp file first. + cache_dir.mkdir(parents=True, exist_ok=True) + tmp_path = cache_dir / f"{cache_stem}.mp4.{time.time_ns()}.tmp" + try: + await _download_sns_remote_to_file(fixed_url, tmp_path, max_bytes=200 * 1024 * 1024) + except Exception: + try: + tmp_path.unlink(missing_ok=True) + except Exception: + pass + return None + + # Decrypt in-place if the file isn't already a mp4. + maybe_decrypt_sns_video_file(tmp_path, str(key or "")) + + # Validate: mp4 must have `ftyp` at offset 4. + ok_mp4 = False + try: + with tmp_path.open("rb") as f: + head = f.read(8) + ok_mp4 = _detect_mp4_ftyp(head) + except Exception: + ok_mp4 = False + + if not ok_mp4: + try: + tmp_path.unlink(missing_ok=True) + except Exception: + pass + return None + + if use_cache: + final_path = cache_dir / f"{cache_stem}.mp4" + try: + os.replace(str(tmp_path), str(final_path)) + except Exception: + # If rename fails, keep tmp_path as fallback. + final_path = tmp_path + + # Remove other extensions for the same cache key. + for other_ext in _SNS_REMOTE_VIDEO_CACHE_EXTS: + if other_ext.lower() == ".mp4": + continue + other = cache_dir / f"{cache_stem}{other_ext}" + try: + if other.exists() and other.is_file(): + other.unlink(missing_ok=True) + except Exception: + continue + + return final_path + + # Cache disabled: keep the decrypted tmp_path (caller should delete it). + return tmp_path + + +def best_effort_unlink(path: str) -> None: + try: + Path(path).unlink(missing_ok=True) + except Exception: + pass + + +def detect_image_mime(data: bytes) -> str: + """Sniff image mime type by magic bytes. + + IMPORTANT: Do NOT trust HTTP Content-Type as a fallback here. We use this for + validating decrypted bytes. If we blindly trust `image/*`, a failed decrypt + would poison the disk cache and the frontend would keep showing broken images. + """ + if not data: + return "" + + if data.startswith(b"\xFF\xD8\xFF"): + return "image/jpeg" + if data.startswith(b"\x89PNG\r\n\x1a\n"): + return "image/png" + if len(data) >= 6 and data[:6] in (b"GIF87a", b"GIF89a"): + return "image/gif" + if len(data) >= 12 and data[:4] == b"RIFF" and data[8:12] == b"WEBP": + return "image/webp" + if len(data) >= 12 and data[4:8] == b"ftyp": + # ISO BMFF based image formats (HEIF/HEIC/AVIF). + brand = data[8:12] + if brand == b"avif": + return "image/avif" + if brand in (b"heic", b"heix", b"hevc", b"hevx"): + return "image/heic" + if brand in (b"heif", b"mif1", b"msf1"): + return "image/heif" + if data.startswith(b"BM"): + return "image/bmp" + + return "" + + +_SNS_REMOTE_CACHE_EXTS = [ + ".jpg", + ".jpeg", + ".png", + ".gif", + ".webp", + ".bmp", + ".avif", + ".heic", + ".heif", + ".bin", # legacy/unknown +] + + +def _mime_to_ext(mt: str) -> str: + m = str(mt or "").split(";", 1)[0].strip().lower() + return { + "image/jpeg": ".jpg", + "image/jpg": ".jpg", + "image/png": ".png", + "image/gif": ".gif", + "image/webp": ".webp", + "image/bmp": ".bmp", + "image/avif": ".avif", + "image/heic": ".heic", + "image/heif": ".heif", + }.get(m, ".bin") + + +def _ext_to_mime(ext: str) -> str: + e = str(ext or "").strip().lower().lstrip(".") + return { + "jpg": "image/jpeg", + "jpeg": "image/jpeg", + "png": "image/png", + "gif": "image/gif", + "webp": "image/webp", + "bmp": "image/bmp", + "avif": "image/avif", + "heic": "image/heic", + "heif": "image/heif", + }.get(e, "") + + +def _sns_remote_cache_dir_and_stem(account_dir: Path, *, url: str, key: str) -> tuple[Path, str]: + digest = hashlib.md5(f"{url}|{key}".encode("utf-8", errors="ignore")).hexdigest() + cache_dir = account_dir / "sns_remote_cache" / digest[:2] + return cache_dir, digest + + +def _sns_remote_cache_existing_path(cache_dir: Path, stem: str) -> Optional[Path]: + for ext in _SNS_REMOTE_CACHE_EXTS: + p = cache_dir / f"{stem}{ext}" + try: + if p.exists() and p.is_file(): + return p + except Exception: + continue + return None + + +def _sniff_image_mime_from_file(path: Path) -> str: + try: + with path.open("rb") as f: + head = f.read(64) + return detect_image_mime(head) + except Exception: + return "" + + +async def _download_sns_remote_bytes(url: str) -> tuple[bytes, str, str]: + """Download SNS media bytes from Tencent CDN with a few safe header variants.""" + u = str(url or "").strip() + if not u: + return b"", "", "" + + max_bytes = 25 * 1024 * 1024 + + base_headers = { + "User-Agent": "MicroMessenger Client", + "Accept": "*/*", + "Accept-Language": "zh-CN,zh;q=0.9", + # Avoid brotli dependency issues; images are already compressed anyway. + "Accept-Encoding": "identity", + "Connection": "keep-alive", + } + + # Some CDN endpoints return a small placeholder image for certain UA/Referer + # combinations but still respond 200. Try the simplest (base headers only) + # first to maximize the chance of getting the real media in one request. + header_variants = [ + {}, + # WeFlow/Electron: MicroMessenger UA + servicewechat.com referer passes some CDN anti-hotlink checks. + { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 MicroMessenger/7.0.20.1781(0x6700143B) WindowsWechat(0x63090719) XWEB/8351", + "Referer": "https://servicewechat.com/", + "Origin": "https://servicewechat.com", + }, + {"Referer": "https://wx.qq.com/", "Origin": "https://wx.qq.com"}, + {"Referer": "https://mp.weixin.qq.com/", "Origin": "https://mp.weixin.qq.com"}, + ] + + last_err: Exception | None = None + async with httpx.AsyncClient(timeout=20.0, follow_redirects=True) as client: + for extra in header_variants: + headers = dict(base_headers) + headers.update(extra) + try: + resp = await client.get(u, headers=headers) + resp.raise_for_status() + payload = bytes(resp.content or b"") + if len(payload) > max_bytes: + raise HTTPException(status_code=400, detail="SNS media too large (>25MB).") + content_type = str(resp.headers.get("Content-Type") or "").strip() + x_enc = str(resp.headers.get("x-enc") or "").strip() + return payload, content_type, x_enc + except HTTPException: + raise + except Exception as e: + last_err = e + continue + + raise last_err or RuntimeError("sns remote download failed") + + +@dataclass(frozen=True) +class SnsRemoteImageResult: + payload: bytes + media_type: str + source: str + x_enc: str = "" + cache_path: Optional[Path] = None + + +async def try_fetch_and_decrypt_sns_image_remote( + *, + account_dir: Path, + url: str, + key: str, + token: str, + use_cache: bool, +) -> Optional[SnsRemoteImageResult]: + """Try WeFlow-style: download from CDN -> decrypt via wcdb_decrypt_sns_image -> return bytes. + + Returns a SnsRemoteImageResult on success, or None on failure so caller can fall back to + local cache matching logic. + """ + u_fixed = fix_sns_cdn_url(url, token=token, is_video=False) + if not u_fixed: + return None + + try: + p = urlparse(u_fixed) + host = str(p.hostname or "").strip().lower() + except Exception: + return None + if not is_allowed_sns_media_host(host): + return None + + cache_dir, cache_stem = _sns_remote_cache_dir_and_stem(account_dir, url=u_fixed, key=str(key or "")) + + cache_path: Optional[Path] = None + if use_cache: + try: + existing = _sns_remote_cache_existing_path(cache_dir, cache_stem) + if existing is not None: + mt = _ext_to_mime(existing.suffix) + + # Upgrade legacy `.bin` cache to a proper image extension once. + if (existing.suffix or "").lower() == ".bin" or (not mt): + mt2 = _sniff_image_mime_from_file(existing) + if not mt2: + try: + existing.unlink(missing_ok=True) + except Exception: + pass + existing = None + else: + ext2 = _mime_to_ext(mt2) + if ext2 != ".bin": + try: + cache_dir.mkdir(parents=True, exist_ok=True) + desired = cache_dir / f"{cache_stem}{ext2}" + if desired.exists(): + # Another process/version already wrote the real file; drop legacy bin. + existing.unlink(missing_ok=True) + existing = desired + else: + os.replace(str(existing), str(desired)) + existing = desired + except Exception: + pass + mt = mt2 + + if existing is not None and mt: + try: + payload = existing.read_bytes() + except Exception: + payload = b"" + if payload: + return SnsRemoteImageResult( + payload=payload, + media_type=mt, + source="remote-cache", + x_enc="", + cache_path=existing, + ) + except Exception: + pass + + try: + raw, _content_type, x_enc = await _download_sns_remote_bytes(u_fixed) + except Exception as e: + logger.info("[sns_media] remote download failed: %s", e) + return None + + if not raw: + return None + + # First, validate whether the CDN already returned a real image. + mt_raw = detect_image_mime(raw) + + decoded = raw + mt = mt_raw + decrypted = False + k = str(key or "").strip() + + # Only attempt decryption when bytes do NOT look like an image, or when CDN explicitly + # signals encryption (x-enc). Some endpoints return already-decoded PNG/JPEG even when + # urlAttrs.enc_idx == 1, and decrypting those would corrupt the bytes. + need_decrypt = bool(k) and (not mt_raw) and bool(raw) + if k and x_enc and str(x_enc).strip() not in ("0", "false", "False"): + need_decrypt = True + + if need_decrypt: + try: + decoded2 = _wcdb_decrypt_sns_image(raw, k) + mt2 = detect_image_mime(decoded2) + if mt2: + decoded = decoded2 + mt = mt2 + decrypted = decoded2 != raw + else: + # Decrypt failed; if raw is a real image, keep it. Otherwise treat as failure. + if mt_raw: + decoded = raw + mt = mt_raw + decrypted = False + else: + return None + except Exception as e: + logger.info("[sns_media] remote decrypt failed: %s", e) + if not mt_raw: + return None + decoded = raw + mt = mt_raw + decrypted = False + + if not mt: + return None + + if use_cache: + try: + ext = _mime_to_ext(mt) + cache_dir.mkdir(parents=True, exist_ok=True) + cache_path = cache_dir / f"{cache_stem}{ext}" + + tmp = cache_path.with_suffix(cache_path.suffix + f".{time.time_ns()}.tmp") + tmp.write_bytes(decoded) + os.replace(str(tmp), str(cache_path)) + + # Remove other extensions for the same cache key to avoid stale duplicates. + for other_ext in _SNS_REMOTE_CACHE_EXTS: + if other_ext.lower() == ext.lower(): + continue + other = cache_dir / f"{cache_stem}{other_ext}" + try: + if other.exists() and other.is_file(): + other.unlink(missing_ok=True) + except Exception: + continue + except Exception: + cache_path = None + + return SnsRemoteImageResult( + payload=decoded, + media_type=mt, + source="remote-decrypt" if decrypted else "remote", + x_enc=str(x_enc or "").strip(), + cache_path=cache_path, + ) + diff --git a/src/wechat_decrypt_tool/sns_realtime_autosync.py b/src/wechat_decrypt_tool/sns_realtime_autosync.py new file mode 100644 index 0000000..478edd1 --- /dev/null +++ b/src/wechat_decrypt_tool/sns_realtime_autosync.py @@ -0,0 +1,274 @@ +"""SNS (Moments) realtime -> decrypted sqlite incremental sync. + +Why: +- We can read the latest Moments via WCDB realtime, but the decrypted snapshot (`output/databases/{account}/sns.db`) + can lag behind or miss data (e.g. you viewed it when it was visible, then it became "only last 3 days"). +- For export/offline browsing, we want to keep a local append-only cache of Moments that were visible at some point. + +This module runs a lightweight background poller that watches db_storage/sns*.db mtime changes and triggers a cheap +incremental sync of the latest N Moments into the decrypted snapshot. +""" + +from __future__ import annotations + +import os +import threading +import time +from dataclasses import dataclass +from pathlib import Path +from typing import Any, Optional + +from fastapi import HTTPException + +from .chat_helpers import _list_decrypted_accounts, _resolve_account_dir +from .logging_config import get_logger +from .wcdb_realtime import WCDB_REALTIME + +logger = get_logger(__name__) + + +def _env_bool(name: str, default: bool) -> bool: + raw = str(os.environ.get(name, "") or "").strip().lower() + if not raw: + return default + return raw not in {"0", "false", "no", "off"} + + +def _env_int(name: str, default: int, *, min_v: int, max_v: int) -> int: + raw = str(os.environ.get(name, "") or "").strip() + try: + v = int(raw) + except Exception: + v = int(default) + if v < min_v: + v = min_v + if v > max_v: + v = max_v + return v + + +def _mtime_ns(path: Path) -> int: + try: + st = path.stat() + m_ns = int(getattr(st, "st_mtime_ns", 0) or 0) + if m_ns <= 0: + m_ns = int(float(getattr(st, "st_mtime", 0.0) or 0.0) * 1_000_000_000) + return int(m_ns) + except Exception: + return 0 + + +def _scan_sns_db_mtime_ns(db_storage_dir: Path) -> int: + """Best-effort "latest mtime" signal for sns.db buckets.""" + base = Path(db_storage_dir) + candidates: list[Path] = [ + base / "sns" / "sns.db", + base / "sns" / "sns.db-wal", + base / "sns" / "sns.db-shm", + base / "sns.db", + base / "sns.db-wal", + base / "sns.db-shm", + ] + max_ns = 0 + for p in candidates: + v = _mtime_ns(p) + if v > max_ns: + max_ns = v + return int(max_ns) + + +@dataclass +class _AccountState: + last_mtime_ns: int = 0 + due_at: float = 0.0 + last_sync_end_at: float = 0.0 + thread: Optional[threading.Thread] = None + + +class SnsRealtimeAutoSyncService: + def __init__(self) -> None: + self._enabled = _env_bool("WECHAT_TOOL_SNS_AUTOSYNC", True) + self._interval_ms = _env_int("WECHAT_TOOL_SNS_AUTOSYNC_INTERVAL_MS", 2000, min_v=500, max_v=60_000) + self._debounce_ms = _env_int("WECHAT_TOOL_SNS_AUTOSYNC_DEBOUNCE_MS", 800, min_v=0, max_v=60_000) + self._min_sync_interval_ms = _env_int( + "WECHAT_TOOL_SNS_AUTOSYNC_MIN_SYNC_INTERVAL_MS", 5000, min_v=0, max_v=300_000 + ) + self._workers = _env_int("WECHAT_TOOL_SNS_AUTOSYNC_WORKERS", 1, min_v=1, max_v=4) + self._max_scan = _env_int("WECHAT_TOOL_SNS_AUTOSYNC_MAX_SCAN", 200, min_v=20, max_v=2000) + + self._mu = threading.Lock() + self._states: dict[str, _AccountState] = {} + self._stop = threading.Event() + self._thread: Optional[threading.Thread] = None + + def start(self) -> None: + if not self._enabled: + logger.info("[sns-autosync] disabled by env WECHAT_TOOL_SNS_AUTOSYNC=0") + return + with self._mu: + if self._thread is not None and self._thread.is_alive(): + return + self._stop.clear() + th = threading.Thread(target=self._run, name="sns-realtime-autosync", daemon=True) + self._thread = th + th.start() + logger.info( + "[sns-autosync] started interval_ms=%s debounce_ms=%s min_sync_interval_ms=%s max_scan=%s workers=%s", + int(self._interval_ms), + int(self._debounce_ms), + int(self._min_sync_interval_ms), + int(self._max_scan), + int(self._workers), + ) + + def stop(self) -> None: + self._stop.set() + with self._mu: + self._thread = None + + def _run(self) -> None: + while not self._stop.is_set(): + tick_t0 = time.perf_counter() + try: + self._tick() + except Exception: + logger.exception("[sns-autosync] tick failed") + + elapsed_ms = (time.perf_counter() - tick_t0) * 1000.0 + sleep_ms = max(200.0, float(self._interval_ms) - elapsed_ms) + self._stop.wait(timeout=sleep_ms / 1000.0) + + def _tick(self) -> None: + accounts = _list_decrypted_accounts() + now = time.time() + if not accounts: + return + + for acc in accounts: + if self._stop.is_set(): + break + try: + account_dir = _resolve_account_dir(acc) + except HTTPException: + continue + except Exception: + continue + + info = WCDB_REALTIME.get_status(account_dir) + available = bool(info.get("dll_present") and info.get("key_present") and info.get("db_storage_dir")) + if not available: + continue + + db_storage_dir = Path(str(info.get("db_storage_dir") or "").strip()) + if not db_storage_dir.exists() or not db_storage_dir.is_dir(): + continue + + mtime_ns = _scan_sns_db_mtime_ns(db_storage_dir) + with self._mu: + st = self._states.setdefault(acc, _AccountState()) + if mtime_ns and mtime_ns != st.last_mtime_ns: + st.last_mtime_ns = int(mtime_ns) + st.due_at = now + (float(self._debounce_ms) / 1000.0) + + # Schedule daemon threads. + to_start: list[threading.Thread] = [] + with self._mu: + keep = set(accounts) + for acc in list(self._states.keys()): + if acc not in keep: + self._states.pop(acc, None) + + running = 0 + for st in self._states.values(): + th = st.thread + if th is not None and th.is_alive(): + running += 1 + elif th is not None and (not th.is_alive()): + st.thread = None + + for acc, st in self._states.items(): + if running >= int(self._workers): + break + if st.due_at <= 0 or st.due_at > now: + continue + if st.thread is not None and st.thread.is_alive(): + continue + + since = now - float(st.last_sync_end_at or 0.0) + min_interval = float(self._min_sync_interval_ms) / 1000.0 + if min_interval > 0 and since < min_interval: + st.due_at = now + (min_interval - since) + continue + + st.due_at = 0.0 + th = threading.Thread( + target=self._sync_account_runner, + args=(acc,), + name=f"sns-autosync-{acc}", + daemon=True, + ) + st.thread = th + to_start.append(th) + running += 1 + + for th in to_start: + if self._stop.is_set(): + break + try: + th.start() + except Exception: + with self._mu: + for acc, st in self._states.items(): + if st.thread is th: + st.thread = None + break + + def _sync_account_runner(self, account: str) -> None: + account = str(account or "").strip() + try: + if self._stop.is_set() or (not account): + return + res = self._sync_account(account) + upserted = int((res or {}).get("upserted") or 0) + logger.info("[sns-autosync] sync done account=%s upserted=%s", account, upserted) + except Exception: + logger.exception("[sns-autosync] sync failed account=%s", account) + finally: + with self._mu: + st = self._states.get(account) + if st is not None: + st.thread = None + st.last_sync_end_at = time.time() + + def _sync_account(self, account: str) -> dict[str, Any]: + account = str(account or "").strip() + if not account: + return {"status": "skipped", "reason": "missing account"} + + try: + account_dir = _resolve_account_dir(account) + except Exception as e: + return {"status": "skipped", "reason": f"resolve account failed: {e}"} + + info = WCDB_REALTIME.get_status(account_dir) + available = bool(info.get("dll_present") and info.get("key_present") and info.get("db_storage_dir")) + if not available: + return {"status": "skipped", "reason": "realtime not available"} + + # Import lazily to avoid startup import ordering issues. + from .routers.sns import sync_sns_realtime_timeline_latest + + try: + return sync_sns_realtime_timeline_latest( + account=account, + max_scan=int(self._max_scan), + force=0, + ) + except HTTPException as e: + return {"status": "error", "error": str(e.detail or "")} + except Exception as e: + return {"status": "error", "error": str(e)} + + +SNS_REALTIME_AUTOSYNC = SnsRealtimeAutoSyncService() + diff --git a/src/wechat_decrypt_tool/sns_stage_timing.py b/src/wechat_decrypt_tool/sns_stage_timing.py new file mode 100644 index 0000000..f7221be --- /dev/null +++ b/src/wechat_decrypt_tool/sns_stage_timing.py @@ -0,0 +1,63 @@ +import re +from collections.abc import MutableMapping + + +def add_sns_stage_timing_headers( + headers: MutableMapping[str, str], + *, + source: str, + hit_type: str = "", + x_enc: str = "", +) -> None: + """Inject `Server-Timing` + `Timing-Allow-Origin` for SNS media stage inspection. + + The frontend can't read `` response headers, but browsers expose `Server-Timing` metrics + via `performance.getEntriesByName(...).serverTiming` when `Timing-Allow-Origin` allows it. + + This helper is intentionally side-effect free beyond mutating `headers`. + """ + + src = str(source or "").strip() + if not src: + return + + ht = str(hit_type or "").strip() + xe = str(x_enc or "").strip() + + if "Timing-Allow-Origin" not in headers: + headers["Timing-Allow-Origin"] = "*" + + def _esc(v: str) -> str: + return v.replace("\\", "\\\\").replace('"', '\\"') + + def _token(v: str) -> str: + raw = str(v or "").strip() + if not raw: + return "" + raw = raw.replace(" ", "_") + safe = re.sub(r"[^0-9A-Za-z_.-]+", "_", raw).strip("_") + if not safe: + return "" + return safe[:64] + + parts: list[str] = [] + src_tok = _token(src) or "unknown" + parts.append(f'sns_source_{src_tok};dur=0;desc="{_esc(src)}"') + if ht: + ht_tok = _token(ht) + if ht_tok: + parts.append(f'sns_hit_{ht_tok};dur=0;desc="{_esc(ht)}"') + if xe: + xe_tok = _token(xe) + if xe_tok: + parts.append(f'sns_xenc_{xe_tok};dur=0;desc="{_esc(xe)}"') + + existing = str(headers.get("Server-Timing") or "").strip() + # Some responses may already have upstream `Server-Timing` metrics. Always append ours so + # the frontend can consistently read `sns_source_*` via ResourceTiming.serverTiming. + if existing and re.search(r"(^|,\\s*)sns_source(_|;)", existing): + return + + combined = ", ".join(parts) + headers["Server-Timing"] = f"{existing}, {combined}" if existing else combined + diff --git a/src/wechat_decrypt_tool/wcdb_realtime.py b/src/wechat_decrypt_tool/wcdb_realtime.py index a65790c..be3b881 100644 --- a/src/wechat_decrypt_tool/wcdb_realtime.py +++ b/src/wechat_decrypt_tool/wcdb_realtime.py @@ -1,6 +1,8 @@ import ctypes +import binascii import json import os +import re import sys import threading import time @@ -20,7 +22,51 @@ class WCDBRealtimeError(RuntimeError): _NATIVE_DIR = Path(__file__).resolve().parent / "native" -_WCDB_API_DLL = _NATIVE_DIR / "wcdb_api.dll" +_DEFAULT_WCDB_API_DLL = _NATIVE_DIR / "wcdb_api.dll" +_WCDB_API_DLL_SELECTED: Optional[Path] = None + + +def _candidate_wcdb_api_dll_paths() -> list[Path]: + """Return possible locations for wcdb_api.dll (prefer WeFlow's newer build when present).""" + cands: list[Path] = [] + + env = str(os.environ.get("WECHAT_TOOL_WCDB_API_DLL_PATH", "") or "").strip() + if env: + cands.append(Path(env)) + + # Repo checkout convenience: reuse bundled WeFlow / echotrace DLLs when available. + try: + repo_root = Path(__file__).resolve().parents[2] + except Exception: + repo_root = Path.cwd() + + for p in [ + repo_root / "WeFlow" / "resources" / "wcdb_api.dll", + repo_root / "echotrace" / "assets" / "dll" / "wcdb_api.dll", + _DEFAULT_WCDB_API_DLL, + ]: + if p not in cands: + cands.append(p) + + return cands + + +def _resolve_wcdb_api_dll_path() -> Path: + global _WCDB_API_DLL_SELECTED + if _WCDB_API_DLL_SELECTED is not None: + return _WCDB_API_DLL_SELECTED + + for p in _candidate_wcdb_api_dll_paths(): + try: + if p.exists() and p.is_file(): + _WCDB_API_DLL_SELECTED = p + return p + except Exception: + continue + + # Fall back to the default path even if it doesn't exist; caller will raise a clear error. + _WCDB_API_DLL_SELECTED = _DEFAULT_WCDB_API_DLL + return _WCDB_API_DLL_SELECTED _lib_lock = threading.Lock() _lib: Optional[ctypes.CDLL] = None @@ -40,16 +86,18 @@ def _load_wcdb_lib() -> ctypes.CDLL: if not _is_windows(): raise WCDBRealtimeError("WCDB realtime mode is only supported on Windows.") - if not _WCDB_API_DLL.exists(): - raise WCDBRealtimeError(f"Missing wcdb_api.dll at: {_WCDB_API_DLL}") + wcdb_api_dll = _resolve_wcdb_api_dll_path() + if not wcdb_api_dll.exists(): + raise WCDBRealtimeError(f"Missing wcdb_api.dll at: {wcdb_api_dll}") # Ensure dependent DLLs (e.g. WCDB.dll) can be found. try: - os.add_dll_directory(str(_NATIVE_DIR)) + os.add_dll_directory(str(wcdb_api_dll.parent)) except Exception: pass - lib = ctypes.CDLL(str(_WCDB_API_DLL)) + lib = ctypes.CDLL(str(wcdb_api_dll)) + logger.info("[wcdb] using wcdb_api.dll: %s", wcdb_api_dll) # Signatures lib.wcdb_init.argtypes = [] @@ -68,6 +116,13 @@ def _load_wcdb_lib() -> ctypes.CDLL: lib.wcdb_close_account.argtypes = [ctypes.c_int64] lib.wcdb_close_account.restype = ctypes.c_int + # Optional: wcdb_set_my_wxid(handle, wxid) + try: + lib.wcdb_set_my_wxid.argtypes = [ctypes.c_int64, ctypes.c_char_p] + lib.wcdb_set_my_wxid.restype = ctypes.c_int + except Exception: + pass + lib.wcdb_get_sessions.argtypes = [ctypes.c_int64, ctypes.POINTER(ctypes.c_char_p)] lib.wcdb_get_sessions.restype = ctypes.c_int @@ -95,6 +150,91 @@ def _load_wcdb_lib() -> ctypes.CDLL: lib.wcdb_get_group_members.argtypes = [ctypes.c_int64, ctypes.c_char_p, ctypes.POINTER(ctypes.c_char_p)] lib.wcdb_get_group_members.restype = ctypes.c_int + # Optional (newer DLLs): wcdb_get_group_nicknames(handle, chatroom_id, out_json) + try: + lib.wcdb_get_group_nicknames.argtypes = [ + ctypes.c_int64, + ctypes.c_char_p, + ctypes.POINTER(ctypes.c_char_p), + ] + lib.wcdb_get_group_nicknames.restype = ctypes.c_int + except Exception: + pass + + # Optional: execute arbitrary SQL on a selected database kind/path. + # Signature: wcdb_exec_query(handle, kind, path, sql, out_json) + try: + lib.wcdb_exec_query.argtypes = [ + ctypes.c_int64, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.POINTER(ctypes.c_char_p), + ] + lib.wcdb_exec_query.restype = ctypes.c_int + except Exception: + pass + + # Optional (newer DLLs): update a single message content in message db. + # Signature: wcdb_update_message(handle, sessionId, localId, createTime, newContent, outError) + try: + lib.wcdb_update_message.argtypes = [ + ctypes.c_int64, + ctypes.c_char_p, + ctypes.c_int64, + ctypes.c_int32, + ctypes.c_char_p, + ctypes.POINTER(ctypes.c_char_p), + ] + lib.wcdb_update_message.restype = ctypes.c_int + except Exception: + pass + + # Optional (newer DLLs): delete a single message in message db. + # Signature: wcdb_delete_message(handle, sessionId, localId, createTime, dbPathHint, outError) + try: + lib.wcdb_delete_message.argtypes = [ + ctypes.c_int64, + ctypes.c_char_p, + ctypes.c_int64, + ctypes.c_int32, + ctypes.c_char_p, + ctypes.POINTER(ctypes.c_char_p), + ] + lib.wcdb_delete_message.restype = ctypes.c_int + except Exception: + pass + + # Optional (newer DLLs): wcdb_get_sns_timeline(handle, limit, offset, usernames_json, keyword, start_time, end_time, out_json) + try: + lib.wcdb_get_sns_timeline.argtypes = [ + ctypes.c_int64, + ctypes.c_int32, + ctypes.c_int32, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_int32, + ctypes.c_int32, + ctypes.POINTER(ctypes.c_char_p), + ] + lib.wcdb_get_sns_timeline.restype = ctypes.c_int + except Exception: + # Older wcdb_api.dll may not expose this export. + pass + + # Optional (newer DLLs): wcdb_decrypt_sns_image(encrypted_data, len, key, out_hex) + # WeFlow uses this to decrypt Moments CDN images. + try: + lib.wcdb_decrypt_sns_image.argtypes = [ + ctypes.c_void_p, + ctypes.c_int32, + ctypes.c_char_p, + ctypes.POINTER(ctypes.c_void_p), + ] + lib.wcdb_decrypt_sns_image.restype = ctypes.c_int32 + except Exception: + pass + lib.wcdb_get_logs.argtypes = [ctypes.POINTER(ctypes.c_char_p)] lib.wcdb_get_logs.restype = ctypes.c_int @@ -113,7 +253,9 @@ def _ensure_initialized() -> None: return rc = int(lib.wcdb_init()) if rc != 0: - raise WCDBRealtimeError(f"wcdb_init failed: {rc}") + logs = get_native_logs(require_initialized=False) + hint = f" logs={logs[:6]}" if logs else "" + raise WCDBRealtimeError(f"wcdb_init failed: {rc}.{hint}") _initialized = True @@ -149,11 +291,38 @@ def _call_out_json(fn, *args) -> str: pass -def get_native_logs() -> list[str]: +def _call_out_error(fn, *args) -> None: + lib = _load_wcdb_lib() + out = ctypes.c_char_p() + rc = int(fn(*args, ctypes.byref(out))) try: - _ensure_initialized() - except Exception: - return [] + if rc != 0: + err = "" + try: + if out.value: + err = (out.value or b"").decode("utf-8", errors="replace") + except Exception: + err = "" + + logs = get_native_logs() + hint = f" logs={logs[:6]}" if logs else "" + if err: + raise WCDBRealtimeError(f"wcdb api call failed: {rc}. error={err}.{hint}") + raise WCDBRealtimeError(f"wcdb api call failed: {rc}.{hint}") + finally: + try: + if out.value: + lib.wcdb_free_string(out) + except Exception: + pass + + +def get_native_logs(*, require_initialized: bool = True) -> list[str]: + if require_initialized: + try: + _ensure_initialized() + except Exception: + return [] lib = _load_wcdb_lib() out = ctypes.c_char_p() rc = int(lib.wcdb_get_logs(ctypes.byref(out))) @@ -195,6 +364,30 @@ def open_account(session_db_path: Path, key_hex: str) -> int: return int(out_handle.value) +def set_my_wxid(handle: int, wxid: str) -> bool: + """Best-effort set the "my wxid" context for some WCDB APIs.""" + try: + _ensure_initialized() + except Exception: + return False + + lib = _load_wcdb_lib() + fn = getattr(lib, "wcdb_set_my_wxid", None) + if not fn: + return False + + w = str(wxid or "").strip() + if not w: + return False + + try: + rc = int(fn(ctypes.c_int64(int(handle)), w.encode("utf-8"))) + except Exception: + return False + + return rc == 0 + + def close_account(handle: int) -> None: try: h = int(handle) @@ -293,6 +486,243 @@ def get_avatar_urls(handle: int, usernames: list[str]) -> dict[str, str]: return {} +def get_group_members(handle: int, chatroom_id: str) -> list[dict[str, Any]]: + _ensure_initialized() + lib = _load_wcdb_lib() + cid = str(chatroom_id or "").strip() + if not cid: + return [] + out_json = _call_out_json(lib.wcdb_get_group_members, ctypes.c_int64(int(handle)), cid.encode("utf-8")) + decoded = _safe_load_json(out_json) + if isinstance(decoded, list): + out: list[dict[str, Any]] = [] + for x in decoded: + if isinstance(x, dict): + out.append(x) + return out + return [] + + +def get_group_nicknames(handle: int, chatroom_id: str) -> dict[str, str]: + _ensure_initialized() + lib = _load_wcdb_lib() + fn = getattr(lib, "wcdb_get_group_nicknames", None) + if not fn: + return {} + + cid = str(chatroom_id or "").strip() + if not cid: + return {} + + out_json = _call_out_json(fn, ctypes.c_int64(int(handle)), cid.encode("utf-8")) + decoded = _safe_load_json(out_json) + if isinstance(decoded, dict): + return {str(k): str(v) for k, v in decoded.items()} + return {} + + +def exec_query(handle: int, *, kind: str, path: Optional[str], sql: str) -> list[dict[str, Any]]: + """Execute raw SQL on a specific db kind/path via WCDB. + + This is primarily used for SNS/other dbs that are not directly exposed by dedicated APIs. + """ + _ensure_initialized() + lib = _load_wcdb_lib() + fn = getattr(lib, "wcdb_exec_query", None) + if not fn: + raise WCDBRealtimeError("Current wcdb_api.dll does not support exec_query.") + + k = str(kind or "").strip() + if not k: + raise WCDBRealtimeError("Missing kind for exec_query.") + + s = str(sql or "").strip() + if not s: + return [] + + p = None if path is None else str(path or "").strip() + + out_json = _call_out_json( + fn, + ctypes.c_int64(int(handle)), + k.encode("utf-8"), + None if p is None else p.encode("utf-8"), + s.encode("utf-8"), + ) + decoded = _safe_load_json(out_json) + if isinstance(decoded, list): + out: list[dict[str, Any]] = [] + for x in decoded: + if isinstance(x, dict): + out.append(x) + return out + return [] + + +def update_message(handle: int, *, session_id: str, local_id: int, create_time: int, new_content: str) -> None: + """Update a single message content in the live encrypted db_storage via WCDB. + + Requires wcdb_update_message export in wcdb_api.dll. + """ + _ensure_initialized() + lib = _load_wcdb_lib() + fn = getattr(lib, "wcdb_update_message", None) + if not fn: + raise WCDBRealtimeError("Current wcdb_api.dll does not support update_message.") + + sid = str(session_id or "").strip() + if not sid: + raise WCDBRealtimeError("Missing session_id for update_message.") + + _call_out_error( + fn, + ctypes.c_int64(int(handle)), + sid.encode("utf-8"), + ctypes.c_int64(int(local_id or 0)), + ctypes.c_int32(int(create_time or 0)), + str(new_content or "").encode("utf-8"), + ) + + +def delete_message( + handle: int, + *, + session_id: str, + local_id: int, + create_time: int, + db_path_hint: str | None = None, +) -> None: + """Delete a single message in the live encrypted db_storage via WCDB. + + Requires wcdb_delete_message export in wcdb_api.dll. + """ + _ensure_initialized() + lib = _load_wcdb_lib() + fn = getattr(lib, "wcdb_delete_message", None) + if not fn: + raise WCDBRealtimeError("Current wcdb_api.dll does not support delete_message.") + + sid = str(session_id or "").strip() + if not sid: + raise WCDBRealtimeError("Missing session_id for delete_message.") + + hint = str(db_path_hint or "").strip() + _call_out_error( + fn, + ctypes.c_int64(int(handle)), + sid.encode("utf-8"), + ctypes.c_int64(int(local_id or 0)), + ctypes.c_int32(int(create_time or 0)), + hint.encode("utf-8"), + ) + + +def get_sns_timeline( + handle: int, + *, + limit: int = 20, + offset: int = 0, + usernames: Optional[list[str]] = None, + keyword: str | None = None, + start_time: int = 0, + end_time: int = 0, +) -> list[dict[str, Any]]: + """Read Moments (SnsTimeLine) from the live encrypted db_storage via WCDB. + + Requires a newer wcdb_api.dll export: wcdb_get_sns_timeline. + """ + _ensure_initialized() + lib = _load_wcdb_lib() + fn = getattr(lib, "wcdb_get_sns_timeline", None) + if not fn: + raise WCDBRealtimeError("Current wcdb_api.dll does not support sns timeline.") + + lim = max(0, int(limit or 0)) + off = max(0, int(offset or 0)) + + users = [str(u or "").strip() for u in (usernames or []) if str(u or "").strip()] + users = list(dict.fromkeys(users)) + users_json = json.dumps(users, ensure_ascii=False) if users else "" + + kw = str(keyword or "").strip() + + payload = _call_out_json( + fn, + ctypes.c_int64(int(handle)), + ctypes.c_int32(lim), + ctypes.c_int32(off), + users_json.encode("utf-8"), + kw.encode("utf-8"), + ctypes.c_int32(int(start_time or 0)), + ctypes.c_int32(int(end_time or 0)), + ) + decoded = _safe_load_json(payload) + if isinstance(decoded, list): + out: list[dict[str, Any]] = [] + for x in decoded: + if isinstance(x, dict): + out.append(x) + return out + return [] + + +def decrypt_sns_image(encrypted_data: bytes, key: str) -> bytes: + """Decrypt Moments CDN image bytes using WCDB DLL (WeFlow compatible). + + Notes: + - Requires a newer wcdb_api.dll export: wcdb_decrypt_sns_image. + - On failure, returns the original encrypted_data (best-effort behavior like WeFlow). + """ + _ensure_initialized() + lib = _load_wcdb_lib() + fn = getattr(lib, "wcdb_decrypt_sns_image", None) + if not fn: + raise WCDBRealtimeError("Current wcdb_api.dll does not support sns image decryption.") + + raw = bytes(encrypted_data or b"") + if not raw: + return b"" + + k = str(key or "").strip() + if not k: + return raw + + out_ptr = ctypes.c_void_p() + buf = ctypes.create_string_buffer(raw, len(raw)) + rc = 0 + try: + rc = int( + fn( + ctypes.cast(buf, ctypes.c_void_p), + ctypes.c_int32(len(raw)), + k.encode("utf-8"), + ctypes.byref(out_ptr), + ) + ) + + if rc != 0 or not out_ptr.value: + return raw + + hex_bytes = ctypes.cast(out_ptr, ctypes.c_char_p).value or b"" + if not hex_bytes: + return raw + + # Defensive: keep only hex chars (some builds may include whitespace). + hex_clean = re.sub(rb"[^0-9a-fA-F]", b"", hex_bytes) + if not hex_clean: + return raw + try: + return binascii.unhexlify(hex_clean) + except Exception: + return raw + finally: + try: + if out_ptr.value: + lib.wcdb_free_string(ctypes.cast(out_ptr, ctypes.c_char_p)) + except Exception: + pass + + def shutdown() -> None: global _initialized lib = _load_wcdb_lib() @@ -357,10 +787,14 @@ class WCDBRealtimeConnection: class WCDBRealtimeManager: + _FAILED_TTL = 60.0 # seconds before retrying a failed connection + def __init__(self) -> None: self._mu = threading.Lock() self._conns: dict[str, WCDBRealtimeConnection] = {} self._connecting: dict[str, threading.Event] = {} + # Negative cache: accounts that failed to connect recently (avoids repeated timeouts). + self._failed: dict[str, float] = {} # account -> monotonic timestamp of failure def get_status(self, account_dir: Path) -> dict[str, Any]: account = str(account_dir.name) @@ -378,11 +812,16 @@ def get_status(self, account_dir: Path) -> dict[str, Any]: except Exception as e: err = str(e) - dll_ok = _WCDB_API_DLL.exists() + dll_path = _resolve_wcdb_api_dll_path() + try: + dll_ok = bool(dll_path.exists()) + except Exception: + dll_ok = False connected = self.is_connected(account) return { "account": account, "dll_present": bool(dll_ok), + "wcdb_api_dll": str(dll_path), "key_present": bool(key_ok), "db_storage_dir": str(db_storage_dir) if db_storage_dir else "", "session_db_path": str(session_db_path) if session_db_path else "", @@ -395,9 +834,19 @@ def is_connected(self, account: str) -> bool: conn = self._conns.get(str(account)) return bool(conn and conn.handle > 0) - def ensure_connected(self, account_dir: Path, *, key_hex: Optional[str] = None) -> WCDBRealtimeConnection: + def ensure_connected( + self, account_dir: Path, *, key_hex: Optional[str] = None, timeout: float = 5.0 + ) -> WCDBRealtimeConnection: account = str(account_dir.name) + # Fast-reject if this account failed recently to avoid repeated timeouts. + with self._mu: + failed_at = self._failed.get(account) + if failed_at is not None and (time.monotonic() - failed_at) < self._FAILED_TTL: + raise WCDBRealtimeError("WCDB connection recently failed; retry after 60s.") + + deadline = time.monotonic() + timeout + while True: with self._mu: existing = self._conns.get(account) @@ -411,22 +860,64 @@ def ensure_connected(self, account_dir: Path, *, key_hex: Optional[str] = None) break # Another thread is connecting; wait a bit and retry. - waiter.wait(timeout=10.0) + remaining = deadline - time.monotonic() + if remaining <= 0: + raise WCDBRealtimeError("Timed out waiting for WCDB connection.") + waiter.wait(timeout=min(remaining, 10.0)) + if time.monotonic() >= deadline: + raise WCDBRealtimeError("Timed out waiting for WCDB connection.") key = str(key_hex or "").strip() if not key: key_item = get_account_keys_from_store(account) key = str((key_item or {}).get("db_key") or "").strip() - if len(key) != 64: - raise WCDBRealtimeError("Missing db key for this account (call /api/keys or decrypt first).") try: + if len(key) != 64: + with self._mu: + self._failed[account] = time.monotonic() + raise WCDBRealtimeError("Missing db key for this account (call /api/keys or decrypt first).") db_storage_dir = _resolve_account_db_storage_dir(account_dir) if db_storage_dir is None: raise WCDBRealtimeError("Cannot resolve db_storage directory for this account.") session_db_path = _resolve_session_db_path(db_storage_dir) - handle = open_account(session_db_path, key) + + # Run open_account in a daemon thread with a timeout to avoid + # blocking indefinitely when the native library hangs (locked DB). + _handle_box: list[int] = [] + _open_err: list[Exception] = [] + + def _do_open() -> None: + try: + _handle_box.append(open_account(session_db_path, key)) + except Exception as exc: + _open_err.append(exc) + + remaining = max(0.1, deadline - time.monotonic()) + open_thread = threading.Thread(target=_do_open, daemon=True) + open_thread.start() + open_thread.join(timeout=remaining) + + if open_thread.is_alive(): + with self._mu: + self._failed[account] = time.monotonic() + raise WCDBRealtimeError( + f"open_account timed out after {timeout:.0f}s for {session_db_path}" + ) + if _open_err: + with self._mu: + self._failed[account] = time.monotonic() + raise _open_err[0] + if not _handle_box: + raise WCDBRealtimeError("open_account returned no handle.") + + handle = _handle_box[0] + # Some WCDB APIs (e.g. exec_query on non-session DBs) may require this context. + try: + set_my_wxid(handle, account) + except Exception: + pass conn = WCDBRealtimeConnection( account=account, @@ -453,6 +944,7 @@ def disconnect(self, account: str) -> None: return with self._mu: conn = self._conns.pop(a, None) + self._failed.pop(a, None) # clear negative cache on explicit disconnect if conn is None: return try: @@ -461,16 +953,36 @@ def disconnect(self, account: str) -> None: except Exception: pass - def close_all(self) -> None: + def close_all(self, *, lock_timeout_s: float | None = None) -> bool: + """Close all known WCDB realtime connections. + + When `lock_timeout_s` is None, this waits indefinitely for per-connection locks. + When provided, this will skip busy connections after the timeout and return False. + """ with self._mu: conns = list(self._conns.values()) self._conns.clear() + ok = True for conn in conns: try: - with conn.lock: + if lock_timeout_s is None: + with conn.lock: + close_account(conn.handle) + continue + + acquired = conn.lock.acquire(timeout=float(lock_timeout_s)) + if not acquired: + ok = False + logger.warning("[wcdb] close_all skip busy conn account=%s", conn.account) + continue + try: close_account(conn.handle) + finally: + conn.lock.release() except Exception: + ok = False continue + return ok WCDB_REALTIME = WCDBRealtimeManager() diff --git a/src/wechat_decrypt_tool/wrapped/__init__.py b/src/wechat_decrypt_tool/wrapped/__init__.py new file mode 100644 index 0000000..7b8c7c8 --- /dev/null +++ b/src/wechat_decrypt_tool/wrapped/__init__.py @@ -0,0 +1,6 @@ +"""WeChat Wrapped (年度总结) backend modules. + +This package is intentionally split into small modules so we can implement +ideas incrementally (按点子编号依次实现), avoiding a single giant file. +""" + diff --git a/src/wechat_decrypt_tool/wrapped/cards/__init__.py b/src/wechat_decrypt_tool/wrapped/cards/__init__.py new file mode 100644 index 0000000..161b59e --- /dev/null +++ b/src/wechat_decrypt_tool/wrapped/cards/__init__.py @@ -0,0 +1,2 @@ +"""Card implementations for WeChat Wrapped (年度总结).""" + diff --git a/src/wechat_decrypt_tool/wrapped/cards/card_00_global_overview.py b/src/wechat_decrypt_tool/wrapped/cards/card_00_global_overview.py new file mode 100644 index 0000000..5fa9ebd --- /dev/null +++ b/src/wechat_decrypt_tool/wrapped/cards/card_00_global_overview.py @@ -0,0 +1,1064 @@ +from __future__ import annotations + +import hashlib +import re +import sqlite3 +import time +from collections import Counter +from dataclasses import dataclass +from datetime import datetime +from pathlib import Path +from typing import Any, Optional + +from .card_01_cyber_schedule import WeekdayHourHeatmap, compute_weekday_hour_heatmap +from ...chat_search_index import get_chat_search_index_db_path +from ...chat_helpers import ( + _build_avatar_url, + _decode_sqlite_text, + _iter_message_db_paths, + _load_contact_rows, + _pick_display_name, + _quote_ident, + _should_keep_session, + _to_char_token_text, +) +from ...logging_config import get_logger + +logger = get_logger(__name__) + + +_MD5_HEX_RE = re.compile(r"(?i)[0-9a-f]{32}") +# Best-effort heuristics for "new friends added" detection: WeChat system messages vary by version. +_ADDED_FRIEND_PATTERNS: tuple[str, ...] = ( + "你已添加了", + "你添加了", + "现在可以开始聊天了", + "以上是打招呼的消息", + "通过了你的朋友验证", + "通过你的朋友验证", +) + + +@dataclass(frozen=True) +class GlobalOverviewStats: + year: int + active_days: int + added_friends: int + local_type_counts: dict[int, int] + kind_counts: dict[str, int] + latest_ts: int + top_phrase: Optional[tuple[str, int]] + top_emoji: Optional[tuple[str, int]] + top_contact: Optional[tuple[str, int]] + top_group: Optional[tuple[str, int]] + + +def _year_range_epoch_seconds(year: int) -> tuple[int, int]: + # Keep the same semantics as other parts of the project: local time boundaries. + start = int(datetime(year, 1, 1).timestamp()) + end = int(datetime(year + 1, 1, 1).timestamp()) + return start, end + + +def _days_in_year(year: int) -> int: + try: + return int((datetime(int(year) + 1, 1, 1) - datetime(int(year), 1, 1)).days) + except Exception: + return 365 + + +def _list_message_tables(conn: sqlite3.Connection) -> list[str]: + try: + rows = conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall() + except Exception: + return [] + names: list[str] = [] + for r in rows: + if not r or not r[0]: + continue + name = _decode_sqlite_text(r[0]).strip() + if not name: + continue + ln = name.lower() + if ln.startswith(("msg_", "chat_")): + names.append(name) + return names + + +def _accumulate_db_daily_counts( + *, + db_path: Path, + start_ts: int, + end_ts: int, + counts: list[int], + sender_username: str | None = None, +) -> int: + """Accumulate per-day message counts from one message shard DB into counts list. + + Returns the number of messages counted. + """ + + if not db_path.exists(): + return 0 + + conn: sqlite3.Connection | None = None + try: + conn = sqlite3.connect(str(db_path)) + + tables = _list_message_tables(conn) + if not tables: + return 0 + + # Convert millisecond timestamps defensively. + # The expression yields epoch seconds as INTEGER. + ts_expr = ( + "CASE WHEN create_time > 1000000000000 THEN CAST(create_time/1000 AS INTEGER) ELSE create_time END" + ) + + # Optional sender filter (best-effort). When provided, we only count + # messages whose `real_sender_id` maps to `sender_username`. + sender_rowid: int | None = None + if sender_username and str(sender_username).strip(): + try: + r = conn.execute( + "SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", + (str(sender_username).strip(),), + ).fetchone() + if r is not None and r[0] is not None: + sender_rowid = int(r[0]) + except Exception: + sender_rowid = None + + counted = 0 + for table_name in tables: + qt = _quote_ident(table_name) + sender_where = "" + params: tuple[Any, ...] + if sender_rowid is not None: + sender_where = " AND real_sender_id = ?" + params = (start_ts, end_ts, sender_rowid) + else: + params = (start_ts, end_ts) + + sql = ( + "SELECT CAST(strftime('%j', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) - 1 AS doy, " + "COUNT(1) AS cnt " + "FROM (" + f" SELECT {ts_expr} AS ts" + f" FROM {qt}" + f" WHERE {ts_expr} >= ? AND {ts_expr} < ?{sender_where}" + ") sub " + "GROUP BY doy" + ) + + try: + rows = conn.execute(sql, params).fetchall() + except Exception: + continue + + for doy, cnt in rows: + try: + d = int(doy if doy is not None else -1) + c = int(cnt or 0) + except Exception: + continue + if c <= 0 or d < 0 or d >= len(counts): + continue + counts[d] += c + counted += c + + return counted + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass + + +def compute_annual_daily_counts(*, account_dir: Path, year: int, sender_username: str | None = None) -> list[int]: + """Compute per-day message counts for the given year. + + The output is a 0-indexed day-of-year list (length 365/366). Counts default to + "messages sent by me" when sender_username is provided. + """ + + start_ts, end_ts = _year_range_epoch_seconds(year) + days = _days_in_year(year) + counts: list[int] = [0 for _ in range(days)] + + sender = str(sender_username or "").strip() + + # Prefer using our unified search index if available; it's much faster than scanning all msg tables. + index_path = get_chat_search_index_db_path(account_dir) + if index_path.exists(): + conn = sqlite3.connect(str(index_path)) + try: + has_fts = ( + conn.execute( + "SELECT 1 FROM sqlite_master WHERE type='table' AND name='message_fts' LIMIT 1" + ).fetchone() + is not None + ) + if has_fts: + # Convert millisecond timestamps defensively (some datasets store ms). + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + sender_clause = "" + if sender: + sender_clause = " AND sender_username = ?" + + sql = ( + "SELECT " + "CAST(strftime('%j', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) - 1 AS doy, " + "COUNT(1) AS cnt " + "FROM (" + f" SELECT {ts_expr} AS ts" + " FROM message_fts" + f" WHERE {ts_expr} >= ? AND {ts_expr} < ?" + " AND db_stem NOT LIKE 'biz_message%'" + f"{sender_clause}" + ") sub " + "GROUP BY doy" + ) + + t0 = time.time() + try: + params: tuple[Any, ...] = (start_ts, end_ts) + if sender: + params = (start_ts, end_ts, sender) + rows = conn.execute(sql, params).fetchall() + except Exception: + rows = [] + + total = 0 + for r in rows: + if not r: + continue + try: + doy = int(r[0] if r[0] is not None else -1) + cnt = int(r[1] or 0) + except Exception: + continue + if cnt <= 0 or doy < 0 or doy >= days: + continue + counts[doy] += cnt + total += cnt + + logger.info( + "Wrapped annual heatmap computed (search index): account=%s year=%s total=%s sender=%s db=%s elapsed=%.2fs", + str(account_dir.name or "").strip(), + year, + total, + sender or "*", + str(index_path.name), + time.time() - t0, + ) + + return counts + finally: + try: + conn.close() + except Exception: + pass + + db_paths = _iter_message_db_paths(account_dir) + # Default: exclude official/biz shards (biz_message*.db) to reduce noise. + db_paths = [p for p in db_paths if not p.name.lower().startswith("biz_message")] + my_wxid = str(account_dir.name or "").strip() + t0 = time.time() + total = 0 + for db_path in db_paths: + total += _accumulate_db_daily_counts( + db_path=db_path, + start_ts=start_ts, + end_ts=end_ts, + counts=counts, + sender_username=sender or None, + ) + + logger.info( + "Wrapped annual heatmap computed: account=%s year=%s total=%s sender=%s dbs=%s elapsed=%.2fs", + my_wxid, + year, + total, + sender or "*", + len(db_paths), + time.time() - t0, + ) + + return counts + + +def _list_session_usernames(session_db_path: Path) -> list[str]: + if not session_db_path.exists(): + return [] + + conn = sqlite3.connect(str(session_db_path)) + try: + try: + rows = conn.execute("SELECT username FROM SessionTable").fetchall() + except sqlite3.OperationalError: + rows = conn.execute("SELECT username FROM Session").fetchall() + except Exception: + rows = [] + finally: + conn.close() + + out: list[str] = [] + for r in rows: + if not r or not r[0]: + continue + u = str(r[0]).strip() + if u: + out.append(u) + return out + + +def _mask_name(name: str) -> str: + s = str(name or "").strip() + if not s: + return "" + if len(s) == 1: + return "*" + if len(s) == 2: + return s[0] + "*" + return s[0] + ("*" * (len(s) - 2)) + s[-1] + + +def _normalize_phrase(v: Any) -> str: + s = _decode_sqlite_text(v).strip() + if not s: + return "" + s = re.sub(r"\s+", " ", s).strip() + if not s: + return "" + if len(s) > 12: + return "" + lower = s.lower() + if "http://" in lower or "https://" in lower: + return "" + if s.startswith("<"): + return "" + # Avoid pure punctuation / numbers. + if not re.search(r"[\u4e00-\u9fffA-Za-z]", s): + return "" + return s + + +def _normalize_emoji(v: Any) -> str: + s = _decode_sqlite_text(v).strip() + if not s: + return "" + s = re.sub(r"\s+", " ", s).strip() + if not s or len(s) > 48: + return "" + if s.startswith("<"): + return "" + # If it is an md5 or some opaque token, don't show it. + if re.fullmatch(r"(?i)[0-9a-f]{32}", s): + return "" + return s + + +def _kind_from_local_type(t: int) -> str: + # See `_infer_local_type` in chat_helpers for known values. + if t == 1: + return "text" + if t == 3: + return "image" + if t == 34: + return "voice" + if t == 43: + return "video" + if t == 47: + return "emoji" + if t in (49, 17179869233, 21474836529, 154618822705, 12884901937, 270582939697): + return "link" + if t == 25769803825: + return "file" + if t == 10000: + return "system" + if t == 50: + return "voip" + if t == 244813135921: + return "quote" + if t == 8594229559345: + return "red_packet" + if t == 8589934592049: + return "transfer" + if t == 266287972401: + return "pat" + return "other" + + +def _weekday_name_zh(weekday_index: int) -> str: + labels = ["周一", "周二", "周三", "周四", "周五", "周六", "周日"] + if 0 <= weekday_index < len(labels): + return labels[weekday_index] + return "" + + +def _kind_label_zh(kind: str) -> str: + return { + "text": "文字", + "emoji": "表情包", + "voice": "语音", + "image": "图片", + "video": "视频", + "link": "链接/小程序", + "file": "文件", + "system": "系统消息", + "other": "其他", + }.get(kind, kind) + + +def compute_global_overview_stats( + *, + account_dir: Path, + year: int, + sender_username: str | None = None, +) -> GlobalOverviewStats: + """Compute global overview stats for wrapped. + + Notes: + - Best-effort only. Different WeChat versions may store different message types/values. + - We default to excluding `biz_message*.db` to reduce noise. + - If `sender_username` is provided, only messages sent by that sender are counted + (best-effort). + """ + + start_ts, end_ts = _year_range_epoch_seconds(year) + sender = str(sender_username).strip() if sender_username and str(sender_username).strip() else None + + # Prefer using the unified search index if available; it already merges all shards/tables. + index_path = get_chat_search_index_db_path(account_dir) + if index_path.exists(): + conn = sqlite3.connect(str(index_path)) + try: + has_fts = ( + conn.execute( + "SELECT 1 FROM sqlite_master WHERE type='table' AND name='message_fts' LIMIT 1" + ).fetchone() + is not None + ) + if has_fts: + t0 = time.time() + + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + where = f"{ts_expr} >= ? AND {ts_expr} < ? AND db_stem NOT LIKE 'biz_message%'" + params: tuple[Any, ...] = (start_ts, end_ts) + if sender: + where += " AND sender_username = ?" + params = (start_ts, end_ts, sender) + + # activeDays + latest_ts in one pass. + sql_meta = ( + "SELECT " + "COUNT(DISTINCT date(datetime(ts, 'unixepoch', 'localtime'))) AS active_days, " + "MAX(ts) AS latest_ts " + "FROM (" + f" SELECT {ts_expr} AS ts" + " FROM message_fts" + f" WHERE {where}" + ") sub" + ) + r = conn.execute(sql_meta, params).fetchone() + active_days_i = int((r[0] if r else 0) or 0) + latest_ts_i = int((r[1] if r else 0) or 0) + + # local_type distribution (for message kind). + local_type_counts_i: Counter[int] = Counter() + kind_counts_i: Counter[str] = Counter() + try: + rows = conn.execute( + f"SELECT CAST(local_type AS INTEGER) AS lt, COUNT(1) AS cnt " + f"FROM message_fts WHERE {where} GROUP BY lt", + params, + ).fetchall() + except Exception: + rows = [] + for rr in rows: + if not rr: + continue + try: + lt = int(rr[0] or 0) + cnt = int(rr[1] or 0) + except Exception: + continue + if cnt <= 0: + continue + local_type_counts_i[lt] += cnt + kind_counts_i[_kind_from_local_type(lt)] += cnt + + # Top conversations (best-effort: only needs a small LIMIT). + per_username_counts_i: Counter[str] = Counter() + try: + rows_u = conn.execute( + f"SELECT username, COUNT(1) AS cnt " + f"FROM message_fts WHERE {where} " + "GROUP BY username ORDER BY cnt DESC LIMIT 400", + params, + ).fetchall() + except Exception: + rows_u = [] + for rr in rows_u: + if not rr: + continue + u = str(rr[0] or "").strip() + if not u: + continue + try: + cnt = int(rr[1] or 0) + except Exception: + cnt = 0 + if cnt > 0: + per_username_counts_i[u] = cnt + + # Top phrases (short text only). + phrase_counts_i: Counter[str] = Counter() + try: + rows_p = conn.execute( + f"SELECT \"text\" AS txt, COUNT(1) AS cnt " + f"FROM message_fts WHERE {where} AND render_type = 'text' " + " AND \"text\" IS NOT NULL " + " AND TRIM(\"text\") != '' " + " AND LENGTH(TRIM(\"text\")) <= 12 " + "GROUP BY txt ORDER BY cnt DESC LIMIT 400", + params, + ).fetchall() + except Exception: + rows_p = [] + for rr in rows_p: + if not rr: + continue + phrase = _normalize_phrase(rr[0]) + if not phrase: + continue + try: + cnt = int(rr[1] or 0) + except Exception: + cnt = 0 + if cnt > 0: + phrase_counts_i[phrase] += cnt + + def pick_top(counter: Counter[Any]) -> Optional[tuple[Any, int]]: + if not counter: + return None + best_item = max(counter.items(), key=lambda kv: (kv[1], str(kv[0]))) + if best_item[1] <= 0: + return None + return best_item[0], int(best_item[1]) + + def is_keep_username(u: str) -> bool: + return _should_keep_session(u, include_official=False) + + contact_counts_i = Counter( + { + u: c + for u, c in per_username_counts_i.items() + if (not u.endswith("@chatroom")) and is_keep_username(u) + } + ) + group_counts_i = Counter( + {u: c for u, c in per_username_counts_i.items() if u.endswith("@chatroom") and is_keep_username(u)} + ) + top_contact = pick_top(contact_counts_i) + top_group = pick_top(group_counts_i) + top_phrase = pick_top(phrase_counts_i) + + # New friends added in this year (best-effort via WeChat system messages). + added_friend_usernames: set[str] = set() + try: + like_patterns: list[str] = [] + for pat in _ADDED_FRIEND_PATTERNS: + tok = _to_char_token_text(pat) + if tok: + like_patterns.append(f"%{tok}%") + + if like_patterns: + where_added = f"{ts_expr} >= ? AND {ts_expr} < ? AND db_stem NOT LIKE 'biz_message%'" + cond_added = " OR ".join(['\"text\" LIKE ?'] * len(like_patterns)) + rows_added = conn.execute( + f"SELECT DISTINCT username FROM message_fts " + f"WHERE {where_added} " + "AND CAST(local_type AS INTEGER) = 10000 " + f"AND ({cond_added})", + (start_ts, end_ts, *like_patterns), + ).fetchall() + for rr in rows_added: + if not rr or not rr[0]: + continue + u = str(rr[0] or "").strip() + if not u or u.endswith("@chatroom") or (not is_keep_username(u)): + continue + added_friend_usernames.add(u) + except Exception: + added_friend_usernames = set() + + added_friends_i = len(added_friend_usernames) + + total_messages = int(sum(local_type_counts_i.values())) + logger.info( + "Wrapped card#0 overview computed (search index): account=%s year=%s total=%s active_days=%s sender=%s db=%s elapsed=%.2fs", + str(account_dir.name or "").strip(), + year, + total_messages, + active_days_i, + sender or "*", + str(index_path.name), + time.time() - t0, + ) + + return GlobalOverviewStats( + year=year, + active_days=active_days_i, + added_friends=added_friends_i, + local_type_counts={int(k): int(v) for k, v in local_type_counts_i.items()}, + kind_counts={str(k): int(v) for k, v in kind_counts_i.items()}, + latest_ts=latest_ts_i, + top_phrase=(str(top_phrase[0]), int(top_phrase[1])) if top_phrase else None, + top_emoji=None, + top_contact=(str(top_contact[0]), int(top_contact[1])) if top_contact else None, + top_group=(str(top_group[0]), int(top_group[1])) if top_group else None, + ) + finally: + try: + conn.close() + except Exception: + pass + + # Resolve all sessions (usernames) so we can map msg_xxx/chat_xxx tables back to usernames. + session_usernames = _list_session_usernames(account_dir / "session.db") + md5_to_username: dict[str, str] = {} + table_to_username: dict[str, str] = {} + for u in session_usernames: + md5_hex = hashlib.md5(u.encode("utf-8")).hexdigest().lower() + md5_to_username[md5_hex] = u + table_to_username[f"msg_{md5_hex}"] = u + table_to_username[f"chat_{md5_hex}"] = u + + def resolve_username_from_table(table_name: str) -> Optional[str]: + ln = str(table_name or "").lower() + u = table_to_username.get(ln) + if u: + return u + m = _MD5_HEX_RE.search(ln) + if m: + return md5_to_username.get(m.group(0).lower()) + return None + + db_paths = _iter_message_db_paths(account_dir) + db_paths = [p for p in db_paths if not p.name.lower().startswith("biz_message")] + + # Convert millisecond timestamps defensively. + ts_expr = ( + "CASE WHEN create_time > 1000000000000 THEN CAST(create_time/1000 AS INTEGER) ELSE create_time END" + ) + + local_type_counts: Counter[int] = Counter() + kind_counts: Counter[str] = Counter() + active_days: set[str] = set() + per_username_counts: Counter[str] = Counter() + phrase_counts: Counter[str] = Counter() + added_friend_usernames: set[str] = set() + added_like_patterns = [f"%{p}%" for p in _ADDED_FRIEND_PATTERNS if str(p or "").strip()] + + latest_ts = 0 + + t0 = time.time() + for db_path in db_paths: + if not db_path.exists(): + continue + conn: sqlite3.Connection | None = None + try: + conn = sqlite3.connect(str(db_path)) + tables = _list_message_tables(conn) + if not tables: + continue + + skip_sender_stats = False + sender_rowid: int | None = None + if sender: + try: + r2 = conn.execute( + "SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", + (sender,), + ).fetchone() + if r2 is not None and r2[0] is not None: + sender_rowid = int(r2[0]) + except Exception: + sender_rowid = None + # Can't reliably filter by sender for this shard; skip sender-only stats to avoid mixing directions. + if sender_rowid is None: + skip_sender_stats = True + + for table_name in tables: + qt = _quote_ident(table_name) + username = resolve_username_from_table(table_name) + + # New friends added: detect common WeChat system messages within this year. + if ( + added_like_patterns + and username + and (not username.endswith("@chatroom")) + and _should_keep_session(username, include_official=False) + ): + cond_added = " OR ".join(["CAST(message_content AS TEXT) LIKE ?"] * len(added_like_patterns)) + sql_added = ( + f"SELECT 1 FROM {qt} " + f"WHERE local_type = 10000 " + f" AND {ts_expr} >= ? AND {ts_expr} < ? " + f" AND ({cond_added}) " + "LIMIT 1" + ) + try: + r_added = conn.execute(sql_added, (start_ts, end_ts, *added_like_patterns)).fetchone() + except Exception: + r_added = None + if r_added is not None: + added_friend_usernames.add(username) + + if skip_sender_stats: + continue + sender_where = " AND real_sender_id = ?" if sender_rowid is not None else "" + params = (start_ts, end_ts, sender_rowid) if sender_rowid is not None else (start_ts, end_ts) + + # 1) local_type distribution + table total + sql_types = ( + "SELECT local_type, COUNT(1) AS cnt " + "FROM (" + f" SELECT local_type, {ts_expr} AS ts " + f" FROM {qt} " + f" WHERE {ts_expr} >= ? AND {ts_expr} < ?{sender_where}" + ") sub " + "GROUP BY local_type" + ) + try: + rows = conn.execute(sql_types, params).fetchall() + except Exception: + continue + if not rows: + continue + + table_total = 0 + table_text_cnt = 0 + for r in rows: + if not r: + continue + try: + lt = int(r[0] or 0) + except Exception: + lt = 0 + try: + cnt = int(r[1] or 0) + except Exception: + cnt = 0 + if cnt <= 0: + continue + table_total += cnt + local_type_counts[lt] += cnt + kind_counts[_kind_from_local_type(lt)] += cnt + if lt == 1: + table_text_cnt = cnt + + if table_total <= 0: + continue + if username: + per_username_counts[username] += table_total + + # 3) active days (distinct dates) + sql_days = ( + "SELECT DISTINCT date(datetime(ts, 'unixepoch', 'localtime')) AS d " + "FROM (" + f" SELECT {ts_expr} AS ts" + f" FROM {qt}" + f" WHERE {ts_expr} >= ? AND {ts_expr} < ?{sender_where}" + ") sub" + ) + try: + rows_d = conn.execute(sql_days, params).fetchall() + except Exception: + rows_d = [] + for rd in rows_d: + if not rd or not rd[0]: + continue + active_days.add(str(rd[0])) + + # 4) latest timestamp within this year + sql_max_ts = f"SELECT MAX({ts_expr}) AS mx FROM {qt} WHERE {ts_expr} >= ? AND {ts_expr} < ?{sender_where}" + try: + rmax = conn.execute(sql_max_ts, params).fetchone() + except Exception: + rmax = None + try: + mx = int((rmax[0] if rmax else 0) or 0) + except Exception: + mx = 0 + if mx > latest_ts: + latest_ts = mx + + # 5) top phrases (best-effort via short, repeated text messages) + if table_text_cnt > 0: + sql_phrase = ( + "SELECT message_content AS txt, COUNT(1) AS cnt " + f"FROM {qt} " + f"WHERE local_type = 1 " + f" AND {ts_expr} >= ? AND {ts_expr} < ?{sender_where} " + " AND message_content IS NOT NULL " + " AND TRIM(CAST(message_content AS TEXT)) != '' " + " AND LENGTH(TRIM(CAST(message_content AS TEXT))) <= 12 " + "GROUP BY txt " + "ORDER BY cnt DESC " + "LIMIT 60" + ) + try: + rows_p = conn.execute(sql_phrase, params).fetchall() + except Exception: + rows_p = [] + for rp in rows_p: + if not rp: + continue + phrase = _normalize_phrase(rp[0]) + if not phrase: + continue + try: + cnt = int(rp[1] or 0) + except Exception: + cnt = 0 + if cnt > 0: + phrase_counts[phrase] += cnt + finally: + if conn is not None: + try: + conn.close() + except Exception: + pass + + def pick_top(counter: Counter[Any]) -> Optional[tuple[Any, int]]: + if not counter: + return None + # Deterministic tie-breaker: key string ascending. + best_item = max(counter.items(), key=lambda kv: (kv[1], str(kv[0]))) + if best_item[1] <= 0: + return None + return best_item[0], int(best_item[1]) + + # Pick top contact & group (exclude official/service accounts by default). + def is_keep_username(u: str) -> bool: + return _should_keep_session(u, include_official=False) + + contact_counts = Counter({u: c for u, c in per_username_counts.items() if (not u.endswith("@chatroom")) and is_keep_username(u)}) + group_counts = Counter({u: c for u, c in per_username_counts.items() if u.endswith("@chatroom") and is_keep_username(u)}) + top_contact = pick_top(contact_counts) + top_group = pick_top(group_counts) + + top_phrase = pick_top(phrase_counts) + + total_messages = int(sum(local_type_counts.values())) + + logger.info( + "Wrapped card#0 overview computed: account=%s year=%s total=%s active_days=%s sender=%s dbs=%s elapsed=%.2fs", + str(account_dir.name or "").strip(), + year, + total_messages, + len(active_days), + sender or "*", + len(db_paths), + time.time() - t0, + ) + + return GlobalOverviewStats( + year=year, + active_days=len(active_days), + added_friends=len(added_friend_usernames), + local_type_counts={int(k): int(v) for k, v in local_type_counts.items()}, + kind_counts={str(k): int(v) for k, v in kind_counts.items()}, + latest_ts=int(latest_ts), + top_phrase=(str(top_phrase[0]), int(top_phrase[1])) if top_phrase else None, + top_emoji=None, + top_contact=(str(top_contact[0]), int(top_contact[1])) if top_contact else None, + top_group=(str(top_group[0]), int(top_group[1])) if top_group else None, + ) + + +def build_card_00_global_overview( + *, + account_dir: Path, + year: int, + heatmap: WeekdayHourHeatmap | None = None, +) -> dict[str, Any]: + """Card #0: 年度全局概览(开场综合页,建议作为第2页)。""" + + sender = str(account_dir.name or "").strip() + heatmap = heatmap or compute_weekday_hour_heatmap(account_dir=account_dir, year=year, sender_username=sender) + stats = compute_global_overview_stats(account_dir=account_dir, year=year, sender_username=sender) + + # Resolve display names for top sessions (best-effort). + contact_db_path = account_dir / "contact.db" + top_usernames: list[str] = [] + if stats.top_contact: + top_usernames.append(stats.top_contact[0]) + if stats.top_group: + top_usernames.append(stats.top_group[0]) + contact_rows = _load_contact_rows(contact_db_path, top_usernames) if top_usernames else {} + + top_contact_obj = None + if stats.top_contact: + u, cnt = stats.top_contact + row = contact_rows.get(u) + display = _pick_display_name(row, u) + avatar = _build_avatar_url(str(account_dir.name or ""), u) if u else "" + top_contact_obj = { + "username": u, + "displayName": display, + "maskedName": _mask_name(display), + "avatarUrl": avatar, + "messages": int(cnt), + "isGroup": False, + } + + top_group_obj = None + if stats.top_group: + u, cnt = stats.top_group + row = contact_rows.get(u) + display = _pick_display_name(row, u) + avatar = _build_avatar_url(str(account_dir.name or ""), u) if u else "" + top_group_obj = { + "username": u, + "displayName": display, + "maskedName": _mask_name(display), + "avatarUrl": avatar, + "messages": int(cnt), + "isGroup": True, + } + + # Derive the top "message kind". + top_kind = None + if stats.kind_counts: + kc = Counter(stats.kind_counts) + # Exclude mostly-unhelpful kinds from the "top" pick. + for drop in ("system", "other"): + if drop in kc: + del kc[drop] + if kc: + kind, count = max(kc.items(), key=lambda kv: (kv[1], str(kv[0]))) + ratio = (float(count) / float(heatmap.total_messages)) if heatmap.total_messages > 0 else 0.0 + top_kind = { + "kind": str(kind), + "label": _kind_label_zh(str(kind)), + "count": int(count), + "ratio": ratio, + } + + messages_per_day = 0.0 + if stats.active_days > 0: + messages_per_day = heatmap.total_messages / float(stats.active_days) + + most_active_hour: Optional[int] = None + most_active_weekday: Optional[int] = None + if heatmap.total_messages > 0: + hour_totals = [sum(heatmap.matrix[w][h] for w in range(7)) for h in range(24)] + most_active_hour = max(range(24), key=lambda h: (hour_totals[h], -h)) + + weekday_totals = [sum(heatmap.matrix[w][h] for h in range(24)) for w in range(7)] + most_active_weekday = max(range(7), key=lambda w: (weekday_totals[w], -w)) + + most_active_weekday_name = _weekday_name_zh(most_active_weekday or -1) if most_active_weekday is not None else "" + + highlight = None + if stats.latest_ts > 0: + dt = datetime.fromtimestamp(int(stats.latest_ts)) + highlight = { + "timestamp": int(stats.latest_ts), + "date": dt.strftime("%Y-%m-%d"), + "time": dt.strftime("%H:%M"), + # Keep it privacy-safe by default: no content/object here. + "action": "你还在微信里发送消息", + } + + daily_counts = compute_annual_daily_counts(account_dir=account_dir, year=year, sender_username=sender) + annual_heatmap = { + "year": int(year), + "startDate": f"{int(year)}-01-01", + "endDate": f"{int(year)}-12-31", + "days": int(len(daily_counts)), + "dailyCounts": daily_counts, + # Product decision: keep the calendar heatmap lightweight (no extra "best day" markers). + "highlights": [], + } + + lines: list[str] = [] + if heatmap.total_messages > 0: + lines.append(f"今年以来,你在微信里发送了 {heatmap.total_messages:,} 条消息,平均每天 {messages_per_day:.1f} 条。") + else: + lines.append("今年以来,你在微信里还没有发出聊天消息。") + + if stats.active_days > 0: + if most_active_hour is not None and most_active_weekday_name: + lines.append(f"和微信共度的 {stats.active_days} 天里,你最常在 {most_active_hour} 点出没;{most_active_weekday_name}是你最爱聊天的日子。") + else: + lines.append(f"和微信共度的 {stats.active_days} 天里,你留下了很多对话的痕迹。") + + if top_contact_obj or top_group_obj: + parts: list[str] = [] + if top_contact_obj: + parts.append(f"你发消息最多的人是「{top_contact_obj['maskedName']}」({int(top_contact_obj['messages']):,} 条)") + if top_group_obj: + parts.append(f"你最常发言的群是「{top_group_obj['maskedName']}」({int(top_group_obj['messages']):,} 条)") + if parts: + lines.append(",".join(parts) + "。") + + if top_kind and top_kind.get("count", 0) > 0: + pct = float(top_kind.get("ratio") or 0.0) * 100.0 + lines.append(f"你最常用的表达方式是{top_kind['label']}(占 {pct:.0f}%)。") + + if stats.top_phrase and stats.top_phrase[0] and stats.top_phrase[1] > 0: + phrase, cnt = stats.top_phrase + lines.append(f"你今年说得最多的一句话是「{phrase}」(共 {cnt:,} 次)。") + + # NOTE: We keep the `highlight` field in `data` for future use, but do not + # surface it in the page narrative for now (per product requirement). + + narrative = "一屏读懂你的年度微信聊天画像" + + return { + "id": 0, + "title": "这一年,你的微信都经历了什么?", + "scope": "global", + "category": "A", + "status": "ok", + "kind": "global/overview", + "narrative": narrative, + "data": { + "year": int(year), + "totalMessages": int(heatmap.total_messages), + "activeDays": int(stats.active_days), + "addedFriends": int(stats.added_friends), + "sentMediaCount": int(stats.kind_counts.get("image", 0) + stats.kind_counts.get("video", 0)), + "sentStickerCount": int(stats.kind_counts.get("emoji", 0)), + "messagesPerDay": messages_per_day, + "mostActiveHour": most_active_hour, + "mostActiveWeekday": most_active_weekday, + "mostActiveWeekdayName": most_active_weekday_name, + "topContact": top_contact_obj, + "topGroup": top_group_obj, + "topKind": top_kind, + "annualHeatmap": annual_heatmap, + "topPhrase": {"phrase": stats.top_phrase[0], "count": int(stats.top_phrase[1])} if stats.top_phrase else None, + "topEmoji": {"emoji": stats.top_emoji[0], "count": int(stats.top_emoji[1])} if stats.top_emoji else None, + "highlight": highlight, + "lines": lines, + }, + } diff --git a/src/wechat_decrypt_tool/wrapped/cards/card_01_cyber_schedule.py b/src/wechat_decrypt_tool/wrapped/cards/card_01_cyber_schedule.py new file mode 100644 index 0000000..4a66580 --- /dev/null +++ b/src/wechat_decrypt_tool/wrapped/cards/card_01_cyber_schedule.py @@ -0,0 +1,1101 @@ +from __future__ import annotations + +import hashlib +import re +import sqlite3 +import time +from dataclasses import dataclass +from datetime import datetime +from pathlib import Path +from typing import Any, Optional + +from ...chat_search_index import get_chat_search_index_db_path +from ...chat_helpers import ( + _build_avatar_url, + _decode_sqlite_text, + _iter_message_db_paths, + _load_contact_rows, + _pick_display_name, + _quote_ident, + _row_to_search_hit, +) +from ...logging_config import get_logger + +logger = get_logger(__name__) + + +_WEEKDAY_LABELS_ZH = ["周一", "周二", "周三", "周四", "周五", "周六", "周日"] +_HOUR_LABELS = [f"{h:02d}" for h in range(24)] + +_MD5_HEX_RE = re.compile(r"(?i)[0-9a-f]{32}") + + +@dataclass(frozen=True) +class WeekdayHourHeatmap: + weekday_labels: list[str] + hour_labels: list[str] + matrix: list[list[int]] # 7 x 24, weekday major (Mon..Sun) then hour + total_messages: int + + +@dataclass(frozen=True) +class _SentMomentRef: + """Lightweight reference to a sent message (for earliest/latest moment extraction).""" + + ts: int + score: int + username: str + db_stem: str + table_name: str + local_id: int + + +def _get_time_personality(hour: int) -> str: + if 5 <= hour <= 8: + return "early_bird" + if 9 <= hour <= 12: + return "office_worker" + if 13 <= hour <= 17: + return "afternoon" + if 18 <= hour <= 23: + return "night_owl" + if 0 <= hour <= 4: + return "late_night" + return "unknown" + + +def _get_weekday_name(weekday_index: int) -> str: + if 0 <= weekday_index < len(_WEEKDAY_LABELS_ZH): + return _WEEKDAY_LABELS_ZH[weekday_index] + return "" + + +def _build_narrative(*, hour: int, weekday: str, total: int) -> str: + personality = _get_time_personality(hour) + + templates: dict[str, str] = { + "early_bird": ( + f"清晨 {hour:02d}:00,当城市还在沉睡,你已经开始了新一天的问候。" + f"{weekday}是你最健谈的一天,这一年你用 {total:,} 条消息记录了这些早起时光。" + ), + "office_worker": ( + f"忙碌的上午 {hour:02d}:00,是你最常敲击键盘的时刻。" + f"{weekday}最活跃,这一年你用 {total:,} 条消息把工作与生活都留在了对话里。" + ), + "afternoon": ( + f"午后的阳光里,{hour:02d}:00 是你最爱分享的时刻。" + f"{weekday}的聊天最热闹,这一年共 {total:,} 条消息串起了你的午后时光。" + ), + "night_owl": ( + f"夜幕降临,{hour:02d}:00 是你最常出没的时刻。" + f"{weekday}最活跃,这一年 {total:,} 条消息陪你把每个夜晚都聊得更亮。" + ), + "late_night": ( + f"当世界沉睡,凌晨 {hour:02d}:00 的你依然在线。" + f"{weekday}最活跃,这一年 {total:,} 条深夜消息,是你与这个世界的悄悄话。" + ), + } + return templates.get(personality, f"你在 {hour:02d}:00 最活跃") + + +def _year_range_epoch_seconds(year: int) -> tuple[int, int]: + # Use local time boundaries (same semantics as sqlite "localtime"). + start = int(datetime(year, 1, 1).timestamp()) + end = int(datetime(year + 1, 1, 1).timestamp()) + return start, end + + +def _mask_name(name: str) -> str: + s = str(name or "").strip() + if not s: + return "" + if len(s) == 1: + return "*" + if len(s) == 2: + return s[0] + "*" + return s[0] + ("*" * (len(s) - 2)) + s[-1] + + +def _list_session_usernames(session_db_path: Path) -> list[str]: + if not session_db_path.exists(): + return [] + + conn = sqlite3.connect(str(session_db_path)) + try: + try: + rows = conn.execute("SELECT username FROM SessionTable").fetchall() + except sqlite3.OperationalError: + rows = conn.execute("SELECT username FROM Session").fetchall() + except Exception: + rows = [] + finally: + conn.close() + + out: list[str] = [] + for r in rows: + if not r or not r[0]: + continue + u = str(r[0]).strip() + if u: + out.append(u) + return out + + +def _compute_year_first_last_from_index( + *, + account_dir: Path, + year: int, + sender_username: str, +) -> tuple[Optional[_SentMomentRef], Optional[_SentMomentRef]]: + """Find the chronologically first and last sent messages of the year (by timestamp).""" + start_ts, end_ts = _year_range_epoch_seconds(year) + sender = str(sender_username or "").strip() + if not sender: + return None, None + + index_path = get_chat_search_index_db_path(account_dir) + if not index_path.exists(): + return None, None + + conn = sqlite3.connect(str(index_path)) + try: + has_fts = ( + conn.execute("SELECT 1 FROM sqlite_master WHERE type='table' AND name='message_fts' LIMIT 1").fetchone() + is not None + ) + if not has_fts: + return None, None + + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + + where = ( + f"{ts_expr} >= ? AND {ts_expr} < ? " + "AND db_stem NOT LIKE 'biz_message%' " + "AND sender_username = ? " + "AND CAST(local_type AS INTEGER) != 10000" + ) + + base_sql = ( + f"SELECT {ts_expr} AS ts, username, db_stem, table_name, CAST(local_id AS INTEGER) AS local_id " + "FROM message_fts " + f"WHERE {where} " + ) + + def row_to_ref(r: Any) -> Optional[_SentMomentRef]: + if not r: + return None + try: + ts = int(r[0] or 0) + except Exception: + ts = 0 + username = str(r[1] or "").strip() + db_stem = str(r[2] or "").strip() + table_name = str(r[3] or "").strip() + try: + local_id = int(r[4] or 0) + except Exception: + local_id = 0 + + if ts <= 0 or not username or not db_stem or not table_name or local_id <= 0: + return None + + return _SentMomentRef( + ts=int(ts), + score=0, # Not used for chronological ordering + username=username, + db_stem=db_stem, + table_name=table_name, + local_id=int(local_id), + ) + + params = (start_ts, end_ts, sender) + sql_first = base_sql + "ORDER BY ts ASC LIMIT 1" + sql_last = base_sql + "ORDER BY ts DESC LIMIT 1" + + first_ref = row_to_ref(conn.execute(sql_first, params).fetchone()) + last_ref = row_to_ref(conn.execute(sql_last, params).fetchone()) + return first_ref, last_ref + except Exception: + return None, None + finally: + try: + conn.close() + except Exception: + pass + + +def _compute_year_first_last_fallback( + *, + account_dir: Path, + year: int, + sender_username: str, +) -> tuple[Optional[_SentMomentRef], Optional[_SentMomentRef]]: + """Fallback: find chronologically first/last sent messages when no search index.""" + start_ts, end_ts = _year_range_epoch_seconds(year) + sender = str(sender_username or "").strip() + if not sender: + return None, None + + session_usernames = _list_session_usernames(account_dir / "session.db") + md5_to_username: dict[str, str] = {} + table_to_username: dict[str, str] = {} + for u in session_usernames: + md5_hex = hashlib.md5(u.encode("utf-8")).hexdigest().lower() + md5_to_username[md5_hex] = u + table_to_username[f"msg_{md5_hex}"] = u + table_to_username[f"chat_{md5_hex}"] = u + + def resolve_username_from_table(table_name: str) -> Optional[str]: + ln = str(table_name or "").lower() + u = table_to_username.get(ln) + if u: + return u + m = _MD5_HEX_RE.search(ln) + if m: + return md5_to_username.get(m.group(0).lower()) + return None + + db_paths = _iter_message_db_paths(account_dir) + db_paths = [p for p in db_paths if not p.name.lower().startswith("biz_message")] + + ts_expr = ( + "CASE WHEN create_time > 1000000000000 THEN CAST(create_time/1000 AS INTEGER) ELSE create_time END" + ) + + best_first: Optional[_SentMomentRef] = None + best_last: Optional[_SentMomentRef] = None + + for db_path in db_paths: + if not db_path.exists(): + continue + + conn: sqlite3.Connection | None = None + try: + conn = sqlite3.connect(str(db_path)) + conn.row_factory = sqlite3.Row + conn.text_factory = bytes + + try: + r2 = conn.execute("SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", (sender,)).fetchone() + sender_rowid = int(r2[0]) if r2 and r2[0] is not None else None + except Exception: + sender_rowid = None + if sender_rowid is None: + continue + + tables = _list_message_tables(conn) + if not tables: + continue + + for table_name in tables: + username = resolve_username_from_table(table_name) + if not username: + continue + + qt = _quote_ident(table_name) + params = (start_ts, end_ts, int(sender_rowid)) + + sql_base = ( + f"SELECT local_id, {ts_expr} AS ts " + f"FROM {qt} " + f"WHERE {ts_expr} >= ? AND {ts_expr} < ? " + "AND real_sender_id = ? " + "AND local_type != 10000 " + ) + sql_first = sql_base + "ORDER BY ts ASC LIMIT 1" + sql_last = sql_base + "ORDER BY ts DESC LIMIT 1" + + try: + r_first = conn.execute(sql_first, params).fetchone() + except Exception: + r_first = None + if r_first: + try: + local_id = int(r_first["local_id"] or 0) + ts = int(r_first["ts"] or 0) + except Exception: + local_id, ts = 0, 0 + if local_id > 0 and ts > 0: + ref = _SentMomentRef( + ts=int(ts), + score=0, + username=str(username), + db_stem=str(db_path.stem), + table_name=str(table_name), + local_id=int(local_id), + ) + if best_first is None or ref.ts < best_first.ts: + best_first = ref + + try: + r_last = conn.execute(sql_last, params).fetchone() + except Exception: + r_last = None + if r_last: + try: + local_id = int(r_last["local_id"] or 0) + ts = int(r_last["ts"] or 0) + except Exception: + local_id, ts = 0, 0 + if local_id > 0 and ts > 0: + ref = _SentMomentRef( + ts=int(ts), + score=0, + username=str(username), + db_stem=str(db_path.stem), + table_name=str(table_name), + local_id=int(local_id), + ) + if best_last is None or ref.ts > best_last.ts: + best_last = ref + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass + + return best_first, best_last + + +def _compute_sent_moment_refs_from_index( + *, + account_dir: Path, + year: int, + sender_username: str, +) -> tuple[Optional[_SentMomentRef], Optional[_SentMomentRef]]: + start_ts, end_ts = _year_range_epoch_seconds(year) + sender = str(sender_username or "").strip() + if not sender: + return None, None + + index_path = get_chat_search_index_db_path(account_dir) + if not index_path.exists(): + return None, None + + conn = sqlite3.connect(str(index_path)) + try: + has_fts = ( + conn.execute("SELECT 1 FROM sqlite_master WHERE type='table' AND name='message_fts' LIMIT 1").fetchone() + is not None + ) + if not has_fts: + return None, None + + # Convert millisecond timestamps defensively (some datasets store ms). + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + + # NOTE: local_type=10000 are mostly system messages; exclude to make the moment nicer. + where = ( + f"{ts_expr} >= ? AND {ts_expr} < ? " + "AND db_stem NOT LIKE 'biz_message%' " + "AND sender_username = ? " + "AND CAST(local_type AS INTEGER) != 10000" + ) + + base_sql = ( + "SELECT ts, username, db_stem, table_name, CAST(local_id AS INTEGER) AS local_id, " + "CAST(strftime('%H', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS h, " + "CAST(strftime('%M', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS m, " + "CAST(strftime('%S', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS s " + "FROM (" + f" SELECT {ts_expr} AS ts, username, db_stem, table_name, local_id " + " FROM message_fts " + f" WHERE {where}" + ") sub " + ) + + def row_to_ref(r: Any) -> Optional[_SentMomentRef]: + if not r: + return None + try: + ts = int(r[0] or 0) + except Exception: + ts = 0 + username = str(r[1] or "").strip() + db_stem = str(r[2] or "").strip() + table_name = str(r[3] or "").strip() + try: + local_id = int(r[4] or 0) + except Exception: + local_id = 0 + try: + h = int(r[5] or 0) + m = int(r[6] or 0) + s = int(r[7] or 0) + except Exception: + h, m, s = 0, 0, 0 + + if ts <= 0 or not username or not db_stem or not table_name or local_id <= 0: + return None + + # Treat 00:00-04:59 as "late night": shift them +24h so they rank after 23:xx. + score = (h * 3600 + m * 60 + s) + (86400 if h < 5 else 0) + + return _SentMomentRef( + ts=int(ts), + score=int(score), + username=username, + db_stem=db_stem, + table_name=table_name, + local_id=int(local_id), + ) + + params = (start_ts, end_ts, sender) + sql_earliest = ( + base_sql + + "ORDER BY (h*3600 + m*60 + s + CASE WHEN h < 5 THEN 86400 ELSE 0 END) ASC, ts ASC LIMIT 1" + ) + sql_latest = ( + base_sql + + "ORDER BY (h*3600 + m*60 + s + CASE WHEN h < 5 THEN 86400 ELSE 0 END) DESC, ts DESC LIMIT 1" + ) + + earliest_ref = row_to_ref(conn.execute(sql_earliest, params).fetchone()) + latest_ref = row_to_ref(conn.execute(sql_latest, params).fetchone()) + return earliest_ref, latest_ref + except Exception: + return None, None + finally: + try: + conn.close() + except Exception: + pass + + +def _compute_sent_moment_refs_fallback( + *, + account_dir: Path, + year: int, + sender_username: str, +) -> tuple[Optional[_SentMomentRef], Optional[_SentMomentRef]]: + """Fallback implementation when no search index is present.""" + + start_ts, end_ts = _year_range_epoch_seconds(year) + sender = str(sender_username or "").strip() + if not sender: + return None, None + + # Resolve all sessions (usernames) so we can map msg_xxx/chat_xxx tables back to usernames. + session_usernames = _list_session_usernames(account_dir / "session.db") + md5_to_username: dict[str, str] = {} + table_to_username: dict[str, str] = {} + for u in session_usernames: + md5_hex = hashlib.md5(u.encode("utf-8")).hexdigest().lower() + md5_to_username[md5_hex] = u + table_to_username[f"msg_{md5_hex}"] = u + table_to_username[f"chat_{md5_hex}"] = u + + def resolve_username_from_table(table_name: str) -> Optional[str]: + ln = str(table_name or "").lower() + u = table_to_username.get(ln) + if u: + return u + m = _MD5_HEX_RE.search(ln) + if m: + return md5_to_username.get(m.group(0).lower()) + return None + + db_paths = _iter_message_db_paths(account_dir) + db_paths = [p for p in db_paths if not p.name.lower().startswith("biz_message")] + + ts_expr = ( + "CASE WHEN create_time > 1000000000000 THEN CAST(create_time/1000 AS INTEGER) ELSE create_time END" + ) + + best_earliest: Optional[_SentMomentRef] = None + best_latest: Optional[_SentMomentRef] = None + + for db_path in db_paths: + if not db_path.exists(): + continue + + conn: sqlite3.Connection | None = None + try: + conn = sqlite3.connect(str(db_path)) + conn.row_factory = sqlite3.Row + conn.text_factory = bytes + + # Resolve sender rowid for this shard so we can filter sent messages. + try: + r2 = conn.execute("SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", (sender,)).fetchone() + sender_rowid = int(r2[0]) if r2 and r2[0] is not None else None + except Exception: + sender_rowid = None + if sender_rowid is None: + continue + + tables = _list_message_tables(conn) + if not tables: + continue + + for table_name in tables: + username = resolve_username_from_table(table_name) + if not username: + continue + + qt = _quote_ident(table_name) + params = (start_ts, end_ts, int(sender_rowid)) + + sql_base = ( + "SELECT local_id, ts, " + "CAST(strftime('%H', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS h, " + "CAST(strftime('%M', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS m, " + "CAST(strftime('%S', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS s " + "FROM (" + f" SELECT local_id, {ts_expr} AS ts " + f" FROM {qt} " + f" WHERE {ts_expr} >= ? AND {ts_expr} < ? " + " AND real_sender_id = ? " + " AND local_type != 10000" + ") sub " + ) + sql_earliest = ( + sql_base + + "ORDER BY (h*3600 + m*60 + s + CASE WHEN h < 5 THEN 86400 ELSE 0 END) ASC, ts ASC LIMIT 1" + ) + sql_latest = ( + sql_base + + "ORDER BY (h*3600 + m*60 + s + CASE WHEN h < 5 THEN 86400 ELSE 0 END) DESC, ts DESC LIMIT 1" + ) + + try: + r_earliest = conn.execute(sql_earliest, params).fetchone() + except Exception: + r_earliest = None + if r_earliest: + try: + local_id = int(r_earliest["local_id"] or 0) + ts = int(r_earliest["ts"] or 0) + h = int(r_earliest["h"] or 0) + m = int(r_earliest["m"] or 0) + s = int(r_earliest["s"] or 0) + except Exception: + local_id, ts, h, m, s = 0, 0, 0, 0, 0 + if local_id > 0 and ts > 0: + score = (h * 3600 + m * 60 + s) + (86400 if h < 5 else 0) + ref = _SentMomentRef( + ts=int(ts), + score=int(score), + username=str(username), + db_stem=str(db_path.stem), + table_name=str(table_name), + local_id=int(local_id), + ) + if best_earliest is None or ref.score < best_earliest.score or ( + ref.score == best_earliest.score and ref.ts < best_earliest.ts + ): + best_earliest = ref + + try: + r_latest = conn.execute(sql_latest, params).fetchone() + except Exception: + r_latest = None + if r_latest: + try: + local_id = int(r_latest["local_id"] or 0) + ts = int(r_latest["ts"] or 0) + h = int(r_latest["h"] or 0) + m = int(r_latest["m"] or 0) + s = int(r_latest["s"] or 0) + except Exception: + local_id, ts, h, m, s = 0, 0, 0, 0, 0 + if local_id > 0 and ts > 0: + score = (h * 3600 + m * 60 + s) + (86400 if h < 5 else 0) + ref = _SentMomentRef( + ts=int(ts), + score=int(score), + username=str(username), + db_stem=str(db_path.stem), + table_name=str(table_name), + local_id=int(local_id), + ) + if best_latest is None or ref.score > best_latest.score or ( + ref.score == best_latest.score and ref.ts > best_latest.ts + ): + best_latest = ref + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass + + return best_earliest, best_latest + + +def _fetch_message_moment_payload( + *, + account_dir: Path, + ref: _SentMomentRef, + contact_rows: dict[str, sqlite3.Row], +) -> Optional[dict[str, Any]]: + """Resolve ref -> a payload for the frontend card (content is blurred in UI).""" + + username = str(ref.username or "").strip() + if not username: + return None + + db_path = account_dir / f"{ref.db_stem}.db" + if not db_path.exists(): + return None + + conn: sqlite3.Connection | None = None + try: + conn = sqlite3.connect(str(db_path)) + conn.row_factory = sqlite3.Row + conn.text_factory = bytes + + my_rowid: Optional[int] + try: + r2 = conn.execute("SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", (str(account_dir.name),)).fetchone() + my_rowid = int(r2[0]) if r2 and r2[0] is not None else None + except Exception: + my_rowid = None + + qt = _quote_ident(ref.table_name) + sql_with_join = ( + "SELECT " + "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " + "m.message_content, m.compress_content, n.user_name AS sender_username " + f"FROM {qt} m " + "LEFT JOIN Name2Id n ON m.real_sender_id = n.rowid " + "WHERE m.local_id = ? LIMIT 1" + ) + sql_no_join = ( + "SELECT " + "m.local_id, m.server_id, m.local_type, m.sort_seq, m.real_sender_id, m.create_time, " + "m.message_content, m.compress_content, '' AS sender_username " + f"FROM {qt} m " + "WHERE m.local_id = ? LIMIT 1" + ) + + try: + row = conn.execute(sql_with_join, (int(ref.local_id),)).fetchone() + except Exception: + row = None + if row is None: + try: + row = conn.execute(sql_no_join, (int(ref.local_id),)).fetchone() + except Exception: + row = None + if row is None: + return None + + hit = _row_to_search_hit( + row, + db_path=db_path, + table_name=str(ref.table_name), + username=username, + account_dir=account_dir, + is_group=bool(username.endswith("@chatroom")), + my_rowid=my_rowid, + ) + + content = str(hit.get("content") or "").strip() + content = re.sub(r"\s+", " ", content).strip() + if len(content) > 120: + content = content[:117] + "..." + + dt = datetime.fromtimestamp(int(ref.ts)) + + contact_row = contact_rows.get(username) + display = _pick_display_name(contact_row, username) + avatar = _build_avatar_url(str(account_dir.name or ""), username) if username else "" + + return { + "timestamp": int(ref.ts), + "date": dt.strftime("%Y-%m-%d"), + "time": dt.strftime("%H:%M"), + "username": username, + "displayName": display, + "maskedName": _mask_name(display), + "avatarUrl": avatar, + "content": content, + "renderType": str(hit.get("renderType") or ""), + "isGroup": bool(username.endswith("@chatroom")), + } + except Exception: + return None + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass + + +def _list_message_tables(conn: sqlite3.Connection) -> list[str]: + try: + rows = conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall() + except Exception: + return [] + names: list[str] = [] + for r in rows: + if not r or not r[0]: + continue + name = _decode_sqlite_text(r[0]).strip() + if not name: + continue + ln = name.lower() + if ln.startswith(("msg_", "chat_")): + names.append(name) + return names + + +def _accumulate_db( + *, + db_path: Path, + start_ts: int, + end_ts: int, + matrix: list[list[int]], + sender_username: str | None = None, +) -> int: + """Accumulate message counts from one message shard DB into matrix. + + Returns the number of messages counted. + """ + + if not db_path.exists(): + return 0 + + conn: sqlite3.Connection | None = None + try: + conn = sqlite3.connect(str(db_path)) + + tables = _list_message_tables(conn) + if not tables: + return 0 + + # Convert millisecond timestamps defensively (some datasets store ms). + # The expression yields epoch seconds as INTEGER. + ts_expr = ( + "CASE WHEN create_time > 1000000000000 THEN CAST(create_time/1000 AS INTEGER) ELSE create_time END" + ) + + # Optional sender filter (best-effort). When provided, we only count + # messages whose `real_sender_id` maps to `sender_username`. + sender_rowid: int | None = None + if sender_username and str(sender_username).strip(): + try: + r = conn.execute( + "SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", + (str(sender_username).strip(),), + ).fetchone() + if r is not None and r[0] is not None: + sender_rowid = int(r[0]) + except Exception: + sender_rowid = None + + counted = 0 + for table_name in tables: + qt = _quote_ident(table_name) + sender_where = "" + params: tuple[Any, ...] + if sender_rowid is not None: + sender_where = " AND real_sender_id = ?" + params = (start_ts, end_ts, sender_rowid) + else: + params = (start_ts, end_ts) + sql = ( + "SELECT " + # %w: 0..6 with Sunday=0, so shift to Monday=0..Sunday=6 + "((CAST(strftime('%w', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) + 6) % 7) AS weekday, " + "CAST(strftime('%H', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS hour, " + "COUNT(1) AS cnt " + "FROM (" + f" SELECT {ts_expr} AS ts" + f" FROM {qt}" + f" WHERE {ts_expr} >= ? AND {ts_expr} < ?{sender_where}" + ") sub " + "GROUP BY weekday, hour" + ) + try: + rows = conn.execute(sql, params).fetchall() + except Exception: + continue + + for weekday, hour, cnt in rows: + try: + w = int(weekday) + h = int(hour) + c = int(cnt) + except Exception: + continue + if not (0 <= w < 7 and 0 <= h < 24 and c > 0): + continue + matrix[w][h] += c + counted += c + + return counted + finally: + try: + if conn is not None: + conn.close() + except Exception: + pass + + +def compute_weekday_hour_heatmap(*, account_dir: Path, year: int, sender_username: str | None = None) -> WeekdayHourHeatmap: + start_ts, end_ts = _year_range_epoch_seconds(year) + + matrix: list[list[int]] = [[0 for _ in range(24)] for _ in range(7)] + total = 0 + + # Prefer using our unified search index if available; it's much faster than scanning all msg tables. + index_path = get_chat_search_index_db_path(account_dir) + if index_path.exists(): + conn = sqlite3.connect(str(index_path)) + try: + has_fts = ( + conn.execute( + "SELECT 1 FROM sqlite_master WHERE type='table' AND name='message_fts' LIMIT 1" + ).fetchone() + is not None + ) + if has_fts: + # Convert millisecond timestamps defensively (some datasets store ms). + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + sender_clause = "" + if sender_username and str(sender_username).strip(): + sender_clause = " AND sender_username = ?" + sql = ( + "SELECT " + "((CAST(strftime('%w', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) + 6) % 7) AS weekday, " + "CAST(strftime('%H', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS hour, " + "COUNT(1) AS cnt " + "FROM (" + f" SELECT {ts_expr} AS ts" + " FROM message_fts" + f" WHERE {ts_expr} >= ? AND {ts_expr} < ?" + " AND db_stem NOT LIKE 'biz_message%'" + f"{sender_clause}" + ") sub " + "GROUP BY weekday, hour" + ) + + t0 = time.time() + try: + params: tuple[Any, ...] = (start_ts, end_ts) + if sender_username and str(sender_username).strip(): + params = (start_ts, end_ts, str(sender_username).strip()) + rows = conn.execute(sql, params).fetchall() + except Exception: + rows = [] + + for r in rows: + if not r: + continue + try: + w = int(r[0] or 0) + h = int(r[1] or 0) + cnt = int(r[2] or 0) + except Exception: + continue + if 0 <= w < 7 and 0 <= h < 24 and cnt > 0: + matrix[w][h] += cnt + total += cnt + + logger.info( + "Wrapped heatmap computed (search index): account=%s year=%s total=%s sender=%s db=%s elapsed=%.2fs", + str(account_dir.name or "").strip(), + year, + total, + str(sender_username).strip() if sender_username else "*", + str(index_path.name), + time.time() - t0, + ) + + return WeekdayHourHeatmap( + weekday_labels=list(_WEEKDAY_LABELS_ZH), + hour_labels=list(_HOUR_LABELS), + matrix=matrix, + total_messages=total, + ) + finally: + try: + conn.close() + except Exception: + pass + + db_paths = _iter_message_db_paths(account_dir) + # Default: exclude official/biz shards (biz_message*.db) to reduce noise. + db_paths = [p for p in db_paths if not p.name.lower().startswith("biz_message")] + my_wxid = str(account_dir.name or "").strip() + t0 = time.time() + for db_path in db_paths: + total += _accumulate_db( + db_path=db_path, + start_ts=start_ts, + end_ts=end_ts, + matrix=matrix, + sender_username=str(sender_username).strip() if sender_username else None, + ) + + logger.info( + "Wrapped heatmap computed: account=%s year=%s total=%s sender=%s dbs=%s elapsed=%.2fs", + my_wxid, + year, + total, + str(sender_username).strip() if sender_username else "*", + len(db_paths), + time.time() - t0, + ) + + return WeekdayHourHeatmap( + weekday_labels=list(_WEEKDAY_LABELS_ZH), + hour_labels=list(_HOUR_LABELS), + matrix=matrix, + total_messages=total, + ) + + +def build_card_01_cyber_schedule( + *, + account_dir: Path, + year: int, + heatmap: WeekdayHourHeatmap | None = None, +) -> dict[str, Any]: + """Card #1: 年度赛博作息表 (24x7 heatmap). + + `heatmap` can be provided by the caller to reuse computation across cards. + """ + + sender = str(account_dir.name or "").strip() + heatmap = heatmap or compute_weekday_hour_heatmap(account_dir=account_dir, year=year, sender_username=sender) + + narrative = "今年你没有发出聊天消息" + if heatmap.total_messages > 0: + hour_totals = [sum(heatmap.matrix[w][h] for w in range(7)) for h in range(24)] + # Deterministic: pick earliest hour on ties. + most_active_hour = max(range(24), key=lambda h: (hour_totals[h], -h)) + + weekday_totals = [sum(heatmap.matrix[w][h] for h in range(24)) for w in range(7)] + # Deterministic: pick earliest weekday on ties. + most_active_weekday = max(range(7), key=lambda w: (weekday_totals[w], -w)) + weekday_name = _get_weekday_name(most_active_weekday) + + narrative = _build_narrative( + hour=most_active_hour, + weekday=weekday_name, + total=heatmap.total_messages, + ) + + # Earliest/latest sent message moments (best-effort). + earliest_sent = None + latest_sent = None + if heatmap.total_messages > 0: + t0 = time.time() + ref_earliest, ref_latest = _compute_sent_moment_refs_from_index( + account_dir=account_dir, + year=year, + sender_username=sender, + ) + if ref_earliest is None and ref_latest is None: + ref_earliest, ref_latest = _compute_sent_moment_refs_fallback( + account_dir=account_dir, + year=year, + sender_username=sender, + ) + + usernames: list[str] = [] + if ref_earliest and ref_earliest.username: + usernames.append(ref_earliest.username) + if ref_latest and ref_latest.username and ref_latest.username not in usernames: + usernames.append(ref_latest.username) + contact_rows = _load_contact_rows(account_dir / "contact.db", usernames) if usernames else {} + + if ref_earliest is not None: + earliest_sent = _fetch_message_moment_payload(account_dir=account_dir, ref=ref_earliest, contact_rows=contact_rows) + if ref_latest is not None: + latest_sent = _fetch_message_moment_payload(account_dir=account_dir, ref=ref_latest, contact_rows=contact_rows) + + logger.info( + "Wrapped card#1 moments computed: account=%s year=%s earliest=%s latest=%s elapsed=%.2fs", + str(account_dir.name or "").strip(), + year, + "ok" if earliest_sent else "none", + "ok" if latest_sent else "none", + time.time() - t0, + ) + + # Year's chronologically first/last sent messages (by timestamp, not time-of-day). + year_first_sent = None + year_last_sent = None + if heatmap.total_messages > 0: + t0 = time.time() + ref_first, ref_last = _compute_year_first_last_from_index( + account_dir=account_dir, + year=year, + sender_username=sender, + ) + if ref_first is None and ref_last is None: + ref_first, ref_last = _compute_year_first_last_fallback( + account_dir=account_dir, + year=year, + sender_username=sender, + ) + + # Collect usernames for contact lookup (reuse existing contact_rows if possible). + extra_usernames: list[str] = [] + if ref_first and ref_first.username: + extra_usernames.append(ref_first.username) + if ref_last and ref_last.username and ref_last.username not in extra_usernames: + extra_usernames.append(ref_last.username) + # Load contacts for new usernames not already in contact_rows. + new_usernames = [u for u in extra_usernames if u not in contact_rows] + if new_usernames: + extra_contacts = _load_contact_rows(account_dir / "contact.db", new_usernames) + contact_rows.update(extra_contacts) + + if ref_first is not None: + year_first_sent = _fetch_message_moment_payload(account_dir=account_dir, ref=ref_first, contact_rows=contact_rows) + if ref_last is not None: + year_last_sent = _fetch_message_moment_payload(account_dir=account_dir, ref=ref_last, contact_rows=contact_rows) + + logger.info( + "Wrapped card#1 year first/last computed: account=%s year=%s first=%s last=%s elapsed=%.2fs", + str(account_dir.name or "").strip(), + year, + "ok" if year_first_sent else "none", + "ok" if year_last_sent else "none", + time.time() - t0, + ) + + return { + "id": 1, + "title": "你是「早八人」还是「夜猫子」?", + "scope": "global", + "category": "A", + "status": "ok", + "kind": "time/weekday_hour_heatmap", + "narrative": narrative, + "data": { + "weekdayLabels": heatmap.weekday_labels, + "hourLabels": heatmap.hour_labels, + "matrix": heatmap.matrix, + "totalMessages": heatmap.total_messages, + "earliestSent": earliest_sent, + "latestSent": latest_sent, + "yearFirstSent": year_first_sent, + "yearLastSent": year_last_sent, + }, + } diff --git a/src/wechat_decrypt_tool/wrapped/cards/card_02_message_chars.py b/src/wechat_decrypt_tool/wrapped/cards/card_02_message_chars.py new file mode 100644 index 0000000..631ebff --- /dev/null +++ b/src/wechat_decrypt_tool/wrapped/cards/card_02_message_chars.py @@ -0,0 +1,806 @@ +from __future__ import annotations + +import math +import random +import sqlite3 +import time +from collections import Counter +from datetime import datetime +from pathlib import Path +from typing import Any, Optional + +from pypinyin import lazy_pinyin, Style + +from ...chat_helpers import _decode_message_content, _decode_sqlite_text, _iter_message_db_paths, _quote_ident +from ...chat_search_index import get_chat_search_index_db_path +from ...logging_config import get_logger + +logger = get_logger(__name__) + + +# 键盘布局中用于“磨损”展示的按键(字母 + 数字 + 常用标点)。 +# 注意:功能键(Tab/Enter/Backspace 等)不统计;空格键单独放在 spaceHits。 +_KEYBOARD_KEYS = ( + list("`1234567890-=") + + list("qwertyuiop[]\\") + + list("asdfghjkl;\'") + + list("zxcvbnm,./") +) +_KEYBOARD_KEY_SET = set(_KEYBOARD_KEYS) + +# 将“显示字符”映射到键盘上的“实际按键”(用基础键位表示,如 '!' => '1', '?' => '/')。 +_CHAR_TO_KEY: dict[str, str] = { + # ASCII shifted symbols + "~": "`", + "!": "1", + "@": "2", + "#": "3", + "$": "4", + "%": "5", + "^": "6", + "&": "7", + "*": "8", + "(": "9", + ")": "0", + "_": "-", + "+": "=", + "{": "[", + "}": "]", + "|": "\\", + ":": ";", + '"': "'", + "<": ",", + ">": ".", + "?": "/", + # Common fullwidth / CJK punctuation (approximate key mapping) + "~": "`", + "!": "1", + "@": "2", + "#": "3", + "$": "4", + "%": "5", + "^": "6", + "&": "7", + "*": "8", + "(": "9", + ")": "0", + "¥": "4", + "¥": "4", + "_": "-", + "+": "=", + "{": "[", + "}": "]", + "|": "\\", + ":": ";", + """: "'", + "<": ",", + ">": ".", + "?": "/", + ",": ",", + "、": ",", + "。": ".", + ".": ".", + ";": ";", + "“": "'", + "”": "'", + "‘": "'", + "’": "'", + "【": "[", + "】": "]", + "《": ",", + "》": ".", + "—": "-", + "-": "-", + "=": "=", + "/": "/", + "\": "\\", + "·": "`", # 常见:中文输入法下“·”常用 ` 键打出 + "…": ".", # 近似处理:省略号按 '.' 计 +} + +# 默认拼音字母频率分布(用于:有中文但采样不足时的兜底估算) +_DEFAULT_PINYIN_FREQ = { + "a": 0.121, + "i": 0.118, + "n": 0.098, + "e": 0.089, + "u": 0.082, + "g": 0.072, + "h": 0.065, + "o": 0.052, + "z": 0.048, + "s": 0.042, + "x": 0.038, + "y": 0.036, + "d": 0.032, + "l": 0.028, + "j": 0.026, + "b": 0.022, + "c": 0.020, + "w": 0.018, + "m": 0.016, + "f": 0.014, + "t": 0.012, + "r": 0.010, + "p": 0.009, + "k": 0.007, + "q": 0.005, + "v": 0.001, +} +_AVG_PINYIN_LEN = 2.8 + + +def _is_cjk_han(ch: str) -> bool: + """是否为中文汉字(用于拼音估算)。""" + if not ch: + return False + o = ord(ch) + return (0x4E00 <= o <= 0x9FFF) or (0x3400 <= o <= 0x4DBF) + + +def _char_to_key(ch: str) -> str | None: + """将单个字符映射为键盘按键 code(与前端键盘布局的 code 保持一致)。""" + if not ch: + return None + + # Fullwidth digits: '0'..'9' + if "0" <= ch <= "9": + return chr(ord(ch) - ord("0") + ord("0")) + + if ch in _KEYBOARD_KEY_SET: + return ch + + mapped = _CHAR_TO_KEY.get(ch) + if mapped is not None: + return mapped + + if ch.isalpha(): + low = ch.lower() + if low in _KEYBOARD_KEY_SET: + return low + + return None + + +def _update_keyboard_counters( + text: str, + *, + direct_counter: Counter, + pinyin_counter: Counter, + pinyin_cache: dict[str, str], + do_pinyin: bool, +) -> tuple[int, int, int]: + """ + 扫描一条消息文本,累加: + - direct_counter: 非中文汉字部分(英文/数字/标点)可直接映射到按键的统计(精确) + - pinyin_counter: 中文汉字部分的拼音字母统计(仅当 do_pinyin=True 时才做;用于采样估算) + 并返回 (nonspace_chars, cjk_han_chars, space_chars)。 + """ + if not text: + return 0, 0, 0 + + nonspace = 0 + cjk = 0 + spaces = 0 + + for ch in text: + # 真实可见空格:统计进 spaceHits(不计入 sentChars/receivedChars 的口径) + if ch == " " or ch == "\u3000": + spaces += 1 + continue + if ch.isspace(): + continue + + nonspace += 1 + + if _is_cjk_han(ch): + cjk += 1 + if do_pinyin: + py = pinyin_cache.get(ch) + if py is None: + lst = lazy_pinyin(ch, style=Style.NORMAL) + py = (lst[0] or "").lower() if lst else "" + pinyin_cache[ch] = py + for letter in py: + # pypinyin 在 Style.NORMAL 下通常只会给出 a-z(含 ü=>v),这里再做一次过滤。 + if letter in _KEYBOARD_KEY_SET: + pinyin_counter[letter] += 1 + continue + + k = _char_to_key(ch) + if k is not None: + direct_counter[k] += 1 + + return nonspace, cjk, spaces + + +def compute_keyboard_stats(*, account_dir: Path, year: int, sample_rate: float = 1.0) -> dict[str, Any]: + """ + 统计键盘敲击数据。 + + - 英文/数字/标点:可直接从消息文本映射到按键(精确统计) + - 中文汉字:需要拼音转换,成本高;对“消息”做采样(sample_rate)后估算总体拼音字母分布 + """ + start_ts, end_ts = _year_range_epoch_seconds(year) + my_username = str(account_dir.name or "").strip() + + sample_rate = max(0.0, min(1.0, float(sample_rate))) + + direct_counter: Counter[str] = Counter() + pinyin_counter: Counter[str] = Counter() + pinyin_cache: dict[str, str] = {} + + total_cjk_chars = 0 + sampled_cjk_chars = 0 + actual_space_chars = 0 + + total_messages = 0 + sampled_messages = 0 + used_index = False + + # 优先使用搜索索引(更快) + index_path = get_chat_search_index_db_path(account_dir) + if index_path.exists(): + conn = sqlite3.connect(str(index_path)) + try: + has_fts = ( + conn.execute("SELECT 1 FROM sqlite_master WHERE type='table' AND name='message_fts' LIMIT 1").fetchone() + is not None + ) + if has_fts and my_username: + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + where = ( + f"{ts_expr} >= ? AND {ts_expr} < ? " + "AND db_stem NOT LIKE 'biz_message%' " + "AND render_type = 'text' " + "AND \"text\" IS NOT NULL " + "AND TRIM(CAST(\"text\" AS TEXT)) != '' " + "AND sender_username = ?" + ) + + sql = f"SELECT \"text\" FROM message_fts WHERE {where}" + try: + cur = conn.execute(sql, (start_ts, end_ts, my_username)) + used_index = True + for row in cur: + txt = str(row[0] or "").strip() + if not txt: + continue + total_messages += 1 + + if sample_rate >= 1.0: + do_sample = True + elif sample_rate <= 0.0: + do_sample = False + else: + do_sample = random.random() < sample_rate + + if do_sample: + sampled_messages += 1 + + _, cjk, spaces = _update_keyboard_counters( + txt, + direct_counter=direct_counter, + pinyin_counter=pinyin_counter, + pinyin_cache=pinyin_cache, + do_pinyin=do_sample, + ) + total_cjk_chars += cjk + actual_space_chars += spaces + if do_sample: + sampled_cjk_chars += cjk + except Exception: + used_index = False + finally: + try: + conn.close() + except Exception: + pass + + # 如果索引不可用,回退到直接扫描(慢,但兼容) + if not used_index: + db_paths = _iter_message_db_paths(account_dir) + for db_path in db_paths: + try: + if db_path.name.lower().startswith("biz_message"): + continue + except Exception: + pass + if not db_path.exists(): + continue + + conn: sqlite3.Connection | None = None + try: + conn = sqlite3.connect(str(db_path)) + conn.row_factory = sqlite3.Row + conn.text_factory = bytes + + my_rowid: Optional[int] + try: + r2 = conn.execute("SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", (my_username,)).fetchone() + my_rowid = int(r2[0]) if r2 and r2[0] is not None else None + except Exception: + my_rowid = None + + if my_rowid is None: + continue + + tables = _list_message_tables(conn) + if not tables: + continue + + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + + for table in tables: + qt = _quote_ident(table) + sql = ( + "SELECT real_sender_id, message_content, compress_content " + f"FROM {qt} " + "WHERE local_type = 1 " + f" AND {ts_expr} >= ? AND {ts_expr} < ?" + ) + try: + cur = conn.execute(sql, (start_ts, end_ts)) + except Exception: + continue + + for r in cur: + try: + rsid = int(r["real_sender_id"] or 0) + except Exception: + rsid = 0 + + if rsid != my_rowid: + continue + + txt = "" + try: + txt = _decode_message_content(r["compress_content"], r["message_content"]).strip() + except Exception: + txt = "" + if not txt: + continue + total_messages += 1 + if sample_rate >= 1.0: + do_sample = True + elif sample_rate <= 0.0: + do_sample = False + else: + do_sample = random.random() < sample_rate + if do_sample: + sampled_messages += 1 + _, cjk, spaces = _update_keyboard_counters( + txt, + direct_counter=direct_counter, + pinyin_counter=pinyin_counter, + pinyin_cache=pinyin_cache, + do_pinyin=do_sample, + ) + total_cjk_chars += cjk + actual_space_chars += spaces + if do_sample: + sampled_cjk_chars += cjk + finally: + if conn is not None: + try: + conn.close() + except Exception: + pass + + # 中文拼音部分:按“中文汉字数量”缩放(比按总字符缩放更合理,也能让数字/标点更准确) + est_pinyin_counter: Counter[str] = Counter() + sampled_pinyin_hits = int(sum(pinyin_counter.values())) + if total_cjk_chars > 0: + if sampled_cjk_chars > 0 and sampled_pinyin_hits > 0: + scale_factor = total_cjk_chars / sampled_cjk_chars + for k, cnt in pinyin_counter.items(): + est_pinyin_counter[k] = int(round(cnt * scale_factor)) + else: + # 兜底:有中文但采样不足(或采样中无法提取拼音),用默认分布估算 + total_pinyin_hits = int(total_cjk_chars * _AVG_PINYIN_LEN) + for k, freq in _DEFAULT_PINYIN_FREQ.items(): + est_pinyin_counter[k] = int(freq * total_pinyin_hits) + + key_hits_counter: Counter[str] = Counter() + key_hits_counter.update(direct_counter) + key_hits_counter.update(est_pinyin_counter) + + key_hits: dict[str, int] = {k: int(key_hits_counter.get(k, 0)) for k in _KEYBOARD_KEYS} + total_non_space_hits = int(sum(key_hits.values())) + + # 空格键:= 真实空格(如英文句子) + 中文拼音选词带来的“隐含空格”(粗略估算) + implied_space_hits = int(sum(est_pinyin_counter.values()) * 0.15) + space_hits = int(actual_space_chars + implied_space_hits) + + total_key_hits = int(total_non_space_hits + space_hits) + + # 频率只对“非空格键”归一化;空格频率由 spaceHits 单独给出 + key_frequency: dict[str, float] = {} + for k in _KEYBOARD_KEYS: + key_frequency[k] = (key_hits.get(k, 0) / total_non_space_hits) if total_non_space_hits > 0 else 0.0 + + logger.info( + "Keyboard stats computed: account=%s year=%s sample_rate=%.2f msgs=%d sampled=%d cjk=%d sampled_cjk=%d total_hits=%d", + my_username, + year, + float(sample_rate), + int(total_messages), + int(sampled_messages), + int(total_cjk_chars), + int(sampled_cjk_chars), + int(total_key_hits), + ) + + return { + "totalKeyHits": total_key_hits, + "keyHits": key_hits, + "keyFrequency": key_frequency, + "spaceHits": space_hits, + } + + +def _year_range_epoch_seconds(year: int) -> tuple[int, int]: + # Use local time boundaries (same semantics as sqlite "localtime"). + start = int(datetime(year, 1, 1).timestamp()) + end = int(datetime(year + 1, 1, 1).timestamp()) + return start, end + + +def _list_message_tables(conn: sqlite3.Connection) -> list[str]: + try: + rows = conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall() + except Exception: + return [] + names: list[str] = [] + for r in rows: + if not r or not r[0]: + continue + name = _decode_sqlite_text(r[0]).strip() + if not name: + continue + ln = name.lower() + if ln.startswith(("msg_", "chat_")): + names.append(name) + return names + + +# Book analogy table (for "sent chars"). +_BOOK_ANALOGIES: list[dict[str, Any]] = [ + {"min": 1, "max": 100_000, "level": "小量级", "options": ["一本《小王子》", "一本《解忧杂货店》"]}, + {"min": 100_000, "max": 500_000, "level": "中量级", "options": ["一本《三体Ⅰ:地球往事》", "一套《朝花夕拾+呐喊》(鲁迅经典合集)"]}, + {"min": 500_000, "max": 1_000_000, "level": "大量级", "options": ["一本《红楼梦》(全本)", "一本《百年孤独》(全本无删减)"]}, + {"min": 1_000_000, "max": 5_000_000, "level": "超大量级", "options": ["一套《三体》全三册", "一本《西游记》(全本白话文)"]}, + {"min": 5_000_000, "max": 10_000_000, "level": "千万级Ⅰ", "options": ["一套金庸武侠《射雕+神雕+倚天》(经典三部曲)", "一套《平凡的世界》全三册"]}, + {"min": 10_000_000, "max": 50_000_000, "level": "千万级Ⅱ", "options": ["一套《哈利·波特》全七册(中文版)", "一本《资治通鉴》(文白对照全本)"]}, + {"min": 50_000_000, "max": 100_000_000, "level": "亿级Ⅰ", "options": ["一套《冰与火之歌》全系列(中文版)", "一本《史记》(全本含集解索隐正义)"]}, + {"min": 100_000_000, "max": 500_000_000, "level": "亿级Ⅱ", "options": ["一套《中国大百科全书》(单卷本全册)", "一套《金庸武侠全集》(15部完整版)"]}, + {"min": 500_000_000, "max": None, "level": "亿级Ⅲ", "options": ["一套《四库全书》(文津阁精选集)", "一套《大英百科全书》(国际完整版)"]}, +] + + +# A4 analogy table (for "received chars"). +# Estimation assumptions: +# - A4 (single side) holds about 1700 chars (depends on font/spacing; this is an approximation). +# - 70g A4 paper thickness is roughly 0.1mm => 100 sheets ≈ 1cm. +_A4_CHARS_PER_SHEET = 1700 +_A4_SHEETS_PER_CM = 100.0 + +# "Level" is a coarse grouping by character count; the physical object analogy is picked by the +# estimated stacked height (so the text stays self-consistent). +_A4_LEVELS: list[dict[str, Any]] = [ + {"min": 1, "max": 100_000, "level": "小量级"}, + {"min": 100_000, "max": 500_000, "level": "中量级"}, + {"min": 500_000, "max": 1_000_000, "level": "大量级"}, + {"min": 1_000_000, "max": 5_000_000, "level": "超大量级"}, + {"min": 5_000_000, "max": 10_000_000, "level": "千万级Ⅰ"}, + {"min": 10_000_000, "max": 50_000_000, "level": "千万级Ⅱ"}, + {"min": 50_000_000, "max": 100_000_000, "level": "亿级Ⅰ"}, + {"min": 100_000_000, "max": 500_000_000, "level": "亿级Ⅱ"}, + {"min": 500_000_000, "max": None, "level": "亿级Ⅲ"}, +] + +# Physical object analogies by stacked height (cm). +_A4_HEIGHT_ANALOGIES: list[dict[str, Any]] = [ + {"minCm": 0.0, "maxCm": 0.5, "objects": ["1枚硬币的厚度", "1张银行卡的厚度"]}, + {"minCm": 0.5, "maxCm": 2.0, "objects": ["1叠便利贴", "1本薄款软皮笔记本"]}, + {"minCm": 2.0, "maxCm": 6.0, "objects": ["3-5本加厚硬壳笔记本", "1本厚词典"]}, + {"minCm": 6.0, "maxCm": 30.0, "objects": ["10本办公台账", "1个矮款文件柜单层满装"]}, + {"minCm": 30.0, "maxCm": 60.0, "objects": ["1个标准办公文件盒", "1个登机箱(约55cm)"]}, + {"minCm": 60.0, "maxCm": 200.0, "objects": ["1.7-1.8m成年人身高", "2个办公文件柜叠放"]}, + {"minCm": 200.0, "maxCm": 600.0, "objects": ["2层普通住宅层高", "1棵成年矮树(枇杷树/橘子树)"]}, + {"minCm": 600.0, "maxCm": 2500.0, "objects": ["4-8层居民楼层高", "1棵成年大树(梧桐树/樟树)"]}, + {"minCm": 2500.0, "maxCm": 5000.0, "objects": ["10-18层小高层住宅", "1栋小型临街写字楼"]}, + {"minCm": 5000.0, "maxCm": 25000.0, "objects": ["20-80层超高层住宅", "城市核心区小高层地标"]}, + {"minCm": 25000.0, "maxCm": None, "objects": ["1栋城市核心超高层写字楼", "国内中型摩天大楼(约100层)"]}, +] + + +def _pick_option(options: list[str], *, seed: int) -> str: + if not options: + return "" + idx = abs(int(seed)) % len(options) + return str(options[idx] or "").strip() + + +def _pick_book_analogy(chars: int) -> Optional[dict[str, Any]]: + n = int(chars or 0) + if n <= 0: + return None + + for row in _BOOK_ANALOGIES: + lo = int(row["min"] or 0) + hi = row.get("max") + if n < lo: + continue + if hi is None or n < int(hi): + picked = _pick_option(list(row.get("options") or []), seed=n) + return { + "level": str(row.get("level") or ""), + "book": picked, + "text": f"相当于写了{picked}" if picked else "", + } + return None + + +def _format_height(height_cm: float) -> str: + try: + cm = float(height_cm) + except Exception: + cm = 0.0 + if cm <= 0: + return "0cm" + if cm < 1: + mm = cm * 10.0 + return f"{mm:.1f}mm" + if cm < 100: + if cm < 10: + return f"{cm:.1f}cm" + return f"{cm:.0f}cm" + m = cm / 100.0 + if m < 10: + return f"{m:.1f}m" + return f"{m:.0f}m" + + +def _a4_stats(chars: int) -> dict[str, Any]: + # Rough estimate: 1 A4 page ~ 1700 chars; 100 pages ~ 1cm thick. + n = int(chars or 0) + if n <= 0: + return {"sheets": 0, "heightCm": 0.0, "heightText": "0cm"} + sheets = int(math.ceil(n / float(_A4_CHARS_PER_SHEET))) + height_cm = float(sheets) / float(_A4_SHEETS_PER_CM) + return {"sheets": int(sheets), "heightCm": float(height_cm), "heightText": _format_height(height_cm)} + + +def _pick_a4_analogy(chars: int) -> Optional[dict[str, Any]]: + n = int(chars or 0) + if n <= 0: + return None + + a4 = _a4_stats(n) + + level = "" + for row in _A4_LEVELS: + lo = int(row["min"] or 0) + hi = row.get("max") + if n < lo: + continue + if hi is None or n < int(hi): + level = str(row.get("level") or "") + break + + height_cm = float(a4.get("heightCm") or 0.0) + picked = "" + for row in _A4_HEIGHT_ANALOGIES: + lo = float(row.get("minCm") or 0.0) + hi = row.get("maxCm") + if height_cm < lo: + continue + if hi is None or height_cm < float(hi): + picked = _pick_option(list(row.get("objects") or []), seed=n) + break + + return { + "level": level, + "object": picked, + "a4": a4, + "text": ( + f"大约 {int(a4['sheets']):,} 张 A4,堆起来约 {a4['heightText']}" + (f",差不多是{picked}的高度" if picked else "") + ).strip(","), + } + + +def compute_text_message_char_counts(*, account_dir: Path, year: int) -> tuple[int, int]: + """Return (sent_chars, received_chars) for render_type='text' messages in the year.""" + + start_ts, end_ts = _year_range_epoch_seconds(year) + my_username = str(account_dir.name or "").strip() + + # Prefer search index when available. + index_path = get_chat_search_index_db_path(account_dir) + if index_path.exists(): + conn = sqlite3.connect(str(index_path)) + try: + has_fts = ( + conn.execute("SELECT 1 FROM sqlite_master WHERE type='table' AND name='message_fts' LIMIT 1").fetchone() + is not None + ) + if has_fts: + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + where = ( + f"{ts_expr} >= ? AND {ts_expr} < ? " + "AND db_stem NOT LIKE 'biz_message%' " + "AND render_type = 'text' " + "AND \"text\" IS NOT NULL " + "AND TRIM(CAST(\"text\" AS TEXT)) != ''" + ) + + sql_total = f"SELECT COALESCE(SUM(LENGTH(REPLACE(\"text\", ' ', ''))), 0) AS chars FROM message_fts WHERE {where}" + r_total = conn.execute(sql_total, (start_ts, end_ts)).fetchone() + total_chars = int((r_total[0] if r_total else 0) or 0) + + if my_username: + sql_sent = f"{sql_total} AND sender_username = ?" + r_sent = conn.execute(sql_sent, (start_ts, end_ts, my_username)).fetchone() + sent_chars = int((r_sent[0] if r_sent else 0) or 0) + else: + sent_chars = 0 + + recv_chars = max(0, total_chars - sent_chars) + return sent_chars, recv_chars + finally: + try: + conn.close() + except Exception: + pass + + # Fallback: scan message shards directly (slower, but works without the index). + t0 = time.time() + sent_total = 0 + recv_total = 0 + + db_paths = _iter_message_db_paths(account_dir) + for db_path in db_paths: + try: + if db_path.name.lower().startswith("biz_message"): + continue + except Exception: + pass + if not db_path.exists(): + continue + + conn: sqlite3.Connection | None = None + try: + conn = sqlite3.connect(str(db_path)) + conn.row_factory = sqlite3.Row + conn.text_factory = bytes + + my_rowid: Optional[int] + try: + r2 = conn.execute("SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", (my_username,)).fetchone() + my_rowid = int(r2[0]) if r2 and r2[0] is not None else None + except Exception: + my_rowid = None + + tables = _list_message_tables(conn) + if not tables: + continue + + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + + for table in tables: + qt = _quote_ident(table) + sql = ( + "SELECT real_sender_id, message_content, compress_content " + f"FROM {qt} " + "WHERE local_type = 1 " + f" AND {ts_expr} >= ? AND {ts_expr} < ?" + ) + try: + cur = conn.execute(sql, (start_ts, end_ts)) + except Exception: + continue + + for r in cur: + try: + rsid = int(r["real_sender_id"] or 0) + except Exception: + rsid = 0 + txt = "" + try: + txt = _decode_message_content(r["compress_content"], r["message_content"]).strip() + except Exception: + txt = "" + if not txt: + continue + + # Match search index semantics: count non-whitespace characters. + cnt = 0 + for ch in txt: + if not ch.isspace(): + cnt += 1 + if cnt <= 0: + continue + + if my_rowid is not None and rsid == my_rowid: + sent_total += cnt + else: + recv_total += cnt + finally: + if conn is not None: + try: + conn.close() + except Exception: + pass + + logger.info( + "Wrapped card#2 message chars computed (fallback scan): account=%s year=%s sent=%s recv=%s dbs=%s elapsed=%.2fs", + str(account_dir.name or "").strip(), + year, + int(sent_total), + int(recv_total), + len(db_paths), + time.time() - t0, + ) + return int(sent_total), int(recv_total) + + +def build_card_02_message_chars(*, account_dir: Path, year: int) -> dict[str, Any]: + sent_chars, recv_chars = compute_text_message_char_counts(account_dir=account_dir, year=year) + + sent_book = _pick_book_analogy(sent_chars) + recv_a4 = _pick_a4_analogy(recv_chars) + + # 计算键盘敲击统计 + keyboard_stats = compute_keyboard_stats(account_dir=account_dir, year=year, sample_rate=1.0) + + if sent_chars > 0 and recv_chars > 0: + narrative = f"你今年在微信里打了 {sent_chars:,} 个字,也收到了 {recv_chars:,} 个字。" + elif sent_chars > 0: + narrative = f"你今年在微信里打了 {sent_chars:,} 个字。" + elif recv_chars > 0: + narrative = f"你今年在微信里收到了 {recv_chars:,} 个字。" + else: + narrative = "今年你还没有文字消息" + + return { + "id": 2, + "title": "你今年打了多少字?够写一本书吗?", + "scope": "global", + "category": "C", + "status": "ok", + "kind": "text/message_chars", + "narrative": narrative, + "data": { + "year": int(year), + "sentChars": int(sent_chars), + "receivedChars": int(recv_chars), + "sentBook": sent_book, + "receivedA4": recv_a4, + "keyboard": keyboard_stats, + }, + } diff --git a/src/wechat_decrypt_tool/wrapped/cards/card_03_reply_speed.py b/src/wechat_decrypt_tool/wrapped/cards/card_03_reply_speed.py new file mode 100644 index 0000000..0a938d0 --- /dev/null +++ b/src/wechat_decrypt_tool/wrapped/cards/card_03_reply_speed.py @@ -0,0 +1,859 @@ +from __future__ import annotations + +import heapq +import math +import sqlite3 +import time +from dataclasses import dataclass +from datetime import datetime +from pathlib import Path +from typing import Any, Optional + +from ...chat_helpers import ( + _build_avatar_url, + _load_contact_rows, + _pick_display_name, + _should_keep_session, +) +from ...chat_search_index import ( + get_chat_search_index_db_path, + get_chat_search_index_status, + start_chat_search_index_build, +) +from ...logging_config import get_logger + +logger = get_logger(__name__) + + +def _year_range_epoch_seconds(year: int) -> tuple[int, int]: + # Use local time boundaries (same semantics as sqlite "localtime"). + start = int(datetime(year, 1, 1).timestamp()) + end = int(datetime(year + 1, 1, 1).timestamp()) + return start, end + + +def _mask_name(name: str) -> str: + s = str(name or "").strip() + if not s: + return "" + if len(s) == 1: + return "*" + if len(s) == 2: + return s[0] + "*" + return s[0] + ("*" * (len(s) - 2)) + s[-1] + + +def _format_duration_zh(seconds: int | None) -> str: + if seconds is None: + return "" + try: + s = int(seconds) + except Exception: + s = 0 + if s < 0: + s = 0 + + if s < 60: + return f"{s}秒" + m, sec = divmod(s, 60) + if m < 60: + return f"{m}分{sec}秒" if sec else f"{m}分钟" + h, mm = divmod(m, 60) + if h < 24: + return f"{h}小时{mm}分钟" if mm else f"{h}小时" + d, hh = divmod(h, 24) + return f"{d}天{hh}小时" if hh else f"{d}天" + + +def _compute_streak_days(doys: list[int]) -> int: + if not doys: + return 0 + doys_sorted = sorted({int(x) for x in doys if int(x) > 0}) + if not doys_sorted: + return 0 + + best = 1 + cur = 1 + prev = doys_sorted[0] + for d in doys_sorted[1:]: + if d == prev + 1: + cur += 1 + else: + cur = 1 + if cur > best: + best = cur + prev = d + return int(best) + + +def _compute_best_buddy_extras_from_index(*, account_dir: Path, year: int, buddy_username: str) -> dict[str, Any]: + """Compute a few extra fields for Card07 Bento summary. + + - longestStreakDays: longest consecutive days with any interaction + - peakHour/peakHourLabel: most active hour of day with this buddy + + Best-effort: returns empty dict on any failure. + """ + + buddy = str(buddy_username or "").strip() + if not buddy: + return {} + + index_path = get_chat_search_index_db_path(account_dir) + if not index_path.exists(): + return {} + + start_ts, end_ts = _year_range_epoch_seconds(int(year)) + + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + where = ( + f"{ts_expr} >= ? AND {ts_expr} < ? " + "AND db_stem NOT LIKE 'biz_message%' " + "AND CAST(local_type AS INTEGER) != 10000 " + "AND username = ? " + "AND username NOT LIKE '%@chatroom'" + ) + + sql_days = ( + "SELECT DISTINCT " + "CAST(strftime('%j', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS doy " + "FROM (" + f" SELECT {ts_expr} AS ts " + " FROM message_fts " + f" WHERE {where}" + ") sub " + "WHERE ts > 0 " + "ORDER BY doy ASC" + ) + + sql_peak_hour = ( + "SELECT " + "CAST(strftime('%H', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS h, " + "COUNT(1) AS cnt " + "FROM (" + f" SELECT {ts_expr} AS ts " + " FROM message_fts " + f" WHERE {where}" + ") sub " + "WHERE ts > 0 " + "GROUP BY h " + "ORDER BY cnt DESC, h ASC " + "LIMIT 1" + ) + + conn = sqlite3.connect(str(index_path)) + try: + has_fts = ( + conn.execute("SELECT 1 FROM sqlite_master WHERE type='table' AND name='message_fts' LIMIT 1").fetchone() + is not None + ) + if not has_fts: + return {} + + params = (start_ts, end_ts, buddy) + + doys: list[int] = [] + try: + rows = conn.execute(sql_days, params).fetchall() + except Exception: + rows = [] + for r in rows: + if not r or r[0] is None: + continue + try: + doys.append(int(r[0])) + except Exception: + continue + + longest_streak_days = _compute_streak_days(doys) + + peak_hour: int | None = None + try: + row = conn.execute(sql_peak_hour, params).fetchone() + if row and row[0] is not None: + peak_hour = int(row[0]) + except Exception: + peak_hour = None + + out: dict[str, Any] = {"longestStreakDays": int(longest_streak_days)} + if peak_hour is not None and 0 <= peak_hour <= 23: + out["peakHour"] = int(peak_hour) + out["peakHourLabel"] = f"{int(peak_hour):02d}:00" + return out + except Exception: + return {} + finally: + try: + conn.close() + except Exception: + pass + + +@dataclass +class _ConvAgg: + username: str + incoming: int + outgoing: int + replies: int + sum_gap: int + sum_gap_capped: int + min_gap: int + max_gap: int + + @property + def total(self) -> int: + return int(self.incoming) + int(self.outgoing) + + def avg_gap(self) -> float: + return float(self.sum_gap) / float(self.replies) if self.replies > 0 else 0.0 + + def avg_gap_capped(self) -> float: + return float(self.sum_gap_capped) / float(self.replies) if self.replies > 0 else 0.0 + + +def _score_conv(*, agg: _ConvAgg, tau_seconds: float) -> float: + # "聊天频率":更偏向双向互动(取 min(in, out))。 + interaction = float(min(int(agg.incoming), int(agg.outgoing))) + if interaction <= 0.0 or agg.replies <= 0: + return 0.0 + + # "回复频率/速度":用 capped 平均耗时做一个饱和衰减,避免极端长等待把分数打穿。 + avg_s = float(agg.avg_gap_capped()) + speed_score = 1.0 / (1.0 + (avg_s / float(max(1.0, tau_seconds)))) + + volume_score = math.log1p(interaction) + return float(speed_score * volume_score) + + +def compute_reply_speed_stats(*, account_dir: Path, year: int) -> dict[str, Any]: + """ + 统计“回复速度”相关指标(全局 + 每个好友),用于 Wrapped 年度总结卡片。 + + Notes / 口径说明: + - 仅统计 1v1(非群聊)会话:username 不以 "@chatroom" 结尾。 + - “一次回复”定义:对方发出消息后,你发送的第一条消息(同一段连续你发的消息只计 1 次)。 + - 默认过滤系统消息(local_type=10000),并排除 biz_message*.db。 + - 优先使用 chat_search_index.db(全量合并所有 shard),没有索引时做 best-effort 降级。 + """ + + start_ts, end_ts = _year_range_epoch_seconds(int(year)) + my_username = str(account_dir.name or "").strip() + + # Scoring hyper-params (tuned for "更偏向聊天频率高的" 的直觉)。 + gap_cap_seconds = 6 * 60 * 60 # 6h: scoring 上限(超过当作一样慢) + tau_seconds = 30 * 60 # 30min: 速度衰减的尺度 + + total_replies = 0 + global_fastest: int | None = None + global_fastest_u: str | None = None + global_slowest: int | None = None + global_slowest_u: str | None = None + + reply_gaps: list[int] = [] + reply_stats: dict[str, Any] | None = None + + best_score = -1.0 + best_agg: _ConvAgg | None = None + + # NOTE: Use (score, username, agg) so the heap is always comparable even when scores tie. + top_heap: list[tuple[float, str, _ConvAgg]] = [] + top_n = 8 + + # For "今年你总共给 xxx 人发送过消息" & top-total bar-race. + sent_to_contacts: set[str] = set() + # Collect totals for *all* 1v1 sessions so the frontend ranking can naturally grow over time. + all_totals: dict[str, int] = {} + # NOTE: Use (total, username, agg) so the heap is always comparable even when totals tie. + top_total_heap: list[tuple[int, str, _ConvAgg]] = [] + # Keep more than 10 so the bar-race "TOP10" can actually evolve (members can enter/leave over time). + top_total_n = 100 + + def consider_conv(agg: _ConvAgg) -> None: + nonlocal best_score, best_agg + if not agg.username: + return + if agg.replies <= 0: + return + if min(agg.incoming, agg.outgoing) <= 0: + return + + score = _score_conv(agg=agg, tau_seconds=tau_seconds) + if score > best_score: + best_score = float(score) + best_agg = agg + + if score <= 0: + return + key = (float(score), str(agg.username), agg) + if len(top_heap) < top_n: + heapq.heappush(top_heap, key) + else: + heapq.heappushpop(top_heap, key) + + def consider_total(agg: _ConvAgg) -> None: + if not agg.username: + return + if agg.total <= 0: + return + # Keep the same filtering behavior as other wrapped cards. + if not _should_keep_session(agg.username, include_official=False): + return + + if agg.outgoing > 0: + sent_to_contacts.add(agg.username) + + total = int(agg.total) + all_totals[agg.username] = int(total) + key = (total, str(agg.username), agg) + if len(top_total_heap) < top_total_n: + heapq.heappush(top_total_heap, key) + else: + heapq.heappushpop(top_total_heap, key) + + used_index = False + + # -------- Preferred path: unified search index -------- + index_path = get_chat_search_index_db_path(account_dir) + if index_path.exists(): + conn = sqlite3.connect(str(index_path)) + try: + has_fts = ( + conn.execute("SELECT 1 FROM sqlite_master WHERE type='table' AND name='message_fts' LIMIT 1").fetchone() + is not None + ) + if has_fts and my_username: + used_index = True + t0 = time.time() + + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + + where = ( + f"{ts_expr} >= ? AND {ts_expr} < ? " + "AND db_stem NOT LIKE 'biz_message%' " + "AND CAST(local_type AS INTEGER) != 10000 " + "AND username NOT LIKE '%@chatroom'" + ) + + # Order by username, then time (ties broken by sort_seq/local_id if possible). + sql = ( + "SELECT " + "username, sender_username, " + f"{ts_expr} AS ts, " + "CAST(sort_seq AS INTEGER) AS sort_seq_i, " + "CAST(local_id AS INTEGER) AS local_id_i " + "FROM message_fts " + f"WHERE {where} " + "ORDER BY username ASC, ts ASC, sort_seq_i ASC, local_id_i ASC" + ) + + cur = conn.execute(sql, (start_ts, end_ts)) + + cur_username: str = "" + incoming = 0 + outgoing = 0 + replies = 0 + sum_gap = 0 + sum_gap_capped = 0 + min_gap = 0 + max_gap = 0 + prev_other_ts: int | None = None + + def flush() -> None: + nonlocal cur_username, incoming, outgoing, replies, sum_gap, sum_gap_capped, min_gap, max_gap + if not cur_username: + return + agg = _ConvAgg( + username=cur_username, + incoming=int(incoming), + outgoing=int(outgoing), + replies=int(replies), + sum_gap=int(sum_gap), + sum_gap_capped=int(sum_gap_capped), + min_gap=int(min_gap), + max_gap=int(max_gap), + ) + consider_total(agg) + consider_conv(agg) + + for row in cur: + try: + username = str(row[0] or "").strip() + sender = str(row[1] or "").strip() + ts = int(row[2] or 0) + except Exception: + continue + + if ts <= 0 or not username: + continue + if username != cur_username: + # flush old + flush() + # reset for new conversation + cur_username = username + incoming = outgoing = replies = 0 + sum_gap = sum_gap_capped = 0 + min_gap = max_gap = 0 + prev_other_ts = None + + # Drop system/official-ish sessions (best-effort). + if not _should_keep_session(username, include_official=False): + continue + + is_me = sender == my_username + if is_me: + outgoing += 1 + if prev_other_ts is not None and ts >= prev_other_ts: + gap = int(ts - prev_other_ts) + replies += 1 + total_replies += 1 + sum_gap += gap + sum_gap_capped += min(gap, gap_cap_seconds) + reply_gaps.append(int(gap)) + + if replies == 1 or gap < min_gap: + min_gap = gap + if replies == 1 or gap > max_gap: + max_gap = gap + + if global_fastest is None or gap < global_fastest: + global_fastest = gap + global_fastest_u = username + if global_slowest is None or gap > global_slowest: + global_slowest = gap + global_slowest_u = username + + # Only count the first outgoing message as the "reply" to this prompt. + prev_other_ts = None + else: + incoming += 1 + prev_other_ts = ts + + flush() + + logger.info( + "Wrapped card#3 reply_speed computed (search index): account=%s year=%s conversations_top=%s replies=%s db=%s elapsed=%.2fs", + str(account_dir.name or "").strip(), + int(year), + len(top_heap), + int(total_replies), + str(index_path.name), + time.time() - t0, + ) + finally: + try: + conn.close() + except Exception: + pass + + if reply_gaps: + try: + reply_gaps.sort() + n = int(len(reply_gaps)) + # Nearest-rank quantiles (deterministic, integer seconds). + p50_idx = max(0, min(n - 1, int(math.ceil(0.50 * n) - 1))) + p90_idx = max(0, min(n - 1, int(math.ceil(0.90 * n) - 1))) + reply_stats = { + "p50Seconds": int(reply_gaps[p50_idx]), + "p90Seconds": int(reply_gaps[p90_idx]), + } + except Exception: + reply_stats = None + + # -------- Fallback path: no index -------- + # Best-effort: if the index doesn't exist / isn't ready, auto-start building it (async) so user can + # retry this page later. We intentionally do NOT block here. + index_status: dict[str, Any] | None = None + if not used_index: + try: + index_status = get_chat_search_index_status(account_dir) + index = dict(index_status.get("index") or {}) + build = dict(index.get("build") or {}) + index_ready = bool(index.get("ready")) + build_status = str(build.get("status") or "") + index_exists = bool(index.get("exists")) + + if (not index_ready) and build_status not in {"building", "error"}: + start_chat_search_index_build(account_dir, rebuild=bool(index_exists)) + index_status = get_chat_search_index_status(account_dir) + except Exception: + index_status = None + + logger.warning( + "Wrapped card#3 reply_speed: search index missing/not ready; returning empty stats. account=%s year=%s index=%s", + str(account_dir.name or "").strip(), + int(year), + str(index_path), + ) + + # Sort top buddies by score desc. + top_buddies: list[tuple[float, _ConvAgg]] = sorted( + [(score, agg) for score, _, agg in top_heap], + key=lambda x: (-x[0], x[1].username), + ) + top_totals: list[tuple[int, _ConvAgg]] = sorted( + [(total, agg) for total, _, agg in top_total_heap], + key=lambda x: (-x[0], x[1].username), + ) + + # Resolve contact display names/avatars for a small set (bestBuddy + extremes + top list). + need_usernames: list[str] = [] + if best_agg is not None: + need_usernames.append(best_agg.username) + if global_fastest_u: + need_usernames.append(global_fastest_u) + if global_slowest_u: + need_usernames.append(global_slowest_u) + for _, agg in top_buddies: + need_usernames.append(agg.username) + for _, agg in top_totals: + need_usernames.append(agg.username) + + uniq_usernames = [] + seen = set() + for u in need_usernames: + if u and u not in seen: + seen.add(u) + uniq_usernames.append(u) + + contact_rows = _load_contact_rows(account_dir / "contact.db", uniq_usernames) if uniq_usernames else {} + + def conv_to_obj(score: float | None, agg: _ConvAgg) -> dict[str, Any]: + row = contact_rows.get(agg.username) + display = _pick_display_name(row, agg.username) + avatar = _build_avatar_url(str(account_dir.name or ""), agg.username) if agg.username else "" + avg_s = agg.avg_gap() + out: dict[str, Any] = { + "username": agg.username, + "displayName": display, + "maskedName": _mask_name(display), + "avatarUrl": avatar, + "incomingMessages": int(agg.incoming), + "outgoingMessages": int(agg.outgoing), + "totalMessages": int(agg.total), + "replyCount": int(agg.replies), + "avgReplySeconds": float(avg_s), + "fastestReplySeconds": int(agg.min_gap) if agg.replies > 0 else None, + "slowestReplySeconds": int(agg.max_gap) if agg.replies > 0 else None, + } + if score is not None: + out["score"] = float(score) + return out + + best_buddy_obj = None + if best_agg is not None: + best_buddy_obj = conv_to_obj(best_score, best_agg) + if used_index and isinstance(best_buddy_obj, dict) and best_buddy_obj.get("username"): + extras = _compute_best_buddy_extras_from_index( + account_dir=account_dir, + year=int(year), + buddy_username=str(best_buddy_obj.get("username") or ""), + ) + if extras: + best_buddy_obj.update(extras) + + fastest_obj = None + if global_fastest is not None and global_fastest_u: + # Use the best agg if it matches; otherwise create a minimal object. + agg = next((a for _, a in top_buddies if a.username == global_fastest_u), None) + if agg is None and best_agg is not None and best_agg.username == global_fastest_u: + agg = best_agg + if agg is not None: + fastest_obj = conv_to_obj(None, agg) + fastest_obj["seconds"] = int(global_fastest) + else: + row = contact_rows.get(global_fastest_u) + display = _pick_display_name(row, global_fastest_u) + avatar = _build_avatar_url(str(account_dir.name or ""), global_fastest_u) if global_fastest_u else "" + fastest_obj = { + "username": global_fastest_u, + "displayName": display, + "maskedName": _mask_name(display), + "avatarUrl": avatar, + "seconds": int(global_fastest), + } + + slowest_obj = None + if global_slowest is not None and global_slowest_u: + agg = next((a for _, a in top_buddies if a.username == global_slowest_u), None) + if agg is None and best_agg is not None and best_agg.username == global_slowest_u: + agg = best_agg + if agg is not None: + slowest_obj = conv_to_obj(None, agg) + slowest_obj["seconds"] = int(global_slowest) + else: + row = contact_rows.get(global_slowest_u) + display = _pick_display_name(row, global_slowest_u) + avatar = _build_avatar_url(str(account_dir.name or ""), global_slowest_u) if global_slowest_u else "" + slowest_obj = { + "username": global_slowest_u, + "displayName": display, + "maskedName": _mask_name(display), + "avatarUrl": avatar, + "seconds": int(global_slowest), + } + + top_list = [conv_to_obj(score, agg) for score, agg in top_buddies] + + top_totals_list = [ + { + **conv_to_obj(None, agg), + "totalMessages": int(total), + } + for total, agg in top_totals + ] + + # Prepare "bar race" data: all 1v1 sessions (exclude official/system), cumulative per day. + race = None + if used_index and all_totals: + days_in_year = int((datetime(int(year) + 1, 1, 1) - datetime(int(year), 1, 1)).days) + u_list = [u for u, _ in sorted(all_totals.items(), key=lambda kv: (-int(kv[1] or 0), str(kv[0] or ""))) if u] + if days_in_year > 0 and u_list: + # Convert millisecond timestamps defensively. + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + + base_where = ( + f"{ts_expr} >= ? AND {ts_expr} < ? " + "AND db_stem NOT LIKE 'biz_message%' " + "AND CAST(local_type AS INTEGER) != 10000 " + "AND username NOT LIKE '%@chatroom'" + ) + + sql_daily = ( + "SELECT username, " + "CAST(strftime('%j', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) - 1 AS doy, " + "sender_username, " + "COUNT(1) AS cnt " + "FROM (" + f" SELECT username, sender_username, {ts_expr} AS ts " + " FROM message_fts " + f" WHERE {base_where}" + ") sub " + "GROUP BY username, doy, sender_username" + ) + + u_set = set(u_list) + per_user_daily_total: dict[str, list[int]] = {} + per_user_daily_outgoing: dict[str, list[int]] = {} + per_user_daily_incoming: dict[str, list[int]] = {} + try: + conn2 = sqlite3.connect(str(index_path)) + try: + rows = conn2.execute(sql_daily, (start_ts, end_ts)).fetchall() + finally: + conn2.close() + except Exception: + rows = [] + + for r in rows: + if not r: + continue + u = str(r[0] or "").strip() + if not u or u not in u_set: + continue + try: + doy = int(r[1] if r[1] is not None else -1) + sender = str(r[2] or "").strip() + cnt = int(r[3] or 0) + except Exception: + continue + if cnt <= 0 or doy < 0 or doy >= days_in_year: + continue + daily_total = per_user_daily_total.get(u) + if daily_total is None: + daily_total = [0] * days_in_year + per_user_daily_total[u] = daily_total + daily_total[doy] += cnt + + if sender == my_username: + daily_outgoing = per_user_daily_outgoing.get(u) + if daily_outgoing is None: + daily_outgoing = [0] * days_in_year + per_user_daily_outgoing[u] = daily_outgoing + daily_outgoing[doy] += cnt + else: + daily_incoming = per_user_daily_incoming.get(u) + if daily_incoming is None: + daily_incoming = [0] * days_in_year + per_user_daily_incoming[u] = daily_incoming + daily_incoming[doy] += cnt + + # Ensure we can render display names/avatars for the whole race list. + extra_usernames = [u for u in u_list if u and u not in contact_rows] + if extra_usernames: + try: + # sqlite has a default var limit; query in chunks. + CHUNK = 900 + for i in range(0, len(extra_usernames), CHUNK): + contact_rows.update(_load_contact_rows(account_dir / "contact.db", extra_usernames[i : i + CHUNK])) + except Exception: + pass + + series: list[dict[str, Any]] = [] + for u in u_list: + daily_total = per_user_daily_total.get(u) + if not daily_total: + continue + daily_outgoing = per_user_daily_outgoing.get(u) or [0] * days_in_year + daily_incoming = per_user_daily_incoming.get(u) or [0] * days_in_year + cum_total: list[int] = [] + cum_outgoing: list[int] = [] + cum_incoming: list[int] = [] + running_total = 0 + running_outgoing = 0 + running_incoming = 0 + for i in range(days_in_year): + running_total += int(daily_total[i] or 0) + running_outgoing += int(daily_outgoing[i] or 0) + running_incoming += int(daily_incoming[i] or 0) + cum_total.append(int(running_total)) + cum_outgoing.append(int(running_outgoing)) + cum_incoming.append(int(running_incoming)) + + total_messages = int(cum_total[-1]) if cum_total else int(all_totals.get(u) or 0) + outgoing_messages = int(cum_outgoing[-1]) if cum_outgoing else 0 + incoming_messages = int(cum_incoming[-1]) if cum_incoming else 0 + + row = contact_rows.get(u) + display = _pick_display_name(row, u) + avatar = _build_avatar_url(str(account_dir.name or ""), u) if u else "" + series.append( + { + "username": u, + "displayName": display, + "maskedName": _mask_name(display), + "avatarUrl": avatar, + "totalMessages": int(total_messages), + "outgoingMessages": int(outgoing_messages), + "incomingMessages": int(incoming_messages), + "cumulativeCounts": cum_total, + "cumulativeOutgoingCounts": cum_outgoing, + "cumulativeIncomingCounts": cum_incoming, + } + ) + + race = { + "year": int(year), + "startDate": f"{int(year)}-01-01", + "endDate": f"{int(year)}-12-31", + "days": int(days_in_year), + "series": series, + } + + # Load all contacts for lottery animation (up to 50 random contacts) + all_contacts_list: list[dict[str, Any]] = [] + try: + contact_db_path = account_dir / "contact.db" + if contact_db_path.exists(): + conn = sqlite3.connect(str(contact_db_path)) + conn.row_factory = sqlite3.Row + try: + # Get contacts that are real users (not chatrooms, not official accounts) + sql = """ + SELECT username, remark, nick_name, alias, big_head_url, small_head_url + FROM contact + WHERE username NOT LIKE '%@chatroom' + AND username NOT LIKE 'gh_%' + AND username NOT LIKE 'weixin' + AND username NOT LIKE 'filehelper' + AND username NOT LIKE 'fmessage' + AND username NOT IN ('medianote', 'floatbottle', 'shakeapp', 'lbsapp', 'newsapp') + AND (nick_name IS NOT NULL AND nick_name != '') + ORDER BY RANDOM() + LIMIT 50 + """ + rows = conn.execute(sql).fetchall() + for r in rows: + u = str(r["username"] or "").strip() + if not u: + continue + display = _pick_display_name(r, u) + avatar = _build_avatar_url(str(account_dir.name or ""), u) if u else "" + all_contacts_list.append({ + "username": u, + "displayName": display, + "maskedName": _mask_name(display), + "avatarUrl": avatar, + }) + finally: + conn.close() + except Exception: + pass + + return { + "year": int(year), + "sentToContacts": int(len(sent_to_contacts)), + "replyEvents": int(total_replies), + "replyStats": reply_stats, + "fastestReplySeconds": int(global_fastest) if global_fastest is not None else None, + "longestReplySeconds": int(global_slowest) if global_slowest is not None else None, + "bestBuddy": best_buddy_obj, + "fastest": fastest_obj, + "slowest": slowest_obj, + "topBuddies": top_list, + "topTotals": top_totals_list, + "allContacts": all_contacts_list, + "race": race, + "settings": { + "gapCapSeconds": int(gap_cap_seconds), + "tauSeconds": int(tau_seconds), + "usedIndex": bool(used_index), + "indexStatus": index_status, + }, + } + + +def build_card_03_reply_speed(*, account_dir: Path, year: int) -> dict[str, Any]: + stats = compute_reply_speed_stats(account_dir=account_dir, year=year) + + fastest = stats.get("fastestReplySeconds") + longest = stats.get("longestReplySeconds") + best = stats.get("bestBuddy") or None + replies = int(stats.get("replyEvents") or 0) + + if replies <= 0: + narrative = "今年你还没有可统计的“回复”记录(或尚未构建搜索索引)。" + else: + parts: list[str] = [] + if fastest is not None: + parts.append(f"最快一次,你只用了 {_format_duration_zh(int(fastest))} 就回了消息。") + if longest is not None: + parts.append(f"最长一次,你让对方等了 {_format_duration_zh(int(longest))}。") + if best and isinstance(best, dict) and best.get("displayName"): + avg_s = best.get("avgReplySeconds") + try: + avg_i = int(round(float(avg_s or 0.0))) + except Exception: + avg_i = 0 + parts.append( + f"最像你的聊天搭子是「{_mask_name(str(best.get('displayName') or ''))}」,平均每条回复用时 {_format_duration_zh(avg_i)}。" + ) + narrative = "".join(parts) if parts else "你的回复速度,藏着你最在意的人。" + + return { + "id": 3, + "title": "谁是你「秒回」的置顶关心?", + "scope": "global", + "category": "B", + "status": "ok", + "kind": "chat/reply_speed", + "narrative": narrative, + "data": stats, + } diff --git a/src/wechat_decrypt_tool/wrapped/cards/card_04_emoji_universe.py b/src/wechat_decrypt_tool/wrapped/cards/card_04_emoji_universe.py new file mode 100644 index 0000000..dce3fd5 --- /dev/null +++ b/src/wechat_decrypt_tool/wrapped/cards/card_04_emoji_universe.py @@ -0,0 +1,1265 @@ +from __future__ import annotations + +import functools +import hashlib +import html +import re +import sqlite3 +from collections import Counter, defaultdict +from datetime import datetime +from pathlib import Path +from typing import Any, Optional +from urllib.parse import quote + +from ...chat_helpers import ( + _build_avatar_url, + _decode_message_content, + _extract_xml_attr, + _extract_xml_tag_text, + _iter_message_db_paths, + _load_contact_rows, + _lookup_resource_md5, + _pick_display_name, + _quote_ident, + _resource_lookup_chat_id, + _should_keep_session, +) +from ...chat_search_index import get_chat_search_index_db_path +from ...logging_config import get_logger + +logger = get_logger(__name__) + + +_TS_WECHAT_EMOJI_ENTRY_RE = re.compile(r'^\s*"(?P[^"]+)"\s*:\s*"(?P[^"]+)"\s*,?\s*$') +_MD5_HEX_RE = re.compile(r"(?i)[0-9a-f]{32}") +_EXPRESSION_ASSET_RE = re.compile(r"^Expression_(\d+)@2x\.png$") +_EMOJI_VS16 = "\ufe0f" +_EMOJI_ZWJ = "\u200d" +_EMOJI_KEYCAP = "\u20e3" + + +def _is_regional_indicator(ch: str) -> bool: + if not ch: + return False + cp = ord(ch) + return 0x1F1E6 <= cp <= 0x1F1FF + + +def _is_emoji_modifier(ch: str) -> bool: + if not ch: + return False + cp = ord(ch) + return 0x1F3FB <= cp <= 0x1F3FF + + +def _is_emoji_base(ch: str) -> bool: + if not ch: + return False + cp = ord(ch) + return ( + (0x1F300 <= cp <= 0x1FAFF) + or (0x2600 <= cp <= 0x26FF) + or (0x2700 <= cp <= 0x27BF) + or (0x1F1E6 <= cp <= 0x1F1FF) + or cp in {0x00A9, 0x00AE, 0x203C, 0x2049, 0x2122, 0x2139, 0x3030, 0x303D, 0x3297, 0x3299} + or cp == 0x1F004 + or (0x1F170 <= cp <= 0x1F251) + ) + + +def _extract_unicode_emoji_tokens(text: str) -> list[str]: + s = str(text or "") + if not s: + return [] + + out: list[str] = [] + i = 0 + n = len(s) + while i < n: + ch = s[i] + + # keycap emoji: [0-9#*][VS16]?U+20E3 + if ch in "0123456789#*": + j = i + 1 + if j < n and s[j] == _EMOJI_VS16: + j += 1 + if j < n and s[j] == _EMOJI_KEYCAP: + out.append(s[i : j + 1]) + i = j + 1 + continue + + # flags + if _is_regional_indicator(ch): + if (i + 1) < n and _is_regional_indicator(s[i + 1]): + out.append(s[i : i + 2]) + i += 2 + else: + out.append(ch) + i += 1 + continue + + if not _is_emoji_base(ch): + i += 1 + continue + + token: list[str] = [ch] + j = i + 1 + if j < n and s[j] == _EMOJI_VS16: + token.append(s[j]) + j += 1 + if j < n and _is_emoji_modifier(s[j]): + token.append(s[j]) + j += 1 + + # Handle ZWJ chains. + while (j + 1) < n and s[j] == _EMOJI_ZWJ and _is_emoji_base(s[j + 1]): + token.append(s[j]) + token.append(s[j + 1]) + j += 2 + if j < n and s[j] == _EMOJI_VS16: + token.append(s[j]) + j += 1 + if j < n and _is_emoji_modifier(s[j]): + token.append(s[j]) + j += 1 + + out.append("".join(token)) + i = j + + return out + + +def _emoji_key_priority(key: str) -> tuple[int, int, str]: + s = str(key or "").strip() + if not s: + return (9, 9, "") + if re.fullmatch(r"\[[\u4e00-\u9fff]+\]", s): + return (0, len(s), s) + if re.fullmatch(r"/[\u4e00-\u9fff]+", s): + return (1, len(s), s) + if re.fullmatch(r"\[[A-Za-z][A-Za-z0-9_ ]*\]", s): + return (2, len(s), s) + if re.fullmatch(r"/:[^/\s]+", s): + return (3, len(s), s) + return (4, len(s), s) + + +def _normalize_index_text_for_emoji_match(text: str) -> str: + """ + Our chat search index stores `message_fts.text` as `_to_char_token_text`, i.e.: + - lowercased + - whitespace removed + - every character joined by single spaces + + Example: "[捂脸]" -> "[ 捂 脸 ]" + For emoji matching, we normalize it back by removing whitespace and lowercasing. + """ + + return "".join(ch for ch in str(text or "").lower() if not ch.isspace()) + + +def _iter_protobuf_varints(blob: bytes) -> list[tuple[int, int]]: + out: list[tuple[int, int]] = [] + data = bytes(blob or b"") + if not data: + return out + + i = 0 + n = len(data) + while i < n: + key = int(data[i]) + i += 1 + field = int(key >> 3) + wire_type = int(key & 0x07) + + if wire_type == 0: # varint + shift = 0 + value = 0 + while i < n: + b = int(data[i]) + i += 1 + value |= (b & 0x7F) << shift + if b < 0x80: + break + shift += 7 + out.append((field, int(value))) + continue + + if wire_type == 1: # 64-bit + i += 8 + continue + + if wire_type == 2: # length-delimited + shift = 0 + ln = 0 + while i < n: + b = int(data[i]) + i += 1 + ln |= (b & 0x7F) << shift + if b < 0x80: + break + shift += 7 + i += int(ln) + continue + + if wire_type == 5: # 32-bit + i += 4 + continue + + break + + return out + + +def _extract_packed_emoji_meta(packed_info_data: Any) -> tuple[Optional[int], Optional[int]]: + data: bytes = b"" + if packed_info_data is None: + return None, None + if isinstance(packed_info_data, memoryview): + data = packed_info_data.tobytes() + elif isinstance(packed_info_data, (bytes, bytearray)): + data = bytes(packed_info_data) + elif isinstance(packed_info_data, str): + s = packed_info_data.strip() + if s: + try: + data = bytes.fromhex(s) if (len(s) % 2 == 0 and re.fullmatch(r"(?i)[0-9a-f]+", s)) else s.encode( + "utf-8", + errors="ignore", + ) + except Exception: + data = b"" + if not data: + return None, None + + field1: Optional[int] = None + field2: Optional[int] = None + for f, v in _iter_protobuf_varints(data): + if f == 1 and field1 is None: + field1 = int(v) + elif f == 2 and field2 is None: + field2 = int(v) + if field1 is not None and field2 is not None: + break + return field1, field2 + + +def _year_range_epoch_seconds(year: int) -> tuple[int, int]: + start = int(datetime(year, 1, 1).timestamp()) + end = int(datetime(year + 1, 1, 1).timestamp()) + return start, end + + +def _mask_name(name: str) -> str: + s = str(name or "").strip() + if not s: + return "" + if len(s) == 1: + return "*" + if len(s) == 2: + return s[0] + "*" + return s[0] + ("*" * (len(s) - 2)) + s[-1] + + +def _weekday_name_zh(weekday_index: int) -> str: + labels = ["周一", "周二", "周三", "周四", "周五", "周六", "周日"] + if 0 <= weekday_index < len(labels): + return labels[weekday_index] + return "" + + +def _list_message_tables(conn: sqlite3.Connection) -> list[str]: + try: + rows = conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall() + except Exception: + return [] + out: list[str] = [] + for r in rows: + if not r or not r[0]: + continue + raw_name = r[0] + if isinstance(raw_name, memoryview): + raw_name = raw_name.tobytes() + if isinstance(raw_name, (bytes, bytearray)): + try: + name = bytes(raw_name).decode("utf-8", errors="ignore") + except Exception: + continue + else: + name = str(raw_name) + ln = name.lower() + if ln.startswith(("msg_", "chat_")): + out.append(name) + return out + + +def _list_session_usernames(session_db_path: Path) -> list[str]: + if not session_db_path.exists(): + return [] + conn = sqlite3.connect(str(session_db_path)) + try: + try: + rows = conn.execute("SELECT username FROM SessionTable").fetchall() + except sqlite3.OperationalError: + rows = conn.execute("SELECT username FROM Session").fetchall() + except Exception: + rows = [] + finally: + try: + conn.close() + except Exception: + pass + + out: list[str] = [] + for r in rows: + if not r or not r[0]: + continue + u = str(r[0]).strip() + if u: + out.append(u) + return out + + +@functools.lru_cache(maxsize=1) +def _load_wechat_emoji_table() -> dict[str, str]: + repo_root = Path(__file__).resolve().parents[4] + path = repo_root / "frontend" / "utils" / "wechat-emojis.ts" + try: + text = path.read_text(encoding="utf-8") + except Exception: + return {} + + table: dict[str, str] = {} + for line in text.splitlines(): + stripped = line.strip() + if (not stripped) or stripped.startswith("//"): + continue + m = _TS_WECHAT_EMOJI_ENTRY_RE.match(line) + if not m: + continue + key = str(m.group("key") or "") + value = str(m.group("value") or "") + if key and value: + table[key] = value + return table + + +@functools.lru_cache(maxsize=1) +def _load_wechat_emoji_regex() -> Optional[re.Pattern[str]]: + table = _load_wechat_emoji_table() + if not table: + return None + keys = sorted(table.keys(), key=len, reverse=True) + escaped = [re.escape(k) for k in keys if k] + if not escaped: + return None + try: + return re.compile(f"({'|'.join(escaped)})") + except Exception: + return None + + +@functools.lru_cache(maxsize=1) +def _load_wechat_text_emoji_matcher() -> tuple[Optional[re.Pattern[str]], dict[str, str]]: + """ + Build a matcher for extracting WeChat "small yellow face" codes from `message_fts.text`. + + Note: `message_fts.text` is stored as a char-tokenized string (see `_normalize_index_text_for_emoji_match`), + so we match against normalized keys (lowercased + whitespace removed). + + Returns: + - regex: matches normalized keys + - norm_key -> canonical key (used as the public label) + """ + + table = _load_wechat_emoji_table() + if not table: + return None, {} + + asset_to_keys: dict[str, list[str]] = {} + for key, value in table.items(): + asset = str(value or "").strip() + if not asset: + continue + asset_to_keys.setdefault(asset, []).append(str(key or "")) + + asset_to_label: dict[str, str] = {} + for asset, keys in asset_to_keys.items(): + keys2 = [k for k in keys if k] + if not keys2: + continue + asset_to_label[asset] = sorted(keys2, key=_emoji_key_priority)[0] + + norm_to_label: dict[str, str] = {} + for key, value in table.items(): + asset = str(value or "").strip() + label = asset_to_label.get(asset) + if not label: + continue + nk = _normalize_index_text_for_emoji_match(str(key or "")) + if not nk: + continue + norm_to_label.setdefault(nk, label) + + keys_norm = sorted(norm_to_label.keys(), key=len, reverse=True) + escaped = [re.escape(k) for k in keys_norm if k] + if not escaped: + return None, norm_to_label + try: + return re.compile(f"({'|'.join(escaped)})"), norm_to_label + except Exception: + return None, norm_to_label + + +@functools.lru_cache(maxsize=1) +def _load_wechat_expression_catalog() -> tuple[dict[int, str], dict[int, str]]: + table = _load_wechat_emoji_table() + if not table: + return {}, {} + + id_to_asset: dict[int, str] = {} + asset_to_keys: dict[str, list[str]] = {} + for key, value in table.items(): + asset = str(value or "").strip() + m = _EXPRESSION_ASSET_RE.fullmatch(asset) + if not m: + continue + try: + expr_id = int(m.group(1)) + except Exception: + continue + if expr_id <= 0: + continue + id_to_asset.setdefault(expr_id, asset) + asset_to_keys.setdefault(asset, []).append(str(key or "")) + + id_to_label: dict[int, str] = {} + for expr_id, asset in id_to_asset.items(): + keys = [k for k in asset_to_keys.get(asset, []) if k] + if not keys: + continue + keys_sorted = sorted(keys, key=_emoji_key_priority) + id_to_label[expr_id] = keys_sorted[0] + + return id_to_asset, id_to_label + + +def _pick_persona( + *, + sent_sticker_count: int, + sticker_share: float, + peak_hour: Optional[int], + top_text_emoji_count: int, +) -> dict[str, str]: + if sent_sticker_count <= 0 and top_text_emoji_count <= 0: + return {"code": "quiet_observer", "label": "静默观察员", "reason": "你今年几乎没靠表情表达。"} + + if peak_hour is not None and 0 <= int(peak_hour) <= 4 and sent_sticker_count >= 50: + return {"code": "midnight_sticker_king", "label": "午夜斗图王", "reason": "高峰活跃在深夜,夜聊斗图火力很足。"} + + if top_text_emoji_count >= 20 and top_text_emoji_count >= int(sent_sticker_count * 0.6): + return {"code": "text_emoji_narrator", "label": "小黄脸叙事家", "reason": "你更常把小黄脸嵌进文字,表达更细腻。"} + + if sticker_share >= 0.45 and sent_sticker_count >= 80: + return {"code": "sticker_machine_gun", "label": "表情包机关枪", "reason": "在你的表达里,表情包占比非常高。"} + + return {"code": "steady_fighter", "label": "稳健斗图手", "reason": "斗图稳定输出,节奏和分寸都在线。"} + + +def _build_local_emoji_url( + *, + account_name: str, + md5: str, + username: str, + emoji_remote_url: str, +) -> str: + base = f"/api/chat/media/emoji?account={quote(account_name)}&md5={quote(md5)}" + if username: + base += f"&username={quote(username)}" + if emoji_remote_url: + base += f"&emoji_url={quote(emoji_remote_url, safe='')}" + return base + + +def compute_emoji_universe_stats(*, account_dir: Path, year: int) -> dict[str, Any]: + start_ts, end_ts = _year_range_epoch_seconds(year) + my_username = str(account_dir.name or "").strip() + + sent_sticker_count = 0 + total_sent_messages = 0 + sticker_active_days: set[str] = set() + hour_counts: Counter[int] = Counter() + weekday_counts: Counter[int] = Counter() + sticker_by_username: Counter[str] = Counter() + text_emoji_counts: Counter[str] = Counter() + unicode_emoji_counts: Counter[str] = Counter() + wechat_emoji_counts: Counter[int] = Counter() + + sticker_key_counts: Counter[str] = Counter() + sticker_key_md5: dict[str, str] = {} + sticker_key_expr_id: dict[str, int] = {} + sticker_url_map: dict[str, str] = {} + sticker_sample_username: dict[str, str] = {} + sticker_key_username_counts: dict[str, Counter[str]] = defaultdict(Counter) + sticker_key_first_ts_in_year: dict[str, int] = {} + + used_index = False + + emoji_table = _load_wechat_emoji_table() + emoji_regex, emoji_norm_to_key = _load_wechat_text_emoji_matcher() + expression_id_to_asset, expression_id_to_label = _load_wechat_expression_catalog() + + index_path = get_chat_search_index_db_path(account_dir) + if index_path.exists(): + conn = sqlite3.connect(str(index_path)) + try: + has_fts = ( + conn.execute( + "SELECT 1 FROM sqlite_master WHERE type='table' AND name='message_fts' LIMIT 1" + ).fetchone() + is not None + ) + if has_fts and my_username: + used_index = True + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + where_base = ( + f"{ts_expr} >= ? AND {ts_expr} < ? " + "AND db_stem NOT LIKE 'biz_message%' " + "AND sender_username = ?" + ) + + try: + r_total = conn.execute( + f"SELECT COUNT(1) FROM message_fts WHERE {where_base} AND CAST(local_type AS INTEGER) != 10000", + (start_ts, end_ts, my_username), + ).fetchone() + total_sent_messages = int((r_total[0] if r_total else 0) or 0) + except Exception: + total_sent_messages = 0 + + try: + r_sticker = conn.execute( + f"SELECT COUNT(1) FROM message_fts WHERE {where_base} AND CAST(local_type AS INTEGER) = 47", + (start_ts, end_ts, my_username), + ).fetchone() + sent_sticker_count = int((r_sticker[0] if r_sticker else 0) or 0) + except Exception: + sent_sticker_count = 0 + + try: + rows_u = conn.execute( + f"SELECT username, COUNT(1) AS cnt " + f"FROM message_fts WHERE {where_base} AND CAST(local_type AS INTEGER) = 47 " + "GROUP BY username", + (start_ts, end_ts, my_username), + ).fetchall() + except Exception: + rows_u = [] + for r in rows_u: + if not r: + continue + username = str(r[0] or "").strip() + if not username: + continue + try: + cnt = int(r[1] or 0) + except Exception: + cnt = 0 + if cnt > 0: + sticker_by_username[username] += cnt + + try: + rows_t = conn.execute( + "SELECT " + "date(datetime(ts, 'unixepoch', 'localtime')) AS d, " + "CAST(strftime('%H', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS h, " + "CAST(strftime('%w', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS w " + "FROM (" + f" SELECT {ts_expr} AS ts " + " FROM message_fts " + f" WHERE {where_base} AND CAST(local_type AS INTEGER) = 47" + ") sub", + (start_ts, end_ts, my_username), + ).fetchall() + except Exception: + rows_t = [] + for r in rows_t: + if not r: + continue + d = str(r[0] or "").strip() + try: + h = int(r[1] if r[1] is not None else -1) + except Exception: + h = -1 + try: + w0 = int(r[2] if r[2] is not None else -1) + except Exception: + w0 = -1 + if d: + sticker_active_days.add(d) + if 0 <= h <= 23: + hour_counts[h] += 1 + if 0 <= w0 <= 6: + # sqlite: 0=Sun..6=Sat -> 0=Mon..6=Sun + w = 6 if w0 == 0 else (w0 - 1) + weekday_counts[w] += 1 + + try: + rows_text = conn.execute( + f"SELECT \"text\" FROM message_fts " + f"WHERE {where_base} AND render_type = 'text' " + "AND \"text\" IS NOT NULL AND TRIM(\"text\") != ''", + (start_ts, end_ts, my_username), + ).fetchall() + except Exception: + rows_text = [] + for r in rows_text: + txt = str((r[0] if r else "") or "") + if not txt: + continue + txt_norm = _normalize_index_text_for_emoji_match(txt) + if emoji_regex is not None and txt_norm: + for m in emoji_regex.finditer(txt_norm): + nk = str(m.group(0) or "") + k = emoji_norm_to_key.get(nk) or nk + if k: + text_emoji_counts[k] += 1 + for u in _extract_unicode_emoji_tokens(txt_norm): + if u: + unicode_emoji_counts[u] += 1 + finally: + try: + conn.close() + except Exception: + pass + + # Parse local_type=47 payloads from raw message DBs (md5/cdnurl), plus fallback counters when index missing. + session_usernames = _list_session_usernames(account_dir / "session.db") + md5_to_username: dict[str, str] = {} + table_to_username: dict[str, str] = {} + for u in session_usernames: + md5_hex = hashlib.md5(u.encode("utf-8")).hexdigest().lower() + md5_to_username[md5_hex] = u + table_to_username[f"msg_{md5_hex}"] = u + table_to_username[f"chat_{md5_hex}"] = u + + def resolve_username_from_table(table_name: str) -> str: + ln = str(table_name or "").lower() + x = table_to_username.get(ln) + if x: + return x + m = _MD5_HEX_RE.search(ln) + if m: + return str(md5_to_username.get(m.group(0).lower()) or "") + return "" + + resource_conn: sqlite3.Connection | None = None + resource_chat_id_cache: dict[str, Optional[int]] = {} + resource_db_path = account_dir / "message_resource.db" + if resource_db_path.exists(): + try: + resource_conn = sqlite3.connect(str(resource_db_path)) + except Exception: + resource_conn = None + + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + + def _has_packed_info_data_column(conn: sqlite3.Connection, quoted_table: str) -> bool: + try: + cols = conn.execute(f"PRAGMA table_info({quoted_table})").fetchall() + except Exception: + return False + for c in cols: + if not c or len(c) < 2: + continue + name0 = c[1] + if isinstance(name0, memoryview): + name0 = name0.tobytes() + if isinstance(name0, (bytes, bytearray)): + try: + name0 = bytes(name0).decode("utf-8", errors="ignore") + except Exception: + name0 = "" + if str(name0 or "").strip().lower() == "packed_info_data": + return True + return False + + def _extract_sticker_from_row( + *, + row: sqlite3.Row, + username: str, + record_maps: bool, + count_wechat_builtin: bool, + ) -> tuple[int, str, str]: + create_time_raw = int(row["create_time"] or 0) + ts = create_time_raw + if ts > 1_000_000_000_000: + ts = int(ts / 1000) + + raw_text = "" + try: + raw_text = _decode_message_content(row["compress_content"], row["message_content"]).strip() + except Exception: + raw_text = "" + + emoji_md5 = _extract_xml_attr(raw_text, "md5") or _extract_xml_tag_text(raw_text, "md5") + emoji_md5 = str(emoji_md5 or "").strip().lower() + + emoji_url = _extract_xml_attr(raw_text, "cdnurl") or _extract_xml_tag_text(raw_text, "cdn_url") + emoji_url = html.unescape(str(emoji_url or "").strip()) + + packed_emoji_id: Optional[int] = None + try: + _, packed_emoji_id = _extract_packed_emoji_meta(row["packed_info_data"]) + except Exception: + packed_emoji_id = None + + if (not emoji_md5) and resource_conn is not None: + chat_id = resource_chat_id_cache.get(username) + if username not in resource_chat_id_cache: + chat_id = _resource_lookup_chat_id(resource_conn, username) + resource_chat_id_cache[username] = chat_id + try: + emoji_md5 = _lookup_resource_md5( + resource_conn, + chat_id, + message_local_type=47, + server_id=int(row["server_id"] or 0), + local_id=int(row["local_id"] or 0), + create_time=create_time_raw, + ) + except Exception: + emoji_md5 = "" + + emoji_md5 = str(emoji_md5 or "").strip().lower() + sticker_key = "" + if emoji_md5: + sticker_key = f"md5:{emoji_md5}" + if record_maps: + sticker_key_md5[sticker_key] = emoji_md5 + elif packed_emoji_id is not None and int(packed_emoji_id) > 0: + expr_id = int(packed_emoji_id) + sticker_key = f"expr:{expr_id}" + if record_maps: + sticker_key_expr_id[sticker_key] = expr_id + if count_wechat_builtin and expr_id in expression_id_to_asset: + wechat_emoji_counts[expr_id] += 1 + + return ts, sticker_key, emoji_url + + db_paths = [p for p in _iter_message_db_paths(account_dir) if not p.name.lower().startswith("biz_message")] + for db_path in db_paths: + if not db_path.exists(): + continue + + conn: sqlite3.Connection | None = None + try: + conn = sqlite3.connect(str(db_path)) + conn.row_factory = sqlite3.Row + conn.text_factory = bytes + + my_rowid: Optional[int] = None + try: + r2 = conn.execute( + "SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", + (my_username,), + ).fetchone() + if r2 is not None and r2[0] is not None: + my_rowid = int(r2[0]) + except Exception: + my_rowid = None + if my_rowid is None: + continue + + tables = _list_message_tables(conn) + for table_name in tables: + username = resolve_username_from_table(table_name) + qt = _quote_ident(table_name) + + # Fallback-only counters when search index is unavailable. + if not used_index: + try: + r_total = conn.execute( + f"SELECT COUNT(1) FROM {qt} " + f"WHERE {ts_expr} >= ? AND {ts_expr} < ? " + "AND real_sender_id = ? AND CAST(local_type AS INTEGER) != 10000", + (start_ts, end_ts, my_rowid), + ).fetchone() + total_sent_messages += int((r_total[0] if r_total else 0) or 0) + except Exception: + pass + + try: + rows_text = conn.execute( + f"SELECT message_content, compress_content FROM {qt} " + f"WHERE {ts_expr} >= ? AND {ts_expr} < ? " + "AND real_sender_id = ? AND CAST(local_type AS INTEGER) = 1", + (start_ts, end_ts, my_rowid), + ).fetchall() + except Exception: + rows_text = [] + for rt in rows_text: + try: + txt = _decode_message_content(rt["compress_content"], rt["message_content"]).strip() + except Exception: + txt = "" + if not txt: + continue + txt_norm = _normalize_index_text_for_emoji_match(txt) + if emoji_regex is not None and txt_norm: + for m in emoji_regex.finditer(txt_norm): + nk = str(m.group(0) or "") + k = emoji_norm_to_key.get(nk) or nk + if k: + text_emoji_counts[k] += 1 + for u in _extract_unicode_emoji_tokens(txt_norm): + if u: + unicode_emoji_counts[u] += 1 + + try: + packed_info_expr = "packed_info_data" if _has_packed_info_data_column(conn, qt) else "NULL AS packed_info_data" + rows_emoji = conn.execute( + f"SELECT server_id, local_id, create_time, message_content, compress_content, {packed_info_expr} " + f"FROM {qt} " + f"WHERE {ts_expr} >= ? AND {ts_expr} < ? " + "AND real_sender_id = ? AND CAST(local_type AS INTEGER) = 47", + (start_ts, end_ts, my_rowid), + ).fetchall() + except Exception: + rows_emoji = [] + + for r in rows_emoji: + ts, sticker_key, emoji_url = _extract_sticker_from_row( + row=r, + username=username, + record_maps=True, + count_wechat_builtin=True, + ) + + if not used_index: + sent_sticker_count += 1 + if ts > 0: + dt = datetime.fromtimestamp(ts) + sticker_active_days.add(dt.strftime("%Y-%m-%d")) + hour_counts[dt.hour] += 1 + sticker_by_username[username] += 1 + weekday_counts[dt.weekday()] += 1 + + if not sticker_key: + continue + + sticker_key_counts[sticker_key] += 1 + prev_first_ts = sticker_key_first_ts_in_year.get(sticker_key) + if ts > 0 and (prev_first_ts is None or ts < prev_first_ts): + sticker_key_first_ts_in_year[sticker_key] = ts + if emoji_url and (sticker_key not in sticker_url_map): + sticker_url_map[sticker_key] = emoji_url + if username and (sticker_key not in sticker_sample_username): + sticker_sample_username[sticker_key] = username + if username: + sticker_key_username_counts[sticker_key][username] += 1 + finally: + if conn is not None: + try: + conn.close() + except Exception: + pass + + sticker_keys_in_year = set(sticker_key_counts.keys()) + sticker_key_last_ts_before_year: dict[str, int] = {} + if sticker_keys_in_year and my_username: + for db_path in db_paths: + if not db_path.exists(): + continue + + conn: sqlite3.Connection | None = None + try: + conn = sqlite3.connect(str(db_path)) + conn.row_factory = sqlite3.Row + conn.text_factory = bytes + + my_rowid: Optional[int] = None + try: + r2 = conn.execute( + "SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", + (my_username,), + ).fetchone() + if r2 is not None and r2[0] is not None: + my_rowid = int(r2[0]) + except Exception: + my_rowid = None + if my_rowid is None: + continue + + tables = _list_message_tables(conn) + for table_name in tables: + username = resolve_username_from_table(table_name) + qt = _quote_ident(table_name) + packed_info_expr = ( + "packed_info_data" if _has_packed_info_data_column(conn, qt) else "NULL AS packed_info_data" + ) + try: + rows_hist = conn.execute( + f"SELECT server_id, local_id, create_time, message_content, compress_content, {packed_info_expr} " + f"FROM {qt} " + f"WHERE {ts_expr} < ? " + "AND real_sender_id = ? AND CAST(local_type AS INTEGER) = 47", + (start_ts, my_rowid), + ) + except Exception: + rows_hist = [] + + for r in rows_hist: + ts, sticker_key, _ = _extract_sticker_from_row( + row=r, + username=username, + record_maps=False, + count_wechat_builtin=False, + ) + if (not sticker_key) or (sticker_key not in sticker_keys_in_year) or ts <= 0: + continue + prev_ts = sticker_key_last_ts_before_year.get(sticker_key) + if prev_ts is None or ts > prev_ts: + sticker_key_last_ts_before_year[sticker_key] = ts + finally: + if conn is not None: + try: + conn.close() + except Exception: + pass + + if resource_conn is not None: + try: + resource_conn.close() + except Exception: + pass + + # Prefer index total when available, but keep non-negative relationship. + if used_index: + sent_sticker_count = max(int(sent_sticker_count), int(sum(sticker_key_counts.values())), int(sent_sticker_count)) + + sent_sticker_count = int(sent_sticker_count) + sticker_days = int(len(sticker_active_days)) + sticker_per_day = (float(sent_sticker_count) / float(sticker_days)) if sticker_days > 0 else 0.0 + sticker_share = (float(sent_sticker_count) / float(total_sent_messages)) if total_sent_messages > 0 else 0.0 + unique_sticker_type_count = int(len(sticker_key_counts)) + revive_gap_days_threshold = 60 + new_sticker_count_this_year = 0 + revived_sticker_count = 0 + revived_max_gap_days = 0 + new_sticker_keys_in_year: set[str] = set() + revived_sticker_keys_in_year: set[str] = set() + revived_gap_days_by_key: dict[str, int] = {} + for sticker_key, first_ts in sticker_key_first_ts_in_year.items(): + if first_ts <= 0: + continue + prev_ts = sticker_key_last_ts_before_year.get(sticker_key) + if prev_ts is None or prev_ts <= 0: + new_sticker_count_this_year += 1 + new_sticker_keys_in_year.add(sticker_key) + continue + gap_days = int(max(0, (int(first_ts) - int(prev_ts))) // 86400) + if gap_days >= revive_gap_days_threshold: + revived_sticker_count += 1 + revived_sticker_keys_in_year.add(sticker_key) + revived_gap_days_by_key[sticker_key] = int(gap_days) + if gap_days > revived_max_gap_days: + revived_max_gap_days = gap_days + new_sticker_share = ( + float(new_sticker_count_this_year) / float(unique_sticker_type_count) + if unique_sticker_type_count > 0 + else 0.0 + ) + revived_sticker_share = ( + float(revived_sticker_count) / float(unique_sticker_type_count) + if unique_sticker_type_count > 0 + else 0.0 + ) + + peak_hour: Optional[int] = None + if hour_counts: + peak_hour = max(range(24), key=lambda h: (int(hour_counts.get(h, 0)), -h)) + + peak_weekday: Optional[int] = None + if weekday_counts: + peak_weekday = max(range(7), key=lambda w: (int(weekday_counts.get(w, 0)), -w)) + peak_weekday_name = _weekday_name_zh(peak_weekday if peak_weekday is not None else -1) + + def pick_sticker_owner_username(sticker_key: str) -> str: + counts = sticker_key_username_counts.get(sticker_key) + if counts: + try: + return sorted(counts.items(), key=lambda kv: (-int(kv[1]), str(kv[0])))[0][0] + except Exception: + pass + return str(sticker_sample_username.get(sticker_key) or "") + + top_stickers_raw = sorted(sticker_key_counts.items(), key=lambda kv: (-int(kv[1]), str(kv[0])))[:6] + new_sticker_samples_raw = sorted( + [ + (k, int(sticker_key_counts.get(k, 0))) + for k in new_sticker_keys_in_year + if int(sticker_key_counts.get(k, 0)) > 0 + ], + key=lambda kv: (-int(kv[1]), str(kv[0])), + )[:4] + revived_sticker_samples_raw = sorted( + [ + (k, int(sticker_key_counts.get(k, 0))) + for k in revived_sticker_keys_in_year + if int(sticker_key_counts.get(k, 0)) > 0 + ], + key=lambda kv: (-int(kv[1]), str(kv[0])), + )[:4] + + sample_sticker_keys = [k for k, _ in top_stickers_raw + new_sticker_samples_raw + revived_sticker_samples_raw] + sample_usernames = [pick_sticker_owner_username(key) for key in sample_sticker_keys] + sample_contact_rows = _load_contact_rows( + account_dir / "contact.db", + [u for u in sample_usernames if u], + ) + + def build_sticker_stat_item(key: str, cnt: int) -> dict[str, Any]: + md5 = str(sticker_key_md5.get(key) or "") + expr_id = int(sticker_key_expr_id.get(key) or 0) + sample_username = pick_sticker_owner_username(key) + remote_url = str(sticker_url_map.get(key) or "") + sample_row = sample_contact_rows.get(sample_username) if sample_username else None + sample_display = _pick_display_name(sample_row, sample_username) if sample_username else "" + sample_avatar_url = _build_avatar_url(str(account_dir.name or ""), sample_username) if sample_username else "" + expr_asset = str(expression_id_to_asset.get(expr_id) or "") if expr_id > 0 else "" + expr_label = str(expression_id_to_label.get(expr_id) or "") if expr_id > 0 else "" + local_url = ( + _build_local_emoji_url( + account_name=str(account_dir.name or ""), + md5=str(md5), + username=sample_username, + emoji_remote_url=remote_url, + ) + if md5 + else (f"/wxemoji/{expr_asset}" if expr_asset else "") + ) + ratio = (float(cnt) / float(sent_sticker_count)) if sent_sticker_count > 0 else 0.0 + return { + "md5": str(md5 or key), + "count": int(cnt), + "ratio": float(ratio), + "emojiUrl": local_url, + "emojiRemoteUrl": remote_url, + "emojiId": int(expr_id) if expr_id > 0 else None, + "emojiAssetPath": f"/wxemoji/{expr_asset}" if expr_asset else "", + "emojiLabel": expr_label, + "sampleUsername": sample_username, + "sampleDisplayName": sample_display, + "sampleAvatarUrl": sample_avatar_url, + } + + top_stickers: list[dict[str, Any]] = [build_sticker_stat_item(key, cnt) for key, cnt in top_stickers_raw] + new_sticker_samples: list[dict[str, Any]] = [ + build_sticker_stat_item(key, cnt) for key, cnt in new_sticker_samples_raw + ] + revived_sticker_samples: list[dict[str, Any]] = [] + for key, cnt in revived_sticker_samples_raw: + item = build_sticker_stat_item(key, cnt) + item["gapDays"] = int(revived_gap_days_by_key.get(key) or 0) + revived_sticker_samples.append(item) + + top_wechat_emojis_raw = sorted(wechat_emoji_counts.items(), key=lambda kv: (-int(kv[1]), int(kv[0])))[:8] + top_wechat_emojis: list[dict[str, Any]] = [] + for expr_id, cnt in top_wechat_emojis_raw: + expr_asset = str(expression_id_to_asset.get(int(expr_id)) or "") + expr_label = str(expression_id_to_label.get(int(expr_id)) or f"[表情{int(expr_id)}]") + top_wechat_emojis.append( + { + "id": int(expr_id), + "key": expr_label, + "count": int(cnt), + "assetPath": f"/wxemoji/{expr_asset}" if expr_asset else "", + } + ) + + top_text_emojis_raw = sorted(text_emoji_counts.items(), key=lambda kv: (-int(kv[1]), str(kv[0])))[:6] + top_text_emojis: list[dict[str, Any]] = [] + for key, cnt in top_text_emojis_raw: + asset = str(emoji_table.get(key) or "") + top_text_emojis.append( + { + "key": str(key), + "count": int(cnt), + "assetPath": f"/wxemoji/{asset}" if asset else "", + } + ) + + top_unicode_emojis_raw = sorted(unicode_emoji_counts.items(), key=lambda kv: (-int(kv[1]), str(kv[0])))[:8] + top_unicode_emojis: list[dict[str, Any]] = [] + for key, cnt in top_unicode_emojis_raw: + top_unicode_emojis.append({"emoji": str(key), "count": int(cnt)}) + + top_battle_partner_obj: dict[str, Any] = { + "username": "", + "displayName": "", + "maskedName": "", + "avatarUrl": "", + "stickerCount": 0, + } + battle_candidates = [ + (u, c) + for u, c in sticker_by_username.items() + if u + and (not u.endswith("@chatroom")) + and _should_keep_session(u, include_official=False) + and int(c) > 0 + ] + if battle_candidates: + top_u, top_cnt = sorted(battle_candidates, key=lambda kv: (-int(kv[1]), str(kv[0])))[0] + rows = _load_contact_rows(account_dir / "contact.db", [top_u]) + row = rows.get(top_u) + display = _pick_display_name(row, top_u) + top_battle_partner_obj = { + "username": top_u, + "displayName": display, + "maskedName": display, + "avatarUrl": _build_avatar_url(str(account_dir.name or ""), top_u), + "stickerCount": int(top_cnt), + } + + top_text = top_text_emojis[0] if top_text_emojis else None + top_wechat = top_wechat_emojis[0] if top_wechat_emojis else None + persona = _pick_persona( + sent_sticker_count=sent_sticker_count, + sticker_share=float(sticker_share), + peak_hour=peak_hour, + top_text_emoji_count=int((top_text.get("count") if top_text else 0) or 0) + + int((top_wechat.get("count") if top_wechat else 0) or 0), + ) + + lines: list[str] = [] + if sent_sticker_count > 0: + lines.append( + f"这一年,你用 {sent_sticker_count:,} 张表情包把聊天变得更有温度;在 {sticker_days:,} 个活跃日里,日均 {sticker_per_day:.1f} 张。" + ) + else: + lines.append("这一年你几乎没发过表情包。") + + if peak_hour is not None and peak_weekday_name: + lines.append(f"你最活跃的时刻是 {peak_weekday_name} {peak_hour}:00。") + + if top_stickers: + top0 = top_stickers[0] + label0 = str(top0.get("emojiLabel") or "") + if label0: + lines.append(f"年度 C 位表情是 {label0}({int(top0['count']):,} 次)。") + else: + lines.append(f"年度 C 位表情是 {top0['md5'][:8]}…({int(top0['count']):,} 次)。") + + if top_wechat: + lines.append(f"你最常用的小黄脸是 {top_wechat['key']},共 {int(top_wechat['count']):,} 次。") + elif top_text: + lines.append(f"在文字聊天里,你最常打的小黄脸是 {top_text['key']},共 {int(top_text['count']):,} 次。") + if top_unicode_emojis: + lines.append(f"普通 Emoji 最常用 {top_unicode_emojis[0]['emoji']},共 {int(top_unicode_emojis[0]['count']):,} 次。") + + if int(top_battle_partner_obj.get("stickerCount") or 0) > 0: + lines.append( + f"和你斗图最狠的是 {top_battle_partner_obj['displayName']}({int(top_battle_partner_obj['stickerCount']):,} 发)。" + ) + + lines.append(f"年度人格:{persona['label']}。") + + return { + "year": int(year), + "sentStickerCount": int(sent_sticker_count), + "stickerActiveDays": int(sticker_days), + "stickerPerActiveDay": float(sticker_per_day), + "stickerShareOfSentMessages": float(sticker_share), + "uniqueStickerTypeCount": int(unique_sticker_type_count), + "newStickerCountThisYear": int(new_sticker_count_this_year), + "newStickerShare": float(new_sticker_share), + "newStickerSamples": new_sticker_samples, + "revivedStickerCount": int(revived_sticker_count), + "revivedStickerShare": float(revived_sticker_share), + "revivedMinGapDays": int(revive_gap_days_threshold), + "revivedMaxGapDays": int(revived_max_gap_days), + "revivedStickerSamples": revived_sticker_samples, + "peakHour": int(peak_hour) if peak_hour is not None else None, + "peakWeekday": int(peak_weekday) if peak_weekday is not None else None, + "peakWeekdayName": peak_weekday_name, + "stickerHourCounts": [int(hour_counts.get(h, 0)) for h in range(24)], + "stickerWeekdayCounts": [int(weekday_counts.get(w, 0)) for w in range(7)], + "topStickers": top_stickers, + "topWechatEmojis": top_wechat_emojis, + "topTextEmojis": top_text_emojis, + "topUnicodeEmojis": top_unicode_emojis, + "topBattlePartner": top_battle_partner_obj, + "persona": persona, + "lines": lines, + "settings": {"usedIndex": bool(used_index)}, + } + + +def build_card_04_emoji_universe(*, account_dir: Path, year: int) -> dict[str, Any]: + data = compute_emoji_universe_stats(account_dir=account_dir, year=year) + + sent_sticker_count = int(data.get("sentStickerCount") or 0) + sticker_days = int(data.get("stickerActiveDays") or 0) + sticker_per_day = float(data.get("stickerPerActiveDay") or 0.0) + top_stickers = list(data.get("topStickers") or []) + top_wechat_emojis = list(data.get("topWechatEmojis") or []) + top_text_emojis = list(data.get("topTextEmojis") or []) + top_unicode_emojis = list(data.get("topUnicodeEmojis") or []) + peak_weekday_name = str(data.get("peakWeekdayName") or "") + peak_hour = data.get("peakHour") + + if sent_sticker_count <= 0 and (not top_wechat_emojis) and (not top_text_emojis) and (not top_unicode_emojis): + narrative = "今年你几乎没用表情表达。" + else: + parts: list[str] = [] + if sent_sticker_count > 0: + parts.append( + f"这一年,你用 {sent_sticker_count:,} 张表情包把聊天变得更有温度;在 {sticker_days:,} 个活跃日里,日均 {sticker_per_day:.1f} 张。" + ) + if peak_hour is not None and peak_weekday_name: + parts.append(f"你最活跃的时刻是 {peak_weekday_name} {int(peak_hour)}:00。") + tail_parts: list[str] = [] + if top_stickers: + x = top_stickers[0] + label0 = str(x.get("emojiLabel") or "").strip() + if label0: + tail_parts.append(f"年度 C 位表情是 {label0}({int(x.get('count') or 0):,} 次)") + else: + tail_parts.append(f"年度 C 位表情是 {str(x.get('md5') or '')[:8]}…({int(x.get('count') or 0):,} 次)") + if top_wechat_emojis: + x = top_wechat_emojis[0] + tail_parts.append(f"你最常用的小黄脸是 {str(x.get('key') or '')}({int(x.get('count') or 0):,} 次)") + elif top_text_emojis: + x = top_text_emojis[0] + tail_parts.append(f"在文字聊天里,你最常打的小黄脸是 {str(x.get('key') or '')}({int(x.get('count') or 0):,} 次)") + if top_unicode_emojis: + x = top_unicode_emojis[0] + tail_parts.append(f"普通 Emoji 最常用 {str(x.get('emoji') or '')}({int(x.get('count') or 0):,} 次)") + if tail_parts: + parts.append(",".join(tail_parts) + "。") + narrative = "".join(parts) + + return { + "id": 5, + "title": "这一年,你的表情包里藏了多少心情?", + "scope": "global", + "category": "B", + "status": "ok", + "kind": "emoji/annual_universe", + "narrative": narrative, + "data": data, + } diff --git a/src/wechat_decrypt_tool/wrapped/cards/card_04_monthly_best_friends_wall.py b/src/wechat_decrypt_tool/wrapped/cards/card_04_monthly_best_friends_wall.py new file mode 100644 index 0000000..705406f --- /dev/null +++ b/src/wechat_decrypt_tool/wrapped/cards/card_04_monthly_best_friends_wall.py @@ -0,0 +1,452 @@ +from __future__ import annotations + +import math +import sqlite3 +import time +from dataclasses import dataclass, field +from datetime import datetime +from pathlib import Path +from typing import Any + +from ...chat_helpers import ( + _build_avatar_url, + _load_contact_rows, + _pick_display_name, + _should_keep_session, +) +from ...chat_search_index import ( + get_chat_search_index_db_path, + get_chat_search_index_status, + start_chat_search_index_build, +) +from ...logging_config import get_logger + +logger = get_logger(__name__) + + +def _year_range_epoch_seconds(year: int) -> tuple[int, int]: + start = int(datetime(year, 1, 1).timestamp()) + end = int(datetime(year + 1, 1, 1).timestamp()) + return start, end + + +def _mask_name(name: str) -> str: + s = str(name or "").strip() + if not s: + return "" + if len(s) == 1: + return "*" + if len(s) == 2: + return s[0] + "*" + return s[0] + ("*" * (len(s) - 2)) + s[-1] + + +@dataclass +class _MonthConvAgg: + username: str + month: int + incoming: int = 0 + outgoing: int = 0 + replies: int = 0 + sum_gap: int = 0 + sum_gap_capped: int = 0 + active_days: set[int] = field(default_factory=set) + time_bucket_mask: int = 0 + + @property + def total(self) -> int: + return int(self.incoming) + int(self.outgoing) + + @property + def interaction(self) -> int: + return min(int(self.incoming), int(self.outgoing)) + + @property + def active_days_count(self) -> int: + return len(self.active_days) + + @property + def time_bucket_count(self) -> int: + m = int(self.time_bucket_mask) & 0xF + return (m & 1) + ((m >> 1) & 1) + ((m >> 2) & 1) + ((m >> 3) & 1) + + def avg_reply_seconds(self) -> float: + if self.replies <= 0: + return 0.0 + return float(self.sum_gap) / float(self.replies) + + def avg_reply_seconds_capped(self) -> float: + if self.replies <= 0: + return 0.0 + return float(self.sum_gap_capped) / float(self.replies) + + def observe(self, *, day: int, hour: int) -> None: + if 1 <= day <= 31: + self.active_days.add(int(day)) + bucket = max(0, min(3, int(hour) // 6)) + self.time_bucket_mask |= 1 << bucket + + +def _score_month_agg( + *, + agg: _MonthConvAgg, + month_max_interaction: int, + month_max_active_days: int, + tau_seconds: float, + weights: dict[str, float], +) -> dict[str, float]: + max_interaction = max(1, int(month_max_interaction)) + max_active = max(1, int(month_max_active_days)) + interaction_score = math.log1p(float(agg.interaction)) / math.log1p(float(max_interaction)) + speed_score = 1.0 / (1.0 + (float(agg.avg_reply_seconds_capped()) / float(max(1.0, tau_seconds)))) + continuity_score = float(agg.active_days_count) / float(max_active) + coverage_score = float(agg.time_bucket_count) / 4.0 + final_score = ( + float(weights["interaction"]) * interaction_score + + float(weights["speed"]) * speed_score + + float(weights["continuity"]) * continuity_score + + float(weights["coverage"]) * coverage_score + ) + return { + "interaction": float(interaction_score), + "speed": float(speed_score), + "continuity": float(continuity_score), + "coverage": float(coverage_score), + "final": float(final_score), + } + + +def compute_monthly_best_friends_wall_stats(*, account_dir: Path, year: int) -> dict[str, Any]: + start_ts, end_ts = _year_range_epoch_seconds(int(year)) + my_username = str(account_dir.name or "").strip() + + gap_cap_seconds = 6 * 60 * 60 + tau_seconds = 30 * 60 + weights = { + "interaction": 0.40, + "speed": 0.30, + "continuity": 0.20, + "coverage": 0.10, + } + eligibility = { + "minTotalMessages": 8, + "minInteraction": 3, + "minReplyCount": 1, + "minActiveDays": 2, + } + + per_month_aggs: dict[int, list[_MonthConvAgg]] = {m: [] for m in range(1, 13)} + used_index = False + index_status: dict[str, Any] | None = None + + index_path = get_chat_search_index_db_path(account_dir) + if index_path.exists(): + conn = sqlite3.connect(str(index_path)) + try: + has_fts = ( + conn.execute("SELECT 1 FROM sqlite_master WHERE type='table' AND name='message_fts' LIMIT 1").fetchone() + is not None + ) + if has_fts and my_username: + used_index = True + t0 = time.time() + + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + + where = ( + f"{ts_expr} >= ? AND {ts_expr} < ? " + "AND db_stem NOT LIKE 'biz_message%' " + "AND CAST(local_type AS INTEGER) != 10000 " + "AND username NOT LIKE '%@chatroom'" + ) + + sql = ( + "SELECT " + "username, sender_username, " + f"{ts_expr} AS ts, " + "CAST(sort_seq AS INTEGER) AS sort_seq_i, " + "CAST(local_id AS INTEGER) AS local_id_i " + "FROM message_fts " + f"WHERE {where} " + "ORDER BY username ASC, ts ASC, sort_seq_i ASC, local_id_i ASC" + ) + + cur = conn.execute(sql, (start_ts, end_ts)) + + cur_username = "" + conv_month_aggs: dict[int, _MonthConvAgg] = {} + prev_other_ts: int | None = None + + def flush_conv() -> None: + nonlocal cur_username, conv_month_aggs, prev_other_ts + if not cur_username: + return + for m, agg in conv_month_aggs.items(): + if 1 <= int(m) <= 12 and agg.total > 0: + per_month_aggs[int(m)].append(agg) + conv_month_aggs = {} + prev_other_ts = None + + for row in cur: + try: + username = str(row[0] or "").strip() + sender = str(row[1] or "").strip() + ts = int(row[2] or 0) + except Exception: + continue + + if ts <= 0 or not username: + continue + + if username != cur_username: + flush_conv() + cur_username = username + + if not _should_keep_session(username, include_official=False): + continue + + dt = datetime.fromtimestamp(ts) + month = int(dt.month) + if month < 1 or month > 12: + continue + agg = conv_month_aggs.get(month) + if agg is None: + agg = _MonthConvAgg(username=username, month=month) + conv_month_aggs[month] = agg + agg.observe(day=int(dt.day), hour=int(dt.hour)) + + is_me = sender == my_username + if is_me: + agg.outgoing += 1 + if prev_other_ts is not None and ts >= prev_other_ts: + gap = int(ts - prev_other_ts) + agg.replies += 1 + agg.sum_gap += gap + agg.sum_gap_capped += min(gap, gap_cap_seconds) + prev_other_ts = None + else: + agg.incoming += 1 + prev_other_ts = ts + + flush_conv() + + logger.info( + "Wrapped card#4 monthly_best_friends computed (search index): account=%s year=%s elapsed=%.2fs", + str(account_dir.name or "").strip(), + int(year), + time.time() - t0, + ) + finally: + try: + conn.close() + except Exception: + pass + + if not used_index: + try: + index_status = get_chat_search_index_status(account_dir) + index = dict(index_status.get("index") or {}) + build = dict(index.get("build") or {}) + index_ready = bool(index.get("ready")) + build_status = str(build.get("status") or "") + index_exists = bool(index.get("exists")) + if (not index_ready) and build_status not in {"building", "error"}: + start_chat_search_index_build(account_dir, rebuild=bool(index_exists)) + index_status = get_chat_search_index_status(account_dir) + except Exception: + index_status = None + + month_winner_raw: dict[int, dict[str, Any]] = {} + winner_usernames: list[str] = [] + for month in range(1, 13): + aggs = list(per_month_aggs.get(month) or []) + eligible: list[_MonthConvAgg] = [] + for agg in aggs: + if agg.total < int(eligibility["minTotalMessages"]): + continue + if agg.interaction < int(eligibility["minInteraction"]): + continue + if agg.replies < int(eligibility["minReplyCount"]): + continue + if agg.active_days_count < int(eligibility["minActiveDays"]): + continue + eligible.append(agg) + + if not eligible: + continue + + month_max_interaction = max(agg.interaction for agg in eligible) + month_max_active_days = max(agg.active_days_count for agg in eligible) + scored: list[tuple[tuple[float, float, float, float, str], _MonthConvAgg, dict[str, float]]] = [] + for agg in eligible: + score = _score_month_agg( + agg=agg, + month_max_interaction=month_max_interaction, + month_max_active_days=month_max_active_days, + tau_seconds=float(tau_seconds), + weights=weights, + ) + tie_key = ( + -float(score["final"]), + -float(agg.interaction), + float(agg.avg_reply_seconds_capped()), + -float(agg.active_days_count), + str(agg.username), + ) + scored.append((tie_key, agg, score)) + scored.sort(key=lambda x: x[0]) + _, winner_agg, winner_score = scored[0] + month_winner_raw[month] = { + "agg": winner_agg, + "score": winner_score, + } + winner_usernames.append(winner_agg.username) + + uniq_winner_usernames: list[str] = [] + seen: set[str] = set() + for u in winner_usernames: + if u and u not in seen: + seen.add(u) + uniq_winner_usernames.append(u) + + contact_rows = _load_contact_rows(account_dir / "contact.db", uniq_winner_usernames) if uniq_winner_usernames else {} + + months: list[dict[str, Any]] = [] + for month in range(1, 13): + winner_pack = month_winner_raw.get(month) + if not winner_pack: + months.append( + { + "month": month, + "winner": None, + "metrics": None, + "raw": None, + "isFallback": False, + "reason": "insufficient_data", + } + ) + continue + + agg: _MonthConvAgg = winner_pack["agg"] + score = dict(winner_pack["score"] or {}) + row = contact_rows.get(agg.username) + display = _pick_display_name(row, agg.username) + avatar = _build_avatar_url(str(account_dir.name or ""), agg.username) if agg.username else "" + + months.append( + { + "month": month, + "winner": { + "username": agg.username, + "displayName": display, + "maskedName": _mask_name(display), + "avatarUrl": avatar, + "score": float(score.get("final") or 0.0), + "score100": round(float(score.get("final") or 0.0) * 100.0, 1), + }, + "metrics": { + "interactionScore": float(score.get("interaction") or 0.0), + "speedScore": float(score.get("speed") or 0.0), + "continuityScore": float(score.get("continuity") or 0.0), + "coverageScore": float(score.get("coverage") or 0.0), + }, + "raw": { + "incomingMessages": int(agg.incoming), + "outgoingMessages": int(agg.outgoing), + "totalMessages": int(agg.total), + "interaction": int(agg.interaction), + "replyCount": int(agg.replies), + "avgReplySeconds": float(agg.avg_reply_seconds()), + "avgReplySecondsCapped": float(agg.avg_reply_seconds_capped()), + "activeDays": int(agg.active_days_count), + "timeBucketsCount": int(agg.time_bucket_count), + }, + "isFallback": False, + } + ) + + winner_month_counts: dict[str, int] = {} + for item in months: + w = item.get("winner") + if not isinstance(w, dict): + continue + u = str(w.get("username") or "").strip() + if not u: + continue + winner_month_counts[u] = int(winner_month_counts.get(u, 0)) + 1 + + top_champion = None + if winner_month_counts: + champion_username = sorted(winner_month_counts.items(), key=lambda kv: (-int(kv[1]), str(kv[0])))[0][0] + champion_months = int(winner_month_counts.get(champion_username) or 0) + row = contact_rows.get(champion_username) + display = _pick_display_name(row, champion_username) + top_champion = { + "username": champion_username, + "displayName": display, + "maskedName": _mask_name(display), + "monthsWon": champion_months, + } + + filled_months = [int(x.get("month") or 0) for x in months if isinstance(x.get("winner"), dict)] + + return { + "year": int(year), + "months": months, + "summary": { + "monthsWithWinner": int(len(filled_months)), + "topChampion": top_champion, + "filledMonths": filled_months, + }, + "settings": { + "weights": { + "interaction": float(weights["interaction"]), + "speed": float(weights["speed"]), + "continuity": float(weights["continuity"]), + "coverage": float(weights["coverage"]), + }, + "tauSeconds": int(tau_seconds), + "gapCapSeconds": int(gap_cap_seconds), + "eligibility": { + "minTotalMessages": int(eligibility["minTotalMessages"]), + "minInteraction": int(eligibility["minInteraction"]), + "minReplyCount": int(eligibility["minReplyCount"]), + "minActiveDays": int(eligibility["minActiveDays"]), + }, + "usedIndex": bool(used_index), + "indexStatus": index_status, + }, + } + + +def build_card_04_monthly_best_friends_wall(*, account_dir: Path, year: int) -> dict[str, Any]: + data = compute_monthly_best_friends_wall_stats(account_dir=account_dir, year=year) + summary = dict(data.get("summary") or {}) + top_champion = summary.get("topChampion") + months_with_winner = int(summary.get("monthsWithWinner") or 0) + + if months_with_winner <= 0: + narrative = "今年还没有足够的聊天互动数据来评选每月最佳好友(或搜索索引尚未就绪)。" + elif isinstance(top_champion, dict) and top_champion.get("displayName"): + champ_name = str(top_champion.get("displayName") or "") + months_won = int(top_champion.get("monthsWon") or 0) + narrative = f"{champ_name} 拿下了 {months_won} 个月的月度最佳好友;这一年你们的聊天默契很稳定。" + else: + narrative = f"你在 {months_with_winner} 个月里都出现了稳定的“月度最佳好友”。" + + return { + "id": 4, + "title": "陪你走过每个月的人", + "scope": "global", + "category": "B", + "status": "ok", + "kind": "chat/monthly_best_friends_wall", + "narrative": narrative, + "data": data, + } diff --git a/src/wechat_decrypt_tool/wrapped/cards/card_05_keywords_wordcloud.py b/src/wechat_decrypt_tool/wrapped/cards/card_05_keywords_wordcloud.py new file mode 100644 index 0000000..307ff13 --- /dev/null +++ b/src/wechat_decrypt_tool/wrapped/cards/card_05_keywords_wordcloud.py @@ -0,0 +1,849 @@ +from __future__ import annotations + +import hashlib +import logging +import math +import random +import re +import sqlite3 +import time +from collections import Counter +from datetime import datetime +from pathlib import Path +from typing import Any + +import jieba + +from ...chat_helpers import _decode_message_content, _decode_sqlite_text, _iter_message_db_paths, _quote_ident +from ...logging_config import get_logger + +logger = get_logger(__name__) +try: + jieba.setLogLevel(logging.ERROR) +except Exception: + pass + + +_MD5_HEX_RE = re.compile(r"(?i)\b[0-9a-f]{32}\b") +_URL_RE = re.compile(r"(?i)\bhttps?://\S+") +_CTRL_RE = re.compile(r"[\x00-\x08\x0b\x0c\x0e-\x1f]") +_HAS_CJK_RE = re.compile(r"[\u4e00-\u9fff]") +_CJK_SEQ_RE = re.compile(r"[\u4e00-\u9fff]+") +_HAS_ALNUM_RE = re.compile(r"[\u4e00-\u9fffA-Za-z0-9]") +_EN_WORD_RE = re.compile(r"^[A-Za-z]{3,16}$") +_DATEISH_RE = re.compile( + r"^(?:" + r"\d{4}[-/]\d{1,2}[-/]\d{1,2}" + r"|" + r"\d{1,2}:\d{2}" + r"|" + r"\d{1,2}月\d{1,2}日" + r")$" +) + +# Align with WeFlow Annual Report "年度常用语" logic. +# WeFlow counts repeated *phrases* (full short sent messages), not jieba tokens. +_WEFLOW_COMMON_PHRASE_LOCAL_TYPES = (1, 244813135921) + +# Small but practical stopword list for chat keywords. +_STOPWORDS_ZH = { + "的", + "了", + "是", + "我", + "你", + "他", + "她", + "它", + "我们", + "你们", + "他们", + "她们", + "它们", + "这", + "那", + "这个", + "那个", + "这里", + "那里", + "这样", + "那样", + "就是", + "也是", + "还有", + "因为", + "所以", + "但是", + "如果", + "然后", + "已经", + "可以", + "还是", + "可能", + "不会", + "没有", + "不是", + "一个", + "一下", + "一下子", + "一下下", + "哈哈", + "哈哈哈", + "嘿嘿", + "呜呜", + "嗯", + "哦", + "啊", + "呀", + "啦", + "嘛", + "呢", + "吧", + "额", + "诶", + "哇", + "唉", + "好", + "行", + "可以", + "ok", + "OK", +} + +_STOPWORDS_EN = { + "the", + "a", + "an", + "and", + "or", + "but", + "to", + "of", + "in", + "on", + "for", + "with", + "at", + "from", + "as", + "is", + "are", + "was", + "were", + "be", + "been", + "being", + "i", + "me", + "my", + "you", + "your", + "he", + "she", + "it", + "we", + "they", + "them", + "this", + "that", + "these", + "those", + "yeah", + "haha", + "ok", + "okay", + "pls", + "lol", +} + + +def _year_range_epoch_seconds(year: int) -> tuple[int, int]: + start = int(datetime(int(year), 1, 1).timestamp()) + end = int(datetime(int(year) + 1, 1, 1).timestamp()) + return start, end + + +def _stable_seed(account_name: str, year: int) -> int: + s = f"{str(account_name or '').strip()}|{int(year)}|wrapped_keywords" + h = hashlib.sha256(s.encode("utf-8")).hexdigest() + return int(h[:8], 16) + + +def _list_message_tables(conn: sqlite3.Connection) -> list[str]: + try: + rows = conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall() + except Exception: + return [] + names: list[str] = [] + for r in rows: + if not r or not r[0]: + continue + name = _decode_sqlite_text(r[0]).strip() + if not name: + continue + ln = name.lower() + if ln.startswith(("msg_", "chat_")): + names.append(name) + return names + + +def _clean_text(text: str) -> str: + s = str(text or "") + if not s: + return "" + s = s.replace("\u200b", "").replace("\ufeff", "") + s = _CTRL_RE.sub("", s) + s = _URL_RE.sub("", s) + s = re.sub(r"\s+", " ", s).strip() + if not s: + return "" + # XML-like payloads are rarely useful as bubbles/keywords. + if s.startswith("<") or s.startswith('"<'): + return "" + return s + + +def _is_good_bubble_text(text: str) -> bool: + s = _clean_text(text) + if not s: + return False + # 仅过滤极短噪声,不对消息长度设置上限。 + if len(s) < 2: + return False + if _URL_RE.search(s): + return False + if _MD5_HEX_RE.fullmatch(s.replace(" ", "")): + return False + # Avoid pure punctuation / emoji / digits. + if not re.search(r"[\u4e00-\u9fffA-Za-z]", s): + return False + if not _HAS_ALNUM_RE.search(s): + return False + if re.fullmatch(r"[0-9]+", s): + return False + return True + + +def _is_good_example_text(text: str) -> bool: + s = _clean_text(text) + if not s: + return False + # 常用语卡片需要保留短句(如“在吗”“好的”),仅过滤 1 字噪声。 + if len(s) < 2: + return False + if _URL_RE.search(s): + return False + if _MD5_HEX_RE.search(s): + return False + if not re.search(r"[\u4e00-\u9fffA-Za-z]", s): + return False + return True + + +def _normalize_token(tok: str) -> str: + s = str(tok or "").strip() + if not s: + return "" + if len(s) > 32: + return "" + + # Trim punctuation on both sides. + s = re.sub(r"^[^\w\u4e00-\u9fff]+|[^\w\u4e00-\u9fff]+$", "", s, flags=re.UNICODE).strip() + if not s: + return "" + + if _MD5_HEX_RE.fullmatch(s) or _MD5_HEX_RE.search(s): + return "" + if _DATEISH_RE.fullmatch(s): + return "" + + # Discard if contains obvious long ids (alnum with many digits). + if len(s) >= 18 and re.fullmatch(r"[A-Za-z0-9_-]+", s) and sum(ch.isdigit() for ch in s) >= 6: + return "" + + # Remove tokens with digits. + if any(ch.isdigit() for ch in s): + return "" + + has_cjk = bool(_HAS_CJK_RE.search(s)) + if has_cjk: + if not (2 <= len(s) <= 8): + return "" + if s in _STOPWORDS_ZH: + return "" + return s + + if _EN_WORD_RE.fullmatch(s): + low = s.lower() + if low in _STOPWORDS_EN: + return "" + return low + + return "" + + +def extract_keywords_jieba(texts: list[str], *, top_n: int = 40) -> list[dict[str, Any]]: + counter: Counter[str] = Counter() + for raw in texts: + s = _clean_text(raw) + if not s: + continue + try: + toks = jieba.lcut(s, cut_all=False) + except Exception: + toks = [] + had_token = False + for tok in toks: + w = _normalize_token(tok) + if not w: + continue + counter[w] += 1 + had_token = True + + # Fallback for short chat phrases that Jieba often splits into single characters + # (e.g. "在吗" -> ["在","吗"]) which we intentionally filter out. + if not had_token and _HAS_CJK_RE.search(s): + for seg in _CJK_SEQ_RE.findall(s): + if len(seg) < 2: + continue + for i in range(0, len(seg) - 1): + w = _normalize_token(seg[i : i + 2]) + if not w: + continue + counter[w] += 1 + + if not counter: + return [] + + items = [(w, int(c)) for w, c in counter.items() if int(c) > 1] + if not items: + # If everything is singleton, still provide something. + items = [(w, int(c)) for w, c in counter.items() if int(c) > 0] + + items.sort(key=lambda kv: (-kv[1], kv[0])) + items = items[: max(0, int(top_n or 0))] + if not items: + return [] + + vals = [math.sqrt(max(0, c)) for _, c in items] + minv = min(vals) if vals else 0.0 + maxv = max(vals) if vals else 0.0 + + out: list[dict[str, Any]] = [] + for (w, c), v in zip(items, vals): + if maxv <= minv: + weight = 1.0 + else: + weight = 0.2 + 0.8 * ((v - minv) / (maxv - minv)) + out.append({"word": w, "count": int(c), "weight": round(float(weight), 4)}) + return out + + +def pick_examples( + keywords: list[dict[str, Any]], + message_pool: list[str], + *, + per_word: int = 3, +) -> list[dict[str, Any]]: + all_msgs = [_clean_text(x) for x in (message_pool or []) if _clean_text(x)] + uniq_msgs = list(dict.fromkeys(all_msgs)) + out: list[dict[str, Any]] = [] + + for kw in keywords: + word = str(kw.get("word") or "").strip() + if not word: + continue + count = int(kw.get("count") or 0) + + hits: list[str] = [] + limit = max(1, int(per_word)) + + def _match(msg: str) -> bool: + if not _is_good_example_text(msg): + return False + if _HAS_CJK_RE.search(word): + return word in msg + return word.lower() in msg.lower() + + # Pass 1: prefer unique samples for diversity. + for msg in uniq_msgs: + if len(hits) >= limit: + break + if _match(msg): + hits.append(msg) + + # Pass 2: if still not enough, allow repeated samples from original pool. + if len(hits) < limit: + for msg in all_msgs: + if len(hits) >= limit: + break + if _match(msg): + hits.append(msg) + + out.append({"word": word, "count": int(count), "messages": hits}) + + return out + + +def build_keywords_payload( + *, + texts: list[str], + seed: int, + top_n: int = 40, + bubble_limit: int = 180, + examples_per_word: int = 3, +) -> dict[str, Any]: + _ = seed # 保留参数以兼容现有调用/测试;随机采样不再使用固定 seed。 + keywords = extract_keywords_jieba(list(texts or []), top_n=top_n) + + bubble_candidates = [_clean_text(x) for x in (texts or [])] + bubble_candidates = [x for x in bubble_candidates if _is_good_bubble_text(x)] + bubble_candidates = list(dict.fromkeys(bubble_candidates)) + + rnd = random.SystemRandom() + rnd.shuffle(bubble_candidates) + bubble_messages = bubble_candidates[: max(0, int(bubble_limit or 0))] + + examples = pick_examples(keywords, texts, per_word=examples_per_word) + + top_kw = None + if keywords: + top_kw = {"word": str(keywords[0]["word"]), "count": int(keywords[0]["count"])} + + return { + "topKeyword": top_kw, + "keywords": keywords, + "bubbleMessages": bubble_messages, + "examples": examples, + } + + +def _weflow_common_phrase_or_empty(text: Any) -> str: + """ + Match WeFlow "年度常用语" filter: + - Only short messages: 2 <= len <= 20 + - Exclude links/markup: contains "http" or "<" + - Exclude bracketed / xml-like payloads: startswith "[" or " 20: + return "" + if "http" in s: + return "" + if "<" in s: + return "" + if s.startswith("[") or s.startswith(" dict[str, Any]: + _ = seed # 保留参数以兼容现有调用/测试;气泡抽样不再使用固定 seed。 + + items = [(p, int(c)) for p, c in (phrase_counts or {}).items() if int(c) >= 2] + if not items: + return {"topKeyword": None, "keywords": [], "bubbleMessages": [], "examples": []} + + items.sort(key=lambda kv: (-kv[1], kv[0])) + items = items[: max(0, int(top_n or 0))] + if not items: + return {"topKeyword": None, "keywords": [], "bubbleMessages": [], "examples": []} + + vals = [math.sqrt(max(0, c)) for _, c in items] + minv = min(vals) if vals else 0.0 + maxv = max(vals) if vals else 0.0 + + keywords: list[dict[str, Any]] = [] + for (phrase, count), v in zip(items, vals): + if maxv <= minv: + weight = 1.0 + else: + weight = 0.2 + 0.8 * ((v - minv) / (maxv - minv)) + keywords.append({"word": phrase, "count": int(count), "weight": round(float(weight), 4)}) + + # Bubble pool: unique phrases (not all raw messages). Keep it diverse and lightweight. + bubble_candidates = list(dict.fromkeys([str(p or "").strip() for p in phrase_counts.keys()])) + bubble_candidates = [p for p in bubble_candidates if p] + rnd = random.SystemRandom() + rnd.shuffle(bubble_candidates) + bubble_messages = bubble_candidates[: max(0, int(bubble_limit or 0))] + + # Examples: prefer real sampled messages; fallback to phrase itself. + if example_texts: + per_word = max(1, int(examples_per_word or 1)) + examples = pick_examples(keywords, list(example_texts), per_word=per_word) + for ex in examples: + msgs = [str(m or "").strip() for m in (ex.get("messages") or []) if str(m or "").strip()] + if not msgs: + w = str(ex.get("word") or "").strip() + ex["messages"] = [w] if w else [] + else: + ex["messages"] = msgs[:per_word] + else: + examples = [{"word": kw["word"], "count": int(kw["count"]), "messages": [kw["word"]]} for kw in keywords] + + top_kw = {"word": str(keywords[0]["word"]), "count": int(keywords[0]["count"])} if keywords else None + + return { + "topKeyword": top_kw, + "keywords": keywords, + "bubbleMessages": bubble_messages, + "examples": examples, + } + + +def _scan_common_phrase_counts( + *, + account_dir: Path, + year: int, + outgoing_only: bool, + seed: int, + max_seen: int | None = None, +) -> tuple[Counter[str], dict[str, Any]]: + start_ts, end_ts = _year_range_epoch_seconds(int(year)) + _ = seed # 保留参数以兼容现有调用;扫描顺序不再使用随机。 + + db_paths = _iter_message_db_paths(account_dir) + # Prefer chat shards; biz_message often contains service/ads content. + db_paths = [p for p in db_paths if not p.name.lower().startswith("biz_message")] + + phrase_counts: Counter[str] = Counter() + scanned = 0 + matched = 0 + capped = False + + t0 = time.time() + for db_path in db_paths: + if not db_path.exists(): + continue + + conn: sqlite3.Connection | None = None + try: + conn = sqlite3.connect(str(db_path)) + conn.row_factory = sqlite3.Row + conn.text_factory = bytes + + my_rowid: int | None = None + if outgoing_only: + try: + r = conn.execute( + "SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", + (str(account_dir.name),), + ).fetchone() + if r is not None and r[0] is not None: + my_rowid = int(r[0]) + except Exception: + my_rowid = None + if my_rowid is None: + continue + + tables = _list_message_tables(conn) + if not tables: + continue + tables.sort() + + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + + local_types_csv = ",".join(str(int(x)) for x in _WEFLOW_COMMON_PHRASE_LOCAL_TYPES) + + for table in tables: + if max_seen is not None and scanned >= int(max_seen): + capped = True + break + + qt = _quote_ident(table) + where_sender = "" + params: tuple[Any, ...] + if outgoing_only and my_rowid is not None: + where_sender = " AND CAST(real_sender_id AS INTEGER) = ?" + params = (start_ts, end_ts, int(my_rowid)) + else: + params = (start_ts, end_ts) + + sql = ( + "SELECT message_content, compress_content " + f"FROM {qt} " + f"WHERE CAST(local_type AS INTEGER) IN ({local_types_csv}) " + f" AND {ts_expr} >= ? AND {ts_expr} < ?" + f"{where_sender}" + ) + + try: + cur = conn.execute(sql, params) + except Exception: + continue + + for r in cur: + if max_seen is not None and scanned >= int(max_seen): + capped = True + break + + scanned += 1 + try: + raw_txt = _decode_message_content(r["compress_content"], r["message_content"]) + except Exception: + continue + + phrase = _weflow_common_phrase_or_empty(raw_txt) + if not phrase: + continue + phrase_counts[phrase] += 1 + matched += 1 + finally: + if conn is not None: + try: + conn.close() + except Exception: + pass + + if max_seen is not None and scanned >= int(max_seen): + break + + elapsed = time.time() - t0 + meta = { + "scannedCandidates": int(scanned), + "matchedCandidates": int(matched), + "uniquePhrases": int(len(phrase_counts)), + "capped": bool(capped), + "elapsedSec": round(float(elapsed), 3), + "localTypes": list(_WEFLOW_COMMON_PHRASE_LOCAL_TYPES), + } + return phrase_counts, meta + + +def _scan_message_pool( + *, + account_dir: Path, + year: int, + outgoing_only: bool, + seed: int, + max_pool: int = 3000, + max_seen: int = 120_000, +) -> tuple[list[str], dict[str, Any]]: + start_ts, end_ts = _year_range_epoch_seconds(int(year)) + _ = seed # 保留参数以兼容现有调用;抽样本身使用非确定性随机。 + rnd = random.SystemRandom() + + db_paths = _iter_message_db_paths(account_dir) + # Prefer chat shards; biz_message often contains service/ads content. + db_paths = [p for p in db_paths if not p.name.lower().startswith("biz_message")] + rnd.shuffle(db_paths) + + pool: list[str] = [] + seen = 0 + + t0 = time.time() + for db_path in db_paths: + if not db_path.exists(): + continue + + conn: sqlite3.Connection | None = None + try: + conn = sqlite3.connect(str(db_path)) + conn.row_factory = sqlite3.Row + conn.text_factory = bytes + + my_rowid: int | None = None + if outgoing_only: + try: + r = conn.execute( + "SELECT rowid FROM Name2Id WHERE user_name = ? LIMIT 1", + (str(account_dir.name),), + ).fetchone() + if r is not None and r[0] is not None: + my_rowid = int(r[0]) + except Exception: + my_rowid = None + if my_rowid is None: + continue + + tables = _list_message_tables(conn) + if not tables: + continue + rnd.shuffle(tables) + + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + + for table in tables: + if seen >= int(max_seen): + break + qt = _quote_ident(table) + where_sender = "" + params: tuple[Any, ...] + if outgoing_only and my_rowid is not None: + where_sender = " AND CAST(real_sender_id AS INTEGER) = ?" + params = (start_ts, end_ts, int(my_rowid)) + else: + params = (start_ts, end_ts) + sql = ( + "SELECT message_content, compress_content " + f"FROM {qt} " + "WHERE CAST(local_type AS INTEGER) = 1 " + f" AND {ts_expr} >= ? AND {ts_expr} < ?" + f"{where_sender}" + ) + + try: + cur = conn.execute(sql, params) + except Exception: + continue + + for r in cur: + if seen >= int(max_seen): + break + raw_txt = "" + try: + raw_txt = _decode_message_content(r["compress_content"], r["message_content"]).strip() + except Exception: + raw_txt = "" + cleaned = _clean_text(raw_txt) + if not cleaned: + continue + seen += 1 + + if len(pool) < int(max_pool): + pool.append(cleaned) + continue + + # Reservoir sampling over the accepted stream. + j = rnd.randrange(seen) + if j < int(max_pool): + pool[j] = cleaned + finally: + if conn is not None: + try: + conn.close() + except Exception: + pass + + if seen >= int(max_seen): + break + + elapsed = time.time() - t0 + meta = { + "scannedMessages": int(seen), + "sampledMessages": int(len(pool)), + "sampleRate": round(float(len(pool)) / float(seen), 6) if seen > 0 else 0.0, + "elapsedSec": round(float(elapsed), 3), + } + return pool, meta + + +def build_card_05_keywords_wordcloud(*, account_dir: Path, year: int) -> dict[str, Any]: + title = "这一年,你把哪些话说了一遍又一遍?" + seed = _stable_seed(str(account_dir.name or ""), int(year)) + + phrase_counts, scan_meta = _scan_common_phrase_counts( + account_dir=account_dir, + year=year, + outgoing_only=True, + seed=seed, + ) + # Fallback only when we cannot scan any candidate rows (e.g. Name2Id row missing). + if int(scan_meta.get("scannedCandidates") or 0) <= 0: + phrase_counts, scan_meta = _scan_common_phrase_counts( + account_dir=account_dir, + year=year, + outgoing_only=False, + seed=seed ^ 0x1234, + ) + scan_meta["outgoingOnlyFallback"] = True + + example_pool: list[str] = [] + pool_meta: dict[str, Any] = {} + if phrase_counts: + use_outgoing_only = not bool(scan_meta.get("outgoingOnlyFallback") or False) + example_pool, pool_meta = _scan_message_pool( + account_dir=account_dir, + year=year, + outgoing_only=use_outgoing_only, + seed=seed ^ 0x9E37, + max_pool=3000, + max_seen=120_000, + ) + if (not example_pool) and use_outgoing_only: + example_pool, pool_meta = _scan_message_pool( + account_dir=account_dir, + year=year, + outgoing_only=False, + seed=seed ^ 0xA53C, + max_pool=3000, + max_seen=120_000, + ) + pool_meta["outgoingOnlyFallback"] = True + + payload = build_common_phrases_payload( + phrase_counts=phrase_counts, + seed=seed, + example_texts=example_pool, + examples_per_word=3, + ) + + logger.info( + "Wrapped card#6 common phrases computed: account=%s year=%s phrases=%s bubble=%s scanned=%s matched=%s capped=%s elapsed=%.2fs", + str(account_dir.name or "").strip(), + int(year), + len(payload.get("keywords") or []), + len(payload.get("bubbleMessages") or []), + int(scan_meta.get("scannedCandidates") or 0), + int(scan_meta.get("matchedCandidates") or 0), + bool(scan_meta.get("capped") or False), + float(scan_meta.get("elapsedSec") or 0.0), + ) + + return { + "id": 6, + "title": title, + "scope": "global", + "category": "C", + "status": "ok", + "kind": "text/keywords_wordcloud", + "narrative": "你的年度常用语词云", + "data": { + "year": int(year), + **payload, + "meta": { + "scannedCandidates": int(scan_meta.get("scannedCandidates") or 0), + "matchedCandidates": int(scan_meta.get("matchedCandidates") or 0), + "uniquePhrases": int(scan_meta.get("uniquePhrases") or 0), + "capped": bool(scan_meta.get("capped") or False), + "localTypes": list(scan_meta.get("localTypes") or []), + "outgoingOnlyFallback": bool(scan_meta.get("outgoingOnlyFallback") or False), + "examplePoolScannedMessages": int(pool_meta.get("scannedMessages") or 0), + "examplePoolSampledMessages": int(pool_meta.get("sampledMessages") or 0), + "examplePoolOutgoingOnlyFallback": bool(pool_meta.get("outgoingOnlyFallback") or False), + }, + }, + } diff --git a/src/wechat_decrypt_tool/wrapped/cards/card_07_bento_summary.py b/src/wechat_decrypt_tool/wrapped/cards/card_07_bento_summary.py new file mode 100644 index 0000000..2727582 --- /dev/null +++ b/src/wechat_decrypt_tool/wrapped/cards/card_07_bento_summary.py @@ -0,0 +1,292 @@ +from __future__ import annotations + +from typing import Any + + +def _as_data(obj: Any) -> dict[str, Any]: + if not isinstance(obj, dict): + return {} + data = obj.get("data") + if isinstance(data, dict): + return data + return obj + + +def _pick_int(x: Any, default: int = 0) -> int: + try: + return int(x) + except Exception: + return int(default) + + +def _pick_float(x: Any, default: float = 0.0) -> float: + try: + v = float(x) + return v if v == v else float(default) # NaN guard + except Exception: + return float(default) + + +def _pick_str(x: Any, default: str = "") -> str: + s = str(x or "").strip() + return s if s else str(default) + + +def _pick_obj(d: Any, keys: tuple[str, ...]) -> dict[str, Any] | None: + if not isinstance(d, dict): + return None + out: dict[str, Any] = {} + for k in keys: + if k in d: + out[k] = d.get(k) + return out if out else None + + +def build_card_07_bento_summary_from_sources( + *, + year: int, + overview: dict[str, Any], + heatmap: dict[str, Any], + message_chars: dict[str, Any], + reply_speed: dict[str, Any], + monthly: dict[str, Any], + emoji: dict[str, Any], +) -> dict[str, Any]: + """Card #7: Bento Summary (prototype style merged into Wrapped deck). + + The frontend expects a stable `data.snapshot` object to render without running extra JS. + """ + + overview_d = _as_data(overview) + heatmap_d = _as_data(heatmap) + message_chars_d = _as_data(message_chars) + reply_speed_d = _as_data(reply_speed) + monthly_d = _as_data(monthly) + emoji_d = _as_data(emoji) + + top_group_raw = overview_d.get("topGroup") + top_group = None + if isinstance(top_group_raw, dict): + display = _pick_str(top_group_raw.get("displayName"), "--") + top_group = { + "displayName": display, + "maskedName": display, + "avatarUrl": _pick_str(top_group_raw.get("avatarUrl"), ""), + "messages": _pick_int(top_group_raw.get("messages"), 0), + } + + best_buddy_raw = reply_speed_d.get("bestBuddy") + best_buddy = None + if isinstance(best_buddy_raw, dict): + display = _pick_str(best_buddy_raw.get("displayName"), "--") + best_buddy = { + "displayName": display, + "maskedName": display, + "avatarUrl": _pick_str(best_buddy_raw.get("avatarUrl"), ""), + "totalMessages": _pick_int(best_buddy_raw.get("totalMessages"), 0), + "longestStreakDays": _pick_int(best_buddy_raw.get("longestStreakDays"), 0), + "peakHour": best_buddy_raw.get("peakHour"), + "peakHourLabel": _pick_str(best_buddy_raw.get("peakHourLabel"), ""), + } + + fastest_raw = reply_speed_d.get("fastest") + fastest = None + if isinstance(fastest_raw, dict): + display = _pick_str(fastest_raw.get("displayName"), "--") + fastest = { + "displayName": display, + "maskedName": display, + "avatarUrl": _pick_str(fastest_raw.get("avatarUrl"), ""), + "seconds": _pick_int(fastest_raw.get("seconds"), 0), + } + + slowest_raw = reply_speed_d.get("slowest") + slowest = None + if isinstance(slowest_raw, dict): + display = _pick_str(slowest_raw.get("displayName"), "--") + slowest = { + "displayName": display, + "maskedName": display, + "avatarUrl": _pick_str(slowest_raw.get("avatarUrl"), ""), + "seconds": _pick_int(slowest_raw.get("seconds"), 0), + } + + reply_stats_raw = reply_speed_d.get("replyStats") + reply_stats = None + if isinstance(reply_stats_raw, dict): + reply_stats = { + "p50Seconds": reply_stats_raw.get("p50Seconds"), + "p90Seconds": reply_stats_raw.get("p90Seconds"), + } + + top_phrase_raw = overview_d.get("topPhrase") + top_phrase = None + if isinstance(top_phrase_raw, dict): + phrase = _pick_str(top_phrase_raw.get("phrase"), "") + count = _pick_int(top_phrase_raw.get("count"), 0) + if phrase and count > 0: + top_phrase = {"phrase": phrase, "count": count} + + sent_sticker_count = _pick_int(emoji_d.get("sentStickerCount"), _pick_int(overview_d.get("sentStickerCount"), 0)) + top_sticker = None + top_stickers = emoji_d.get("topStickers") + if isinstance(top_stickers, list) and top_stickers: + x0 = top_stickers[0] if isinstance(top_stickers[0], dict) else None + if x0: + url = _pick_str(x0.get("emojiUrl") or x0.get("imageUrl") or x0.get("url"), "") + cnt = _pick_int(x0.get("count"), 0) + if url: + top_sticker = {"imageUrl": url, "count": cnt} + + top_unicode_emoji = "" + top_unicode_emoji_count = 0 + top_unicode_emojis = emoji_d.get("topUnicodeEmojis") + if isinstance(top_unicode_emojis, list) and top_unicode_emojis: + x0 = top_unicode_emojis[0] if isinstance(top_unicode_emojis[0], dict) else None + if x0: + top_unicode_emoji = _pick_str(x0.get("emoji"), "") + top_unicode_emoji_count = _pick_int(x0.get("count"), 0) + + # "Top emoji" should be picked across both unicode emoji and WeChat built-in emoji. + # The deck has a separate "sticker" card; here we focus on emoji-like items. + top_emoji: dict[str, Any] | None = None + emoji_candidates: list[dict[str, Any]] = [] + + top_wechat_emojis = emoji_d.get("topWechatEmojis") + if isinstance(top_wechat_emojis, list) and top_wechat_emojis: + for item in top_wechat_emojis: + if not isinstance(item, dict): + continue + key = _pick_str(item.get("key"), "") + cnt = _pick_int(item.get("count"), 0) + if key and cnt > 0: + emoji_candidates.append( + { + "kind": "wechat", + "key": key, + "count": cnt, + "assetPath": _pick_str(item.get("assetPath"), ""), + } + ) + + top_text_emojis = emoji_d.get("topTextEmojis") + if isinstance(top_text_emojis, list) and top_text_emojis: + for item in top_text_emojis: + if not isinstance(item, dict): + continue + key = _pick_str(item.get("key"), "") + cnt = _pick_int(item.get("count"), 0) + if key and cnt > 0: + emoji_candidates.append( + { + "kind": "wechat", + "key": key, + "count": cnt, + "assetPath": _pick_str(item.get("assetPath"), ""), + } + ) + + if isinstance(top_unicode_emojis, list) and top_unicode_emojis: + for item in top_unicode_emojis: + if not isinstance(item, dict): + continue + emo = _pick_str(item.get("emoji"), "") + cnt = _pick_int(item.get("count"), 0) + if emo and cnt > 0: + emoji_candidates.append({"kind": "unicode", "emoji": emo, "count": cnt}) + + if emoji_candidates: + best = max( + emoji_candidates, + key=lambda x: ( + _pick_int(x.get("count"), 0), + 1 if str(x.get("kind")) == "wechat" else 0, + _pick_str(x.get("key") or x.get("emoji"), ""), + ), + ) + if str(best.get("kind")) == "wechat": + top_emoji = { + "kind": "wechat", + "key": _pick_str(best.get("key"), ""), + "count": _pick_int(best.get("count"), 0), + "assetPath": _pick_str(best.get("assetPath"), ""), + } + else: + top_emoji = { + "kind": "unicode", + "emoji": _pick_str(best.get("emoji"), ""), + "count": _pick_int(best.get("count"), 0), + } + + monthly_best_buddies: list[dict[str, Any]] = [] + months = monthly_d.get("months") + if isinstance(months, list) and months: + for item in months: + if not isinstance(item, dict): + continue + m = _pick_int(item.get("month"), 0) + winner = item.get("winner") if isinstance(item.get("winner"), dict) else None + metrics = item.get("metrics") if isinstance(item.get("metrics"), dict) else None + raw = item.get("raw") if isinstance(item.get("raw"), dict) else None + monthly_best_buddies.append( + { + "month": m, + "displayName": _pick_str((winner or {}).get("displayName"), "--"), + "maskedName": _pick_str((winner or {}).get("displayName"), "--"), + "avatarUrl": _pick_str((winner or {}).get("avatarUrl"), ""), + "messages": _pick_int((raw or {}).get("totalMessages"), 0), + "metrics": metrics if metrics else None, + } + ) + + # Ensure we always return 12 items for the grid. + if len(monthly_best_buddies) != 12: + fixed = {int(x.get("month") or 0): x for x in monthly_best_buddies if isinstance(x, dict)} + monthly_best_buddies = [] + for m in range(1, 13): + monthly_best_buddies.append( + fixed.get(m) + or { + "month": m, + "displayName": "--", + "maskedName": "--", + "avatarUrl": "", + "messages": 0, + "metrics": None, + } + ) + + snapshot: dict[str, Any] = { + "year": _pick_int(year), + "totalMessages": _pick_int(overview_d.get("totalMessages"), _pick_int(heatmap_d.get("totalMessages"), 0)), + "messagesPerDay": _pick_float(overview_d.get("messagesPerDay"), 0.0), + "sentChars": _pick_int(message_chars_d.get("sentChars"), 0), + "addedFriends": _pick_int(overview_d.get("addedFriends"), 0), + "mostActiveHour": overview_d.get("mostActiveHour"), + "topGroup": top_group, + "bestBuddy": best_buddy, + "fastest": fastest, + "slowest": slowest, + "replyStats": reply_stats, + "topPhrase": top_phrase, + "sentStickerCount": int(sent_sticker_count), + "topSticker": top_sticker, + "topEmoji": top_emoji, + "topUnicodeEmoji": top_unicode_emoji, + "topUnicodeEmojiCount": int(top_unicode_emoji_count), + "monthlyBestBuddies": monthly_best_buddies, + "weekdayLabels": heatmap_d.get("weekdayLabels") or [], + "hourLabels": heatmap_d.get("hourLabels") or [], + "weekdayHourMatrix": heatmap_d.get("matrix") or [], + } + + return { + "id": 7, + "title": "便当总览:一屏看完这一年", + "scope": "global", + "category": "A", + "status": "ok", + "kind": "global/bento_summary", + "narrative": "把这一年的关键信息装进一份便当。", + "data": {"snapshot": snapshot}, + } diff --git a/src/wechat_decrypt_tool/wrapped/service.py b/src/wechat_decrypt_tool/wrapped/service.py new file mode 100644 index 0000000..16b7c1c --- /dev/null +++ b/src/wechat_decrypt_tool/wrapped/service.py @@ -0,0 +1,613 @@ +from __future__ import annotations + +import json +import sqlite3 +import threading +import time +from datetime import datetime +from pathlib import Path +from typing import Any, Optional + +from ..chat_helpers import _decode_sqlite_text, _iter_message_db_paths, _quote_ident, _resolve_account_dir +from ..chat_search_index import get_chat_search_index_db_path +from ..logging_config import get_logger +from .storage import wrapped_cache_dir, wrapped_cache_path +from .cards.card_00_global_overview import build_card_00_global_overview +from .cards.card_01_cyber_schedule import WeekdayHourHeatmap, build_card_01_cyber_schedule, compute_weekday_hour_heatmap +from .cards.card_02_message_chars import build_card_02_message_chars +from .cards.card_05_keywords_wordcloud import build_card_05_keywords_wordcloud +from .cards.card_03_reply_speed import build_card_03_reply_speed +from .cards.card_04_monthly_best_friends_wall import build_card_04_monthly_best_friends_wall +from .cards.card_04_emoji_universe import build_card_04_emoji_universe +from .cards.card_07_bento_summary import build_card_07_bento_summary_from_sources + +logger = get_logger(__name__) + + +# We use this number to version the cache filename so adding more cards won't accidentally serve +# an older partial cache. +_IMPLEMENTED_UPTO_ID = 7 +# Bump this when we change card payloads/ordering while keeping the same implemented_upto. +_CACHE_VERSION = 26 + + +# "Manifest" is used by the frontend to render the deck quickly, then lazily fetch each card. +# Keep this list in display order (same as the old monolithic `/api/wrapped/annual` response). +_WRAPPED_CARD_MANIFEST: tuple[dict[str, Any], ...] = ( + { + "id": 0, + "title": "这一年,你的微信都经历了什么?", + "scope": "global", + "category": "A", + "kind": "global/overview", + }, + { + "id": 1, + "title": "你是「早八人」还是「夜猫子」?", + "scope": "global", + "category": "A", + "kind": "time/weekday_hour_heatmap", + }, + { + "id": 2, + "title": "你今年打了多少字?够写一本书吗?", + "scope": "global", + "category": "C", + "kind": "text/message_chars", + }, + { + "id": 6, + "title": "这一年,你把哪些词说了一遍又一遍?", + "scope": "global", + "category": "C", + "kind": "text/keywords_wordcloud", + }, + { + "id": 3, + "title": "谁是你「秒回」的置顶关心?", + "scope": "global", + "category": "B", + "kind": "chat/reply_speed", + }, + { + "id": 4, + "title": "这一年,每个月谁最懂你?", + "scope": "global", + "category": "B", + "kind": "chat/monthly_best_friends_wall", + }, + { + "id": 5, + "title": "这一年,你的表情包里藏了多少心情?", + "scope": "global", + "category": "B", + "kind": "emoji/annual_universe", + }, + { + "id": 7, + "title": "便当总览:一屏看完这一年", + "scope": "global", + "category": "A", + "kind": "global/bento_summary", + }, +) +_WRAPPED_CARD_ID_SET = {int(c["id"]) for c in _WRAPPED_CARD_MANIFEST} + + +# Prevent duplicated heavy computations when multiple card endpoints are hit concurrently. +_LOCKS: dict[str, threading.Lock] = {} +_LOCKS_GUARD = threading.Lock() + + +def _get_lock(key: str) -> threading.Lock: + with _LOCKS_GUARD: + lock = _LOCKS.get(key) + if lock is None: + lock = threading.Lock() + _LOCKS[key] = lock + return lock + + +def _default_year() -> int: + return datetime.now().year + + +def _list_message_tables(conn: sqlite3.Connection) -> list[str]: + try: + rows = conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall() + except Exception: + return [] + names: list[str] = [] + for r in rows: + if not r or not r[0]: + continue + name = _decode_sqlite_text(r[0]).strip() + if not name: + continue + ln = name.lower() + if ln.startswith(("msg_", "chat_")): + names.append(name) + return names + + +def list_wrapped_available_years(*, account_dir: Path) -> list[int]: + """List years that have *any* chat messages for the account (best-effort). + + Prefer using `chat_search_index.db` (fast). If not available, fall back to scanning message + shard databases (slower, but works without the index). + """ + + # Try a tiny cache first (years don't change often, but scanning can be expensive). + cache_path = wrapped_cache_dir(account_dir) / "available_years.json" + max_mtime = 0 + try: + index_path = get_chat_search_index_db_path(account_dir) + if index_path.exists(): + max_mtime = max(max_mtime, int(index_path.stat().st_mtime)) + except Exception: + pass + try: + for p in _iter_message_db_paths(account_dir): + try: + if p.name.lower().startswith("biz_message"): + continue + if p.exists(): + max_mtime = max(max_mtime, int(p.stat().st_mtime)) + except Exception: + continue + except Exception: + pass + + if cache_path.exists(): + try: + cached = json.loads(cache_path.read_text(encoding="utf-8")) + if isinstance(cached, dict): + sig = int(cached.get("max_mtime") or 0) + years = cached.get("years") + if sig == max_mtime and isinstance(years, list): + out: list[int] = [] + for x in years: + try: + y = int(x) + except Exception: + continue + if y > 0: + out.append(y) + out.sort(reverse=True) + return out + except Exception: + pass + + # Convert millisecond timestamps defensively (some datasets store ms). + # The expression yields epoch seconds as INTEGER. + ts_expr = ( + "CASE " + "WHEN CAST(create_time AS INTEGER) > 1000000000000 " + "THEN CAST(CAST(create_time AS INTEGER)/1000 AS INTEGER) " + "ELSE CAST(create_time AS INTEGER) " + "END" + ) + + # Fast path: use our unified search index when available. + index_path = get_chat_search_index_db_path(account_dir) + if index_path.exists(): + conn = sqlite3.connect(str(index_path)) + try: + has_fts = ( + conn.execute("SELECT 1 FROM sqlite_master WHERE type='table' AND name='message_fts' LIMIT 1").fetchone() + is not None + ) + if has_fts: + sql = ( + "SELECT " + "CAST(strftime('%Y', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS y, " + "COUNT(1) AS cnt " + "FROM (" + f" SELECT {ts_expr} AS ts" + " FROM message_fts" + f" WHERE {ts_expr} > 0" + " AND db_stem NOT LIKE 'biz_message%'" + ") sub " + "GROUP BY y " + "HAVING cnt > 0 " + "ORDER BY y DESC" + ) + try: + rows = conn.execute(sql).fetchall() + except Exception: + rows = [] + years: list[int] = [] + for r in rows: + if not r: + continue + try: + y = int(r[0]) + cnt = int(r[1] or 0) + except Exception: + continue + if y > 0 and cnt > 0: + years.append(y) + years.sort(reverse=True) + try: + cache_path.write_text( + json.dumps({"max_mtime": max_mtime, "years": years}, ensure_ascii=False, indent=2), + encoding="utf-8", + ) + except Exception: + pass + return years + finally: + try: + conn.close() + except Exception: + pass + + # Fallback: scan message shard DBs (may be slow on very large datasets, but only runs + # when the index does not exist). + year_counts: dict[int, int] = {} + db_paths = _iter_message_db_paths(account_dir) + db_paths = [p for p in db_paths if not p.name.lower().startswith("biz_message")] + for db_path in db_paths: + if not db_path.exists(): + continue + conn = sqlite3.connect(str(db_path)) + try: + tables = _list_message_tables(conn) + if not tables: + continue + for table_name in tables: + qt = _quote_ident(table_name) + sql = ( + "SELECT " + "CAST(strftime('%Y', datetime(ts, 'unixepoch', 'localtime')) AS INTEGER) AS y, " + "COUNT(1) AS cnt " + "FROM (" + f" SELECT {ts_expr} AS ts" + f" FROM {qt}" + f" WHERE {ts_expr} > 0" + ") sub " + "GROUP BY y" + ) + try: + rows = conn.execute(sql).fetchall() + except Exception: + continue + for r in rows: + if not r: + continue + try: + y = int(r[0]) + cnt = int(r[1] or 0) + except Exception: + continue + if y > 0 and cnt > 0: + year_counts[y] = int(year_counts.get(y, 0)) + cnt + finally: + try: + conn.close() + except Exception: + pass + + years = [y for y, cnt in year_counts.items() if int(cnt) > 0] + years.sort(reverse=True) + try: + cache_path.write_text( + json.dumps({"max_mtime": max_mtime, "years": years}, ensure_ascii=False, indent=2), + encoding="utf-8", + ) + except Exception: + pass + return years + + +def build_wrapped_annual_response( + *, + account: Optional[str], + year: Optional[int], + refresh: bool = False, +) -> dict[str, Any]: + """Build annual wrapped response for the given account/year. + + For now we implement cards up to id=7 (plus a meta overview card id=0). + """ + + account_dir = _resolve_account_dir(account) + + available_years = list_wrapped_available_years(account_dir=account_dir) + + # If the requested year has no messages, snap to the latest available year so the selector only + # shows years with data. + y = int(year or _default_year()) + if available_years and y not in available_years: + y = int(available_years[0]) + scope = "global" + + cache_path = wrapped_cache_path( + account_dir=account_dir, + scope=scope, + year=y, + implemented_upto=_IMPLEMENTED_UPTO_ID, + options_tag=f"v{_CACHE_VERSION}", + ) + if (not refresh) and cache_path.exists(): + try: + cached_obj = json.loads(cache_path.read_text(encoding="utf-8")) + if isinstance(cached_obj, dict) and isinstance(cached_obj.get("cards"), list): + # Card#6(关键词词云)要求每次请求返回随机消息批次,不复用旧卡片内容。 + for idx, c in enumerate(cached_obj.get("cards") or []): + try: + if int((c or {}).get("id") or -1) != 6: + continue + except Exception: + continue + cached_obj["cards"][idx] = build_card_05_keywords_wordcloud(account_dir=account_dir, year=y) + break + cached_obj["cached"] = True + cached_obj["availableYears"] = available_years + return cached_obj + except Exception: + pass + + cards: list[dict[str, Any]] = [] + # Wrapped cards default to "messages sent by me" (outgoing), to avoid mixing directions + # in first-person narratives like "你最常...". + heatmap_sent = _get_or_compute_heatmap_sent(account_dir=account_dir, scope=scope, year=y, refresh=refresh) + # Page 2: global overview (page 1 is the frontend cover slide). + card_overview = build_card_00_global_overview(account_dir=account_dir, year=y, heatmap=heatmap_sent) + cards.append(card_overview) + # Page 3: cyber schedule heatmap. + card_heatmap = build_card_01_cyber_schedule(account_dir=account_dir, year=y, heatmap=heatmap_sent) + cards.append(card_heatmap) + # Page 4: message char counts (sent vs received). + card_message_chars = build_card_02_message_chars(account_dir=account_dir, year=y) + cards.append(card_message_chars) + # Page 5: annual keywords (bubble storm -> word cloud). + cards.append(build_card_05_keywords_wordcloud(account_dir=account_dir, year=y)) + # Page 6: reply speed / best chat buddy. + card_reply_speed = build_card_03_reply_speed(account_dir=account_dir, year=y) + cards.append(card_reply_speed) + # Page 7: monthly best friends wall (photo wall). + card_monthly = build_card_04_monthly_best_friends_wall(account_dir=account_dir, year=y) + cards.append(card_monthly) + # Page 8: annual emoji universe / meme almanac. + card_emoji = build_card_04_emoji_universe(account_dir=account_dir, year=y) + cards.append(card_emoji) + # Page 9: bento summary (prototype). Build from prior cards for consistency. + cards.append( + build_card_07_bento_summary_from_sources( + year=y, + overview=card_overview, + heatmap=card_heatmap, + message_chars=card_message_chars, + reply_speed=card_reply_speed, + monthly=card_monthly, + emoji=card_emoji, + ) + ) + + obj: dict[str, Any] = { + "account": account_dir.name, + "year": y, + "scope": scope, + "username": None, + "generated_at": int(time.time()), + "cached": False, + "availableYears": available_years, + "cards": cards, + } + + try: + cache_path.write_text(json.dumps(obj, ensure_ascii=False, indent=2), encoding="utf-8") + except Exception: + logger.exception("Failed to write wrapped cache: %s", cache_path) + + return obj + + +def build_wrapped_annual_meta( + *, + account: Optional[str], + year: Optional[int], + refresh: bool = False, +) -> dict[str, Any]: + """Return a light-weight manifest for the Wrapped annual deck. + + This is meant to be fast so the frontend can render the deck first, then + request each page (card) lazily to avoid freezing on initial load. + """ + + account_dir = _resolve_account_dir(account) + + available_years = list_wrapped_available_years(account_dir=account_dir) + + # Keep the same year snapping semantics as `build_wrapped_annual_response`. + y = int(year or _default_year()) + if available_years and y not in available_years: + y = int(available_years[0]) + + if refresh: + # The manifest itself is static today, but we keep the flag for API symmetry. + pass + + return { + "account": account_dir.name, + "year": y, + "scope": "global", + "availableYears": available_years, + # Shallow copy so callers can't mutate our module-level tuple. + "cards": [dict(c) for c in _WRAPPED_CARD_MANIFEST], + } + + +def _wrapped_cache_suffix() -> str: + return f"_v{_CACHE_VERSION}" + + +def _wrapped_card_cache_path(*, account_dir: Path, scope: str, year: int, card_id: int) -> Path: + # Keep stable names; per-account directory already namespaces the files. + return wrapped_cache_dir(account_dir) / f"{scope}_{year}_card_{card_id}{_wrapped_cache_suffix()}.json" + + +def _wrapped_heatmap_sent_cache_path(*, account_dir: Path, scope: str, year: int) -> Path: + return wrapped_cache_dir(account_dir) / f"{scope}_{year}_heatmap_sent{_wrapped_cache_suffix()}.json" + + +def _load_cached_heatmap_sent(path: Path) -> WeekdayHourHeatmap | None: + if not path.exists(): + return None + try: + obj = json.loads(path.read_text(encoding="utf-8")) + except Exception: + return None + + if not isinstance(obj, dict): + return None + + weekday_labels = obj.get("weekdayLabels") + hour_labels = obj.get("hourLabels") + matrix = obj.get("matrix") + total = obj.get("totalMessages") + + if not isinstance(weekday_labels, list) or not isinstance(hour_labels, list) or not isinstance(matrix, list): + return None + + try: + total_i = int(total or 0) + except Exception: + total_i = 0 + + # Best-effort sanitize matrix to ints; keep shape if possible. + out_matrix: list[list[int]] = [] + for row in matrix: + if not isinstance(row, list): + return None + out_row: list[int] = [] + for v in row: + try: + out_row.append(int(v or 0)) + except Exception: + out_row.append(0) + out_matrix.append(out_row) + + return WeekdayHourHeatmap( + weekday_labels=[str(x) for x in weekday_labels], + hour_labels=[str(x) for x in hour_labels], + matrix=out_matrix, + total_messages=total_i, + ) + + +def _get_or_compute_heatmap_sent(*, account_dir: Path, scope: str, year: int, refresh: bool) -> WeekdayHourHeatmap: + path = _wrapped_heatmap_sent_cache_path(account_dir=account_dir, scope=scope, year=year) + lock = _get_lock(str(path)) + with lock: + if not refresh: + cached = _load_cached_heatmap_sent(path) + if cached is not None: + return cached + + heatmap = compute_weekday_hour_heatmap(account_dir=account_dir, year=year, sender_username=account_dir.name) + try: + path.write_text( + json.dumps( + { + "weekdayLabels": heatmap.weekday_labels, + "hourLabels": heatmap.hour_labels, + "matrix": heatmap.matrix, + "totalMessages": heatmap.total_messages, + }, + ensure_ascii=False, + indent=2, + ), + encoding="utf-8", + ) + except Exception: + logger.exception("Failed to write wrapped heatmap cache: %s", path) + return heatmap + + +def build_wrapped_annual_card( + *, + account: Optional[str], + year: Optional[int], + card_id: int, + refresh: bool = False, +) -> dict[str, Any]: + """Build one Wrapped card (page) on-demand. + + The result is cached per account/year/card_id to avoid recomputing when users + flip back and forth between pages. + """ + + cid = int(card_id) + if cid not in _WRAPPED_CARD_ID_SET: + raise ValueError(f"Unknown Wrapped card id: {cid}") + + account_dir = _resolve_account_dir(account) + + available_years = list_wrapped_available_years(account_dir=account_dir) + y = int(year or _default_year()) + if available_years and y not in available_years: + y = int(available_years[0]) + + scope = "global" + cache_path = _wrapped_card_cache_path(account_dir=account_dir, scope=scope, year=y, card_id=cid) + # Card#6 需要每次随机抽样,不使用按卡片缓存。 + cacheable = cid != 6 + + lock = _get_lock(str(cache_path)) + with lock: + if cacheable and (not refresh) and cache_path.exists(): + try: + cached_obj = json.loads(cache_path.read_text(encoding="utf-8")) + if isinstance(cached_obj, dict) and int(cached_obj.get("id") or -1) == cid: + return cached_obj + except Exception: + pass + + heatmap_sent: WeekdayHourHeatmap | None = None + if cid in (0, 1): + heatmap_sent = _get_or_compute_heatmap_sent(account_dir=account_dir, scope=scope, year=y, refresh=refresh) + + if cid == 0: + card = build_card_00_global_overview(account_dir=account_dir, year=y, heatmap=heatmap_sent) + elif cid == 1: + card = build_card_01_cyber_schedule(account_dir=account_dir, year=y, heatmap=heatmap_sent) + elif cid == 2: + card = build_card_02_message_chars(account_dir=account_dir, year=y) + elif cid == 6: + card = build_card_05_keywords_wordcloud(account_dir=account_dir, year=y) + elif cid == 3: + card = build_card_03_reply_speed(account_dir=account_dir, year=y) + elif cid == 4: + card = build_card_04_monthly_best_friends_wall(account_dir=account_dir, year=y) + elif cid == 5: + card = build_card_04_emoji_universe(account_dir=account_dir, year=y) + elif cid == 7: + # Build from already-implemented cards so we can reuse their caches if available. + overview = build_wrapped_annual_card(account=account_dir.name, year=y, card_id=0, refresh=refresh) + heatmap = build_wrapped_annual_card(account=account_dir.name, year=y, card_id=1, refresh=refresh) + message_chars = build_wrapped_annual_card(account=account_dir.name, year=y, card_id=2, refresh=refresh) + reply_speed = build_wrapped_annual_card(account=account_dir.name, year=y, card_id=3, refresh=refresh) + monthly = build_wrapped_annual_card(account=account_dir.name, year=y, card_id=4, refresh=refresh) + emoji = build_wrapped_annual_card(account=account_dir.name, year=y, card_id=5, refresh=refresh) + card = build_card_07_bento_summary_from_sources( + year=y, + overview=overview, + heatmap=heatmap, + message_chars=message_chars, + reply_speed=reply_speed, + monthly=monthly, + emoji=emoji, + ) + else: + # Should be unreachable due to _WRAPPED_CARD_ID_SET check. + raise ValueError(f"Unknown Wrapped card id: {cid}") + + if cacheable: + try: + cache_path.write_text(json.dumps(card, ensure_ascii=False, indent=2), encoding="utf-8") + except Exception: + logger.exception("Failed to write wrapped card cache: %s", cache_path) + + return card diff --git a/src/wechat_decrypt_tool/wrapped/storage.py b/src/wechat_decrypt_tool/wrapped/storage.py new file mode 100644 index 0000000..f79651b --- /dev/null +++ b/src/wechat_decrypt_tool/wrapped/storage.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +from pathlib import Path + + +def wrapped_account_dir(account_dir: Path) -> Path: + """Return the per-account wrapped working directory. + + We keep all wrapped artifacts under `/_wrapped` so they travel + with the decrypted databases and are easy to inspect/backup. + """ + + return account_dir / "_wrapped" + + +def wrapped_cache_dir(account_dir: Path) -> Path: + d = wrapped_account_dir(account_dir) / "cache" + d.mkdir(parents=True, exist_ok=True) + return d + + +def wrapped_cache_path( + *, + account_dir: Path, + scope: str, + year: int, + implemented_upto: int, + options_tag: str | None = None, +) -> Path: + # NOTE: Keep the filename stable and versioned by "implemented_upto" so when we + # add more cards later we don't accidentally serve a partial cache. + suffix = f"_{options_tag}" if options_tag else "" + return wrapped_cache_dir(account_dir) / f"{scope}_{year}_upto_{implemented_upto}{suffix}.json" diff --git a/tests/test_admin_server_error_logging.py b/tests/test_admin_server_error_logging.py new file mode 100644 index 0000000..6ffd669 --- /dev/null +++ b/tests/test_admin_server_error_logging.py @@ -0,0 +1,174 @@ +import importlib +import os +import sys +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest.mock import patch + +from fastapi import FastAPI, HTTPException +from fastapi.testclient import TestClient + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +def _close_logging_handlers() -> None: + import logging + + for logger_name in ("", "uvicorn", "uvicorn.access", "uvicorn.error", "fastapi"): + lg = logging.getLogger(logger_name) + for h in lg.handlers[:]: + try: + h.close() + except Exception: + pass + try: + lg.removeHandler(h) + except Exception: + pass + + +class TestAdminServerErrorLogging(unittest.TestCase): + def setUp(self): + self._prev_data_dir = os.environ.get("WECHAT_TOOL_DATA_DIR") + self._td = TemporaryDirectory() + os.environ["WECHAT_TOOL_DATA_DIR"] = self._td.name + + import wechat_decrypt_tool.app_paths as app_paths + import wechat_decrypt_tool.logging_config as logging_config + import wechat_decrypt_tool.request_logging as request_logging + import wechat_decrypt_tool.routers.admin as admin_router + + importlib.reload(app_paths) + importlib.reload(logging_config) + importlib.reload(request_logging) + importlib.reload(admin_router) + + self.logging_config = logging_config + self.request_logging = request_logging + self.admin_router = admin_router + self.log_file = self.logging_config.setup_logging() + + def tearDown(self): + _close_logging_handlers() + + if self._prev_data_dir is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = self._prev_data_dir + + self._td.cleanup() + + def _read_log(self) -> str: + return self.log_file.read_text(encoding="utf-8") + + def _make_admin_app(self) -> FastAPI: + app = FastAPI() + app.include_router(self.admin_router.router) + return app + + def _make_logged_app(self) -> FastAPI: + app = FastAPI() + + @app.middleware("http") + async def _log_server_errors(request, call_next): + return await self.request_logging.log_server_errors_middleware( + self.logging_config.get_logger("tests.server_error_logging"), + request, + call_next, + ) + + @app.get("/boom-http") + async def _boom_http(): + raise HTTPException(status_code=500, detail="planned http failure") + + @app.get("/boom-exception") + async def _boom_exception(): + raise RuntimeError("planned unhandled failure") + + return app + + def test_get_log_file_returns_current_backend_log_path(self): + client = TestClient(self._make_admin_app(), client=("127.0.0.1", 52000)) + + resp = client.get("/api/admin/log-file") + + self.assertEqual(resp.status_code, 200) + payload = resp.json() + self.assertEqual(Path(payload["path"]), self.log_file) + self.assertTrue(payload["exists"]) + self.assertTrue(self.log_file.is_relative_to(Path(self._td.name) / "output" / "logs")) + + def test_open_log_file_requires_loopback(self): + client = TestClient(self._make_admin_app(), client=("203.0.113.8", 52001)) + + resp = client.post("/api/admin/log-file/open") + + self.assertEqual(resp.status_code, 403) + + def test_open_log_file_uses_default_opener_for_loopback(self): + client = TestClient(self._make_admin_app(), client=("127.0.0.1", 52002)) + + with patch.object(self.admin_router, "_open_path_with_default_app") as mocked_open: + resp = client.post("/api/admin/log-file/open") + + self.assertEqual(resp.status_code, 200) + mocked_open.assert_called_once_with(self.log_file) + self.assertEqual(resp.json()["path"], str(self.log_file)) + + def test_frontend_server_error_endpoint_writes_log(self): + client = TestClient(self._make_admin_app(), client=("127.0.0.1", 52003)) + + resp = client.post( + "/api/admin/log-frontend-server-error", + json={ + "status": 503, + "method": "GET", + "request_url": "http://127.0.0.1:10392/api/chat/accounts", + "message": "fetch failed", + "backend_detail": "upstream timeout", + "source": "useApi", + "page_url": "http://127.0.0.1:10392/chat", + }, + ) + + self.assertEqual(resp.status_code, 200) + text = self._read_log() + self.assertIn("[frontend-server-error]", text) + self.assertIn("status=503", text) + self.assertIn("source=useApi", text) + self.assertIn("upstream timeout", text) + + def test_http_500_response_is_logged(self): + client = TestClient(self._make_logged_app(), client=("127.0.0.1", 52004)) + + resp = client.get("/boom-http") + + self.assertEqual(resp.status_code, 500) + text = self._read_log() + self.assertIn("[server-5xx]", text) + self.assertIn("status=500", text) + self.assertIn("path=/boom-http", text) + self.assertIn("planned http failure", text) + + def test_unhandled_exception_is_logged_with_traceback(self): + client = TestClient( + self._make_logged_app(), + client=("127.0.0.1", 52005), + raise_server_exceptions=False, + ) + + resp = client.get("/boom-exception") + + self.assertEqual(resp.status_code, 500) + text = self._read_log() + self.assertIn("[server-exception]", text) + self.assertIn("path=/boom-exception", text) + self.assertIn("planned unhandled failure", text) + self.assertIn("Traceback", text) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_avatar_cache_chat_media.py b/tests/test_avatar_cache_chat_media.py new file mode 100644 index 0000000..4b8db11 --- /dev/null +++ b/tests/test_avatar_cache_chat_media.py @@ -0,0 +1,173 @@ +import os +import sqlite3 +import sys +import unittest +import importlib +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestAvatarCacheChatMedia(unittest.TestCase): + def _seed_contact_db(self, path: Path, *, username: str = "wxid_friend") -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + ( + username, + "", + "测试好友", + "", + 1, + 0, + "https://wx.qlogo.cn/mmhead/ver_1/test_remote_avatar/132", + "", + ), + ) + conn.commit() + finally: + conn.close() + + def _seed_session_db(self, path: Path, *, username: str = "wxid_friend") -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE SessionTable ( + username TEXT, + sort_timestamp INTEGER, + last_timestamp INTEGER + ) + """ + ) + conn.execute("INSERT INTO SessionTable VALUES (?, ?, ?)", (username, 200, 200)) + conn.commit() + finally: + conn.close() + + def _seed_head_image_db(self, path: Path, *, username: str = "wxid_friend") -> None: + # 1x1 PNG + png = bytes.fromhex( + "89504E470D0A1A0A" + "0000000D49484452000000010000000108060000001F15C489" + "0000000D49444154789C6360606060000000050001A5F64540" + "0000000049454E44AE426082" + ) + conn = sqlite3.connect(str(path)) + try: + conn.execute("CREATE TABLE head_image(username TEXT PRIMARY KEY, md5 TEXT, image_buffer BLOB, update_time INTEGER)") + conn.execute( + "INSERT INTO head_image VALUES (?, ?, ?, ?)", + (username, "0123456789abcdef0123456789abcdef", sqlite3.Binary(png), 1735689600), + ) + conn.commit() + finally: + conn.close() + + def test_chat_avatar_caches_to_output_avatar_cache(self): + from fastapi import FastAPI + from fastapi.testclient import TestClient + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + account_dir = root / "output" / "databases" / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db", username=username) + self._seed_session_db(account_dir / "session.db", username=username) + self._seed_head_image_db(account_dir / "head_image.db", username=username) + + prev_data = None + prev_cache = None + try: + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + prev_cache = os.environ.get("WECHAT_TOOL_AVATAR_CACHE_ENABLED") + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + os.environ["WECHAT_TOOL_AVATAR_CACHE_ENABLED"] = "1" + + import wechat_decrypt_tool.app_paths as app_paths + import wechat_decrypt_tool.chat_helpers as chat_helpers + import wechat_decrypt_tool.avatar_cache as avatar_cache + import wechat_decrypt_tool.routers.chat_media as chat_media + + importlib.reload(app_paths) + importlib.reload(chat_helpers) + importlib.reload(avatar_cache) + importlib.reload(chat_media) + + app = FastAPI() + app.include_router(chat_media.router) + client = TestClient(app) + + resp = client.get("/api/chat/avatar", params={"account": account, "username": username}) + self.assertEqual(resp.status_code, 200) + self.assertTrue(resp.headers.get("content-type", "").startswith("image/")) + + cache_db = root / "output" / "avatar_cache" / account / "avatar_cache.db" + self.assertTrue(cache_db.exists()) + + conn = sqlite3.connect(str(cache_db)) + try: + row = conn.execute( + "SELECT cache_key, source_kind, username, rel_path, media_type FROM avatar_cache_entries WHERE source_kind = 'user' LIMIT 1" + ).fetchone() + self.assertIsNotNone(row) + rel_path = str(row[3] or "") + finally: + conn.close() + + self.assertTrue(rel_path) + cache_file = (root / "output" / "avatar_cache" / account / rel_path).resolve() + self.assertTrue(cache_file.exists()) + + resp2 = client.get("/api/chat/avatar", params={"account": account, "username": username}) + self.assertEqual(resp2.status_code, 200) + self.assertEqual(resp2.content, resp.content) + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + if prev_cache is None: + os.environ.pop("WECHAT_TOOL_AVATAR_CACHE_ENABLED", None) + else: + os.environ["WECHAT_TOOL_AVATAR_CACHE_ENABLED"] = prev_cache + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_chat_app_message_type4_patmsg_regression.py b/tests/test_chat_app_message_type4_patmsg_regression.py new file mode 100644 index 0000000..d5e7777 --- /dev/null +++ b/tests/test_chat_app_message_type4_patmsg_regression.py @@ -0,0 +1,50 @@ +import sys +import unittest +from pathlib import Path + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + +from wechat_decrypt_tool.chat_helpers import _parse_app_message + + +class TestChatAppMessageType4PatMsgRegression(unittest.TestCase): + def test_type4_link_with_patmsg_metadata_is_not_misclassified_as_pat(self): + raw_text = ( + "" + '' + "【中配】抽象可能让你的代码变差 - CodeAesthetic" + "UP主:黑纹白斑马" + "4" + "https://b23.tv/au68guF" + "哔哩哔哩" + "3057020100044b30" + "" + "" + "" + ) + + parsed = _parse_app_message(raw_text) + self.assertEqual(parsed.get("renderType"), "link") + self.assertEqual(parsed.get("url"), "https://b23.tv/au68guF") + self.assertEqual(parsed.get("title"), "【中配】抽象可能让你的代码变差 - CodeAesthetic") + self.assertEqual(parsed.get("from"), "哔哩哔哩") + self.assertNotEqual(parsed.get("content"), "[拍一拍]") + + def test_type62_is_still_pat(self): + raw_text = '"A" 拍了拍 "B"62' + parsed = _parse_app_message(raw_text) + self.assertEqual(parsed.get("renderType"), "system") + self.assertEqual(parsed.get("content"), "[拍一拍]") + + def test_sysmsg_type_patmsg_attr_is_still_pat(self): + raw_text = 'bar' + parsed = _parse_app_message(raw_text) + self.assertEqual(parsed.get("renderType"), "system") + self.assertEqual(parsed.get("content"), "[拍一拍]") + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_chat_edit_store.py b/tests/test_chat_edit_store.py new file mode 100644 index 0000000..c8ae040 --- /dev/null +++ b/tests/test_chat_edit_store.py @@ -0,0 +1,182 @@ +import os +import sys +import json +import sqlite3 +import unittest +import importlib +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestChatEditStore(unittest.TestCase): + def setUp(self): + self._prev_data_dir = os.environ.get("WECHAT_TOOL_DATA_DIR") + self._td = TemporaryDirectory() + os.environ["WECHAT_TOOL_DATA_DIR"] = self._td.name + + import wechat_decrypt_tool.app_paths as app_paths + import wechat_decrypt_tool.chat_edit_store as chat_edit_store + + importlib.reload(app_paths) + importlib.reload(chat_edit_store) + + self.app_paths = app_paths + self.store = chat_edit_store + + def tearDown(self): + if self._prev_data_dir is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = self._prev_data_dir + self._td.cleanup() + + def test_ensure_schema_creates_db(self): + self.store.ensure_schema() + db_path = self.app_paths.get_output_dir() / "message_edits.db" + self.assertTrue(db_path.exists()) + + conn = sqlite3.connect(str(db_path)) + try: + row = conn.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name='message_edits' LIMIT 1" + ).fetchone() + self.assertIsNotNone(row) + finally: + conn.close() + + def test_blob_hex_roundtrip(self): + payload = {"a": b"\x00\xff", "nested": {"b": memoryview(b"\x01\x02")}} + dumped = self.store.dumps_json_with_blobs(payload) + self.assertIn("0x00ff", dumped.lower()) + self.assertIn("0x0102", dumped.lower()) + + loaded = self.store.loads_json_with_blobs(dumped) + self.assertEqual(loaded["a"], b"\x00\xff") + self.assertEqual(loaded["nested"]["b"], b"\x01\x02") + + def test_message_id_format_parse(self): + mid = self.store.format_message_id("message_0", "Msg_foo", 123) + self.assertEqual(mid, "message_0:Msg_foo:123") + + db, table, local_id = self.store.parse_message_id(mid) + self.assertEqual(db, "message_0") + self.assertEqual(table, "Msg_foo") + self.assertEqual(local_id, 123) + + with self.assertRaises(ValueError): + self.store.parse_message_id("bad") + + def test_upsert_original_once_does_not_overwrite_snapshot(self): + now1 = 1000 + now2 = 2000 + self.store.upsert_original_once( + account="wxid_me", + session_id="wxid_you", + db="message_0", + table_name="Msg_foo", + local_id=1, + original_msg={"local_id": 1, "message_content": "hello", "compress_content": b"\x01"}, + original_resource={"message_id": 9, "packed_info": b"\x02"}, + now_ms=now1, + ) + + self.store.upsert_original_once( + account="wxid_me", + session_id="wxid_you", + db="message_0", + table_name="Msg_foo", + local_id=1, + original_msg={"local_id": 1, "message_content": "SHOULD_NOT_OVERWRITE", "compress_content": b"\x03"}, + original_resource={"message_id": 9, "packed_info": b"\x04"}, + now_ms=now2, + ) + + mid = self.store.format_message_id("message_0", "Msg_foo", 1) + item = self.store.get_message_edit("wxid_me", "wxid_you", mid) + self.assertIsNotNone(item) + self.assertEqual(int(item["first_edited_at"]), now1) + self.assertEqual(int(item["last_edited_at"]), now2) + self.assertEqual(int(item["edit_count"]), 2) + + original_msg = self.store.loads_json_with_blobs(item["original_msg_json"]) + self.assertEqual(original_msg["message_content"], "hello") + self.assertEqual(original_msg["compress_content"], b"\x01") + + original_res = self.store.loads_json_with_blobs(item["original_resource_json"]) + self.assertEqual(int(original_res["message_id"]), 9) + self.assertEqual(original_res["packed_info"], b"\x02") + + def test_update_message_edit_local_id_moves_primary_key(self): + self.store.upsert_original_once( + account="wxid_me", + session_id="wxid_you", + db="message_0", + table_name="Msg_foo", + local_id=10, + original_msg={"local_id": 10, "message_content": "hello"}, + original_resource=None, + now_ms=1234, + ) + + ok = self.store.update_message_edit_local_id( + account="wxid_me", + session_id="wxid_you", + db="message_0", + table_name="Msg_foo", + old_local_id=10, + new_local_id=11, + ) + self.assertTrue(ok) + + old_mid = self.store.format_message_id("message_0", "Msg_foo", 10) + new_mid = self.store.format_message_id("message_0", "Msg_foo", 11) + self.assertIsNone(self.store.get_message_edit("wxid_me", "wxid_you", old_mid)) + self.assertIsNotNone(self.store.get_message_edit("wxid_me", "wxid_you", new_mid)) + + def test_list_sessions_counts(self): + self.store.upsert_original_once( + account="wxid_me", + session_id="u1", + db="message_0", + table_name="Msg_foo", + local_id=1, + original_msg={"local_id": 1, "message_content": "a"}, + original_resource=None, + now_ms=100, + ) + self.store.upsert_original_once( + account="wxid_me", + session_id="u1", + db="message_0", + table_name="Msg_foo", + local_id=2, + original_msg={"local_id": 2, "message_content": "b"}, + original_resource=None, + now_ms=200, + ) + self.store.upsert_original_once( + account="wxid_me", + session_id="u2", + db="message_0", + table_name="Msg_foo", + local_id=3, + original_msg={"local_id": 3, "message_content": "c"}, + original_resource=None, + now_ms=300, + ) + + stats = self.store.list_sessions("wxid_me") + by_sid = {s["session_id"]: s for s in stats} + self.assertEqual(int(by_sid["u1"]["msg_count"]), 2) + self.assertEqual(int(by_sid["u1"]["last_edited_at"]), 200) + self.assertEqual(int(by_sid["u2"]["msg_count"]), 1) + self.assertEqual(int(by_sid["u2"]["last_edited_at"]), 300) + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_chat_export_chat_history_modal.py b/tests/test_chat_export_chat_history_modal.py new file mode 100644 index 0000000..94c8af9 --- /dev/null +++ b/tests/test_chat_export_chat_history_modal.py @@ -0,0 +1,217 @@ +import os +import hashlib +import sqlite3 +import sys +import unittest +import zipfile +import importlib +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestChatExportChatHistoryModal(unittest.TestCase): + _MD5 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + + def _reload_export_modules(self): + import wechat_decrypt_tool.app_paths as app_paths + import wechat_decrypt_tool.chat_helpers as chat_helpers + import wechat_decrypt_tool.media_helpers as media_helpers + import wechat_decrypt_tool.chat_export_service as chat_export_service + + importlib.reload(app_paths) + importlib.reload(chat_helpers) + importlib.reload(media_helpers) + importlib.reload(chat_export_service) + return chat_export_service + + def _seed_contact_db(self, path: Path, *, account: str, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (account, "", "我", "", 1, 0, "", ""), + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (username, "", "测试好友", "", 1, 0, "", ""), + ) + conn.commit() + finally: + conn.close() + + def _seed_session_db(self, path: Path, *, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE SessionTable ( + username TEXT, + is_hidden INTEGER, + sort_timestamp INTEGER + ) + """ + ) + conn.execute( + "INSERT INTO SessionTable VALUES (?, ?, ?)", + (username, 0, 1735689600), + ) + conn.commit() + finally: + conn.close() + + def _seed_message_db(self, path: Path, *, account: str, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute("CREATE TABLE Name2Id (rowid INTEGER PRIMARY KEY, user_name TEXT)") + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (1, account)) + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (2, username)) + + table_name = f"msg_{hashlib.md5(username.encode('utf-8')).hexdigest()}" + conn.execute( + f""" + CREATE TABLE {table_name} ( + local_id INTEGER, + server_id INTEGER, + local_type INTEGER, + sort_seq INTEGER, + real_sender_id INTEGER, + create_time INTEGER, + message_content TEXT, + compress_content BLOB + ) + """ + ) + + record_item = ( + "" + "" + "" + "2" + f"{self._MD5}" + "" + "" + "" + ) + chat_history_xml = ( + "" + "19" + "聊天记录" + "记录预览" + f"" + "" + ) + + conn.execute( + f"INSERT INTO {table_name} (local_id, server_id, local_type, sort_seq, real_sender_id, create_time, message_content, compress_content) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (1, 1001, 49, 1, 2, 1735689601, chat_history_xml, None), + ) + conn.commit() + finally: + conn.close() + + def _seed_media_files(self, account_dir: Path) -> None: + resource_root = account_dir / "resource" + (resource_root / "aa").mkdir(parents=True, exist_ok=True) + (resource_root / "aa" / f"{self._MD5}.jpg").write_bytes(b"\xff\xd8\xff\xd9") + + def _prepare_account(self, root: Path, *, account: str, username: str) -> Path: + account_dir = root / "output" / "databases" / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db", account=account, username=username) + self._seed_session_db(account_dir / "session.db", username=username) + self._seed_message_db(account_dir / "message_0.db", account=account, username=username) + self._seed_media_files(account_dir) + return account_dir + + def _create_job(self, manager, *, account: str, username: str): + job = manager.create_job( + account=account, + scope="selected", + usernames=[username], + export_format="html", + start_time=None, + end_time=None, + include_hidden=False, + include_official=False, + include_media=True, + media_kinds=["image"], + message_types=["chatHistory", "image"], + output_dir=None, + allow_process_key_extract=False, + download_remote_media=False, + privacy_mode=False, + file_name=None, + ) + + for _ in range(200): + latest = manager.get_job(job.export_id) + if latest and latest.status in {"done", "error", "cancelled"}: + return latest + import time as _time + + _time.sleep(0.05) + self.fail("export job did not finish in time") + + def test_chat_history_modal_has_media_index_and_record_item(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + job = self._create_job(svc.CHAT_EXPORT_MANAGER, account=account, username=username) + self.assertEqual(job.status, "done", msg=job.error) + + with zipfile.ZipFile(job.zip_path, "r") as zf: + names = set(zf.namelist()) + self.assertIn(f"media/images/{self._MD5}.jpg", names) + + html_path = next((n for n in names if n.endswith("/messages.html")), "") + self.assertTrue(html_path) + html_text = zf.read(html_path).decode("utf-8") + self.assertIn('data-wce-chat-history="1"', html_text) + self.assertIn('data-record-item-b64="', html_text) + self.assertIn('id="wceMediaIndex"', html_text) + self.assertIn(self._MD5, html_text) + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data diff --git a/tests/test_chat_export_html_format.py b/tests/test_chat_export_html_format.py new file mode 100644 index 0000000..1f5047f --- /dev/null +++ b/tests/test_chat_export_html_format.py @@ -0,0 +1,460 @@ +import os +import json +import hashlib +import logging +import re +import sqlite3 +import sys +import unittest +import zipfile +import importlib +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestChatExportHtmlFormat(unittest.TestCase): + _FILE_MD5 = "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" + _VOICE_SERVER_ID = 2001 + + def _reload_export_modules(self): + import wechat_decrypt_tool.app_paths as app_paths + import wechat_decrypt_tool.chat_helpers as chat_helpers + import wechat_decrypt_tool.media_helpers as media_helpers + import wechat_decrypt_tool.chat_export_service as chat_export_service + + importlib.reload(app_paths) + importlib.reload(chat_helpers) + importlib.reload(media_helpers) + importlib.reload(chat_export_service) + return chat_export_service + + def _seed_contact_db(self, path: Path, *, account: str, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (account, "", "我", "", 1, 0, "", ""), + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (username, "", "测试好友", "", 1, 0, "", ""), + ) + conn.commit() + finally: + conn.close() + + def _seed_session_db(self, path: Path, *, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE SessionTable ( + username TEXT, + is_hidden INTEGER, + sort_timestamp INTEGER + ) + """ + ) + conn.execute( + "INSERT INTO SessionTable VALUES (?, ?, ?)", + (username, 0, 1735689600), + ) + conn.commit() + finally: + conn.close() + + def _seed_message_db(self, path: Path, *, account: str, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute("CREATE TABLE Name2Id (rowid INTEGER PRIMARY KEY, user_name TEXT)") + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (1, account)) + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (2, username)) + + table_name = f"msg_{hashlib.md5(username.encode('utf-8')).hexdigest()}" + conn.execute( + f""" + CREATE TABLE {table_name} ( + local_id INTEGER, + server_id INTEGER, + local_type INTEGER, + sort_seq INTEGER, + real_sender_id INTEGER, + create_time INTEGER, + message_content TEXT, + compress_content BLOB + ) + """ + ) + + image_xml = '' + voice_xml = '' + file_md5 = self._FILE_MD5 + file_xml = ( + "" + "6" + "demo.pdf" + "2048" + f"{file_md5}" + "" + ) + link_xml = ( + "" + "5" + "示例链接" + "这是描述" + "https://example.com/" + "https://example.com/thumb.jpg" + "gh_test" + "测试公众号" + "" + ) + chat_history_xml = ( + "" + "19" + "聊天记录" + "记录预览" + "张三: hi\n李四: ok" + "" + ) + transfer_xml = ( + "" + "2000" + "微信转账" + "" + "转账备注" + "¥1.23" + "3" + "transfer_123" + "" + "" + ) + red_packet_xml = ( + "" + "2001" + "红包" + "" + "恭喜发财,大吉大利" + "微信红包" + "" + "" + ) + voip_xml = ( + "" + "1" + "语音通话" + "" + ) + quote_voice_xml = ( + "" + "57" + "回复语音" + "" + "34" + f"{self._VOICE_SERVER_ID}" + "wxid_friend" + "测试好友" + "wxid_friend:3000:1:" + "" + "" + ) + rows = [ + (1, 1001, 3, 1, 2, 1735689601, image_xml, None), + (2, 1002, 1, 2, 2, 1735689602, "普通文本消息[微笑]", None), + (3, 1003, 49, 3, 1, 1735689603, transfer_xml, None), + (4, 1004, 49, 4, 2, 1735689604, red_packet_xml, None), + (5, 1005, 49, 5, 1, 1735689605, file_xml, None), + (6, 1006, 49, 6, 2, 1735689606, link_xml, None), + (7, 1007, 49, 7, 2, 1735689607, chat_history_xml, None), + (8, 1008, 50, 8, 2, 1735689608, voip_xml, None), + (9, self._VOICE_SERVER_ID, 34, 9, 1, 1735689609, voice_xml, None), + (10, 1010, 49, 10, 1, 1735689610, quote_voice_xml, None), + ] + conn.executemany( + f"INSERT INTO {table_name} (local_id, server_id, local_type, sort_seq, real_sender_id, create_time, message_content, compress_content) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + rows, + ) + conn.commit() + finally: + conn.close() + + def _seed_media_files(self, account_dir: Path) -> None: + resource_root = account_dir / "resource" + (resource_root / "aa").mkdir(parents=True, exist_ok=True) + (resource_root / "aa" / "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.jpg").write_bytes(b"\xff\xd8\xff\xd9") + (resource_root / "bb").mkdir(parents=True, exist_ok=True) + (resource_root / "bb" / f"{self._FILE_MD5}.dat").write_bytes(b"dummy") + + conn = sqlite3.connect(str(account_dir / "media_0.db")) + try: + conn.execute( + """ + CREATE TABLE VoiceInfo ( + svr_id INTEGER, + create_time INTEGER, + voice_data BLOB + ) + """ + ) + conn.execute( + "INSERT INTO VoiceInfo VALUES (?, ?, ?)", + (self._VOICE_SERVER_ID, 1735689609, b"SILK_VOICE_DATA"), + ) + conn.commit() + finally: + conn.close() + + def _prepare_account(self, root: Path, *, account: str, username: str) -> Path: + account_dir = root / "output" / "databases" / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db", account=account, username=username) + self._seed_session_db(account_dir / "session.db", username=username) + self._seed_message_db(account_dir / "message_0.db", account=account, username=username) + self._seed_media_files(account_dir) + return account_dir + + def _insert_missing_voice_message(self, account_dir: Path, *, username: str, server_id: int, duration_ms: int) -> None: + conn = sqlite3.connect(str(account_dir / "message_0.db")) + try: + table_name = f"msg_{hashlib.md5(username.encode('utf-8')).hexdigest()}" + row = conn.execute(f"SELECT COALESCE(MAX(local_id), 0), COALESCE(MAX(sort_seq), 0) FROM {table_name}").fetchone() + next_local_id = int((row[0] or 0)) + 1 + next_sort_seq = int((row[1] or 0)) + 1 + voice_xml = f'' + conn.execute( + f"INSERT INTO {table_name} (local_id, server_id, local_type, sort_seq, real_sender_id, create_time, message_content, compress_content) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (next_local_id, int(server_id), 34, next_sort_seq, 2, 1735689700, voice_xml, None), + ) + conn.commit() + finally: + conn.close() + + def _create_job(self, manager, *, account: str, username: str): + job = manager.create_job( + account=account, + scope="selected", + usernames=[username], + export_format="html", + start_time=None, + end_time=None, + include_hidden=False, + include_official=False, + include_media=True, + media_kinds=["image", "emoji", "video", "video_thumb", "voice", "file"], + message_types=[], + output_dir=None, + allow_process_key_extract=False, + download_remote_media=False, + privacy_mode=False, + file_name=None, + ) + + for _ in range(200): + latest = manager.get_job(job.export_id) + if latest and latest.status in {"done", "error", "cancelled"}: + return latest + import time as _time + + _time.sleep(0.05) + self.fail("export job did not finish in time") + + def test_html_export_contains_index_and_conversation_page(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + original_converter = svc._convert_silk_to_browser_audio + svc._convert_silk_to_browser_audio = ( + lambda data, preferred_format="mp3": (bytes(data or b""), "silk", "audio/silk") + ) + try: + job = self._create_job(svc.CHAT_EXPORT_MANAGER, account=account, username=username) + finally: + svc._convert_silk_to_browser_audio = original_converter + self.assertEqual(job.status, "done", msg=job.error) + + self.assertTrue(job.zip_path and job.zip_path.exists()) + with zipfile.ZipFile(job.zip_path, "r") as zf: + names = set(zf.namelist()) + + self.assertIn("index.html", names) + self.assertIn("assets/wechat-chat-export.css", names) + self.assertIn("assets/wechat-chat-export.js", names) + + manifest = json.loads(zf.read("manifest.json").decode("utf-8")) + self.assertEqual(manifest.get("format"), "html") + + html_path = next((n for n in names if n.endswith("/messages.html")), "") + self.assertTrue(html_path) + + html_text = zf.read(html_path).decode("utf-8") + self.assertIn('data-wce-rail-avatar="1"', html_text) + self.assertIn('data-wce-session-list="1"', html_text) + self.assertIn('id="sessionSearchInput"', html_text) + self.assertIn('data-wce-time-divider="1"', html_text) + self.assertIn('id="messageTypeFilter"', html_text) + self.assertIn('value="chatHistory"', html_text) + self.assertIn('data-wce-chat-history="1"', html_text) + self.assertIn('data-record-item-b64="', html_text) + self.assertIn('id="wceMediaIndex"', html_text) + self.assertIn('data-wce-quote-voice-btn="1"', html_text) + self.assertNotIn('title="刷新消息"', html_text) + self.assertNotIn('title="导出聊天记录"', html_text) + self.assertNotIn("搜索聊天记录", html_text) + self.assertNotIn("朋友圈", html_text) + self.assertNotIn("年度总结", html_text) + self.assertNotIn("设置", html_text) + self.assertNotIn("隐私模式", html_text) + + self.assertTrue(any(n.startswith("media/images/") for n in names)) + self.assertIn("../../media/images/", html_text) + + self.assertIn("wechat-transfer-card", html_text) + self.assertIn("wechat-redpacket-card", html_text) + self.assertIn("wechat-chat-history-card", html_text) + self.assertIn("wechat-voip-bubble", html_text) + self.assertIn("wechat-link-card", html_text) + self.assertIn("wechat-file-card", html_text) + self.assertIn("wechat-voice-wrapper", html_text) + + css_text = zf.read("assets/wechat-chat-export.css").decode("utf-8", errors="ignore") + self.assertIn("wechat-transfer-card", css_text) + self.assertRegex(css_text, re.compile(r"\.wechat-voice-sent(?::|::)after")) + self.assertRegex(css_text, re.compile(r"\.wechat-voice-received(?::|::)before")) + self.assertNotIn("wechat-transfer-card[data-v-", css_text) + self.assertNotIn("bento-container", css_text) + + js_text = zf.read("assets/wechat-chat-export.js").decode("utf-8", errors="ignore") + self.assertIn("wechat-voice-bubble", js_text) + self.assertIn("voice-playing", js_text) + self.assertIn("data-wce-quote-voice-btn", js_text) + + self.assertIn("assets/images/wechat/wechat-trans-icon1.png", names) + self.assertIn("assets/images/wechat/zip.png", names) + self.assertIn("assets/images/wechat/WeChat-Icon-Logo.wine.svg", names) + self.assertIn("wxemoji/Expression_1@2x.png", names) + self.assertIn("../../wxemoji/Expression_1@2x.png", html_text) + finally: + logging.shutdown() + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + + def test_html_export_prefers_mp3_for_voice_assets(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + + original_converter = svc._convert_silk_to_browser_audio + svc._convert_silk_to_browser_audio = ( + lambda data, preferred_format="mp3": (b"ID3FAKE_MP3_DATA", "mp3", "audio/mpeg") + ) + try: + job = self._create_job(svc.CHAT_EXPORT_MANAGER, account=account, username=username) + finally: + svc._convert_silk_to_browser_audio = original_converter + + self.assertEqual(job.status, "done", msg=job.error) + + self.assertTrue(job.zip_path and job.zip_path.exists()) + with zipfile.ZipFile(job.zip_path, "r") as zf: + names = set(zf.namelist()) + voice_path = f"media/voices/voice_{self._VOICE_SERVER_ID}.mp3" + self.assertIn(voice_path, names) + self.assertNotIn(f"media/voices/voice_{self._VOICE_SERVER_ID}.wav", names) + + html_path = next((n for n in names if n.endswith("/messages.html")), "") + self.assertTrue(html_path) + html_text = zf.read(html_path).decode("utf-8") + self.assertIn(f"../../{voice_path}", html_text) + finally: + logging.shutdown() + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + + def test_html_export_keeps_voice_bubble_when_audio_file_missing(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + account_dir = self._prepare_account(root, account=account, username=username) + self._insert_missing_voice_message(account_dir, username=username, server_id=999999, duration_ms=6543) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + original_converter = svc._convert_silk_to_browser_audio + svc._convert_silk_to_browser_audio = ( + lambda data, preferred_format="mp3": (bytes(data or b""), "silk", "audio/silk") + ) + try: + job = self._create_job(svc.CHAT_EXPORT_MANAGER, account=account, username=username) + finally: + svc._convert_silk_to_browser_audio = original_converter + self.assertEqual(job.status, "done", msg=job.error) + + self.assertTrue(job.zip_path and job.zip_path.exists()) + with zipfile.ZipFile(job.zip_path, "r") as zf: + names = set(zf.namelist()) + html_path = next((n for n in names if n.endswith("/messages.html")), "") + self.assertTrue(html_path) + html_text = zf.read(html_path).decode("utf-8") + self.assertIn("wechat-voice-wrapper", html_text) + self.assertIn('data-render-type="voice"', html_text) + self.assertIn('data-voice-id="message_0:msg_d5616d78f22fe35c632f66cabecfc82d:11"', html_text) + self.assertIn('class="wechat-voice-duration">7"', html_text) + finally: + logging.shutdown() + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data diff --git a/tests/test_chat_export_html_paging.py b/tests/test_chat_export_html_paging.py new file mode 100644 index 0000000..ca71f61 --- /dev/null +++ b/tests/test_chat_export_html_paging.py @@ -0,0 +1,223 @@ +import os +import json +import hashlib +import logging +import sqlite3 +import sys +import unittest +import zipfile +import importlib +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestChatExportHtmlPaging(unittest.TestCase): + def _reload_export_modules(self): + import wechat_decrypt_tool.app_paths as app_paths + import wechat_decrypt_tool.chat_helpers as chat_helpers + import wechat_decrypt_tool.media_helpers as media_helpers + import wechat_decrypt_tool.chat_export_service as chat_export_service + + importlib.reload(app_paths) + importlib.reload(chat_helpers) + importlib.reload(media_helpers) + importlib.reload(chat_export_service) + return chat_export_service + + def _seed_contact_db(self, path: Path, *, account: str, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (account, "", "Me", "", 1, 0, "", ""), + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (username, "", "Friend", "", 1, 0, "", ""), + ) + conn.commit() + finally: + conn.close() + + def _seed_session_db(self, path: Path, *, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE SessionTable ( + username TEXT, + is_hidden INTEGER, + sort_timestamp INTEGER + ) + """ + ) + conn.execute( + "INSERT INTO SessionTable VALUES (?, ?, ?)", + (username, 0, 1735689600), + ) + conn.commit() + finally: + conn.close() + + def _seed_message_db(self, path: Path, *, account: str, username: str, total: int) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute("CREATE TABLE Name2Id (rowid INTEGER PRIMARY KEY, user_name TEXT)") + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (1, account)) + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (2, username)) + + table_name = f"msg_{hashlib.md5(username.encode('utf-8')).hexdigest()}" + conn.execute( + f""" + CREATE TABLE {table_name} ( + local_id INTEGER, + server_id INTEGER, + local_type INTEGER, + sort_seq INTEGER, + real_sender_id INTEGER, + create_time INTEGER, + message_content TEXT, + compress_content BLOB + ) + """ + ) + + # Generate lots of plain text messages with unique markers. + rows = [] + base_ts = 1735689600 + for i in range(1, total + 1): + marker = f"MSG{i:04d}" + real_sender_id = 1 if (i % 2 == 0) else 2 + rows.append((i, 100000 + i, 1, i, real_sender_id, base_ts + i, marker, None)) + + conn.executemany( + f"INSERT INTO {table_name} (local_id, server_id, local_type, sort_seq, real_sender_id, create_time, message_content, compress_content) " + "VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + rows, + ) + conn.commit() + finally: + conn.close() + + def _prepare_account(self, root: Path, *, account: str, username: str, total: int) -> Path: + account_dir = root / "output" / "databases" / account + account_dir.mkdir(parents=True, exist_ok=True) + self._seed_contact_db(account_dir / "contact.db", account=account, username=username) + self._seed_session_db(account_dir / "session.db", username=username) + self._seed_message_db(account_dir / "message_0.db", account=account, username=username, total=total) + return account_dir + + def _create_job(self, manager, *, account: str, username: str, html_page_size: int): + job = manager.create_job( + account=account, + scope="selected", + usernames=[username], + export_format="html", + start_time=None, + end_time=None, + include_hidden=False, + include_official=False, + include_media=False, + media_kinds=[], + message_types=[], + output_dir=None, + allow_process_key_extract=False, + download_remote_media=False, + html_page_size=html_page_size, + privacy_mode=False, + file_name=None, + ) + + # Export is async (thread). Allow enough time for a few thousand messages + zip writes. + for _ in range(600): + latest = manager.get_job(job.export_id) + if latest and latest.status in {"done", "error", "cancelled"}: + return latest + import time as _time + + _time.sleep(0.05) + self.fail("export job did not finish in time") + + def test_html_export_paging_inlines_latest_page_only(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + + total_messages = 2300 + page_size = 1000 + self._prepare_account(root, account=account, username=username, total=total_messages) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + job = self._create_job( + svc.CHAT_EXPORT_MANAGER, + account=account, + username=username, + html_page_size=page_size, + ) + self.assertEqual(job.status, "done", msg=job.error) + + self.assertTrue(job.zip_path and job.zip_path.exists()) + with zipfile.ZipFile(job.zip_path, "r") as zf: + names = set(zf.namelist()) + + html_path = next((n for n in names if n.endswith("/messages.html")), "") + self.assertTrue(html_path, msg="missing messages.html") + html_text = zf.read(html_path).decode("utf-8", errors="ignore") + + # Paging UI + meta should exist for multi-page exports. + self.assertIn('id="wcePageMeta"', html_text) + self.assertIn('id="wcePager"', html_text) + self.assertIn('id="wceMessageList"', html_text) + self.assertIn('id="wceLoadPrevBtn"', html_text) + + # Latest page is inlined; earliest page should not be present in messages.html. + self.assertIn("MSG2300", html_text) + self.assertNotIn("MSG0001", html_text) + + conv_dir = html_path.rsplit("/", 1)[0] + page1_js = f"{conv_dir}/pages/page-0001.js" + self.assertIn(page1_js, names) + page1_text = zf.read(page1_js).decode("utf-8", errors="ignore") + self.assertIn("MSG0001", page1_text) + finally: + logging.shutdown() + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data diff --git a/tests/test_chat_export_image_md5_candidate_fallback.py b/tests/test_chat_export_image_md5_candidate_fallback.py new file mode 100644 index 0000000..401b716 --- /dev/null +++ b/tests/test_chat_export_image_md5_candidate_fallback.py @@ -0,0 +1,199 @@ +import os +import hashlib +import sqlite3 +import sys +import unittest +import zipfile +import importlib +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestChatExportImageMd5CandidateFallback(unittest.TestCase): + def _reload_export_modules(self): + import wechat_decrypt_tool.app_paths as app_paths + import wechat_decrypt_tool.chat_helpers as chat_helpers + import wechat_decrypt_tool.media_helpers as media_helpers + import wechat_decrypt_tool.chat_export_service as chat_export_service + + importlib.reload(app_paths) + importlib.reload(chat_helpers) + importlib.reload(media_helpers) + importlib.reload(chat_export_service) + return chat_export_service + + def _seed_contact_db(self, path: Path, *, account: str, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (account, "", "我", "", 1, 0, "", ""), + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (username, "", "测试好友", "", 1, 0, "", ""), + ) + conn.commit() + finally: + conn.close() + + def _seed_session_db(self, path: Path, *, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE SessionTable ( + username TEXT, + is_hidden INTEGER, + sort_timestamp INTEGER + ) + """ + ) + conn.execute( + "INSERT INTO SessionTable VALUES (?, ?, ?)", + (username, 0, 1735689600), + ) + conn.commit() + finally: + conn.close() + + def _seed_message_db(self, path: Path, *, account: str, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute("CREATE TABLE Name2Id (rowid INTEGER PRIMARY KEY, user_name TEXT)") + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (1, account)) + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (2, username)) + + table_name = f"msg_{hashlib.md5(username.encode('utf-8')).hexdigest()}" + conn.execute( + f""" + CREATE TABLE {table_name} ( + local_id INTEGER, + server_id INTEGER, + local_type INTEGER, + sort_seq INTEGER, + real_sender_id INTEGER, + create_time INTEGER, + message_content TEXT, + compress_content BLOB + ) + """ + ) + + good_md5 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + bad_md5 = "ffffffffffffffffffffffffffffffff" + image_xml = f'' + + conn.execute( + f"INSERT INTO {table_name} (local_id, server_id, local_type, sort_seq, real_sender_id, create_time, message_content, compress_content) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (1, 1001, 3, 1, 2, 1735689601, image_xml, None), + ) + conn.commit() + finally: + conn.close() + + def _seed_decrypted_resource(self, account_dir: Path) -> None: + resource_root = account_dir / "resource" + (resource_root / "aa").mkdir(parents=True, exist_ok=True) + (resource_root / "aa" / "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.jpg").write_bytes(b"\xff\xd8\xff\xd9") + + def _prepare_account(self, root: Path, *, account: str, username: str) -> Path: + account_dir = root / "output" / "databases" / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db", account=account, username=username) + self._seed_session_db(account_dir / "session.db", username=username) + self._seed_message_db(account_dir / "message_0.db", account=account, username=username) + self._seed_decrypted_resource(account_dir) + return account_dir + + def _create_job(self, manager, *, account: str, username: str): + job = manager.create_job( + account=account, + scope="selected", + usernames=[username], + export_format="html", + start_time=None, + end_time=None, + include_hidden=False, + include_official=False, + include_media=True, + media_kinds=["image"], + message_types=[], + output_dir=None, + allow_process_key_extract=False, + download_remote_media=False, + privacy_mode=False, + file_name=None, + ) + + for _ in range(200): + latest = manager.get_job(job.export_id) + if latest and latest.status in {"done", "error", "cancelled"}: + return latest + import time as _time + + _time.sleep(0.05) + self.fail("export job did not finish in time") + + def test_falls_back_to_secondary_md5_candidate(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + job = self._create_job(svc.CHAT_EXPORT_MANAGER, account=account, username=username) + self.assertEqual(job.status, "done", msg=job.error) + + with zipfile.ZipFile(job.zip_path, "r") as zf: + names = set(zf.namelist()) + self.assertIn("media/images/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.jpg", names) + self.assertFalse(any("ffffffffffffffffffffffffffffffff" in n for n in names if n.startswith("media/images/"))) + + html_path = next((n for n in names if n.endswith("/messages.html")), "") + self.assertTrue(html_path) + html_text = zf.read(html_path).decode("utf-8", errors="ignore") + self.assertIn("../../media/images/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.jpg", html_text) + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + diff --git a/tests/test_chat_export_image_md5_prefers_message_resource.py b/tests/test_chat_export_image_md5_prefers_message_resource.py new file mode 100644 index 0000000..1b9d942 --- /dev/null +++ b/tests/test_chat_export_image_md5_prefers_message_resource.py @@ -0,0 +1,235 @@ +import os +import hashlib +import sqlite3 +import sys +import unittest +import zipfile +import importlib +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestChatExportImageMd5PrefersMessageResource(unittest.TestCase): + def _reload_export_modules(self): + import wechat_decrypt_tool.app_paths as app_paths + import wechat_decrypt_tool.chat_helpers as chat_helpers + import wechat_decrypt_tool.media_helpers as media_helpers + import wechat_decrypt_tool.chat_export_service as chat_export_service + + importlib.reload(app_paths) + importlib.reload(chat_helpers) + importlib.reload(media_helpers) + importlib.reload(chat_export_service) + return chat_export_service + + def _seed_source_info(self, account_dir: Path) -> None: + wxid_dir = account_dir / "_wxid_dummy" + db_storage_dir = account_dir / "_db_storage_dummy" + wxid_dir.mkdir(parents=True, exist_ok=True) + db_storage_dir.mkdir(parents=True, exist_ok=True) + (account_dir / "_source.json").write_text( + '{"wxid_dir": "' + str(wxid_dir).replace("\\", "\\\\") + '", "db_storage_path": "' + str(db_storage_dir).replace("\\", "\\\\") + '"}', + encoding="utf-8", + ) + + def _seed_contact_db(self, path: Path, *, account: str, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (account, "", "我", "", 1, 0, "", ""), + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (username, "", "测试好友", "", 1, 0, "", ""), + ) + conn.commit() + finally: + conn.close() + + def _seed_session_db(self, path: Path, *, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE SessionTable ( + username TEXT, + is_hidden INTEGER, + sort_timestamp INTEGER + ) + """ + ) + conn.execute( + "INSERT INTO SessionTable VALUES (?, ?, ?)", + (username, 0, 1735689600), + ) + conn.commit() + finally: + conn.close() + + def _seed_message_db(self, path: Path, *, account: str, username: str, bad_md5: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute("CREATE TABLE Name2Id (rowid INTEGER PRIMARY KEY, user_name TEXT)") + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (1, account)) + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (2, username)) + + table_name = f"msg_{hashlib.md5(username.encode('utf-8')).hexdigest()}" + conn.execute( + f""" + CREATE TABLE {table_name} ( + local_id INTEGER, + server_id INTEGER, + local_type INTEGER, + sort_seq INTEGER, + real_sender_id INTEGER, + create_time INTEGER, + message_content TEXT, + compress_content BLOB + ) + """ + ) + + image_xml = f'' + conn.execute( + f"INSERT INTO {table_name} (local_id, server_id, local_type, sort_seq, real_sender_id, create_time, message_content, compress_content) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (1, 1001, 3, 1, 2, 1735689601, image_xml, None), + ) + conn.commit() + finally: + conn.close() + + def _seed_message_resource_db(self, path: Path, *, good_md5: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE MessageResourceInfo ( + message_id INTEGER, + message_svr_id INTEGER, + message_local_type INTEGER, + chat_id INTEGER, + message_local_id INTEGER, + message_create_time INTEGER, + packed_info BLOB + ) + """ + ) + # packed_info may contain multiple tokens; include a realistic *.dat reference so the extractor prefers it. + packed_info = f"{good_md5}_t.dat".encode("ascii") + conn.execute( + "INSERT INTO MessageResourceInfo VALUES (?, ?, ?, ?, ?, ?, ?)", + (1, 1001, 3, 0, 1, 1735689601, packed_info), + ) + conn.commit() + finally: + conn.close() + + def _seed_decrypted_resource(self, account_dir: Path, *, good_md5: str) -> None: + resource_root = account_dir / "resource" + (resource_root / good_md5[:2]).mkdir(parents=True, exist_ok=True) + # Minimal JPEG payload (valid SOI/EOI). + (resource_root / good_md5[:2] / f"{good_md5}.jpg").write_bytes(b"\xff\xd8\xff\xd9") + + def _prepare_account(self, root: Path, *, account: str, username: str, bad_md5: str, good_md5: str) -> Path: + account_dir = root / "output" / "databases" / account + account_dir.mkdir(parents=True, exist_ok=True) + self._seed_source_info(account_dir) + self._seed_contact_db(account_dir / "contact.db", account=account, username=username) + self._seed_session_db(account_dir / "session.db", username=username) + self._seed_message_db(account_dir / "message_0.db", account=account, username=username, bad_md5=bad_md5) + self._seed_message_resource_db(account_dir / "message_resource.db", good_md5=good_md5) + self._seed_decrypted_resource(account_dir, good_md5=good_md5) + return account_dir + + def _create_job(self, manager, *, account: str, username: str): + job = manager.create_job( + account=account, + scope="selected", + usernames=[username], + export_format="html", + start_time=None, + end_time=None, + include_hidden=False, + include_official=False, + include_media=True, + media_kinds=["image"], + message_types=["image"], + output_dir=None, + allow_process_key_extract=False, + download_remote_media=False, + privacy_mode=False, + file_name=None, + ) + + for _ in range(200): + latest = manager.get_job(job.export_id) + if latest and latest.status in {"done", "error", "cancelled"}: + return latest + import time as _time + + _time.sleep(0.05) + self.fail("export job did not finish in time") + + def test_prefers_message_resource_md5_over_xml_md5(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + bad_md5 = "ffffffffffffffffffffffffffffffff" + good_md5 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + self._prepare_account(root, account=account, username=username, bad_md5=bad_md5, good_md5=good_md5) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + job = self._create_job(svc.CHAT_EXPORT_MANAGER, account=account, username=username) + self.assertEqual(job.status, "done", msg=job.error) + + with zipfile.ZipFile(job.zip_path, "r") as zf: + names = set(zf.namelist()) + self.assertIn(f"media/images/{good_md5}.jpg", names) + + html_path = next((n for n in names if n.endswith("/messages.html")), "") + self.assertTrue(html_path) + html_text = zf.read(html_path).decode("utf-8", errors="ignore") + self.assertIn(f"../../media/images/{good_md5}.jpg", html_text) + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + diff --git a/tests/test_chat_export_message_types_semantics.py b/tests/test_chat_export_message_types_semantics.py new file mode 100644 index 0000000..2b587a3 --- /dev/null +++ b/tests/test_chat_export_message_types_semantics.py @@ -0,0 +1,505 @@ +import os +import json +import hashlib +import sqlite3 +import sys +import unittest +import zipfile +import importlib +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestChatExportMessageTypesSemantics(unittest.TestCase): + def _reload_export_modules(self): + import wechat_decrypt_tool.app_paths as app_paths + import wechat_decrypt_tool.chat_helpers as chat_helpers + import wechat_decrypt_tool.media_helpers as media_helpers + import wechat_decrypt_tool.chat_export_service as chat_export_service + + importlib.reload(app_paths) + importlib.reload(chat_helpers) + importlib.reload(media_helpers) + importlib.reload(chat_export_service) + return chat_export_service + + def _seed_contact_db(self, path: Path, *, account: str, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (account, "", "我", "", 1, 0, "", ""), + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (username, "", "测试好友", "", 1, 0, "", ""), + ) + conn.commit() + finally: + conn.close() + + def _seed_session_db(self, path: Path, *, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE SessionTable ( + username TEXT, + is_hidden INTEGER, + sort_timestamp INTEGER + ) + """ + ) + conn.execute( + "INSERT INTO SessionTable VALUES (?, ?, ?)", + (username, 0, 1735689600), + ) + conn.commit() + finally: + conn.close() + + def _seed_message_db(self, path: Path, *, account: str, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute("CREATE TABLE Name2Id (rowid INTEGER PRIMARY KEY, user_name TEXT)") + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (1, account)) + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (2, username)) + + table_name = f"msg_{hashlib.md5(username.encode('utf-8')).hexdigest()}" + conn.execute( + f""" + CREATE TABLE {table_name} ( + local_id INTEGER, + server_id INTEGER, + local_type INTEGER, + sort_seq INTEGER, + real_sender_id INTEGER, + create_time INTEGER, + message_content TEXT, + compress_content BLOB + ) + """ + ) + + image_xml = '' + video_xml = '' + + rows = [ + (1, 1001, 3, 1, 2, 1735689601, image_xml, None), + (2, 1002, 43, 2, 2, 1735689602, video_xml, None), + (3, 1003, 49, 3, 2, 1735689603, '2000收到转账0.01元', None), + (4, 1004, 1, 4, 2, 1735689604, '普通文本消息', None), + (5, 1005, 10000, 5, 2, 1735689605, '系统提示消息', None), + ( + 6, + 1006, + 10000, + 6, + 2, + 1735689606, + '', + None, + ), + ( + 7, + 1007, + 48, + 7, + 2, + 1735689607, + '', + None, + ), + ] + conn.executemany( + f"INSERT INTO {table_name} (local_id, server_id, local_type, sort_seq, real_sender_id, create_time, message_content, compress_content) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + rows, + ) + conn.commit() + finally: + conn.close() + + def _seed_media_files(self, account_dir: Path) -> None: + resource_root = account_dir / "resource" + (resource_root / "aa").mkdir(parents=True, exist_ok=True) + (resource_root / "bb").mkdir(parents=True, exist_ok=True) + (resource_root / "cc").mkdir(parents=True, exist_ok=True) + + (resource_root / "aa" / "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.jpg").write_bytes(b"\xff\xd8\xff\xd9") + (resource_root / "bb" / "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb.mp4").write_bytes(b"video-bytes") + (resource_root / "cc" / "cccccccccccccccccccccccccccccccc.jpg").write_bytes(b"\xff\xd8\xff\xd9") + + def _seed_source_info(self, account_dir: Path, wxid_dir: Path) -> None: + payload = { + "wxid_dir": str(wxid_dir), + "db_storage_path": str(wxid_dir / "db_storage"), + } + (account_dir / "_source.json").write_text(json.dumps(payload, ensure_ascii=False), encoding="utf-8") + + def _seed_wxid_media_files(self, wxid_dir: Path) -> None: + (wxid_dir / "msg" / "video").mkdir(parents=True, exist_ok=True) + (wxid_dir / "msg" / "attach").mkdir(parents=True, exist_ok=True) + (wxid_dir / "cache").mkdir(parents=True, exist_ok=True) + (wxid_dir / "db_storage").mkdir(parents=True, exist_ok=True) + + (wxid_dir / "msg" / "video" / "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb.mp4").write_bytes(b"video-bytes") + (wxid_dir / "msg" / "video" / "cccccccccccccccccccccccccccccccc.jpg").write_bytes(b"\xff\xd8\xff\xd9") + + def _prepare_account(self, root: Path, *, account: str, username: str) -> Path: + account_dir = root / "output" / "databases" / account + account_dir.mkdir(parents=True, exist_ok=True) + wxid_dir = root / "wxid_data" / account + + self._seed_contact_db(account_dir / "contact.db", account=account, username=username) + self._seed_session_db(account_dir / "session.db", username=username) + self._seed_message_db(account_dir / "message_0.db", account=account, username=username) + self._seed_media_files(account_dir) + self._seed_wxid_media_files(wxid_dir) + self._seed_source_info(account_dir, wxid_dir) + return account_dir + + def _create_job(self, manager, *, account: str, username: str, message_types, include_media=True, media_kinds=None, privacy_mode=False): + if media_kinds is None: + media_kinds = ["image", "emoji", "video", "video_thumb", "voice", "file"] + + job = manager.create_job( + account=account, + scope="selected", + usernames=[username], + export_format="json", + start_time=None, + end_time=None, + include_hidden=False, + include_official=False, + include_media=include_media, + media_kinds=media_kinds, + message_types=message_types, + output_dir=None, + allow_process_key_extract=False, + download_remote_media=False, + privacy_mode=privacy_mode, + file_name=None, + ) + + for _ in range(200): + latest = manager.get_job(job.export_id) + if latest and latest.status in {"done", "error", "cancelled"}: + return latest + import time as _time + + _time.sleep(0.05) + self.fail("export job did not finish in time") + + def _load_export_payload(self, zip_path: Path): + self.assertTrue(zip_path.exists()) + with zipfile.ZipFile(zip_path, "r") as zf: + names = set(zf.namelist()) + msg_path = next((n for n in names if n.endswith("/messages.json")), "") + self.assertTrue(msg_path) + import json as _json + + payload = _json.loads(zf.read(msg_path).decode("utf-8")) + manifest = _json.loads(zf.read("manifest.json").decode("utf-8")) + return payload, manifest, names + + def test_unchecked_image_is_filtered_out(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + job = self._create_job( + svc.CHAT_EXPORT_MANAGER, + account=account, + username=username, + message_types=["text", "transfer"], + include_media=True, + ) + self.assertEqual(job.status, "done", msg=job.error) + + payload, _, names = self._load_export_payload(job.zip_path) + image_msg = next((m for m in payload.get("messages", []) if int(m.get("type") or 0) == 3), None) + self.assertIsNone(image_msg) + render_types = {str(m.get("renderType") or "") for m in payload.get("messages", [])} + self.assertTrue(render_types.issubset({"text", "transfer"})) + self.assertFalse(any(n.startswith("media/images/") for n in names)) + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + + def test_checked_image_exports_media_file(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + job = self._create_job( + svc.CHAT_EXPORT_MANAGER, + account=account, + username=username, + message_types=["image", "text"], + include_media=True, + ) + self.assertEqual(job.status, "done", msg=job.error) + + payload, _, names = self._load_export_payload(job.zip_path) + image_msg = next((m for m in payload.get("messages", []) if int(m.get("type") or 0) == 3), None) + self.assertIsNotNone(image_msg) + self.assertEqual(str(image_msg.get("renderType") or ""), "image") + self.assertTrue(isinstance(image_msg.get("offlineMedia"), list) and image_msg.get("offlineMedia")) + self.assertTrue(any(n.startswith("media/images/") for n in names)) + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + + def test_unchecked_non_media_type_is_filtered_out(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + job = self._create_job( + svc.CHAT_EXPORT_MANAGER, + account=account, + username=username, + message_types=["text"], + include_media=True, + ) + self.assertEqual(job.status, "done", msg=job.error) + + payload, manifest, _ = self._load_export_payload(job.zip_path) + system_msg = next((m for m in payload.get("messages", []) if int(m.get("type") or 0) == 10000), None) + self.assertIsNone(system_msg) + self.assertTrue(all(str(m.get("renderType") or "") == "text" for m in payload.get("messages", []))) + self.assertEqual(manifest.get("filters", {}).get("messageTypes"), ["text"]) + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + + def test_checked_video_exports_video_and_thumb(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + job = self._create_job( + svc.CHAT_EXPORT_MANAGER, + account=account, + username=username, + message_types=["video", "text"], + include_media=True, + ) + self.assertEqual(job.status, "done", msg=job.error) + + payload, _, names = self._load_export_payload(job.zip_path) + video_msg = next((m for m in payload.get("messages", []) if int(m.get("type") or 0) == 43), None) + self.assertIsNotNone(video_msg) + self.assertEqual(str(video_msg.get("renderType") or ""), "video") + image_msg = next((m for m in payload.get("messages", []) if int(m.get("type") or 0) == 3), None) + self.assertIsNone(image_msg) + media_items = video_msg.get("offlineMedia") or [] + kinds = sorted(str(x.get("kind") or "") for x in media_items) + self.assertIn("video", kinds) + self.assertIn("video_thumb", kinds) + self.assertTrue(any(n.startswith("media/videos/") for n in names)) + self.assertTrue(any(n.startswith("media/video_thumbs/") for n in names)) + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + + def test_checked_location_exports_location_fields(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + job = self._create_job( + svc.CHAT_EXPORT_MANAGER, + account=account, + username=username, + message_types=["location"], + include_media=False, + ) + self.assertEqual(job.status, "done", msg=job.error) + + payload, manifest, _ = self._load_export_payload(job.zip_path) + location_msg = next((m for m in payload.get("messages", []) if int(m.get("type") or 0) == 48), None) + self.assertIsNotNone(location_msg) + self.assertEqual(str(location_msg.get("renderType") or ""), "location") + self.assertEqual(str(location_msg.get("locationPoiname") or ""), "天安门") + self.assertEqual(str(location_msg.get("locationLabel") or ""), "北京市东城区东华门街道") + self.assertAlmostEqual(float(location_msg.get("locationLat") or 0), 39.9042, places=4) + self.assertAlmostEqual(float(location_msg.get("locationLng") or 0), 116.4074, places=4) + self.assertEqual(manifest.get("filters", {}).get("messageTypes"), ["location"]) + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + + def test_privacy_mode_never_exports_media(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + job = self._create_job( + svc.CHAT_EXPORT_MANAGER, + account=account, + username=username, + message_types=["image", "video", "text"], + include_media=True, + privacy_mode=True, + ) + self.assertEqual(job.status, "done", msg=job.error) + + payload, manifest, names = self._load_export_payload(job.zip_path) + self.assertFalse(any(n.startswith("media/images/") for n in names)) + self.assertFalse(any(n.startswith("media/videos/") for n in names)) + self.assertFalse(any(n.startswith("media/video_thumbs/") for n in names)) + + for msg in payload.get("messages", []): + self.assertFalse(msg.get("offlineMedia")) + + self.assertFalse(bool(manifest.get("options", {}).get("includeMedia"))) + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + + def test_transfer_only_exports_transfer_messages(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + job = self._create_job( + svc.CHAT_EXPORT_MANAGER, + account=account, + username=username, + message_types=["transfer"], + include_media=True, + ) + self.assertEqual(job.status, "done", msg=job.error) + + payload, manifest, _ = self._load_export_payload(job.zip_path) + messages = list(payload.get("messages", [])) + self.assertEqual(len(messages), 1) + self.assertTrue(all(str(m.get("renderType") or "") == "transfer" for m in messages)) + self.assertEqual(manifest.get("filters", {}).get("messageTypes"), ["transfer"]) + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + + def test_system_revoke_exports_readable_revoker_content(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + job = self._create_job( + svc.CHAT_EXPORT_MANAGER, + account=account, + username=username, + message_types=["system"], + include_media=False, + ) + self.assertEqual(job.status, "done", msg=job.error) + + payload, _, _ = self._load_export_payload(job.zip_path) + revoke_msg = next((m for m in payload.get("messages", []) if int(m.get("serverId") or 0) == 1006), None) + self.assertIsNotNone(revoke_msg) + self.assertEqual(str(revoke_msg.get("renderType") or ""), "system") + self.assertEqual(str(revoke_msg.get("content") or ""), "“测试好友”撤回了一条消息") + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_chat_export_remote_thumb_option.py b/tests/test_chat_export_remote_thumb_option.py new file mode 100644 index 0000000..e587fef --- /dev/null +++ b/tests/test_chat_export_remote_thumb_option.py @@ -0,0 +1,304 @@ +import os +import hashlib +import sqlite3 +import sys +import unittest +import zipfile +import importlib +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest import mock + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class _FakeResponse: + def __init__(self, body: bytes, *, content_type: str) -> None: + self.status_code = 200 + self.headers = { + "Content-Type": str(content_type or "").strip(), + "Content-Length": str(len(body)), + } + self._body = body + + def iter_content(self, chunk_size=65536): + data = self._body or b"" + for i in range(0, len(data), int(chunk_size or 65536)): + yield data[i : i + int(chunk_size or 65536)] + + def close(self): + return None + + +class TestChatExportRemoteThumbOption(unittest.TestCase): + def _reload_export_modules(self): + import wechat_decrypt_tool.app_paths as app_paths + import wechat_decrypt_tool.chat_helpers as chat_helpers + import wechat_decrypt_tool.media_helpers as media_helpers + import wechat_decrypt_tool.chat_export_service as chat_export_service + + importlib.reload(app_paths) + importlib.reload(chat_helpers) + importlib.reload(media_helpers) + importlib.reload(chat_export_service) + return chat_export_service + + def _seed_contact_db(self, path: Path, *, account: str, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (account, "", "我", "", 1, 0, "", ""), + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (username, "", "测试好友", "", 1, 0, "", ""), + ) + conn.commit() + finally: + conn.close() + + def _seed_session_db(self, path: Path, *, username: str) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE SessionTable ( + username TEXT, + is_hidden INTEGER, + sort_timestamp INTEGER + ) + """ + ) + conn.execute( + "INSERT INTO SessionTable VALUES (?, ?, ?)", + (username, 0, 1735689600), + ) + conn.commit() + finally: + conn.close() + + def _seed_message_db(self, path: Path, *, account: str, username: str) -> tuple[str, str]: + conn = sqlite3.connect(str(path)) + try: + conn.execute("CREATE TABLE Name2Id (rowid INTEGER PRIMARY KEY, user_name TEXT)") + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (1, account)) + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (2, username)) + + table_name = f"msg_{hashlib.md5(username.encode('utf-8')).hexdigest()}" + conn.execute( + f""" + CREATE TABLE {table_name} ( + local_id INTEGER, + server_id INTEGER, + local_type INTEGER, + sort_seq INTEGER, + real_sender_id INTEGER, + create_time INTEGER, + message_content TEXT, + compress_content BLOB + ) + """ + ) + + link_thumb = "https://1.1.1.1/thumb.png" + quote_thumb = "https://1.1.1.1/quote.png" + + link_xml = ( + "" + "5" + "示例链接" + "这是描述" + "https://example.com/" + f"{link_thumb}" + "" + ) + quote_xml = ( + "" + "57" + "回复" + "" + "49" + "8888" + "wxid_other" + "对方" + "" + "5被引用链接https://example.com/" + f"{quote_thumb}" + "" + "" + "" + "" + ) + + rows = [ + (1, 1001, 49, 1, 2, 1735689601, link_xml, None), + (2, 1002, 49, 2, 2, 1735689602, quote_xml, None), + ] + conn.executemany( + f"INSERT INTO {table_name} (local_id, server_id, local_type, sort_seq, real_sender_id, create_time, message_content, compress_content) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + rows, + ) + conn.commit() + return link_thumb, quote_thumb + finally: + conn.close() + + def _prepare_account(self, root: Path, *, account: str, username: str) -> tuple[Path, str, str]: + account_dir = root / "output" / "databases" / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db", account=account, username=username) + self._seed_session_db(account_dir / "session.db", username=username) + link_thumb, quote_thumb = self._seed_message_db(account_dir / "message_0.db", account=account, username=username) + return account_dir, link_thumb, quote_thumb + + def _create_job(self, manager, *, account: str, username: str, download_remote_media: bool): + job = manager.create_job( + account=account, + scope="selected", + usernames=[username], + export_format="html", + start_time=None, + end_time=None, + include_hidden=False, + include_official=False, + include_media=True, + media_kinds=["image", "emoji", "video", "video_thumb", "voice", "file"], + message_types=["link", "quote", "image"], + output_dir=None, + allow_process_key_extract=False, + download_remote_media=download_remote_media, + privacy_mode=False, + file_name=None, + ) + + for _ in range(200): + latest = manager.get_job(job.export_id) + if latest and latest.status in {"done", "error", "cancelled"}: + return latest + import time as _time + + _time.sleep(0.05) + self.fail("export job did not finish in time") + + def test_remote_thumb_disabled_does_not_download(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + _, link_thumb, quote_thumb = self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + + with mock.patch.object( + svc.requests, + "get", + side_effect=AssertionError("requests.get should not be called when download_remote_media=False"), + ) as m_get: + job = self._create_job( + svc.CHAT_EXPORT_MANAGER, + account=account, + username=username, + download_remote_media=False, + ) + self.assertEqual(job.status, "done", msg=job.error) + self.assertEqual(m_get.call_count, 0) + + with zipfile.ZipFile(job.zip_path, "r") as zf: + names = set(zf.namelist()) + html_path = next((n for n in names if n.endswith("/messages.html")), "") + self.assertTrue(html_path) + html_text = zf.read(html_path).decode("utf-8") + self.assertIn(f'src="{link_thumb}"', html_text) + self.assertIn(f'src="{quote_thumb}"', html_text) + self.assertFalse(any(n.startswith("media/remote/") for n in names)) + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + + def test_remote_thumb_enabled_downloads_and_rewrites(self): + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + username = "wxid_friend" + _, link_thumb, quote_thumb = self._prepare_account(root, account=account, username=username) + + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + svc = self._reload_export_modules() + + fake_png = b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x02\x00\x00\x00\x90wS\xde" + + def _fake_get(url, **_kwargs): + return _FakeResponse(fake_png, content_type="image/png") + + with mock.patch.object(svc.requests, "get", side_effect=_fake_get) as m_get: + job = self._create_job( + svc.CHAT_EXPORT_MANAGER, + account=account, + username=username, + download_remote_media=True, + ) + self.assertEqual(job.status, "done", msg=job.error) + self.assertGreaterEqual(m_get.call_count, 1) + + with zipfile.ZipFile(job.zip_path, "r") as zf: + names = set(zf.namelist()) + html_path = next((n for n in names if n.endswith("/messages.html")), "") + self.assertTrue(html_path) + html_text = zf.read(html_path).decode("utf-8") + + h1 = hashlib.sha256(link_thumb.encode("utf-8", errors="ignore")).hexdigest() + arc1 = f"media/remote/{h1[:32]}.png" + self.assertIn(arc1, names) + self.assertIn(f"../../{arc1}", html_text) + self.assertNotIn(f'src="{link_thumb}"', html_text) + + h2 = hashlib.sha256(quote_thumb.encode("utf-8", errors="ignore")).hexdigest() + arc2 = f"media/remote/{h2[:32]}.png" + self.assertIn(arc2, names) + self.assertIn(f"../../{arc2}", html_text) + self.assertNotIn(f'src="{quote_thumb}"', html_text) + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + diff --git a/tests/test_chat_list_messages_re_scope.py b/tests/test_chat_list_messages_re_scope.py new file mode 100644 index 0000000..583d441 --- /dev/null +++ b/tests/test_chat_list_messages_re_scope.py @@ -0,0 +1,67 @@ +import sys +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest.mock import patch + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestChatListMessagesReScope(unittest.TestCase): + def test_list_chat_messages_does_not_shadow_re(self): + from starlette.requests import Request + + import wechat_decrypt_tool.routers.chat as chat + + class _Sentinel(Exception): + pass + + def fake_collect_chat_messages(**_kwargs): + merged = [ + { + "id": "1", + "sortSeq": 0, + "createTime": 1, + "localId": 1, + "type": 266287972401, + "_rawText": "", + "renderType": "appmsg", + } + ] + return merged, False, [], [], set() + + scope = { + "type": "http", + "method": "GET", + "path": "/api/chat/messages", + "raw_path": b"/api/chat/messages", + "query_string": b"", + "headers": [], + "client": ("testclient", 12345), + "server": ("testserver", 80), + "scheme": "http", + } + request = Request(scope) + + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + + sentinel = _Sentinel("stop-after-template-parse") + + with patch.object(chat, "_resolve_account_dir", return_value=account_dir), patch.object( + chat, "_iter_message_db_paths", return_value=[account_dir / "msg_0.db"] + ), patch.object(chat, "_collect_chat_messages", side_effect=fake_collect_chat_messages), patch.object( + chat, "_postprocess_transfer_messages", lambda _merged: None + ), patch.object(chat, "_extract_xml_tag_text", return_value="${wxid_abc}"), patch.object( + chat, "_load_contact_rows", side_effect=sentinel + ): + with self.assertRaises(_Sentinel): + chat.list_chat_messages(request=request, username="44372432598@chatroom", account="acc") + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_chat_media_favicon.py b/tests/test_chat_media_favicon.py new file mode 100644 index 0000000..516ef84 --- /dev/null +++ b/tests/test_chat_media_favicon.py @@ -0,0 +1,133 @@ +import os +import sqlite3 +import sys +import unittest +import importlib +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest.mock import patch + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class _FakeResponse: + def __init__(self, *, status_code: int = 200, headers: dict | None = None, url: str = "", body: bytes = b""): + self.status_code = int(status_code) + self.headers = dict(headers or {}) + self.url = str(url or "") + self._body = bytes(body or b"") + + def iter_content(self, chunk_size: int = 64 * 1024): + yield self._body + + def close(self) -> None: + return None + + +class TestChatMediaFavicon(unittest.TestCase): + def test_chat_media_favicon_caches(self): + from fastapi import FastAPI + from fastapi.testclient import TestClient + + # 1x1 PNG (same as other avatar cache tests) + png = bytes.fromhex( + "89504E470D0A1A0A" + "0000000D49484452000000010000000108060000001F15C489" + "0000000D49444154789C6360606060000000050001A5F64540" + "0000000049454E44AE426082" + ) + + with TemporaryDirectory() as td: + root = Path(td) + + prev_data = None + prev_cache = None + try: + prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR") + prev_cache = os.environ.get("WECHAT_TOOL_AVATAR_CACHE_ENABLED") + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + os.environ["WECHAT_TOOL_AVATAR_CACHE_ENABLED"] = "1" + + import wechat_decrypt_tool.app_paths as app_paths + import wechat_decrypt_tool.avatar_cache as avatar_cache + import wechat_decrypt_tool.routers.chat_media as chat_media + + importlib.reload(app_paths) + importlib.reload(avatar_cache) + importlib.reload(chat_media) + + def fake_head(url, **_kwargs): + # Pretend short-link resolves to bilibili. + return _FakeResponse( + status_code=200, + headers={}, + url="https://www.bilibili.com/video/BV1Au4tzNEq2", + body=b"", + ) + + def fake_get(url, **_kwargs): + u = str(url or "") + if "www.bilibili.com/favicon.ico" in u: + return _FakeResponse( + status_code=200, + headers={"Content-Type": "image/png", "content-length": str(len(png))}, + url=u, + body=png, + ) + return _FakeResponse( + status_code=404, + headers={"Content-Type": "text/html"}, + url=u, + body=b"", + ) + + app = FastAPI() + app.include_router(chat_media.router) + client = TestClient(app) + + with patch("wechat_decrypt_tool.routers.chat_media.requests.head", side_effect=fake_head) as mock_head, patch( + "wechat_decrypt_tool.routers.chat_media.requests.get", side_effect=fake_get + ) as mock_get: + resp = client.get("/api/chat/media/favicon", params={"url": "https://b23.tv/au68guF"}) + self.assertEqual(resp.status_code, 200) + self.assertTrue(resp.headers.get("content-type", "").startswith("image/")) + self.assertEqual(resp.content, png) + + # Second call should hit disk cache (no extra favicon download). + resp2 = client.get("/api/chat/media/favicon", params={"url": "https://b23.tv/au68guF"}) + self.assertEqual(resp2.status_code, 200) + self.assertEqual(resp2.content, png) + + self.assertGreaterEqual(mock_head.call_count, 1) + self.assertEqual(mock_get.call_count, 1) + + cache_db = root / "output" / "avatar_cache" / "favicon" / "avatar_cache.db" + self.assertTrue(cache_db.exists()) + + conn = sqlite3.connect(str(cache_db)) + try: + row = conn.execute( + "SELECT source_kind, source_url, media_type FROM avatar_cache_entries WHERE source_kind = 'url' LIMIT 1" + ).fetchone() + self.assertIsNotNone(row) + self.assertEqual(str(row[0] or ""), "url") + self.assertIn("favicon.ico", str(row[1] or "")) + self.assertTrue(str(row[2] or "").startswith("image/")) + finally: + conn.close() + finally: + if prev_data is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data + if prev_cache is None: + os.environ.pop("WECHAT_TOOL_AVATAR_CACHE_ENABLED", None) + else: + os.environ["WECHAT_TOOL_AVATAR_CACHE_ENABLED"] = prev_cache + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_chat_message_calendar_heatmap.py b/tests/test_chat_message_calendar_heatmap.py new file mode 100644 index 0000000..1075ae5 --- /dev/null +++ b/tests/test_chat_message_calendar_heatmap.py @@ -0,0 +1,292 @@ +import hashlib +import sqlite3 +import sys +import unittest +from datetime import datetime +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest.mock import patch + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +from wechat_decrypt_tool.routers import chat as chat_router + + +def _msg_table_name(username: str) -> str: + md5_hex = hashlib.md5(username.encode("utf-8")).hexdigest() + return f"Msg_{md5_hex}" + + +def _seed_message_db(path: Path, *, username: str, rows: list[tuple[int, int]]) -> None: + """rows: [(create_time, sort_seq), ...]""" + table = _msg_table_name(username) + conn = sqlite3.connect(str(path)) + try: + conn.execute( + f""" + CREATE TABLE "{table}"( + local_id INTEGER PRIMARY KEY AUTOINCREMENT, + create_time INTEGER, + sort_seq INTEGER + ) + """ + ) + for create_time, sort_seq in rows: + conn.execute( + f'INSERT INTO "{table}"(create_time, sort_seq) VALUES (?, ?)', + (int(create_time), int(sort_seq)), + ) + conn.commit() + finally: + conn.close() + + +def _seed_message_db_full(path: Path, *, username: str, rows: list[tuple[int, int, str]]) -> None: + """rows: [(create_time, sort_seq, text), ...] - minimal schema for /api/chat/messages/around.""" + + table = _msg_table_name(username) + conn = sqlite3.connect(str(path)) + try: + conn.execute( + f""" + CREATE TABLE "{table}"( + local_id INTEGER PRIMARY KEY AUTOINCREMENT, + server_id INTEGER, + local_type INTEGER, + sort_seq INTEGER, + real_sender_id INTEGER, + create_time INTEGER, + message_content TEXT, + compress_content BLOB + ) + """ + ) + for create_time, sort_seq, text in rows: + conn.execute( + f'INSERT INTO "{table}"(server_id, local_type, sort_seq, real_sender_id, create_time, message_content, compress_content) ' + "VALUES (?, ?, ?, ?, ?, ?, ?)", + (0, 1, int(sort_seq), 0, int(create_time), str(text), None), + ) + conn.commit() + finally: + conn.close() + + +def _seed_contact_db_minimal(path: Path) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.commit() + finally: + conn.close() + + +class TestChatMessageCalendarHeatmap(unittest.TestCase): + def test_daily_counts_aggregates_per_day_and_respects_month_range(self): + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + + username = "wxid_test_user" + + ts_jan31_23 = int(datetime(2026, 1, 31, 23, 0, 0).timestamp()) + ts_feb01_10 = int(datetime(2026, 2, 1, 10, 0, 0).timestamp()) + ts_feb14_12 = int(datetime(2026, 2, 14, 12, 0, 0).timestamp()) + + _seed_message_db( + account_dir / "message.db", + username=username, + rows=[ + (ts_jan31_23, 0), + (ts_feb01_10, 5), + (ts_feb01_10, 2), + (ts_feb14_12, 0), + ], + ) + + with patch.object(chat_router, "_resolve_account_dir", return_value=account_dir): + resp = chat_router.get_chat_message_daily_counts( + username=username, + year=2026, + month=2, + account="acc", + ) + + self.assertEqual(resp.get("status"), "success") + self.assertEqual(resp.get("username"), username) + self.assertEqual(resp.get("year"), 2026) + self.assertEqual(resp.get("month"), 2) + + counts = resp.get("counts") or {} + self.assertEqual(counts.get("2026-02-01"), 2) + self.assertEqual(counts.get("2026-02-14"), 1) + self.assertIsNone(counts.get("2026-01-31")) + + self.assertEqual(resp.get("total"), 3) + self.assertEqual(resp.get("max"), 2) + + def test_anchor_day_picks_earliest_by_create_time_then_sort_seq_then_local_id(self): + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + + username = "wxid_test_user" + + ts_jan31_23 = int(datetime(2026, 1, 31, 23, 0, 0).timestamp()) + ts_feb01_10 = int(datetime(2026, 2, 1, 10, 0, 0).timestamp()) + + _seed_message_db( + account_dir / "message.db", + username=username, + rows=[ + (ts_jan31_23, 0), # local_id = 1 + (ts_feb01_10, 5), # local_id = 2 + (ts_feb01_10, 2), # local_id = 3 <- expected (sort_seq smaller) + ], + ) + + with patch.object(chat_router, "_resolve_account_dir", return_value=account_dir): + resp = chat_router.get_chat_message_anchor( + username=username, + kind="day", + account="acc", + date="2026-02-01", + ) + + self.assertEqual(resp.get("status"), "success") + self.assertEqual(resp.get("kind"), "day") + self.assertEqual(resp.get("date"), "2026-02-01") + anchor_id = str(resp.get("anchorId") or "") + self.assertTrue(anchor_id.startswith("message:"), anchor_id) + self.assertTrue(anchor_id.endswith(":3"), anchor_id) + + def test_anchor_first_picks_global_earliest(self): + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + + username = "wxid_test_user" + + ts_jan31_23 = int(datetime(2026, 1, 31, 23, 0, 0).timestamp()) + ts_feb01_10 = int(datetime(2026, 2, 1, 10, 0, 0).timestamp()) + + _seed_message_db( + account_dir / "message.db", + username=username, + rows=[ + (ts_feb01_10, 2), # local_id = 1 + (ts_jan31_23, 0), # local_id = 2, but earlier create_time -> should win even if local_id bigger + ], + ) + + with patch.object(chat_router, "_resolve_account_dir", return_value=account_dir): + resp = chat_router.get_chat_message_anchor( + username=username, + kind="first", + account="acc", + ) + + self.assertEqual(resp.get("status"), "success") + self.assertEqual(resp.get("kind"), "first") + anchor_id = str(resp.get("anchorId") or "") + self.assertTrue(anchor_id.startswith("message:"), anchor_id) + self.assertTrue(anchor_id.endswith(":2"), anchor_id) + + def test_anchor_day_empty_returns_empty_status(self): + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + + username = "wxid_test_user" + ts_feb01_10 = int(datetime(2026, 2, 1, 10, 0, 0).timestamp()) + + _seed_message_db(account_dir / "message.db", username=username, rows=[(ts_feb01_10, 0)]) + + with patch.object(chat_router, "_resolve_account_dir", return_value=account_dir): + resp = chat_router.get_chat_message_anchor( + username=username, + kind="day", + account="acc", + date="2026-02-02", + ) + + self.assertEqual(resp.get("status"), "empty") + self.assertEqual(resp.get("anchorId"), "") + + def test_around_can_span_multiple_message_dbs_for_pagination(self): + from fastapi import FastAPI + from fastapi.testclient import TestClient + + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + + username = "wxid_test_user" + table = _msg_table_name(username) + + # Anchor in message.db, next message in message_1.db + _seed_message_db_full( + account_dir / "message.db", + username=username, + rows=[(1000, 0, "A")], # local_id=1 + ) + _seed_message_db_full( + account_dir / "message_1.db", + username=username, + rows=[(2000, 0, "B")], # local_id=1 + ) + _seed_contact_db_minimal(account_dir / "contact.db") + + app = FastAPI() + app.include_router(chat_router.router) + client = TestClient(app) + + with patch.object(chat_router, "_resolve_account_dir", return_value=account_dir): + resp = client.get( + "/api/chat/messages/around", + params={ + "account": "acc", + "username": username, + "anchor_id": f"message:{table}:1", + "before": 0, + "after": 10, + }, + ) + + self.assertEqual(resp.status_code, 200, resp.text) + data = resp.json() + self.assertEqual(data.get("status"), "success") + self.assertEqual(data.get("username"), username) + self.assertEqual(data.get("anchorId"), f"message:{table}:1") + self.assertEqual(data.get("anchorIndex"), 0) + + msgs = data.get("messages") or [] + self.assertEqual(len(msgs), 2) + self.assertEqual(msgs[0].get("id"), f"message:{table}:1") + self.assertEqual(msgs[1].get("id"), f"message_1:{table}:1") diff --git a/tests/test_chat_official_article_cover_style.py b/tests/test_chat_official_article_cover_style.py new file mode 100644 index 0000000..40f303c --- /dev/null +++ b/tests/test_chat_official_article_cover_style.py @@ -0,0 +1,58 @@ +import sys +import unittest +from pathlib import Path + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + +from wechat_decrypt_tool.chat_helpers import _parse_app_message + + +class TestChatOfficialArticleCoverStyle(unittest.TestCase): + def test_mp_weixin_feed_url_is_cover_style(self): + raw_text = ( + "" + "" + "时尚穿搭:「这样的jk你喜欢吗」" + "这样的jk你喜欢吗?" + "5" + "" + "http://mp.weixin.qq.com/s?__biz=MzkxOTY4MjIxOA==&mid=2247508015&idx=1&sn=931dce677c6e70b4365792b14e7e8ff0" + "&exptype=masonry_feed_brief_content_elite_for_pcfeeds_u2i&ranksessionid=1770868256_1&req_id=1770867949535989#rd" + "" + "https://mmbiz.qpic.cn/sz_mmbiz_jpg/foo/640?wx_fmt=jpeg&wxfrom=401" + "甜图社" + "gh_abc123" + "" + "" + ) + + parsed = _parse_app_message(raw_text) + self.assertEqual(parsed.get("renderType"), "link") + self.assertEqual(parsed.get("linkType"), "official_article") + self.assertEqual(parsed.get("linkStyle"), "cover") + + def test_mp_weixin_non_feed_url_keeps_default_style(self): + raw_text = ( + "" + "" + "普通分享" + "这样的jk你喜欢吗?" + "5" + "http://mp.weixin.qq.com/s?__biz=foo&mid=1&idx=1&sn=bar#rd" + "甜图社" + "gh_abc123" + "" + "" + ) + + parsed = _parse_app_message(raw_text) + self.assertEqual(parsed.get("renderType"), "link") + self.assertEqual(parsed.get("linkType"), "official_article") + self.assertEqual(parsed.get("linkStyle"), "default") + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_chat_realtime_sync_all_updates_sender_display_name.py b/tests/test_chat_realtime_sync_all_updates_sender_display_name.py new file mode 100644 index 0000000..db60770 --- /dev/null +++ b/tests/test_chat_realtime_sync_all_updates_sender_display_name.py @@ -0,0 +1,111 @@ +import sqlite3 +import sys +import threading +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest.mock import patch + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + +from wechat_decrypt_tool.routers import chat as chat_router + + +class _DummyRequest: + base_url = "http://testserver/" + + +class _DummyConn: + def __init__(self) -> None: + self.handle = 1 + self.lock = threading.Lock() + + +def _seed_session_db(session_db_path: Path) -> None: + conn = sqlite3.connect(str(session_db_path)) + try: + conn.execute( + """ + CREATE TABLE SessionTable ( + username TEXT PRIMARY KEY, + unread_count INTEGER DEFAULT 0, + is_hidden INTEGER DEFAULT 0, + summary TEXT DEFAULT '', + draft TEXT DEFAULT '', + last_timestamp INTEGER DEFAULT 0, + sort_timestamp INTEGER DEFAULT 0, + last_msg_locald_id INTEGER DEFAULT 0, + last_msg_type INTEGER DEFAULT 0, + last_msg_sub_type INTEGER DEFAULT 0, + last_msg_sender TEXT DEFAULT '', + last_sender_display_name TEXT DEFAULT '' + ) + """ + ) + conn.commit() + finally: + conn.close() + + +class TestChatRealtimeSyncAllUpdatesSenderDisplayName(unittest.TestCase): + def test_sync_all_upserts_last_sender_display_name(self): + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + _seed_session_db(account_dir / "session.db") + + conn = _DummyConn() + sessions_rows = [ + { + "username": "demo@chatroom", + "unread_count": 0, + "is_hidden": 0, + "summary": "hello", + "draft": "", + "last_timestamp": 123, + "sort_timestamp": 123, + "last_msg_type": 1, + "last_msg_sub_type": 0, + "last_msg_sender": "wxid_demo", + "last_sender_display_name": "群名片A", + "last_msg_locald_id": 777, + } + ] + + with ( + patch.object(chat_router, "_resolve_account_dir", return_value=account_dir), + patch.object(chat_router.WCDB_REALTIME, "ensure_connected", return_value=conn), + patch.object(chat_router, "_wcdb_get_sessions", return_value=sessions_rows), + patch.object(chat_router, "_ensure_decrypted_message_tables", return_value={}), + patch.object(chat_router, "_should_keep_session", return_value=True), + ): + resp = chat_router.sync_chat_realtime_messages_all( + _DummyRequest(), + account="acc", + max_scan=20, + include_hidden=True, + include_official=True, + ) + + self.assertEqual(resp.get("status"), "success") + + db = sqlite3.connect(str(account_dir / "session.db")) + try: + row = db.execute( + "SELECT last_sender_display_name, last_msg_sender, last_msg_locald_id FROM SessionTable WHERE username = ? LIMIT 1", + ("demo@chatroom",), + ).fetchone() + finally: + db.close() + + self.assertIsNotNone(row) + self.assertEqual(str(row[0] or ""), "群名片A") + self.assertEqual(str(row[1] or ""), "wxid_demo") + self.assertEqual(int(row[2] or 0), 777) + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_chat_realtime_video_thumb_md5_from_packed_info.py b/tests/test_chat_realtime_video_thumb_md5_from_packed_info.py new file mode 100644 index 0000000..d35a547 --- /dev/null +++ b/tests/test_chat_realtime_video_thumb_md5_from_packed_info.py @@ -0,0 +1,93 @@ +import sys +import threading +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest.mock import patch + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +from wechat_decrypt_tool.routers import chat as chat_router + + +class _DummyRequest: + base_url = "http://testserver/" + + +class _DummyConn: + def __init__(self) -> None: + self.handle = 1 + self.lock = threading.Lock() + + +class TestChatRealtimeVideoThumbMd5FromPackedInfo(unittest.TestCase): + def test_video_thumb_md5_filled_from_packed_info(self): + packed_md5 = "faff984641f9dd174e01c74f0796c9ae" + file_id = "3057020100044b3049020100020445eb9d5102032f54690204749999db0204698c336b0424deadbeef" + video_md5 = "22e6612411898b6d43b7e773e504d506" + xml = ( + '\n' + "\n" + f' \n' + "\n" + ) + + wcdb_rows = [ + { + "localId": 1, + "serverId": 123, + "localType": 43, + "sortSeq": 1700000000000, + "realSenderId": 1, + "createTime": 1700000000, + "messageContent": xml, + "compressContent": None, + "packedInfoData": packed_md5.encode("ascii"), + "senderUsername": "wxid_sender", + } + ] + + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + conn = _DummyConn() + + with ( + patch.object(chat_router, "_resolve_account_dir", return_value=account_dir), + patch.object(chat_router.WCDB_REALTIME, "ensure_connected", return_value=conn), + patch.object(chat_router, "_wcdb_get_messages", return_value=wcdb_rows), + patch.object(chat_router, "_load_contact_rows", return_value={}), + patch.object(chat_router, "_query_head_image_usernames", return_value=set()), + patch.object(chat_router, "_wcdb_get_display_names", return_value={}), + patch.object(chat_router, "_wcdb_get_avatar_urls", return_value={}), + patch.object(chat_router, "_load_usernames_by_display_names", return_value={}), + patch.object(chat_router, "_load_group_nickname_map", return_value={}), + ): + resp = chat_router.list_chat_messages( + _DummyRequest(), + username="demo@chatroom", + account="acc", + limit=50, + offset=0, + order="asc", + render_types=None, + source="realtime", + ) + + self.assertEqual(resp.get("status"), "success") + messages = resp.get("messages") or [] + self.assertEqual(len(messages), 1) + msg = messages[0] + self.assertEqual(msg.get("renderType"), "video") + self.assertEqual(msg.get("videoThumbMd5"), packed_md5) + thumb_url = str(msg.get("videoThumbUrl") or "") + self.assertIn(f"md5={packed_md5}", thumb_url) + self.assertNotIn("file_id=", thumb_url) + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_chat_session_preview_formatting.py b/tests/test_chat_session_preview_formatting.py new file mode 100644 index 0000000..55466a7 --- /dev/null +++ b/tests/test_chat_session_preview_formatting.py @@ -0,0 +1,68 @@ +import sqlite3 +import sys +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + +from wechat_decrypt_tool.chat_helpers import ( + _build_group_sender_display_name_map, + _normalize_session_preview_text, + _replace_preview_sender_prefix, +) + + +class TestChatSessionPreviewFormatting(unittest.TestCase): + def test_normalize_session_preview_emoji_label(self): + out = _normalize_session_preview_text("[表情]", is_group=False, sender_display_names={}) + self.assertEqual(out, "[动画表情]") + + def test_normalize_group_preview_sender_display_name(self): + out = _normalize_session_preview_text( + "wxid_u3gwceqvne2m22: [表情]", + is_group=True, + sender_display_names={"wxid_u3gwceqvne2m22": "食神"}, + ) + self.assertEqual(out, "食神: [动画表情]") + + def test_build_group_sender_display_name_map_from_contact_db(self): + with TemporaryDirectory() as td: + contact_db_path = Path(td) / "contact.db" + conn = sqlite3.connect(str(contact_db_path)) + try: + conn.execute( + """ + CREATE TABLE contact ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?)", + ("wxid_u3gwceqvne2m22", "", "食神", "", "", ""), + ) + conn.commit() + finally: + conn.close() + + mapping = _build_group_sender_display_name_map( + contact_db_path, + {"demo@chatroom": "wxid_u3gwceqvne2m22: [动画表情]"}, + ) + self.assertEqual(mapping.get("wxid_u3gwceqvne2m22"), "食神") + + def test_replace_preview_sender_prefix_uses_group_nickname(self): + out = _replace_preview_sender_prefix("去码头整点🍟: [动画表情]", "麻辣香锅") + self.assertEqual(out, "麻辣香锅: [动画表情]") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_chat_sessions_pinning.py b/tests/test_chat_sessions_pinning.py new file mode 100644 index 0000000..f2da9f6 --- /dev/null +++ b/tests/test_chat_sessions_pinning.py @@ -0,0 +1,211 @@ +import sqlite3 +import sys +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest.mock import patch + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +from wechat_decrypt_tool.routers import chat as chat_router + + +class _DummyRequest: + base_url = "http://testserver/" + + +def _seed_session_db(path: Path, rows: list[tuple[str, int, int, str]]) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE SessionTable( + username TEXT PRIMARY KEY, + unread_count INTEGER, + is_hidden INTEGER, + summary TEXT, + draft TEXT, + last_timestamp INTEGER, + sort_timestamp INTEGER, + last_msg_type INTEGER, + last_msg_sub_type INTEGER + ) + """ + ) + for username, sort_timestamp, last_timestamp, summary in rows: + conn.execute( + """ + INSERT INTO SessionTable( + username, unread_count, is_hidden, summary, draft, + last_timestamp, sort_timestamp, last_msg_type, last_msg_sub_type + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + """, + ( + username, + 0, + 0, + summary, + "", + int(last_timestamp), + int(sort_timestamp), + 1, + 0, + ), + ) + conn.commit() + finally: + conn.close() + + +def _seed_contact_db_with_flag(path: Path, flags: dict[str, int]) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + big_head_url TEXT, + small_head_url TEXT, + flag INTEGER + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + big_head_url TEXT, + small_head_url TEXT, + flag INTEGER + ) + """ + ) + for username, flag in flags.items(): + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?)", + (username, "", "", "", "", "", int(flag)), + ) + conn.commit() + finally: + conn.close() + + +def _seed_contact_db_without_flag(path: Path, usernames: list[str]) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + for username in usernames: + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?)", + (username, "", "", "", "", ""), + ) + conn.commit() + finally: + conn.close() + + +class TestChatSessionsPinning(unittest.TestCase): + def test_pinned_session_is_sorted_first_and_has_is_top(self): + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + + _seed_session_db( + account_dir / "session.db", + [ + ("wxid_new", 200, 200, "new message"), + ("wxid_top", 100, 100, "top older message"), + ], + ) + _seed_contact_db_with_flag( + account_dir / "contact.db", + { + "wxid_new": 0, + "wxid_top": 1 << 11, + }, + ) + + with patch.object(chat_router, "_resolve_account_dir", return_value=account_dir): + resp = chat_router.list_chat_sessions( + _DummyRequest(), + account="acc", + limit=50, + include_hidden=True, + include_official=True, + preview="session", + source="", + ) + + self.assertEqual(resp.get("status"), "success") + sessions = resp.get("sessions") or [] + self.assertEqual(len(sessions), 2) + self.assertEqual(sessions[0].get("username"), "wxid_top") + self.assertTrue(bool(sessions[0].get("isTop"))) + self.assertEqual(sessions[1].get("username"), "wxid_new") + self.assertFalse(bool(sessions[1].get("isTop"))) + + def test_missing_flag_column_does_not_error_and_defaults_false(self): + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + + _seed_session_db( + account_dir / "session.db", + [ + ("wxid_top", 100, 100, "hello"), + ], + ) + _seed_contact_db_without_flag(account_dir / "contact.db", ["wxid_top"]) + + with patch.object(chat_router, "_resolve_account_dir", return_value=account_dir): + resp = chat_router.list_chat_sessions( + _DummyRequest(), + account="acc", + limit=50, + include_hidden=True, + include_official=True, + preview="session", + source="", + ) + + self.assertEqual(resp.get("status"), "success") + sessions = resp.get("sessions") or [] + self.assertEqual(len(sessions), 1) + self.assertFalse(bool(sessions[0].get("isTop"))) + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_chat_sessions_realtime_sender_preview.py b/tests/test_chat_sessions_realtime_sender_preview.py new file mode 100644 index 0000000..dd92bad --- /dev/null +++ b/tests/test_chat_sessions_realtime_sender_preview.py @@ -0,0 +1,103 @@ +import sys +import threading +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest.mock import patch + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +from wechat_decrypt_tool.routers import chat as chat_router + + +class _DummyRequest: + base_url = "http://testserver/" + + +class _DummyConn: + def __init__(self) -> None: + self.handle = 1 + self.lock = threading.Lock() + + +class TestChatSessionsRealtimeSenderPreview(unittest.TestCase): + def _run(self, sessions_rows: list[dict]) -> dict: + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + + conn = _DummyConn() + with ( + patch.object(chat_router, "_resolve_account_dir", return_value=account_dir), + patch.object(chat_router.WCDB_REALTIME, "ensure_connected", return_value=conn), + patch.object(chat_router, "_wcdb_get_sessions", return_value=sessions_rows), + patch.object(chat_router, "_wcdb_get_display_names", return_value={}), + patch.object(chat_router, "_wcdb_get_avatar_urls", return_value={}), + patch.object(chat_router, "_load_contact_rows", return_value={}), + patch.object(chat_router, "_query_head_image_usernames", return_value=set()), + patch.object(chat_router, "_should_keep_session", return_value=True), + patch.object(chat_router, "_avatar_url_unified", return_value="/avatar"), + ): + return chat_router.list_chat_sessions( + _DummyRequest(), + account="acc", + limit=50, + include_hidden=True, + include_official=True, + preview="latest", + source="realtime", + ) + + def test_realtime_sessions_group_summary_prefixed_by_sender_display_name(self): + resp = self._run( + [ + { + "username": "demo@chatroom", + "summary": "hello", + "draft": "", + "unread_count": 0, + "is_hidden": 0, + "last_timestamp": 123, + "sort_timestamp": 123, + "last_msg_type": 1, + "last_msg_sub_type": 0, + "last_msg_sender": "wxid_demo", + "last_sender_display_name": "群名片A", + } + ] + ) + self.assertEqual(resp.get("status"), "success") + sessions = resp.get("sessions") or [] + self.assertEqual(len(sessions), 1) + self.assertEqual(sessions[0].get("lastMessage"), "群名片A: hello") + + def test_realtime_sessions_group_url_summary_keeps_scheme(self): + resp = self._run( + [ + { + "username": "url@chatroom", + "summary": "https://example.com/x", + "draft": "", + "unread_count": 0, + "is_hidden": 0, + "last_timestamp": 123, + "sort_timestamp": 123, + "last_msg_type": 1, + "last_msg_sub_type": 0, + "last_msg_sender": "wxid_demo", + "last_sender_display_name": "群名片B", + } + ] + ) + self.assertEqual(resp.get("status"), "success") + sessions = resp.get("sessions") or [] + self.assertEqual(len(sessions), 1) + self.assertEqual(sessions[0].get("lastMessage"), "群名片B: https://example.com/x") + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_chat_system_message_parsing.py b/tests/test_chat_system_message_parsing.py new file mode 100644 index 0000000..7828dfc --- /dev/null +++ b/tests/test_chat_system_message_parsing.py @@ -0,0 +1,42 @@ +import sys +import unittest +from pathlib import Path + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + +from wechat_decrypt_tool.chat_helpers import _parse_system_message_content + + +class TestChatSystemMessageParsing(unittest.TestCase): + def test_extract_replacemsg_for_revoke(self): + raw_text = ( + '' + "" + ) + self.assertEqual(_parse_system_message_content(raw_text), "“张三”撤回了一条消息") + + def test_extract_nested_content_in_replacemsg(self): + raw_text = ( + '"黄智欢" 撤回了一条消息0' + ']]>' + ) + self.assertEqual(_parse_system_message_content(raw_text), '"黄智欢" 撤回了一条消息') + + def test_extract_revokemsg_text_when_replacemsg_missing(self): + raw_text = "你撤回了一条消息" + self.assertEqual(_parse_system_message_content(raw_text), "你撤回了一条消息") + + def test_revoke_fallback_when_no_readable_text(self): + raw_text = '' + self.assertEqual(_parse_system_message_content(raw_text), "撤回了一条消息") + + def test_normal_system_message_still_cleaned(self): + raw_text = "" + self.assertEqual(_parse_system_message_content(raw_text), "张三 加入了群聊") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_contact_type_detection.py b/tests/test_contact_type_detection.py new file mode 100644 index 0000000..df38af4 --- /dev/null +++ b/tests/test_contact_type_detection.py @@ -0,0 +1,71 @@ +import sys +import unittest +from pathlib import Path + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestContactTypeDetection(unittest.TestCase): + def test_infer_group(self): + from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type + + row = {"local_type": 0, "alias": "", "remark": "", "nick_name": ""} + self.assertEqual(_infer_contact_type("123@chatroom", row), "group") + + def test_infer_official_by_prefix(self): + from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type + + row = {"local_type": 0, "verify_flag": 0, "alias": "", "remark": "", "nick_name": ""} + self.assertEqual(_infer_contact_type("gh_xxx", row), "official") + + def test_infer_official_by_verify_flag(self): + from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type + + row = {"local_type": 1, "verify_flag": 24, "alias": "", "remark": "", "nick_name": ""} + self.assertEqual(_infer_contact_type("wxid_xxx", row), "official") + + def test_infer_none_for_local_type_3_without_verify(self): + from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type + + row = {"local_type": 3, "verify_flag": 0, "alias": "", "remark": "", "nick_name": "普通联系人"} + self.assertIsNone(_infer_contact_type("wxid_xxx", row)) + + def test_infer_none_from_wxid_alias_when_local_type_not_1(self): + from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type + + row = {"local_type": 0, "verify_flag": 0, "alias": "wechat_id", "remark": "", "nick_name": ""} + self.assertIsNone(_infer_contact_type("wxid_xxx", row)) + + def test_infer_friend_from_local_type_1(self): + from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type + + row = {"local_type": 1, "verify_flag": 0, "alias": "", "remark": "", "nick_name": ""} + self.assertEqual(_infer_contact_type("wxid_xxx", row), "friend") + + def test_infer_none_from_local_type_2(self): + from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type + + row = {"local_type": 2, "verify_flag": 0, "alias": "", "remark": "", "nick_name": ""} + self.assertIsNone(_infer_contact_type("wxid_xxx", row)) + + def test_infer_none_when_empty_type_0(self): + from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type + + row = {"local_type": 0, "verify_flag": 0, "alias": "", "remark": "", "nick_name": ""} + self.assertIsNone(_infer_contact_type("wxid_xxx", row)) + + def test_valid_contact_username_filters_system_accounts(self): + from wechat_decrypt_tool.routers.chat_contacts import _is_valid_contact_username + + self.assertFalse(_is_valid_contact_username("filehelper")) + self.assertFalse(_is_valid_contact_username("notifymessage")) + self.assertFalse(_is_valid_contact_username("fake_abc")) + self.assertTrue(_is_valid_contact_username("weixin")) + self.assertTrue(_is_valid_contact_username("wxid_abc")) + self.assertTrue(_is_valid_contact_username("123@chatroom")) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_contacts_export.py b/tests/test_contacts_export.py new file mode 100644 index 0000000..e773396 --- /dev/null +++ b/tests/test_contacts_export.py @@ -0,0 +1,561 @@ +import json +import os +import sqlite3 +import sys +import unittest +import importlib +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestContactsExport(unittest.TestCase): + @staticmethod + def _encode_varint(value: int) -> bytes: + v = int(value) + out = bytearray() + while True: + b = v & 0x7F + v >>= 7 + if v: + out.append(b | 0x80) + else: + out.append(b) + break + return bytes(out) + + @classmethod + def _encode_field_len(cls, field_no: int, raw: bytes) -> bytes: + tag = (int(field_no) << 3) | 2 + payload = bytes(raw) + return cls._encode_varint(tag) + cls._encode_varint(len(payload)) + payload + + @classmethod + def _encode_field_varint(cls, field_no: int, value: int) -> bytes: + tag = int(field_no) << 3 + return cls._encode_varint(tag) + cls._encode_varint(int(value)) + + @classmethod + def _build_extra_buffer( + cls, + *, + country: str, + province: str, + city: str, + source_scene: int, + gender: int = 0, + signature: str = "", + ) -> bytes: + return b"".join( + [ + cls._encode_field_varint(2, gender), + cls._encode_field_len(4, signature.encode("utf-8")), + cls._encode_field_len(5, country.encode("utf-8")), + cls._encode_field_len(6, province.encode("utf-8")), + cls._encode_field_len(7, city.encode("utf-8")), + cls._encode_field_varint(8, source_scene), + ] + ) + + def _seed_contact_db(self, path: Path) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT, + extra_buffer BLOB + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT, + extra_buffer BLOB + ) + """ + ) + + friend_extra_buffer = self._build_extra_buffer( + country="CN", + province="Sichuan", + city="Chengdu", + source_scene=14, + gender=1, + signature="自助者天助!!!", + ) + + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", + ( + "wxid_friend", + "好友备注", + "好友昵称", + "friend_alias", + 1, + 0, + "https://cdn.example.com/friend_big.jpg", + "https://cdn.example.com/friend_small.jpg", + friend_extra_buffer, + ), + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", + ( + "room@chatroom", + "", + "测试群", + "", + 0, + 0, + "https://cdn.example.com/group_big.jpg", + "", + b"", + ), + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", + ( + "gh_official", + "", + "公众号", + "", + 4, + 8, + "", + "https://cdn.example.com/official_small.jpg", + b"", + ), + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", + ( + "wxid_local_type_3", + "", + "不应计入联系人", + "", + 3, + 0, + "", + "", + b"", + ), + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", + ( + "weixin", + "", + "微信团队", + "", + 1, + 56, + "", + "", + b"", + ), + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", + ( + "filehelper", + "", + "文件传输助手", + "", + 0, + 0, + "", + "", + b"", + ), + ) + conn.execute( + "INSERT INTO stranger VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", + ( + "stranger_verified", + "", + "陌生人认证号", + "", + 4, + 24, + "", + "", + b"", + ), + ) + conn.commit() + finally: + conn.close() + + def _seed_session_db(self, path: Path) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE SessionTable ( + username TEXT, + sort_timestamp INTEGER, + last_timestamp INTEGER + ) + """ + ) + conn.execute("INSERT INTO SessionTable VALUES (?, ?, ?)", ("room@chatroom", 300, 300)) + conn.execute("INSERT INTO SessionTable VALUES (?, ?, ?)", ("wxid_friend", 200, 200)) + conn.execute("INSERT INTO SessionTable VALUES (?, ?, ?)", ("gh_official", 100, 100)) + conn.execute("INSERT INTO SessionTable VALUES (?, ?, ?)", ("missing@chatroom", 250, 250)) + conn.commit() + finally: + conn.close() + + def _seed_contact_db_legacy(self, path: Path) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + ( + "wxid_legacy_friend", + "旧版好友备注", + "旧版好友昵称", + "legacy_friend_alias", + 1, + 0, + "", + "", + ), + ) + conn.commit() + finally: + conn.close() + + def test_export_json_and_csv(self): + from fastapi import FastAPI + from fastapi.testclient import TestClient + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + account_dir = root / "output" / "databases" / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db") + self._seed_session_db(account_dir / "session.db") + + prev = None + try: + prev = os.environ.get("WECHAT_TOOL_DATA_DIR") + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + + import wechat_decrypt_tool.chat_helpers as chat_helpers + import wechat_decrypt_tool.routers.chat_contacts as chat_contacts + + importlib.reload(chat_helpers) + importlib.reload(chat_contacts) + + app = FastAPI() + app.include_router(chat_contacts.router) + + client = TestClient(app) + + list_resp = client.get( + "/api/chat/contacts", + params={ + "account": account, + "include_friends": True, + "include_groups": True, + "include_officials": True, + }, + ) + self.assertEqual(list_resp.status_code, 200) + list_payload = list_resp.json() + self.assertEqual(list_payload["status"], "success") + self.assertEqual(list_payload["total"], 6) + self.assertEqual(list_payload["counts"]["friends"], 1) + self.assertEqual(list_payload["counts"]["groups"], 2) + self.assertEqual(list_payload["counts"]["officials"], 3) + usernames = {str(x.get("username")) for x in list_payload.get("contacts", [])} + self.assertIn("missing@chatroom", usernames) + self.assertIn("weixin", usernames) + self.assertNotIn("wxid_local_type_3", usernames) + first = list_payload["contacts"][0] + self.assertIn("avatarLink", first) + + friend_contact = next( + (x for x in list_payload.get("contacts", []) if str(x.get("username")) == "wxid_friend"), + {}, + ) + self.assertEqual(friend_contact.get("country"), "CN") + self.assertEqual(friend_contact.get("province"), "Sichuan") + self.assertEqual(friend_contact.get("city"), "Chengdu") + self.assertEqual(friend_contact.get("region"), "中国大陆·Sichuan·Chengdu") + self.assertEqual(friend_contact.get("gender"), 1) + self.assertEqual(friend_contact.get("signature"), "自助者天助!!!") + self.assertEqual(friend_contact.get("sourceScene"), 14) + self.assertEqual(friend_contact.get("source"), "通过群聊添加") + + export_dir = root / "exports" + export_dir.mkdir(parents=True, exist_ok=True) + + json_resp = client.post( + "/api/chat/contacts/export", + json={ + "account": account, + "output_dir": str(export_dir), + "format": "json", + "include_avatar_link": True, + "contact_types": { + "friends": True, + "groups": True, + "officials": True, + }, + }, + ) + self.assertEqual(json_resp.status_code, 200) + json_payload = json_resp.json() + self.assertEqual(json_payload["status"], "success") + self.assertEqual(json_payload["count"], 6) + json_path = Path(json_payload["outputPath"]) + self.assertTrue(json_path.exists()) + + data = json.loads(json_path.read_text(encoding="utf-8")) + self.assertEqual(data["count"], 6) + self.assertIn("avatarLink", data["contacts"][0]) + self.assertIn("region", data["contacts"][0]) + self.assertIn("country", data["contacts"][0]) + self.assertIn("province", data["contacts"][0]) + self.assertIn("city", data["contacts"][0]) + self.assertIn("source", data["contacts"][0]) + self.assertIn("sourceScene", data["contacts"][0]) + export_usernames = {str(x.get("username")) for x in data.get("contacts", [])} + self.assertIn("missing@chatroom", export_usernames) + self.assertNotIn("wxid_local_type_3", export_usernames) + + friend_export = next( + (x for x in data.get("contacts", []) if str(x.get("username")) == "wxid_friend"), + {}, + ) + self.assertEqual(friend_export.get("region"), "中国大陆·Sichuan·Chengdu") + self.assertEqual(friend_export.get("sourceScene"), 14) + self.assertEqual(friend_export.get("source"), "通过群聊添加") + + csv_resp = client.post( + "/api/chat/contacts/export", + json={ + "account": account, + "output_dir": str(export_dir), + "format": "csv", + "include_avatar_link": False, + "contact_types": { + "friends": True, + "groups": False, + "officials": False, + }, + }, + ) + self.assertEqual(csv_resp.status_code, 200) + csv_payload = csv_resp.json() + self.assertEqual(csv_payload["count"], 1) + csv_path = Path(csv_payload["outputPath"]) + text = csv_path.read_text(encoding="utf-8-sig") + self.assertIn("用户名,显示名称,备注,昵称,微信号,类型,地区,国家/地区码,省份,城市,来源,来源场景码", text.splitlines()[0]) + self.assertNotIn("头像链接", text.splitlines()[0]) + self.assertIn("wxid_friend", text) + self.assertIn("中国大陆·Sichuan·Chengdu", text) + self.assertIn("通过群聊添加", text) + self.assertIn(",14", text) + self.assertNotIn("wxid_local_type_3", text) + finally: + if prev is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev + + def test_export_invalid_format_returns_400(self): + from fastapi import FastAPI + from fastapi.testclient import TestClient + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + account_dir = root / "output" / "databases" / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db") + self._seed_session_db(account_dir / "session.db") + + prev = None + try: + prev = os.environ.get("WECHAT_TOOL_DATA_DIR") + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + + import wechat_decrypt_tool.chat_helpers as chat_helpers + import wechat_decrypt_tool.routers.chat_contacts as chat_contacts + + importlib.reload(chat_helpers) + importlib.reload(chat_contacts) + + app = FastAPI() + app.include_router(chat_contacts.router) + + client = TestClient(app) + resp = client.post( + "/api/chat/contacts/export", + json={ + "account": account, + "output_dir": str(root / "exports"), + "format": "vcf", + "include_avatar_link": True, + "contact_types": { + "friends": True, + "groups": True, + "officials": True, + }, + }, + ) + self.assertEqual(resp.status_code, 400) + finally: + if prev is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev + + def test_missing_contact_db_returns_404(self): + from fastapi import FastAPI + from fastapi.testclient import TestClient + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_test" + account_dir = root / "output" / "databases" / account + account_dir.mkdir(parents=True, exist_ok=True) + + # only session.db exists + self._seed_session_db(account_dir / "session.db") + + prev = None + try: + prev = os.environ.get("WECHAT_TOOL_DATA_DIR") + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + + import wechat_decrypt_tool.chat_helpers as chat_helpers + import wechat_decrypt_tool.routers.chat_contacts as chat_contacts + + importlib.reload(chat_helpers) + importlib.reload(chat_contacts) + + app = FastAPI() + app.include_router(chat_contacts.router) + client = TestClient(app) + + resp = client.get("/api/chat/contacts", params={"account": account}) + self.assertEqual(resp.status_code, 404) + finally: + if prev is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev + + def test_legacy_schema_without_extra_buffer_is_compatible(self): + from fastapi import FastAPI + from fastapi.testclient import TestClient + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_legacy" + account_dir = root / "output" / "databases" / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db_legacy(account_dir / "contact.db") + self._seed_session_db(account_dir / "session.db") + + prev = None + try: + prev = os.environ.get("WECHAT_TOOL_DATA_DIR") + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + + import wechat_decrypt_tool.chat_helpers as chat_helpers + import wechat_decrypt_tool.routers.chat_contacts as chat_contacts + + importlib.reload(chat_helpers) + importlib.reload(chat_contacts) + + app = FastAPI() + app.include_router(chat_contacts.router) + client = TestClient(app) + + resp = client.get( + "/api/chat/contacts", + params={ + "account": account, + "include_friends": True, + "include_groups": False, + "include_officials": False, + }, + ) + self.assertEqual(resp.status_code, 200) + payload = resp.json() + self.assertEqual(payload.get("status"), "success") + self.assertEqual(int(payload.get("total", 0)), 1) + + contact = payload.get("contacts", [])[0] + self.assertEqual(contact.get("username"), "wxid_legacy_friend") + self.assertEqual(contact.get("country"), "") + self.assertEqual(contact.get("province"), "") + self.assertEqual(contact.get("city"), "") + self.assertEqual(contact.get("region"), "") + self.assertIsNone(contact.get("sourceScene")) + self.assertEqual(contact.get("source"), "") + finally: + if prev is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_decrypt_stream_sse.py b/tests/test_decrypt_stream_sse.py new file mode 100644 index 0000000..c041630 --- /dev/null +++ b/tests/test_decrypt_stream_sse.py @@ -0,0 +1,91 @@ +import json +import os +import sys +import unittest +import importlib +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestDecryptStreamSSE(unittest.TestCase): + def test_decrypt_stream_reports_progress(self): + from fastapi import FastAPI + from fastapi.testclient import TestClient + + from wechat_decrypt_tool.wechat_decrypt import SQLITE_HEADER + + with TemporaryDirectory() as td: + root = Path(td) + + prev_data_dir = os.environ.get("WECHAT_TOOL_DATA_DIR") + prev_build_cache = os.environ.get("WECHAT_TOOL_BUILD_SESSION_LAST_MESSAGE") + try: + os.environ["WECHAT_TOOL_DATA_DIR"] = str(root) + os.environ["WECHAT_TOOL_BUILD_SESSION_LAST_MESSAGE"] = "0" + + import wechat_decrypt_tool.app_paths as app_paths + import wechat_decrypt_tool.routers.decrypt as decrypt_router + + importlib.reload(app_paths) + importlib.reload(decrypt_router) + + db_storage = root / "xwechat_files" / "wxid_foo_bar" / "db_storage" + db_storage.mkdir(parents=True, exist_ok=True) + + # Fake a decrypted sqlite db (>= 4096 bytes) so decryptor falls back to copy. + (db_storage / "MSG0.db").write_bytes(SQLITE_HEADER + b"\x00" * (4096 - len(SQLITE_HEADER))) + + app = FastAPI() + app.include_router(decrypt_router.router) + client = TestClient(app) + + events: list[dict] = [] + with client.stream( + "GET", + "/api/decrypt_stream", + params={"key": "00" * 32, "db_storage_path": str(db_storage)}, + ) as resp: + self.assertEqual(resp.status_code, 200) + self.assertIn("text/event-stream", resp.headers.get("content-type", "")) + + for line in resp.iter_lines(): + if not line: + continue + if isinstance(line, bytes): + line = line.decode("utf-8", errors="ignore") + line = str(line) + + if line.startswith(":"): + continue + if not line.startswith("data: "): + continue + payload = json.loads(line[len("data: ") :]) + events.append(payload) + if payload.get("type") in {"complete", "error"}: + break + + types = {e.get("type") for e in events} + self.assertIn("start", types) + self.assertIn("progress", types) + self.assertEqual(events[-1].get("type"), "complete") + + out = root / "output" / "databases" / "wxid_foo" / "MSG0.db" + self.assertTrue(out.exists()) + finally: + if prev_data_dir is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data_dir + if prev_build_cache is None: + os.environ.pop("WECHAT_TOOL_BUILD_SESSION_LAST_MESSAGE", None) + else: + os.environ["WECHAT_TOOL_BUILD_SESSION_LAST_MESSAGE"] = prev_build_cache + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_group_nickname_ext_buffer_parsing.py b/tests/test_group_nickname_ext_buffer_parsing.py new file mode 100644 index 0000000..40558c4 --- /dev/null +++ b/tests/test_group_nickname_ext_buffer_parsing.py @@ -0,0 +1,114 @@ +import sqlite3 +import sys +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + +from wechat_decrypt_tool.chat_helpers import _load_group_nickname_map_from_contact_db + + +def _enc_varint(n: int) -> bytes: + v = int(n) + out = bytearray() + while True: + b = v & 0x7F + v >>= 7 + if v: + out.append(b | 0x80) + else: + out.append(b) + break + return bytes(out) + + +def _enc_tag(field_no: int, wire_type: int) -> bytes: + return _enc_varint((int(field_no) << 3) | int(wire_type)) + + +def _enc_len(field_no: int, data: bytes) -> bytes: + b = bytes(data or b"") + return _enc_tag(field_no, 2) + _enc_varint(len(b)) + b + + +def _member_entry(*, inner: bytes) -> bytes: + # contact.db ext_buffer uses repeated length-delimited submessages; the top-level field number is not important + # for our best-effort parser, so we use field 1. + return _enc_len(1, inner) + + +class TestGroupNicknameExtBufferParsing(unittest.TestCase): + def test_parse_pattern_a_field1_username_field2_display(self): + chatroom = "demo@chatroom" + username = "wxid_demo_123456" + display = "群名片A" + + inner = _enc_len(1, username.encode("utf-8")) + _enc_len(2, display.encode("utf-8")) + ext_buffer = _member_entry(inner=inner) + + with TemporaryDirectory() as td: + contact_db_path = Path(td) / "contact.db" + conn = sqlite3.connect(str(contact_db_path)) + try: + conn.execute( + "CREATE TABLE chat_room(id INTEGER PRIMARY KEY, username TEXT, owner TEXT, ext_buffer BLOB)" + ) + conn.execute( + "INSERT INTO chat_room(id, username, owner, ext_buffer) VALUES (?, ?, ?, ?)", + (1, chatroom, "", ext_buffer), + ) + conn.commit() + finally: + conn.close() + + out = _load_group_nickname_map_from_contact_db(contact_db_path, chatroom, [username]) + self.assertEqual(out.get(username), display) + + def test_parse_pattern_b_field4_username_field1_display(self): + chatroom = "demo2@chatroom" + username = "wxid_demo_abcdef" + display = "hjlbingo" + + inner = _enc_len(4, username.encode("utf-8")) + _enc_len(1, display.encode("utf-8")) + ext_buffer = _member_entry(inner=inner) + + with TemporaryDirectory() as td: + contact_db_path = Path(td) / "contact.db" + conn = sqlite3.connect(str(contact_db_path)) + try: + conn.execute( + "CREATE TABLE chat_room(id INTEGER PRIMARY KEY, username TEXT, owner TEXT, ext_buffer BLOB)" + ) + conn.execute( + "INSERT INTO chat_room(id, username, owner, ext_buffer) VALUES (?, ?, ?, ?)", + (1, chatroom, "", ext_buffer), + ) + conn.commit() + finally: + conn.close() + + out = _load_group_nickname_map_from_contact_db(contact_db_path, chatroom, [username]) + self.assertEqual(out.get(username), display) + + def test_non_chatroom_returns_empty(self): + with TemporaryDirectory() as td: + contact_db_path = Path(td) / "contact.db" + conn = sqlite3.connect(str(contact_db_path)) + try: + conn.execute( + "CREATE TABLE chat_room(id INTEGER PRIMARY KEY, username TEXT, owner TEXT, ext_buffer BLOB)" + ) + conn.commit() + finally: + conn.close() + + out = _load_group_nickname_map_from_contact_db(contact_db_path, "wxid_not_chatroom", ["wxid_xxx"]) + self.assertEqual(out, {}) + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_group_xml_sender_extraction.py b/tests/test_group_xml_sender_extraction.py new file mode 100644 index 0000000..c719bdc --- /dev/null +++ b/tests/test_group_xml_sender_extraction.py @@ -0,0 +1,23 @@ +import sys +import unittest +from pathlib import Path + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + +from wechat_decrypt_tool.chat_helpers import _extract_sender_from_group_xml + + +class TestGroupXmlSenderExtraction(unittest.TestCase): + def test_prefers_outer_fromusername_over_nested_refermsg(self): + xml_text = ( + '57' + 'quoted_user@chatroom' + 'actual_sender@chatroom' + ) + self.assertEqual(_extract_sender_from_group_xml(xml_text), "actual_sender@chatroom") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_logging_config_data_dir.py b/tests/test_logging_config_data_dir.py new file mode 100644 index 0000000..90ca1ee --- /dev/null +++ b/tests/test_logging_config_data_dir.py @@ -0,0 +1,63 @@ +import os +import sys +import unittest +import importlib +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +def _close_logging_handlers() -> None: + # Close handlers to avoid Windows temp dir cleanup failures (FileHandler holds a lock). + import logging + + for logger_name in ("", "uvicorn", "uvicorn.access", "uvicorn.error", "fastapi"): + lg = logging.getLogger(logger_name) + for h in lg.handlers[:]: + try: + h.close() + except Exception: + pass + try: + lg.removeHandler(h) + except Exception: + pass + + +class TestLoggingConfigDataDir(unittest.TestCase): + def setUp(self): + self._prev_data_dir = os.environ.get("WECHAT_TOOL_DATA_DIR") + self._td = TemporaryDirectory() + os.environ["WECHAT_TOOL_DATA_DIR"] = self._td.name + + import wechat_decrypt_tool.app_paths as app_paths + import wechat_decrypt_tool.logging_config as logging_config + + importlib.reload(app_paths) + importlib.reload(logging_config) + + self.logging_config = logging_config + + def tearDown(self): + _close_logging_handlers() + + if self._prev_data_dir is None: + os.environ.pop("WECHAT_TOOL_DATA_DIR", None) + else: + os.environ["WECHAT_TOOL_DATA_DIR"] = self._prev_data_dir + self._td.cleanup() + + def test_setup_logging_uses_wechat_tool_data_dir(self): + log_file = self.logging_config.setup_logging() + + base = Path(self._td.name) / "output" / "logs" + self.assertTrue(log_file.is_relative_to(base)) + self.assertTrue(log_file.exists()) + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_parse_app_message.py b/tests/test_parse_app_message.py new file mode 100644 index 0000000..3148d07 --- /dev/null +++ b/tests/test_parse_app_message.py @@ -0,0 +1,143 @@ +import sys +import unittest +from pathlib import Path + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + +from wechat_decrypt_tool.chat_helpers import _parse_app_message + + +class TestParseAppMessage(unittest.TestCase): + def test_mini_program_type_33_parses_as_link(self): + # 小程序分享是 appmsg type=33/36。部分 payload 会在 内嵌一个 0, + # 并且出现在外层 33 之前,因此解析必须避免被嵌套 误导。 + raw_text = ( + "" + "锦城苑房源详情分享给你,点击查看哦~" + "" + "" + "0" + "" + "" + "" + "33" + "" + "https://example.com/thumb.jpg" + "" + "" + ) + + parsed = _parse_app_message(raw_text) + + self.assertEqual(parsed.get("renderType"), "link") + self.assertEqual(parsed.get("linkType"), "mini_program") + self.assertEqual(parsed.get("title"), "锦城苑房源详情分享给你,点击查看哦~") + self.assertEqual(parsed.get("from"), "成都购房通") + self.assertEqual(parsed.get("fromUsername"), "gh_xxx@app") + self.assertEqual(parsed.get("thumbUrl"), "https://example.com/thumb.jpg") + + def test_quote_type_57_nested_refermsg_uses_inner_title(self): + raw_text = ( + '' + '一松一紧57' + '00' + '' + '0' + '' + '' + '571173057991425172913' + '44372432598@chatroom44372432598@chatroom' + '' + '那里紧?哪里张?' + '5700' + '' + '0' + '' + ']]>' + '' + ) + + parsed = _parse_app_message(raw_text) + + self.assertEqual(parsed.get("renderType"), "quote") + self.assertEqual(parsed.get("content"), "一松一紧") + self.assertEqual(parsed.get("quoteType"), "57") + self.assertEqual(parsed.get("quoteContent"), "那里紧?哪里张?") + + def test_quote_type_57_plain_text_refermsg_keeps_text(self): + raw_text = ( + '' + '回复57' + '57' + '' + ) + + parsed = _parse_app_message(raw_text) + + self.assertEqual(parsed.get("renderType"), "quote") + self.assertEqual(parsed.get("quoteContent"), "普通文本引用") + + def test_quote_type_49_nested_xml_refermsg_uses_inner_title(self): + raw_text = ( + '' + '这种傻逼公众号怎么还在看57' + '49' + '' + '为自己的美丽漂亮善良知性发声😊' + '5https://mp.weixin.qq.com/s/example' + 'https://mmbiz.qpic.cn/some-thumb.jpg' + ']]>' + ) + + parsed = _parse_app_message(raw_text) + + self.assertEqual(parsed.get("renderType"), "quote") + self.assertEqual(parsed.get("quoteType"), "49") + self.assertEqual(parsed.get("quoteTitle"), "水豚喧喧") + self.assertEqual(parsed.get("quoteContent"), "[链接] 为自己的美丽漂亮善良知性发声😊") + self.assertEqual(parsed.get("quoteThumbUrl"), "https://mmbiz.qpic.cn/some-thumb.jpg") + + def test_public_account_link_exposes_link_type_and_style(self): + raw_text = ( + '' + '为自己的美丽漂亮善良知性发声😊' + '#日常穿搭灵感 #白色蕾丝裙穿搭 #知性美女' + '5' + 'http://mp.weixin.qq.com/s?__biz=xx&mid=1' + 'http://mmbiz.qpic.cn/abc/640?wx_fmt=jpeg' + 'gh_0cef8eaa987d' + '草莓不甜芒果甜' + '' + ) + + parsed = _parse_app_message(raw_text) + + self.assertEqual(parsed.get("renderType"), "link") + self.assertEqual(parsed.get("linkType"), "official_article") + self.assertEqual(parsed.get("linkStyle"), "cover") + + def test_quote_type_5_nested_xml_refermsg_uses_inner_title(self): + raw_text = ( + '' + '这个年龄有点大啊57' + '5' + '\n' + '谁说冬天不能穿裙子?5' + 'https://mmbiz.qpic.cn/some-thumb2.jpg' + 'https://mp.weixin.qq.com/s/example2' + ']]>' + ) + + parsed = _parse_app_message(raw_text) + + self.assertEqual(parsed.get("renderType"), "quote") + self.assertEqual(parsed.get("quoteType"), "5") + self.assertEqual(parsed.get("quoteTitle"), "水豚噜噜") + self.assertEqual(parsed.get("quoteContent"), "[链接] 谁说冬天不能穿裙子?") + self.assertEqual(parsed.get("quoteThumbUrl"), "https://mmbiz.qpic.cn/some-thumb2.jpg") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_realtime_sync_table_creation.py b/tests/test_realtime_sync_table_creation.py new file mode 100644 index 0000000..3c33836 --- /dev/null +++ b/tests/test_realtime_sync_table_creation.py @@ -0,0 +1,102 @@ +import hashlib +import sqlite3 +import sys +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory + + +# Ensure "src/" is importable when running tests from repo root. +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestRealtimeSyncTableCreation(unittest.TestCase): + def _touch_sqlite(self, path: Path) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + conn = sqlite3.connect(str(path)) + try: + # Ensure a valid sqlite file is created. + conn.execute("PRAGMA user_version = 1") + conn.commit() + finally: + conn.close() + + def test_ensure_creates_msg_table_and_indexes_in_message_db(self): + from wechat_decrypt_tool.routers import chat as chat_router + + with TemporaryDirectory() as td: + account_dir = Path(td) + self._touch_sqlite(account_dir / "message_0.db") + + username = "wxid_foo" + md5_hex = hashlib.md5(username.encode("utf-8")).hexdigest() + expected_table = f"Msg_{md5_hex}" + + db_path, table_name = chat_router._ensure_decrypted_message_table(account_dir, username) + self.assertEqual(table_name, expected_table) + self.assertEqual(db_path.name, "message_0.db") + + conn = sqlite3.connect(str(db_path)) + try: + r = conn.execute( + "SELECT 1 FROM sqlite_master WHERE type='table' AND lower(name)=lower(?)", + (expected_table,), + ).fetchone() + self.assertIsNotNone(r, "Msg_ table should be created") + + idx_names = [ + f"{expected_table}_SENDERID", + f"{expected_table}_SERVERID", + f"{expected_table}_SORTSEQ", + f"{expected_table}_TYPE_SEQ", + ] + for idx in idx_names: + r = conn.execute( + "SELECT 1 FROM sqlite_master WHERE type='index' AND lower(name)=lower(?)", + (idx,), + ).fetchone() + self.assertIsNotNone(r, f"Index {idx} should be created") + finally: + conn.close() + + def test_ensure_prefers_biz_message_for_official_accounts(self): + from wechat_decrypt_tool.routers import chat as chat_router + + with TemporaryDirectory() as td: + account_dir = Path(td) + self._touch_sqlite(account_dir / "message_0.db") + self._touch_sqlite(account_dir / "biz_message_0.db") + + username = "gh_12345" + db_path, _ = chat_router._ensure_decrypted_message_table(account_dir, username) + self.assertEqual(db_path.name, "biz_message_0.db") + + def test_bulk_ensure_creates_missing_tables(self): + from wechat_decrypt_tool.routers import chat as chat_router + + with TemporaryDirectory() as td: + account_dir = Path(td) + self._touch_sqlite(account_dir / "message_0.db") + + usernames = ["wxid_a", "wxid_b"] + table_map = chat_router._ensure_decrypted_message_tables(account_dir, usernames) + self.assertEqual(set(table_map.keys()), set(usernames)) + + conn = sqlite3.connect(str(account_dir / "message_0.db")) + try: + for u in usernames: + md5_hex = hashlib.md5(u.encode("utf-8")).hexdigest() + expected_table = f"Msg_{md5_hex}" + r = conn.execute( + "SELECT 1 FROM sqlite_master WHERE type='table' AND lower(name)=lower(?)", + (expected_table,), + ).fetchone() + self.assertIsNotNone(r, f"{expected_table} should be created for {u}") + finally: + conn.close() + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_sns_media.py b/tests/test_sns_media.py new file mode 100644 index 0000000..6bc2d38 --- /dev/null +++ b/tests/test_sns_media.py @@ -0,0 +1,180 @@ +import asyncio +import hashlib +import sys +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest import mock + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +from wechat_decrypt_tool import sns_media # noqa: E402 pylint: disable=wrong-import-position + + +class TestSnsMedia(unittest.TestCase): + def test_fix_sns_cdn_url_image_rewrites_150_and_appends_token(self): + u = "http://mmsns.qpic.cn/sns/abc/150" + out = sns_media.fix_sns_cdn_url(u, token="tkn", is_video=False) + self.assertEqual(out, "https://mmsns.qpic.cn/sns/abc/0?token=tkn&idx=1") + + u2 = "https://mmsns.qpic.cn/sns/abc/150?foo=bar" + out2 = sns_media.fix_sns_cdn_url(u2, token="tkn", is_video=False) + self.assertEqual(out2, "https://mmsns.qpic.cn/sns/abc/0?foo=bar&token=tkn&idx=1") + + def test_fix_sns_cdn_url_video_places_token_first(self): + u = "https://snsvideodownload.video.qq.com/abc.mp4?foo=1&bar=2" + out = sns_media.fix_sns_cdn_url(u, token="tkn", is_video=True) + self.assertEqual(out, "https://snsvideodownload.video.qq.com/abc.mp4?token=tkn&idx=1&foo=1&bar=2") + + def test_fix_sns_cdn_url_non_tencent_host_passthrough(self): + u = "http://example.com/a/150?x=1" + out = sns_media.fix_sns_cdn_url(u, token="tkn", is_video=False) + self.assertEqual(out, u) + + def test_maybe_decrypt_sns_video_file_xors_inplace(self): + # Build a fake MP4 header (ftyp at offset 4) and encrypt it by XORing with a keystream. + plain = b"\x00\x00\x00\x20ftypisom" + b"\x00" * 48 + ks = bytes(range(len(plain))) + enc = bytes([plain[i] ^ ks[i] for i in range(len(plain))]) + + with TemporaryDirectory() as td: + p = Path(td) / "v.mp4" + p.write_bytes(enc) + + with mock.patch("wechat_decrypt_tool.sns_media.weflow_wxisaac64_keystream", return_value=ks): + did = sns_media.maybe_decrypt_sns_video_file(p, key="1") + self.assertTrue(did) + self.assertEqual(p.read_bytes(), plain) + + # Second run should be a no-op because it already looks like a MP4. + did2 = sns_media.maybe_decrypt_sns_video_file(p, key="1") + self.assertFalse(did2) + + def test_try_fetch_and_decrypt_sns_image_remote_cache_hit(self): + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + + url = "https://mmsns.qpic.cn/sns/test/0?token=tkn&idx=1" + key = "123" + fixed = sns_media.fix_sns_cdn_url(url, token="tkn", is_video=False) + digest = hashlib.md5(f"{fixed}|{key}".encode("utf-8", errors="ignore")).hexdigest() + + cache_dir = account_dir / "sns_remote_cache" / digest[:2] + cache_dir.mkdir(parents=True, exist_ok=True) + cache_path = cache_dir / f"{digest}.jpg" + + payload = b"\xff\xd8\xff\x00fakejpeg" + cache_path.write_bytes(payload) + + res = asyncio.run( + sns_media.try_fetch_and_decrypt_sns_image_remote( + account_dir=account_dir, + url=url, + key=key, + token="tkn", + use_cache=True, + ) + ) + self.assertIsNotNone(res) + assert res is not None + self.assertEqual(res.source, "remote-cache") + self.assertEqual(res.media_type, "image/jpeg") + self.assertEqual(res.payload, payload) + self.assertTrue(res.cache_path and res.cache_path.exists()) + + def test_try_fetch_and_decrypt_sns_image_remote_cache_upgrades_bin_extension(self): + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + + url = "https://mmsns.qpic.cn/sns/test/0?token=tkn&idx=1" + key = "123" + fixed = sns_media.fix_sns_cdn_url(url, token="tkn", is_video=False) + digest = hashlib.md5(f"{fixed}|{key}".encode("utf-8", errors="ignore")).hexdigest() + + cache_dir = account_dir / "sns_remote_cache" / digest[:2] + cache_dir.mkdir(parents=True, exist_ok=True) + bin_path = cache_dir / f"{digest}.bin" + png_payload = b"\x89PNG\r\n\x1a\n" + b"fakepng" + bin_path.write_bytes(png_payload) + + res = asyncio.run( + sns_media.try_fetch_and_decrypt_sns_image_remote( + account_dir=account_dir, + url=url, + key=key, + token="tkn", + use_cache=True, + ) + ) + self.assertIsNotNone(res) + assert res is not None + self.assertEqual(res.source, "remote-cache") + self.assertEqual(res.media_type, "image/png") + self.assertTrue(res.cache_path and res.cache_path.suffix.lower() == ".png") + self.assertTrue(res.cache_path and res.cache_path.exists()) + self.assertFalse(bin_path.exists()) + + def test_try_fetch_and_decrypt_sns_image_remote_decrypts_when_needed(self): + raw = b"\x01\x02\x03\x04not_an_image" + decoded = b"\x89PNG\r\n\x1a\n" + b"decoded" + + async def fake_download(_url: str): + return raw, "image/jpeg", "1" + + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + + with mock.patch("wechat_decrypt_tool.sns_media._download_sns_remote_bytes", side_effect=fake_download): + with mock.patch("wechat_decrypt_tool.sns_media._wcdb_decrypt_sns_image", return_value=decoded): + res = asyncio.run( + sns_media.try_fetch_and_decrypt_sns_image_remote( + account_dir=account_dir, + url="https://mmsns.qpic.cn/sns/test/0", + key="123", + token="tkn", + use_cache=False, + ) + ) + + self.assertIsNotNone(res) + assert res is not None + self.assertEqual(res.media_type, "image/png") + self.assertEqual(res.source, "remote-decrypt") + self.assertEqual(res.x_enc, "1") + self.assertEqual(res.payload, decoded) + + def test_try_fetch_and_decrypt_sns_image_remote_decrypt_failure_returns_none(self): + raw = b"\x01\x02\x03\x04not_an_image" + decoded_bad = b"\x00\x00\x00\x00still_bad" + + async def fake_download(_url: str): + return raw, "image/jpeg", "1" + + with TemporaryDirectory() as td: + account_dir = Path(td) / "acc" + account_dir.mkdir(parents=True, exist_ok=True) + + with mock.patch("wechat_decrypt_tool.sns_media._download_sns_remote_bytes", side_effect=fake_download): + with mock.patch("wechat_decrypt_tool.sns_media._wcdb_decrypt_sns_image", return_value=decoded_bad): + res = asyncio.run( + sns_media.try_fetch_and_decrypt_sns_image_remote( + account_dir=account_dir, + url="https://mmsns.qpic.cn/sns/test/0", + key="123", + token="tkn", + use_cache=False, + ) + ) + + self.assertIsNone(res) + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_sns_parse_timeline_xml_sanitization.py b/tests/test_sns_parse_timeline_xml_sanitization.py new file mode 100644 index 0000000..40e5b48 --- /dev/null +++ b/tests/test_sns_parse_timeline_xml_sanitization.py @@ -0,0 +1,72 @@ +import sys +import unittest +from pathlib import Path + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +from wechat_decrypt_tool.routers.sns import _parse_timeline_xml # noqa: E402 pylint: disable=wrong-import-position + + +class TestSnsParseTimelineXmlSanitization(unittest.TestCase): + def test_external_share_type5_parses_with_raw_ampersands(self): + xml = ( + "" + "wxid_2az0agby0baa22" + "1771500773" + "让我看看它和suno有什么区别" + "" + "5" + "Google Gemini 上线了AI音乐生成功能" + "https://b23.tv/lVa1lpm?share_medium=android&share_source=weixin_moments" + "" + "哔哩哔哩" + "" + "4m1" + "https://b23.tv/lVa1lpm?share_medium=android&share_source=weixin_moments" + "http://shmmsns.qpic.cn/mmsns/test/150" + "" + "" + ) + + out = _parse_timeline_xml(xml, "fallback") + self.assertEqual(out.get("type"), 5) + self.assertEqual(out.get("title"), "Google Gemini 上线了AI音乐生成功能") + self.assertEqual(out.get("sourceName"), "哔哩哔哩") + self.assertIn("&share_source=weixin_moments", str(out.get("contentUrl") or "")) + self.assertTrue(isinstance(out.get("media"), list) and len(out.get("media") or []) == 1) + + def test_external_share_type42_parses_with_raw_ampersands(self): + xml = ( + "" + "wxid_all914izz7w222" + "1771504315" + "2026 恭喜自己 也恭喜你" + "" + "42" + "恭喜自己" + "成龙/周华健" + "https://i.y.qq.com/v8/playsong.html?platform=11&appshare=android_qq" + "" + "QQ音乐" + "" + "5m2" + "http://c6.y.qq.com/rsc/fcgi-bin/fcg_pyq_play.fcg?songmid=002kNnX90keHGW&fromtag=46" + "http://szmmsns.qpic.cn/mmsns/test/0" + "" + "" + ) + + out = _parse_timeline_xml(xml, "fallback") + self.assertEqual(out.get("type"), 42) + self.assertEqual(out.get("title"), "恭喜自己") + self.assertEqual(out.get("sourceName"), "QQ音乐") + self.assertIn("&appshare=android_qq", str(out.get("contentUrl") or "")) + self.assertTrue(isinstance(out.get("media"), list) and len(out.get("media") or []) == 1) + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_sns_stage_server_timing.py b/tests/test_sns_stage_server_timing.py new file mode 100644 index 0000000..1fd2a13 --- /dev/null +++ b/tests/test_sns_stage_server_timing.py @@ -0,0 +1,40 @@ +import sys +import unittest +from pathlib import Path + +from starlette.responses import Response + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +from wechat_decrypt_tool.sns_stage_timing import add_sns_stage_timing_headers # noqa: E402 pylint: disable=wrong-import-position + + +class TestSnsStageServerTiming(unittest.TestCase): + def test_injects_server_timing_when_missing(self): + resp = Response(content=b"ok") + add_sns_stage_timing_headers(resp.headers, source="proxy") + st = str(resp.headers.get("Server-Timing") or "") + self.assertIn("sns_source_", st) + self.assertIn("proxy", st) + + def test_appends_when_upstream_server_timing_exists(self): + resp = Response(content=b"ok") + resp.headers["Server-Timing"] = "edge;dur=1" + add_sns_stage_timing_headers(resp.headers, source="proxy") + st = str(resp.headers.get("Server-Timing") or "") + self.assertIn("edge;dur=1", st) + self.assertIn("sns_source_", st) + + def test_does_not_duplicate_existing_sns_source_metric(self): + resp = Response(content=b"ok") + resp.headers["Server-Timing"] = 'sns_source_proxy;dur=0;desc="proxy"' + add_sns_stage_timing_headers(resp.headers, source="proxy") + st = str(resp.headers.get("Server-Timing") or "") + self.assertEqual(st.count("sns_source_"), 1) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_transfer_postprocess.py b/tests/test_transfer_postprocess.py new file mode 100644 index 0000000..3f6fea4 --- /dev/null +++ b/tests/test_transfer_postprocess.py @@ -0,0 +1,129 @@ +import sys +import unittest +from pathlib import Path + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +from wechat_decrypt_tool.routers import chat as chat_router + + +class TestTransferPostprocess(unittest.TestCase): + def test_backfilled_pending_and_received_confirmation_have_expected_titles(self): + transfer_id = "1000050001202601152035503031545" + merged = [ + { + "id": "message_0:Msg_x:60", + "renderType": "transfer", + "paySubType": "1", + "transferId": transfer_id, + "amount": "¥100.00", + "createTime": 1768463200, + "isSent": False, + "transferStatus": "", + }, + { + "id": "message_0:Msg_x:65", + "renderType": "transfer", + "paySubType": "3", + "transferId": transfer_id, + "amount": "¥100.00", + "createTime": 1768463246, + "isSent": True, + # Pre-inferred value (may be "已被接收") should be corrected by postprocess. + "transferStatus": "已被接收", + }, + ] + + chat_router._postprocess_transfer_messages(merged) + + self.assertEqual(merged[0].get("paySubType"), "3") + self.assertEqual(merged[0].get("transferStatus"), "已被接收") + self.assertEqual(merged[1].get("paySubType"), "3") + self.assertEqual(merged[1].get("transferStatus"), "已收款") + + def test_received_message_without_pending_is_left_unchanged(self): + merged = [ + { + "id": "message_0:Msg_x:65", + "renderType": "transfer", + "paySubType": "3", + "transferId": "t1", + "amount": "¥100.00", + "createTime": 1, + "isSent": True, + "transferStatus": "已被接收", + } + ] + + chat_router._postprocess_transfer_messages(merged) + + self.assertEqual(merged[0].get("transferStatus"), "已被接收") + + def test_pending_transfer_marked_expired_by_system_message(self): + merged = [ + { + "id": "message_0:Msg_x:100", + "renderType": "transfer", + "paySubType": "1", + "transferId": "t-expired-1", + "amount": "¥500.00", + "createTime": 1770742598, + "isSent": True, + "transferStatus": "转账", + }, + { + "id": "message_0:Msg_x:101", + "renderType": "system", + "type": 10000, + "createTime": 1770829000, + "content": "收款方24小时内未接收你的转账,已过期", + }, + ] + + chat_router._postprocess_transfer_messages(merged) + + self.assertEqual(merged[0].get("paySubType"), "10") + self.assertEqual(merged[0].get("transferStatus"), "已过期") + + def test_expired_matching_wins_over_amount_time_received_fallback(self): + merged = [ + { + "id": "message_0:Msg_x:200", + "renderType": "transfer", + "paySubType": "1", + "transferId": "t-expired-2", + "amount": "¥500.00", + "createTime": 1770742598, + "isSent": True, + "transferStatus": "", + }, + { + "id": "message_0:Msg_x:201", + "renderType": "transfer", + "paySubType": "3", + "transferId": "t-other", + "amount": "¥500.00", + "createTime": 1770828800, + "isSent": False, + "transferStatus": "已收款", + }, + { + "id": "message_0:Msg_x:202", + "renderType": "system", + "type": 10000, + "createTime": 1770829000, + "content": "收款方24小时内未接收你的转账,已过期", + }, + ] + + chat_router._postprocess_transfer_messages(merged) + + self.assertEqual(merged[0].get("paySubType"), "10") + self.assertEqual(merged[0].get("transferStatus"), "已过期") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_transfer_status_text.py b/tests/test_transfer_status_text.py new file mode 100644 index 0000000..477ac03 --- /dev/null +++ b/tests/test_transfer_status_text.py @@ -0,0 +1,63 @@ +import sys +import unittest +from pathlib import Path + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + +from wechat_decrypt_tool.chat_helpers import _infer_transfer_status_text + + +class TestTransferStatusText(unittest.TestCase): + def test_paysubtype_3_sent_side(self): + status = _infer_transfer_status_text( + is_sent=True, + paysubtype="3", + receivestatus="", + sendertitle="", + receivertitle="", + senderdes="", + receiverdes="", + ) + self.assertEqual(status, "已被接收") + + def test_paysubtype_3_received_side(self): + status = _infer_transfer_status_text( + is_sent=False, + paysubtype="3", + receivestatus="", + sendertitle="", + receivertitle="", + senderdes="", + receiverdes="", + ) + self.assertEqual(status, "已收款") + + def test_receivestatus_1_sent_side(self): + status = _infer_transfer_status_text( + is_sent=True, + paysubtype="1", + receivestatus="1", + sendertitle="", + receivertitle="", + senderdes="", + receiverdes="", + ) + self.assertEqual(status, "已被接收") + + def test_receivestatus_1_received_side(self): + status = _infer_transfer_status_text( + is_sent=False, + paysubtype="1", + receivestatus="1", + sendertitle="", + receivertitle="", + senderdes="", + receiverdes="", + ) + self.assertEqual(status, "已收款") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_wrapped_bento_summary_top_emoji.py b/tests/test_wrapped_bento_summary_top_emoji.py new file mode 100644 index 0000000..8876bc3 --- /dev/null +++ b/tests/test_wrapped_bento_summary_top_emoji.py @@ -0,0 +1,116 @@ +import sys +import unittest +from pathlib import Path + +# Ensure "src/" is importable when running tests from repo root. +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestWrappedBentoSummaryTopEmoji(unittest.TestCase): + def _build_sources(self, *, emoji_data): + # Keep sources minimal: card_07_bento_summary only needs a handful of keys. + overview = {"data": {"totalMessages": 100, "addedFriends": 0}} + heatmap = {"data": {"totalMessages": 100, "weekdayLabels": [], "hourLabels": [], "matrix": []}} + message_chars = {"data": {"sentChars": 0}} + reply_speed = {"data": {}} + monthly = {"data": {"months": []}} + emoji = {"data": emoji_data} + return overview, heatmap, message_chars, reply_speed, monthly, emoji + + def test_top_emoji_prefers_wechat_when_count_higher(self): + from wechat_decrypt_tool.wrapped.cards.card_07_bento_summary import build_card_07_bento_summary_from_sources + + overview, heatmap, message_chars, reply_speed, monthly, emoji = self._build_sources( + emoji_data={ + "topWechatEmojis": [{"key": "[微笑]", "count": 5, "assetPath": "/wxemoji/Expression_1@2x.png"}], + "topTextEmojis": [], + "topUnicodeEmojis": [{"emoji": "🙂", "count": 2}], + } + ) + card = build_card_07_bento_summary_from_sources( + year=2025, + overview=overview, + heatmap=heatmap, + message_chars=message_chars, + reply_speed=reply_speed, + monthly=monthly, + emoji=emoji, + ) + snap = card["data"]["snapshot"] + self.assertEqual(snap["topEmoji"]["kind"], "wechat") + self.assertEqual(snap["topEmoji"]["key"], "[微笑]") + self.assertEqual(snap["topEmoji"]["count"], 5) + self.assertTrue(str(snap["topEmoji"]["assetPath"]).startswith("/wxemoji/")) + + def test_top_emoji_prefers_unicode_when_count_higher(self): + from wechat_decrypt_tool.wrapped.cards.card_07_bento_summary import build_card_07_bento_summary_from_sources + + overview, heatmap, message_chars, reply_speed, monthly, emoji = self._build_sources( + emoji_data={ + "topWechatEmojis": [{"key": "[微笑]", "count": 5, "assetPath": "/wxemoji/Expression_1@2x.png"}], + "topTextEmojis": [], + "topUnicodeEmojis": [{"emoji": "🙂", "count": 9}], + } + ) + card = build_card_07_bento_summary_from_sources( + year=2025, + overview=overview, + heatmap=heatmap, + message_chars=message_chars, + reply_speed=reply_speed, + monthly=monthly, + emoji=emoji, + ) + snap = card["data"]["snapshot"] + self.assertEqual(snap["topEmoji"]["kind"], "unicode") + self.assertEqual(snap["topEmoji"]["emoji"], "🙂") + self.assertEqual(snap["topEmoji"]["count"], 9) + + def test_top_emoji_includes_top_text_emojis(self): + from wechat_decrypt_tool.wrapped.cards.card_07_bento_summary import build_card_07_bento_summary_from_sources + + overview, heatmap, message_chars, reply_speed, monthly, emoji = self._build_sources( + emoji_data={ + "topWechatEmojis": [{"key": "[表情1]", "count": 2, "assetPath": "/wxemoji/Expression_1@2x.png"}], + "topTextEmojis": [{"key": "[嘿哈]", "count": 4, "assetPath": "/wxemoji/Expression_99@2x.png"}], + "topUnicodeEmojis": [{"emoji": "🙂", "count": 3}], + } + ) + card = build_card_07_bento_summary_from_sources( + year=2025, + overview=overview, + heatmap=heatmap, + message_chars=message_chars, + reply_speed=reply_speed, + monthly=monthly, + emoji=emoji, + ) + snap = card["data"]["snapshot"] + self.assertEqual(snap["topEmoji"]["kind"], "wechat") + self.assertEqual(snap["topEmoji"]["key"], "[嘿哈]") + self.assertEqual(snap["topEmoji"]["count"], 4) + self.assertTrue(str(snap["topEmoji"]["assetPath"]).endswith("Expression_99@2x.png")) + + def test_top_emoji_none_when_no_emoji_stats(self): + from wechat_decrypt_tool.wrapped.cards.card_07_bento_summary import build_card_07_bento_summary_from_sources + + overview, heatmap, message_chars, reply_speed, monthly, emoji = self._build_sources( + emoji_data={"topWechatEmojis": [], "topTextEmojis": [], "topUnicodeEmojis": []} + ) + card = build_card_07_bento_summary_from_sources( + year=2025, + overview=overview, + heatmap=heatmap, + message_chars=message_chars, + reply_speed=reply_speed, + monthly=monthly, + emoji=emoji, + ) + snap = card["data"]["snapshot"] + self.assertIsNone(snap.get("topEmoji")) + + +if __name__ == "__main__": + unittest.main() + diff --git a/tests/test_wrapped_emoji_universe.py b/tests/test_wrapped_emoji_universe.py new file mode 100644 index 0000000..867f0f6 --- /dev/null +++ b/tests/test_wrapped_emoji_universe.py @@ -0,0 +1,773 @@ +import hashlib +import sqlite3 +import sys +import unittest +from datetime import datetime +from pathlib import Path +from tempfile import TemporaryDirectory + + +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestWrappedEmojiUniverse(unittest.TestCase): + def _ts(self, y: int, m: int, d: int, h: int = 0, mi: int = 0, s: int = 0) -> int: + return int(datetime(y, m, d, h, mi, s).timestamp()) + + def _seed_contact_db(self, path: Path, *, account: str, usernames: list[str]) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE contact ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + """ + CREATE TABLE stranger ( + username TEXT, + remark TEXT, + nick_name TEXT, + alias TEXT, + local_type INTEGER, + verify_flag INTEGER, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (account, "", "我", "", 1, 0, "", ""), + ) + for idx, username in enumerate(usernames): + conn.execute( + "INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (username, "", f"好友{idx + 1}", "", 1, 0, "", ""), + ) + conn.commit() + finally: + conn.close() + + def _seed_session_db(self, path: Path, *, usernames: list[str]) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE SessionTable ( + username TEXT, + is_hidden INTEGER, + sort_timestamp INTEGER + ) + """ + ) + for username in usernames: + conn.execute("INSERT INTO SessionTable VALUES (?, ?, ?)", (username, 0, 1735689600)) + conn.commit() + finally: + conn.close() + + def _seed_message_db( + self, + path: Path, + *, + account: str, + username: str, + rows: list[dict[str, object]], + ) -> None: + table_name = f"msg_{hashlib.md5(username.encode('utf-8')).hexdigest()}" + conn = sqlite3.connect(str(path)) + try: + conn.execute("CREATE TABLE Name2Id (rowid INTEGER PRIMARY KEY, user_name TEXT)") + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (1, account)) + conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (2, username)) + conn.execute( + f""" + CREATE TABLE {table_name} ( + local_id INTEGER, + server_id INTEGER, + local_type INTEGER, + sort_seq INTEGER, + real_sender_id INTEGER, + create_time INTEGER, + message_content TEXT, + compress_content BLOB, + packed_info_data BLOB + ) + """ + ) + for row in rows: + conn.execute( + f""" + INSERT INTO {table_name} + (local_id, server_id, local_type, sort_seq, real_sender_id, create_time, message_content, compress_content, packed_info_data) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + """, + ( + int(row.get("local_id", 0)), + int(row.get("server_id", 0)), + int(row.get("local_type", 0)), + int(row.get("sort_seq", row.get("local_id", 0))), + int(row.get("real_sender_id", 1)), + int(row.get("create_time", 0)), + str(row.get("message_content", "")), + row.get("compress_content"), + row.get("packed_info_data"), + ), + ) + conn.commit() + finally: + conn.close() + + def _seed_index_db(self, path: Path, *, rows: list[dict[str, object]]) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE message_fts ( + text TEXT, + username TEXT, + render_type TEXT, + create_time INTEGER, + sort_seq INTEGER, + local_id INTEGER, + server_id INTEGER, + local_type INTEGER, + db_stem TEXT, + table_name TEXT, + sender_username TEXT, + is_hidden INTEGER, + is_official INTEGER + ) + """ + ) + for row in rows: + conn.execute( + """ + INSERT INTO message_fts ( + text, username, render_type, create_time, sort_seq, local_id, server_id, local_type, + db_stem, table_name, sender_username, is_hidden, is_official + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + """, + ( + str(row.get("text", "")), + str(row.get("username", "")), + str(row.get("render_type", "")), + int(row.get("create_time", 0)), + int(row.get("sort_seq", 0)), + int(row.get("local_id", 0)), + int(row.get("server_id", 0)), + int(row.get("local_type", 0)), + str(row.get("db_stem", "message_0")), + str(row.get("table_name", "")), + str(row.get("sender_username", "")), + int(row.get("is_hidden", 0)), + int(row.get("is_official", 0)), + ), + ) + conn.commit() + finally: + conn.close() + + def _seed_resource_db( + self, + path: Path, + *, + username: str, + md5: str, + server_id: int, + local_id: int, + create_time: int, + ) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute("CREATE TABLE ChatName2Id (user_name TEXT)") + conn.execute("INSERT INTO ChatName2Id (rowid, user_name) VALUES (?, ?)", (7, username)) + conn.execute( + """ + CREATE TABLE MessageResourceInfo ( + message_id INTEGER PRIMARY KEY AUTOINCREMENT, + message_svr_id INTEGER, + chat_id INTEGER, + message_local_type INTEGER, + packed_info BLOB, + message_local_id INTEGER, + message_create_time INTEGER + ) + """ + ) + packed = f"/tmp/{md5}.dat".encode("utf-8") + conn.execute( + """ + INSERT INTO MessageResourceInfo + (message_svr_id, chat_id, message_local_type, packed_info, message_local_id, message_create_time) + VALUES (?, ?, ?, ?, ?, ?) + """, + (int(server_id), 7, 47, packed, int(local_id), int(create_time)), + ) + conn.commit() + finally: + conn.close() + + def test_only_sticker_messages_outputs_core_stats(self): + from wechat_decrypt_tool.wrapped.cards.card_04_emoji_universe import compute_emoji_universe_stats + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_me" + friend = "wxid_friend_a" + account_dir = root / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db", account=account, usernames=[friend]) + self._seed_session_db(account_dir / "session.db", usernames=[friend]) + + md5_a = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + md5_b = "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" + rows = [ + { + "local_id": 1, + "server_id": 1001, + "local_type": 47, + "create_time": self._ts(2025, 1, 1, 10, 5, 0), + "message_content": f'', + }, + { + "local_id": 2, + "server_id": 1002, + "local_type": 47, + "create_time": self._ts(2025, 1, 1, 10, 30, 0), + "message_content": f'', + }, + { + "local_id": 3, + "server_id": 1003, + "local_type": 47, + "create_time": self._ts(2025, 1, 2, 22, 10, 0), + "message_content": f'', + }, + ] + self._seed_message_db(account_dir / "message_0.db", account=account, username=friend, rows=rows) + + table_name = f"msg_{hashlib.md5(friend.encode('utf-8')).hexdigest()}" + fts_rows = [] + for row in rows: + fts_rows.append( + { + "text": "[表情]", + "username": friend, + "render_type": "emoji", + "create_time": row["create_time"], + "sort_seq": row["local_id"], + "local_id": row["local_id"], + "server_id": row["server_id"], + "local_type": 47, + "db_stem": "message_0", + "table_name": table_name, + "sender_username": account, + } + ) + self._seed_index_db(account_dir / "chat_search_index.db", rows=fts_rows) + + data = compute_emoji_universe_stats(account_dir=account_dir, year=2025) + + self.assertTrue(data["settings"]["usedIndex"]) + self.assertEqual(data["sentStickerCount"], 3) + self.assertEqual(data["peakHour"], 10) + self.assertIsNotNone(data["peakWeekday"]) + self.assertEqual(data["topBattlePartner"]["username"], friend) + self.assertEqual(data["topBattlePartner"]["stickerCount"], 3) + self.assertEqual(data["topBattlePartner"]["maskedName"], data["topBattlePartner"]["displayName"]) + self.assertEqual(data["topStickers"][0]["md5"], md5_a) + self.assertEqual(data["topStickers"][0]["count"], 2) + self.assertTrue(str(data["topStickers"][0].get("sampleDisplayName") or "").strip()) + self.assertTrue(str(data["topStickers"][0].get("sampleAvatarUrl") or "").startswith("/api/chat/avatar")) + + def test_fallback_to_resource_md5_when_xml_missing(self): + from wechat_decrypt_tool.wrapped.cards.card_04_emoji_universe import compute_emoji_universe_stats + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_me" + friend = "wxid_friend_b" + account_dir = root / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db", account=account, usernames=[friend]) + self._seed_session_db(account_dir / "session.db", usernames=[friend]) + + ts = self._ts(2025, 3, 8, 21, 0, 0) + rows = [ + { + "local_id": 11, + "server_id": 220011, + "local_type": 47, + "create_time": ts, + "message_content": '', + } + ] + self._seed_message_db(account_dir / "message_0.db", account=account, username=friend, rows=rows) + + md5_fallback = "cccccccccccccccccccccccccccccccc" + self._seed_resource_db( + account_dir / "message_resource.db", + username=friend, + md5=md5_fallback, + server_id=220011, + local_id=11, + create_time=ts, + ) + + data = compute_emoji_universe_stats(account_dir=account_dir, year=2025) + + self.assertFalse(data["settings"]["usedIndex"]) + self.assertEqual(data["sentStickerCount"], 1) + self.assertEqual(data["topStickers"][0]["md5"], md5_fallback) + self.assertEqual(data["topStickers"][0]["count"], 1) + + def test_text_emoji_mapping_from_wechat_emojis_ts(self): + from wechat_decrypt_tool.wrapped.cards.card_04_emoji_universe import compute_emoji_universe_stats + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_me" + friend = "wxid_friend_c" + account_dir = root / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db", account=account, usernames=[friend]) + self._seed_session_db(account_dir / "session.db", usernames=[friend]) + + table_name = f"msg_{hashlib.md5(friend.encode('utf-8')).hexdigest()}" + fts_rows = [ + { + "text": "早上好[微笑][微笑]🙂🙂", + "username": friend, + "render_type": "text", + "create_time": self._ts(2025, 4, 1, 9, 0, 0), + "local_id": 1, + "server_id": 901, + "local_type": 1, + "db_stem": "message_0", + "table_name": table_name, + "sender_username": account, + }, + { + "text": "晚上见[微笑][发呆]😂", + "username": friend, + "render_type": "text", + "create_time": self._ts(2025, 4, 1, 22, 0, 0), + "local_id": 2, + "server_id": 902, + "local_type": 1, + "db_stem": "message_0", + "table_name": table_name, + "sender_username": account, + }, + ] + self._seed_index_db(account_dir / "chat_search_index.db", rows=fts_rows) + + data = compute_emoji_universe_stats(account_dir=account_dir, year=2025) + self.assertTrue(data["settings"]["usedIndex"]) + self.assertGreaterEqual(len(data["topTextEmojis"]), 1) + self.assertEqual(data["topTextEmojis"][0]["key"], "[微笑]") + self.assertEqual(data["topTextEmojis"][0]["count"], 3) + self.assertTrue(data["topTextEmojis"][0]["assetPath"].endswith("Expression_1@2x.png")) + self.assertGreaterEqual(len(data["topUnicodeEmojis"]), 1) + self.assertEqual(data["topUnicodeEmojis"][0]["emoji"], "🙂") + self.assertEqual(data["topUnicodeEmojis"][0]["count"], 2) + + def test_wechat_builtin_emoji_from_packed_info_data(self): + from wechat_decrypt_tool.wrapped.cards.card_04_emoji_universe import compute_emoji_universe_stats + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_me" + friend = "wxid_friend_e" + account_dir = root / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db", account=account, usernames=[friend]) + self._seed_session_db(account_dir / "session.db", usernames=[friend]) + + # packed_info_data protobuf varints: + # 08 04 => field#1=4 + # 10 33 => field#2=51 (Expression_51@2x) + rows = [ + { + "local_id": 1, + "server_id": 501, + "local_type": 47, + "create_time": self._ts(2025, 7, 1, 10, 0, 0), + "message_content": "binary_emoji_payload_a", + "packed_info_data": bytes.fromhex("08041033"), + }, + { + "local_id": 2, + "server_id": 502, + "local_type": 47, + "create_time": self._ts(2025, 7, 1, 10, 1, 0), + "message_content": "binary_emoji_payload_b", + "packed_info_data": bytes.fromhex("08041033"), + }, + { + "local_id": 3, + "server_id": 503, + "local_type": 47, + "create_time": self._ts(2025, 7, 1, 11, 0, 0), + "message_content": "binary_emoji_payload_c", + "packed_info_data": bytes.fromhex("0804104a"), + }, + ] + self._seed_message_db(account_dir / "message_0.db", account=account, username=friend, rows=rows) + + data = compute_emoji_universe_stats(account_dir=account_dir, year=2025) + + self.assertFalse(data["settings"]["usedIndex"]) + self.assertEqual(data["sentStickerCount"], 3) + self.assertGreaterEqual(len(data["topWechatEmojis"]), 1) + self.assertEqual(data["topWechatEmojis"][0]["id"], 51) + self.assertEqual(data["topWechatEmojis"][0]["count"], 2) + self.assertTrue(data["topWechatEmojis"][0]["assetPath"].endswith("Expression_51@2x.png")) + self.assertGreaterEqual(len(data["topStickers"]), 1) + self.assertEqual(data["topStickers"][0]["emojiId"], 51) + self.assertEqual(data["topStickers"][0]["count"], 2) + self.assertTrue(str(data["topStickers"][0].get("emojiAssetPath") or "").endswith("Expression_51@2x.png")) + + def test_index_counts_only_sent_messages(self): + from wechat_decrypt_tool.wrapped.cards.card_04_emoji_universe import compute_emoji_universe_stats + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_me" + friend = "wxid_friend_sent_only" + account_dir = root / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db", account=account, usernames=[friend]) + self._seed_session_db(account_dir / "session.db", usernames=[friend]) + + rows = [ + { + "text": "[ 微 笑 ]", + "username": friend, + "render_type": "text", + "create_time": self._ts(2025, 6, 2, 9, 0, 0), + "local_id": 101, + "server_id": 4001, + "local_type": 1, + "table_name": "msg_dummy", + "sender_username": account, + }, + { + "text": "[ 发 呆 ]", + "username": friend, + "render_type": "text", + "create_time": self._ts(2025, 6, 2, 9, 1, 0), + "local_id": 102, + "server_id": 4002, + "local_type": 1, + "table_name": "msg_dummy", + "sender_username": friend, + }, + { + "text": "[表情]", + "username": friend, + "render_type": "emoji", + "create_time": self._ts(2025, 6, 2, 9, 2, 0), + "local_id": 201, + "server_id": 5001, + "local_type": 47, + "table_name": "msg_dummy", + "sender_username": account, + }, + { + "text": "[表情]", + "username": friend, + "render_type": "emoji", + "create_time": self._ts(2025, 6, 2, 9, 3, 0), + "local_id": 202, + "server_id": 5002, + "local_type": 47, + "table_name": "msg_dummy", + "sender_username": friend, + }, + ] + self._seed_index_db(account_dir / "chat_search_index.db", rows=rows) + + data = compute_emoji_universe_stats(account_dir=account_dir, year=2025) + self.assertTrue(data["settings"]["usedIndex"]) + + self.assertEqual(data["sentStickerCount"], 1) + + keys = {x.get("key") for x in data.get("topTextEmojis") or []} + self.assertIn("[微笑]", keys) + self.assertNotIn("[发呆]", keys) + + def test_raw_db_counts_only_sent_messages(self): + from wechat_decrypt_tool.wrapped.cards.card_04_emoji_universe import compute_emoji_universe_stats + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_me" + friend = "wxid_friend_raw_dir" + account_dir = root / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db", account=account, usernames=[friend]) + self._seed_session_db(account_dir / "session.db", usernames=[friend]) + + rows = [ + { + "local_id": 1, + "server_id": 1001, + "local_type": 1, + "real_sender_id": 1, + "create_time": self._ts(2025, 7, 1, 8, 0, 0), + "message_content": "/::B", + }, + { + "local_id": 2, + "server_id": 1002, + "local_type": 1, + "real_sender_id": 2, + "create_time": self._ts(2025, 7, 1, 8, 1, 0), + "message_content": "/::B", + }, + { + "local_id": 3, + "server_id": 1101, + "local_type": 47, + "real_sender_id": 1, + "create_time": self._ts(2025, 7, 1, 9, 0, 0), + "message_content": "binary_emoji_payload_a", + "packed_info_data": bytes.fromhex("08031033"), + }, + { + "local_id": 4, + "server_id": 1102, + "local_type": 47, + "real_sender_id": 2, + "create_time": self._ts(2025, 7, 1, 9, 1, 0), + "message_content": "binary_emoji_payload_b", + "packed_info_data": bytes.fromhex("08031033"), + }, + ] + self._seed_message_db(account_dir / "message_0.db", account=account, username=friend, rows=rows) + + data = compute_emoji_universe_stats(account_dir=account_dir, year=2025) + + self.assertFalse(data["settings"]["usedIndex"]) + self.assertEqual(data["sentStickerCount"], 1) + self.assertEqual(data["topWechatEmojis"][0]["id"], 51) + self.assertEqual(data["topWechatEmojis"][0]["count"], 1) + + self.assertGreaterEqual(len(data["topTextEmojis"]), 1) + self.assertEqual(data["topTextEmojis"][0]["key"], "[色]") + self.assertEqual(data["topTextEmojis"][0]["count"], 1) + self.assertTrue(data["topTextEmojis"][0]["assetPath"].endswith("Expression_3@2x.png")) + + def test_new_and_revived_sticker_metrics(self): + from wechat_decrypt_tool.wrapped.cards.card_04_emoji_universe import compute_emoji_universe_stats + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_me" + friend = "wxid_friend_new_revived" + account_dir = root / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db", account=account, usernames=[friend]) + self._seed_session_db(account_dir / "session.db", usernames=[friend]) + + md5_revived = "dddddddddddddddddddddddddddddddd" + md5_recent = "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee" + md5_new = "ffffffffffffffffffffffffffffffff" + rows = [ + { + "local_id": 1, + "server_id": 5001, + "local_type": 47, + "create_time": self._ts(2024, 1, 1, 9, 0, 0), + "message_content": f'', + }, + { + "local_id": 2, + "server_id": 5002, + "local_type": 47, + "create_time": self._ts(2024, 12, 28, 10, 0, 0), + "message_content": f'', + }, + { + "local_id": 3, + "server_id": 5003, + "local_type": 47, + "create_time": self._ts(2025, 1, 5, 11, 0, 0), + "message_content": f'', + }, + { + "local_id": 4, + "server_id": 5004, + "local_type": 47, + "create_time": self._ts(2025, 3, 15, 12, 0, 0), + "message_content": f'', + }, + { + "local_id": 5, + "server_id": 5005, + "local_type": 47, + "create_time": self._ts(2025, 5, 10, 13, 0, 0), + "message_content": f'', + }, + ] + self._seed_message_db(account_dir / "message_0.db", account=account, username=friend, rows=rows) + + data = compute_emoji_universe_stats(account_dir=account_dir, year=2025) + + self.assertEqual(data["sentStickerCount"], 3) + self.assertEqual(data["uniqueStickerTypeCount"], 3) + self.assertEqual(data["newStickerCountThisYear"], 1) + self.assertEqual(data["revivedStickerCount"], 1) + self.assertEqual(data["revivedMinGapDays"], 60) + self.assertGreaterEqual(int(data.get("revivedMaxGapDays") or 0), 400) + new_samples = list(data.get("newStickerSamples") or []) + revived_samples = list(data.get("revivedStickerSamples") or []) + self.assertTrue(any(str(x.get("md5") or "") == md5_new for x in new_samples)) + self.assertTrue(any(str(x.get("md5") or "") == md5_revived for x in revived_samples)) + revived_item = next((x for x in revived_samples if str(x.get("md5") or "") == md5_revived), {}) + self.assertGreaterEqual(int(revived_item.get("gapDays") or 0), 400) + + def test_empty_year_returns_safe_empty_state(self): + from wechat_decrypt_tool.wrapped.cards.card_04_emoji_universe import build_card_04_emoji_universe + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_me" + account_dir = root / account + account_dir.mkdir(parents=True, exist_ok=True) + self._seed_contact_db(account_dir / "contact.db", account=account, usernames=[]) + self._seed_session_db(account_dir / "session.db", usernames=[]) + + card = build_card_04_emoji_universe(account_dir=account_dir, year=2025) + self.assertEqual(card["id"], 5) + self.assertEqual(card["status"], "ok") + self.assertEqual(card["data"]["sentStickerCount"], 0) + self.assertIn("几乎没用表情表达", card["narrative"]) + self.assertIsInstance(card["data"]["lines"], list) + self.assertGreaterEqual(len(card["data"]["lines"]), 1) + self.assertEqual(card["data"].get("topUnicodeEmojis"), []) + + def test_tie_break_is_stable_by_key(self): + from wechat_decrypt_tool.wrapped.cards.card_04_emoji_universe import compute_emoji_universe_stats + + with TemporaryDirectory() as td: + root = Path(td) + account = "wxid_me" + friend = "wxid_friend_d" + account_dir = root / account + account_dir.mkdir(parents=True, exist_ok=True) + + self._seed_contact_db(account_dir / "contact.db", account=account, usernames=[friend]) + self._seed_session_db(account_dir / "session.db", usernames=[friend]) + + md5_a = "11111111111111111111111111111111" + md5_b = "22222222222222222222222222222222" + rows = [ + { + "local_id": 1, + "server_id": 301, + "local_type": 47, + "create_time": self._ts(2025, 6, 1, 8, 0, 0), + "message_content": f'', + }, + { + "local_id": 2, + "server_id": 302, + "local_type": 47, + "create_time": self._ts(2025, 6, 1, 8, 1, 0), + "message_content": f'', + }, + { + "local_id": 3, + "server_id": 303, + "local_type": 47, + "create_time": self._ts(2025, 6, 1, 8, 2, 0), + "message_content": f'', + }, + { + "local_id": 4, + "server_id": 304, + "local_type": 47, + "create_time": self._ts(2025, 6, 1, 8, 3, 0), + "message_content": f'', + }, + ] + self._seed_message_db(account_dir / "message_0.db", account=account, username=friend, rows=rows) + + table_name = f"msg_{hashlib.md5(friend.encode('utf-8')).hexdigest()}" + fts_rows = [] + for row in rows: + fts_rows.append( + { + "text": "[表情]", + "username": friend, + "render_type": "emoji", + "create_time": row["create_time"], + "local_id": row["local_id"], + "server_id": row["server_id"], + "local_type": 47, + "db_stem": "message_0", + "table_name": table_name, + "sender_username": account, + } + ) + fts_rows.extend( + [ + { + # `chat_search_index` stores text as char-tokens: "[微笑][发呆]" -> "[ 微 笑 ] [ 发 呆 ]" + "text": "[ 微 笑 ] [ 发 呆 ]", + "username": friend, + "render_type": "text", + "create_time": self._ts(2025, 6, 2, 9, 0, 0), + "local_id": 101, + "server_id": 4001, + "local_type": 1, + "db_stem": "message_0", + "table_name": table_name, + "sender_username": account, + }, + { + "text": "[ 发 呆 ] [ 微 笑 ]", + "username": friend, + "render_type": "text", + "create_time": self._ts(2025, 6, 2, 9, 1, 0), + "local_id": 102, + "server_id": 4002, + "local_type": 1, + "db_stem": "message_0", + "table_name": table_name, + "sender_username": account, + }, + ] + ) + self._seed_index_db(account_dir / "chat_search_index.db", rows=fts_rows) + + data = compute_emoji_universe_stats(account_dir=account_dir, year=2025) + + self.assertEqual(data["topStickers"][0]["md5"], md5_a) + expected_emoji_key = sorted(["[微笑]", "[发呆]"])[0] + self.assertEqual(data["topTextEmojis"][0]["key"], expected_emoji_key) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_wrapped_keywords_wordcloud.py b/tests/test_wrapped_keywords_wordcloud.py new file mode 100644 index 0000000..72fa62c --- /dev/null +++ b/tests/test_wrapped_keywords_wordcloud.py @@ -0,0 +1,203 @@ +import unittest +from pathlib import Path +import sys + +# Ensure "src/" is importable when running tests from repo root. +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestWrappedKeywordsWordCloud(unittest.TestCase): + def test_weflow_common_phrase_filter(self): + from wechat_decrypt_tool.wrapped.cards.card_05_keywords_wordcloud import _weflow_common_phrase_or_empty + + self.assertEqual(_weflow_common_phrase_or_empty(" 在吗 "), "在吗") + self.assertEqual(_weflow_common_phrase_or_empty("ok"), "ok") + self.assertEqual(_weflow_common_phrase_or_empty("a"), "") # too short + self.assertEqual(_weflow_common_phrase_or_empty("x" * 21), "") # too long + self.assertEqual(_weflow_common_phrase_or_empty("看看 http://x.com"), "") # contains http + self.assertEqual(_weflow_common_phrase_or_empty("xml"), "") # contains "<" + self.assertEqual(_weflow_common_phrase_or_empty("[捂脸]"), "") # bracketed payload + self.assertEqual(_weflow_common_phrase_or_empty(""), "") # xml payload + + def test_build_common_phrases_payload_structure(self): + from collections import Counter + + from wechat_decrypt_tool.wrapped.cards.card_05_keywords_wordcloud import build_common_phrases_payload + + phrase_counts = Counter({"好的": 5, "在吗": 2, "movie": 2, "单次": 1}) + example_texts = [ + "好的收到", + "好的好的,明白了", + "你好的呀", + "在吗宝贝", + "movie night is fun", + "MOVIE time now", + ] + payload = build_common_phrases_payload( + phrase_counts=phrase_counts, + seed=123456, + top_n=32, + bubble_limit=50, + example_texts=example_texts, + examples_per_word=3, + ) + + self.assertIn("keywords", payload) + self.assertIn("bubbleMessages", payload) + self.assertIn("examples", payload) + self.assertIn("topKeyword", payload) + + self.assertEqual(payload["topKeyword"]["word"], "好的") + self.assertEqual(int(payload["topKeyword"]["count"]), 5) + + self.assertTrue(all(int(x.get("count") or 0) >= 2 for x in payload["keywords"])) + self.assertTrue(all(isinstance(x.get("word"), str) and x.get("word") for x in payload["keywords"])) + + # Examples should contain real message samples with an upper bound. + for ex in payload["examples"]: + msgs = ex.get("messages") or [] + self.assertGreaterEqual(len(msgs), 1) + self.assertLessEqual(len(msgs), 3) + word = str(ex.get("word") or "") + if any("\u4e00" <= ch <= "\u9fff" for ch in word): + self.assertTrue(any(word in str(m) for m in msgs)) + else: + self.assertTrue(any(word.lower() in str(m).lower() for m in msgs)) + + def test_extract_keywords_jieba_basic(self): + from wechat_decrypt_tool.wrapped.cards.card_05_keywords_wordcloud import extract_keywords_jieba + + texts = [ + "火锅 火锅", + "火锅太好吃了!!!", + "movie night movie", + "2024-01-01 12:30", + "哈哈哈哈", + ] + + out = extract_keywords_jieba(texts, top_n=40) + self.assertIsInstance(out, list) + self.assertLessEqual(len(out), 40) + + # Must be sorted by count descending (tie-break by word). + counts = [int(x.get("count") or 0) for x in out] + self.assertEqual(counts, sorted(counts, reverse=True)) + + # Weights are normalized into [0.2, 1.0] when multiple items exist. + for x in out: + w = float(x.get("weight") or 0) + self.assertGreaterEqual(w, 0.0) + self.assertLessEqual(w, 1.0) + + words = [str(x.get("word") or "") for x in out] + self.assertTrue(any("火锅" == w for w in words)) + self.assertTrue(any("movie" == w for w in words)) + self.assertTrue(all(not w.isdigit() for w in words if w)) + + def test_extract_keywords_jieba_short_phrases(self): + from wechat_decrypt_tool.wrapped.cards.card_05_keywords_wordcloud import extract_keywords_jieba + + # Jieba may split short chat phrases into single characters ("在吗" -> ["在","吗"]), + # which would be filtered out by our tokenizer. Ensure we still extract meaningful + # 2-char phrases as a fallback. + texts = ["在吗"] * 30 + ["好的"] * 25 + ["嗯"] * 40 + ["哈哈"] * 40 + out = extract_keywords_jieba(texts, top_n=10) + + words = [str(x.get("word") or "") for x in out] + self.assertIn("在吗", words) + self.assertIn("好的", words) + + def test_list_message_tables_decodes_bytes(self): + import sqlite3 + + from wechat_decrypt_tool.wrapped.cards.card_05_keywords_wordcloud import _list_message_tables + + conn = sqlite3.connect(":memory:") + try: + conn.text_factory = bytes + conn.execute("CREATE TABLE Msg_abc (id INTEGER)") + conn.execute("CREATE TABLE Chat_def (id INTEGER)") + conn.execute("CREATE TABLE Other (id INTEGER)") + tables = _list_message_tables(conn) + finally: + conn.close() + + self.assertIn("Msg_abc", tables) + self.assertIn("Chat_def", tables) + self.assertTrue(all(isinstance(x, str) for x in tables)) + + def test_pick_examples_contains_word(self): + from wechat_decrypt_tool.wrapped.cards.card_05_keywords_wordcloud import pick_examples + + keywords = [ + {"word": "火锅", "count": 3, "weight": 1.0}, + {"word": "movie", "count": 2, "weight": 0.6}, + ] + pool = [ + "今晚火锅走起", + "火锅太好吃了", + "no", + "xml", + "Movie night is fun", + "MOVIE time", + "https://example.com/movie", + ] + + out = pick_examples(keywords, pool, per_word=3) + self.assertEqual(len(out), 2) + + m_hotpot = next(x for x in out if x["word"] == "火锅") + self.assertTrue(all("火锅" in m for m in m_hotpot["messages"])) + + m_movie = next(x for x in out if x["word"] == "movie") + self.assertTrue(all("movie" in m.lower() for m in m_movie["messages"])) + + def test_pick_examples_short_phrase_can_fill_three(self): + from wechat_decrypt_tool.wrapped.cards.card_05_keywords_wordcloud import pick_examples + + keywords = [{"word": "在吗", "count": 9, "weight": 1.0}] + pool = [ + "在吗", + "在吗", + "在吗", + "在吗?", + "ok", + ] + + out = pick_examples(keywords, pool, per_word=3) + self.assertEqual(len(out), 1) + msgs = out[0]["messages"] + self.assertEqual(len(msgs), 3) + self.assertTrue(all("在吗" in m for m in msgs)) + + def test_build_keywords_payload_structure(self): + from wechat_decrypt_tool.wrapped.cards.card_05_keywords_wordcloud import build_keywords_payload + + texts = [ + "今晚吃火锅吗?", + "火锅太好吃了!!!", + "一起去看电影吧", + "一起一起", + "movie night movie", + ] + + payload = build_keywords_payload(texts=texts, seed=123456) + self.assertIn("keywords", payload) + self.assertIn("bubbleMessages", payload) + self.assertIn("examples", payload) + self.assertIn("topKeyword", payload) + + self.assertIsInstance(payload["keywords"], list) + self.assertIsInstance(payload["bubbleMessages"], list) + self.assertIsInstance(payload["examples"], list) + self.assertTrue(payload["topKeyword"] is None or isinstance(payload["topKeyword"], dict)) + + # bubble messages are unique and within limit + b = payload["bubbleMessages"] + self.assertLessEqual(len(b), 180) + self.assertEqual(len(b), len(list(dict.fromkeys(b)))) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_wrapped_manifest_bento_summary.py b/tests/test_wrapped_manifest_bento_summary.py new file mode 100644 index 0000000..ef6d8c9 --- /dev/null +++ b/tests/test_wrapped_manifest_bento_summary.py @@ -0,0 +1,28 @@ +import sys +import unittest +from pathlib import Path + +# Ensure "src/" is importable when running tests from repo root. +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestWrappedManifestBentoSummary(unittest.TestCase): + def test_manifest_appends_bento_summary(self): + try: + from wechat_decrypt_tool.wrapped.service import _WRAPPED_CARD_MANIFEST + except ModuleNotFoundError as e: + # Some dev/test environments may not have optional deps installed (e.g. pypinyin). + # The manifest itself doesn't depend on them, but importing the service module does. + if getattr(e, "name", "") == "pypinyin": + self.skipTest("pypinyin is not installed") + raise + + self.assertTrue(len(_WRAPPED_CARD_MANIFEST) > 0) + last = _WRAPPED_CARD_MANIFEST[-1] + self.assertEqual(int(last.get("id")), 7) + self.assertEqual(str(last.get("kind")), "global/bento_summary") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_wrapped_monthly_best_friends.py b/tests/test_wrapped_monthly_best_friends.py new file mode 100644 index 0000000..82067d6 --- /dev/null +++ b/tests/test_wrapped_monthly_best_friends.py @@ -0,0 +1,271 @@ +import sqlite3 +import unittest +from datetime import datetime +from pathlib import Path +from tempfile import TemporaryDirectory +import sys + +# Ensure "src/" is importable when running tests from repo root. +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestWrappedMonthlyBestFriends(unittest.TestCase): + def _ts(self, y: int, m: int, d: int, hh: int, mm: int, ss: int) -> int: + return int(datetime(y, m, d, hh, mm, ss).timestamp()) + + def _seed_contact_db(self, path: Path, usernames: list[str]) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE IF NOT EXISTS contact ( + username TEXT PRIMARY KEY, + remark TEXT, + nick_name TEXT, + alias TEXT, + big_head_url TEXT, + small_head_url TEXT + ) + """ + ) + for u in usernames: + conn.execute( + "INSERT INTO contact(username, nick_name) VALUES(?, ?)", + (u, f"Nick_{u}"), + ) + conn.commit() + finally: + conn.close() + + def _seed_index_db(self, path: Path, rows: list[dict]) -> None: + conn = sqlite3.connect(str(path)) + try: + conn.execute( + """ + CREATE TABLE IF NOT EXISTS message_fts ( + username TEXT, + sender_username TEXT, + create_time INTEGER, + sort_seq INTEGER, + local_id INTEGER, + local_type INTEGER, + db_stem TEXT + ) + """ + ) + for r in rows: + conn.execute( + """ + INSERT INTO message_fts( + username, sender_username, create_time, sort_seq, local_id, local_type, db_stem + ) VALUES(?, ?, ?, ?, ?, ?, ?) + """, + ( + r["username"], + r["sender_username"], + int(r["create_time"]), + int(r["sort_seq"]), + int(r["local_id"]), + int(r.get("local_type", 1)), + str(r.get("db_stem", "message_0")), + ), + ) + conn.commit() + finally: + conn.close() + + def test_balanced_profile_can_beat_higher_volume(self): + from wechat_decrypt_tool.wrapped.cards.card_04_monthly_best_friends_wall import ( + compute_monthly_best_friends_wall_stats, + ) + + with TemporaryDirectory() as td: + account = "wxid_me" + account_dir = Path(td) / account + account_dir.mkdir(parents=True, exist_ok=True) + + user_volume = "wxid_volume" + user_balanced = "wxid_balanced" + self._seed_contact_db(account_dir / "contact.db", [user_volume, user_balanced]) + + rows: list[dict] = [] + lid = 1 + # High-volume user: more messages but consistently slow replies and low continuity. + for d in [3, 18]: + for i in range(6): + t = self._ts(2025, 1, d, 21, i * 3, 0) + rows.append( + { + "username": user_volume, + "sender_username": user_volume, + "create_time": t, + "sort_seq": lid, + "local_id": lid, + } + ) + lid += 1 + rows.append( + { + "username": user_volume, + "sender_username": account, + "create_time": t + 7200, + "sort_seq": lid, + "local_id": lid, + } + ) + lid += 1 + + # Balanced user: slightly fewer interactions, but much faster and spread over more days/hours. + day_hour = [ + (2, 1), + (6, 8), + (9, 13), + (13, 19), + (20, 10), + (24, 22), + (27, 7), + (29, 16), + (30, 12), + (31, 20), + ] + for d, hh in day_hour: + t = self._ts(2025, 1, d, hh, 10, 0) + rows.append( + { + "username": user_balanced, + "sender_username": user_balanced, + "create_time": t, + "sort_seq": lid, + "local_id": lid, + } + ) + lid += 1 + rows.append( + { + "username": user_balanced, + "sender_username": account, + "create_time": t + 20, + "sort_seq": lid, + "local_id": lid, + } + ) + lid += 1 + + self._seed_index_db(account_dir / "chat_search_index.db", rows) + data = compute_monthly_best_friends_wall_stats(account_dir=account_dir, year=2025) + jan = data["months"][0] + self.assertIsNotNone(jan["winner"]) + self.assertEqual(jan["winner"]["username"], user_balanced) + + def test_allows_consecutive_month_wins(self): + from wechat_decrypt_tool.wrapped.cards.card_04_monthly_best_friends_wall import ( + compute_monthly_best_friends_wall_stats, + ) + + with TemporaryDirectory() as td: + account = "wxid_me" + account_dir = Path(td) / account + account_dir.mkdir(parents=True, exist_ok=True) + + buddy = "wxid_best" + self._seed_contact_db(account_dir / "contact.db", [buddy]) + + rows: list[dict] = [] + lid = 1 + for month in [1, 2]: + for d in [3, 8, 12, 18]: + t = self._ts(2025, month, d, 12, 0, 0) + rows.append( + { + "username": buddy, + "sender_username": buddy, + "create_time": t, + "sort_seq": lid, + "local_id": lid, + } + ) + lid += 1 + rows.append( + { + "username": buddy, + "sender_username": account, + "create_time": t + 30, + "sort_seq": lid, + "local_id": lid, + } + ) + lid += 1 + + self._seed_index_db(account_dir / "chat_search_index.db", rows) + data = compute_monthly_best_friends_wall_stats(account_dir=account_dir, year=2025) + jan = data["months"][0] + feb = data["months"][1] + self.assertEqual(jan["winner"]["username"], buddy) + self.assertEqual(feb["winner"]["username"], buddy) + + def test_month_without_enough_activity_is_empty(self): + from wechat_decrypt_tool.wrapped.cards.card_04_monthly_best_friends_wall import ( + compute_monthly_best_friends_wall_stats, + ) + + with TemporaryDirectory() as td: + account = "wxid_me" + account_dir = Path(td) / account + account_dir.mkdir(parents=True, exist_ok=True) + + user = "wxid_low" + self._seed_contact_db(account_dir / "contact.db", [user]) + + rows = [] + lid = 1 + # Only 3 reply pairs in March -> total 6 messages, below minTotalMessages=8. + for d in [5, 11, 25]: + t = self._ts(2025, 3, d, 10, 0, 0) + rows.append( + { + "username": user, + "sender_username": user, + "create_time": t, + "sort_seq": lid, + "local_id": lid, + } + ) + lid += 1 + rows.append( + { + "username": user, + "sender_username": account, + "create_time": t + 40, + "sort_seq": lid, + "local_id": lid, + } + ) + lid += 1 + + self._seed_index_db(account_dir / "chat_search_index.db", rows) + data = compute_monthly_best_friends_wall_stats(account_dir=account_dir, year=2025) + march = data["months"][2] + self.assertIsNone(march["winner"]) + self.assertEqual(march["reason"], "insufficient_data") + + def test_card_shape_and_kind(self): + from wechat_decrypt_tool.wrapped.cards.card_04_monthly_best_friends_wall import ( + build_card_04_monthly_best_friends_wall, + ) + + with TemporaryDirectory() as td: + account = "wxid_me" + account_dir = Path(td) / account + account_dir.mkdir(parents=True, exist_ok=True) + self._seed_contact_db(account_dir / "contact.db", []) + self._seed_index_db(account_dir / "chat_search_index.db", []) + + card = build_card_04_monthly_best_friends_wall(account_dir=account_dir, year=2025) + self.assertEqual(card["id"], 4) + self.assertEqual(card["kind"], "chat/monthly_best_friends_wall") + self.assertEqual(card["status"], "ok") + self.assertEqual(len(card["data"]["months"]), 12) + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_wrapped_reply_speed.py b/tests/test_wrapped_reply_speed.py new file mode 100644 index 0000000..9da41ec --- /dev/null +++ b/tests/test_wrapped_reply_speed.py @@ -0,0 +1,73 @@ +import unittest +from pathlib import Path +import sys + +# Ensure "src/" is importable when running tests from repo root. +ROOT = Path(__file__).resolve().parents[1] +sys.path.insert(0, str(ROOT / "src")) + + +class TestWrappedReplySpeedScoring(unittest.TestCase): + def test_score_prefers_more_chat_when_speed_similar(self): + from wechat_decrypt_tool.wrapped.cards.card_03_reply_speed import _ConvAgg, _score_conv + + tau = 30 * 60 # 30min, keep in sync with production default + + # A: 秒回,但聊天很少 + a = _ConvAgg( + username="wxid_a", + incoming=3, + outgoing=3, + replies=3, + sum_gap=30, + sum_gap_capped=30, + min_gap=5, + max_gap=15, + ) + + # B: 稍慢,但聊天明显更多 + b = _ConvAgg( + username="wxid_b", + incoming=50, + outgoing=50, + replies=50, + sum_gap=3000, # avg 60s + sum_gap_capped=3000, + min_gap=10, + max_gap=120, + ) + + self.assertGreater(_score_conv(agg=b, tau_seconds=tau), _score_conv(agg=a, tau_seconds=tau)) + + def test_score_penalizes_extremely_slow_reply(self): + from wechat_decrypt_tool.wrapped.cards.card_03_reply_speed import _ConvAgg, _score_conv + + tau = 30 * 60 + + fast_few = _ConvAgg( + username="wxid_fast", + incoming=5, + outgoing=5, + replies=5, + sum_gap=50, # avg 10s + sum_gap_capped=50, + min_gap=1, + max_gap=20, + ) + + slow_many = _ConvAgg( + username="wxid_slow", + incoming=80, + outgoing=80, + replies=80, + sum_gap=80 * 7200, # avg 2h + sum_gap_capped=80 * 7200, + min_gap=60, + max_gap=100000, + ) + + self.assertGreater(_score_conv(agg=fast_few, tau_seconds=tau), _score_conv(agg=slow_many, tau_seconds=tau)) + + +if __name__ == "__main__": + unittest.main() diff --git a/tools/debug_message_types.py b/tools/debug_message_types.py index 5002a57..c426f29 100644 --- a/tools/debug_message_types.py +++ b/tools/debug_message_types.py @@ -1,9 +1,11 @@ #!/usr/bin/env python3 """调试消息类型返回值""" +import os import requests -resp = requests.get('http://localhost:8000/api/chat/messages', params={ +PORT = os.environ.get("WECHAT_TOOL_PORT", "10392") +resp = requests.get(f'http://localhost:{PORT}/api/chat/messages', params={ 'account': 'wxid_v4mbduwqtzpt22', 'username': 'wxid_qmzc7q0xfm0j22', 'limit': 100 diff --git a/tools/export_database_schema_markdown.py b/tools/export_database_schema_markdown.py new file mode 100644 index 0000000..d6043c7 --- /dev/null +++ b/tools/export_database_schema_markdown.py @@ -0,0 +1,530 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +导出微信数据库字段配置为一份 Markdown 文档(单文件): + +- 输入:wechat_db_config.json(由 tools/generate_wechat_db_config.py 生成) +- 输出:Markdown(包含:数据库 → 表/表组 → 字段与含义) + +说明: +- 本脚本只基于“配置文件中的结构与字段含义”生成文档,不会读取真实数据内容; +- 会对类似 Msg_ 这类用户相关的哈希表名做脱敏显示。 +- 会将“同结构但表名仅数字不同”的重复表自动折叠为一个表组(常见于 FTS 分片/内部表)。 + +用法示例: + python tools/export_database_schema_markdown.py \ + --config wechat_db_config.json \ + --output docs/wechat_database_schema.md +""" + +from __future__ import annotations + +import argparse +import json +import re +from datetime import datetime +from pathlib import Path +from typing import Any + +ROOT = Path(__file__).resolve().parents[1] + + +_HASH_TABLE_RE = re.compile(r"^([A-Za-z0-9]+)_([0-9a-fA-F]{16,})$") + + +def _md_escape_cell(v: Any) -> str: + """Escape Markdown table cell content.""" + if v is None: + return "-" + s = str(v) + # Keep it one-line for tables. + s = s.replace("\r", " ").replace("\n", " ").strip() + # Escape pipe + s = s.replace("|", r"\|") + return s if s else "-" + + +def _mask_hash_table_name(name: str) -> str: + """ + Mask user-specific hash suffix table names: + Msg_00140f... -> Msg_ + """ + m = _HASH_TABLE_RE.match(name) + if not m: + return name + return f"{m.group(1)}_" + + +def _db_sort_key(db_name: str) -> tuple[int, int, str]: + """ + Roughly sort DBs by importance for readers. + """ + # Core + if db_name == "contact": + return (10, 0, db_name) + if db_name == "session": + return (20, 0, db_name) + m = re.match(r"^message_(\d+)$", db_name) + if m: + return (30, int(m.group(1)), db_name) + if re.match(r"^biz_message_(\d+)$", db_name): + n = int(re.match(r"^biz_message_(\d+)$", db_name).group(1)) # type: ignore[union-attr] + return (31, n, db_name) + if db_name == "message_resource": + return (40, 0, db_name) + if db_name == "media_0": + return (41, 0, db_name) + if db_name == "hardlink": + return (42, 0, db_name) + if db_name == "head_image": + return (43, 0, db_name) + + # Social / content + if db_name == "sns": + return (50, 0, db_name) + if db_name == "favorite": + return (60, 0, db_name) + if db_name == "emoticon": + return (70, 0, db_name) + + # System / misc + if db_name in {"general", "unspportmsg"}: + return (80, 0, db_name) + + # Search / index + if db_name in {"chat_search_index", "message_fts"} or db_name.endswith("_fts"): + return (90, 0, db_name) + + # Others + return (100, 0, db_name) + + +def _render_message_type_map(message_types: dict[str, Any]) -> str: + # In Windows WeChat v4, `local_type` is commonly a 64-bit integer: + # raw = (sub_type << 32) | type + # Some configs may still store explicit (type, sub_type) pairs; handle both. + items: list[tuple[int, int, int, str]] = [] + for k, v in message_types.items(): + if k in {"_instructions", "examples"}: + continue + if not isinstance(k, str) or "," not in k: + continue + a, b = k.split(",", 1) + try: + a_i = int(a) + b_i = int(b) + except Exception: + continue + desc = str(v) + + if b_i != 0: + msg_type = a_i + msg_sub = b_i + raw = (msg_sub << 32) | (msg_type & 0xFFFFFFFF) + else: + raw = a_i + msg_type = raw & 0xFFFFFFFF + msg_sub = (raw >> 32) & 0xFFFFFFFF + + items.append((raw, msg_type, msg_sub, desc)) + + if not items: + return "" + + # Sort by decoded (type, sub_type), then raw value. + items.sort(key=lambda x: (x[1], x[2], x[0])) + + out = "## 消息类型(local_type)速查\n\n" + out += "说明:Windows 微信 v4 的 `local_type` 常见为 64 位整型:`raw = (sub_type<<32) | type`。\n\n" + out += "| local_type(raw) | type(low32) | sub_type(high32) | 含义 |\n|---:|---:|---:|---|\n" + for raw, t, st, desc in items: + out += f"| {raw} | {t} | {st} | {_md_escape_cell(desc)} |\n" + return out + "\n" + + +def _table_schema_signature(table: dict[str, Any]) -> tuple[str, str, tuple[tuple[str, str, str, str], ...]]: + """ + Build a stable signature for a table schema in config. + + Used to fold tables which are structurally identical but only differ in name + (e.g. message_fts_v4_aux_0..3). + """ + t_type = str(table.get("type", "table")) + desc = str(table.get("description", "")) + fields = table.get("fields") or {} + + items: list[tuple[str, str, str, str]] = [] + if isinstance(fields, dict): + for field_name, fm in fields.items(): + if not isinstance(fm, dict): + fm = {} + items.append( + ( + str(field_name), + str(fm.get("type", "")), + str(fm.get("meaning", "")), + str(fm.get("notes", "")), + ) + ) + items.sort(key=lambda x: x[0]) + return (t_type, desc, tuple(items)) + + +def _name_family_key(name: str) -> str: + """Normalize a table name into a family key by replacing digit runs with {n}.""" + return re.sub(r"\d+", "{n}", name) + + +def _make_group_pattern(table_names: list[str]) -> str: + """ + Make a readable pattern for a group of similar table names: + + - Only varying numeric segments become `{n}` + - Constant numeric segments are kept as-is + + Example: + message_fts_v4_0/message_fts_v4_1 -> message_fts_v4_{n} + ImgFts0V0/ImgFts1V0 -> ImgFts{n}V0 + """ + if not table_names: + return "" + + tokenized = [re.split(r"(\d+)", n) for n in table_names] + base = tokenized[0] + + # Ensure token structures match; otherwise fall back to a simple normalization. + for t in tokenized[1:]: + if len(t) != len(base): + return _name_family_key(table_names[0]) + for i in range(0, len(base), 2): + if t[i] != base[i]: + return _name_family_key(table_names[0]) + + out_parts: list[str] = [] + for i, part in enumerate(base): + if i % 2 == 0: + out_parts.append(part) + continue + nums = {t[i] for t in tokenized if i < len(t)} + out_parts.append(part if len(nums) == 1 else "{n}") + return "".join(out_parts) + + +def _fold_same_schema_tables_for_display( + tables: dict[str, Any], +) -> list[tuple[str, dict[str, Any]]]: + """ + Fold duplicated tables that share the same schema/signature but only differ in name. + + This is common in FTS shards, e.g.: + message_fts_v4_aux_0..3 + message_fts_v4_0..3 and their internal *_content/*_data/*_idx tables + ImgFts0V0..3 and their internal tables + + Returns a list of (display_name, table_dict) items sorted by the original table name order. + """ + if not tables: + return [] + + # (family_key, schema_sig) -> [table_name, ...] + groups: dict[tuple[str, tuple[str, str, tuple[tuple[str, str, str, str], ...]]], list[str]] = {} + for table_name, table in tables.items(): + if not isinstance(table, dict): + continue + if str(table.get("type", "table")) == "similar_group": + continue + family = _name_family_key(str(table_name)) + sig = _table_schema_signature(table) + groups.setdefault((family, sig), []).append(str(table_name)) + + consumed: set[str] = set() + items: list[tuple[str, str, dict[str, Any]]] = [] # (sort_key, display_name, table) + used_display_names: set[str] = set() + + # Create auto "similar_group" entries for groups > 1. + for (_, _), names in sorted(groups.items(), key=lambda x: x[0][0]): + if len(names) <= 1: + continue + names_sorted = sorted(names) + rep = names_sorted[0] + rep_table = tables.get(rep) + if not isinstance(rep_table, dict): + continue + pattern = _make_group_pattern(names_sorted) + if not pattern: + pattern = _name_family_key(rep) + + display_name = pattern + if display_name in used_display_names: + # Rare: same name pattern but different schema signatures. Disambiguate. + n = 2 + while f"{pattern} (var{n})" in used_display_names: + n += 1 + display_name = f"{pattern} (var{n})" + + group_entry = dict(rep_table) + group_entry.update( + { + "type": "similar_group", + "pattern": pattern, + "table_count": len(names_sorted), + "representative_table": rep, + "table_names": names_sorted, + } + ) + items.append((rep, display_name, group_entry)) + used_display_names.add(display_name) + consumed.update(names_sorted) + + # Keep non-grouped tables (and existing similar_group) as-is. + for table_name, table in tables.items(): + if not isinstance(table, dict): + continue + if str(table_name) in consumed: + continue + items.append((str(table_name), str(table_name), table)) + + items.sort(key=lambda x: (x[0], x[1])) + return [(display_name, table) for _, display_name, table in items] + + +def export_markdown(config_path: Path, output_path: Path) -> None: + cfg = json.loads(config_path.read_text(encoding="utf-8")) + meta = cfg.get("_metadata") or {} + databases: dict[str, Any] = cfg.get("databases") or {} + + # message_{n}.db are typically shards with identical schema. Keep only the last shard for detailed sections. + message_shards: list[tuple[int, str]] = [] + for name in databases.keys(): + m = re.match(r"^message_(\d+)$", str(name)) + if not m: + continue + try: + message_shards.append((int(m.group(1)), str(name))) + except Exception: + continue + message_shards.sort(key=lambda x: x[0]) + rep_message_db: str | None = message_shards[-1][1] if message_shards else None + all_message_db_names = [n for _, n in message_shards] + + now = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + gen_time = meta.get("generated_time") or now + + lines: list[str] = [] + lines.append("# Windows 微信数据库结构文档(自动生成)") + lines.append("") + lines.append(f"> 生成时间:{_md_escape_cell(gen_time)}") + lines.append(f"> 本次导出:{now}") + lines.append(f"> 配置来源:`{config_path.as_posix()}`(由 `tools/generate_wechat_db_config.py` 生成)") + lines.append("") + lines.append("参考资料:") + lines.append("- `万字长文带你了解Windows微信.md`(目录结构/部分表结构与含义)") + lines.append("- 本项目前端页面与后端解析逻辑(字段命名与用途)") + lines.append("") + lines.append("注意:") + lines.append("- 本文档尽量覆盖“库/表/字段”,字段含义部分来自启发式与公开资料,可能存在不准确之处。") + lines.append("- 为避免泄露个人数据,类似 `Msg_` 的哈希表名会脱敏显示。") + lines.append("- 部分 FTS 虚表可能依赖微信自定义 tokenizer(如 `MMFtsTokenizer`),普通 sqlite 环境下查询会报错;本文档字段来自建表 SQL/模板解析。") + lines.append("") + + # Overview + lines.append("## 数据库总览") + lines.append("") + lines.append("| 数据库 | 描述 | 表数量 |") + lines.append("|---|---|---:|") + + for db_name in sorted(databases.keys(), key=_db_sort_key): + db = databases.get(db_name) or {} + if not isinstance(db, dict): + continue + desc = db.get("description", "") + tables = db.get("tables") or {} + lines.append( + f"| `{db_name}.db` | {_md_escape_cell(desc)} | {len(tables) if isinstance(tables, dict) else 0} |" + ) + lines.append("") + + lines.append("## 本项目(前端)功能与数据库大致对应") + lines.append("") + lines.append("- 联系人/群聊:`contact.db`(contact/chat_room/chatroom_member/label 等)") + lines.append("- 会话列表/未读:`session.db`(通常为 SessionTable/ChatInfo 等)") + lines.append("- 聊天记录:`message_*.db`(`Msg_*` 表组 + `Name2Id` 映射等)") + lines.append("- 消息资源/媒体:`message_resource.db` / `hardlink.db` / `media_0.db` / `head_image.db`") + lines.append("- 朋友圈:`sns.db`") + lines.append("- 收藏:`favorite.db`") + lines.append("- 表情包:`emoticon.db`") + lines.append("- 搜索:`chat_search_index.db` / `message_fts.db` / `*_fts.db`(不同版本/实现可能不同)") + lines.append("") + + # Per DB + for db_name in sorted(databases.keys(), key=_db_sort_key): + # Skip duplicated details for message shards; only keep the last shard as representative. + if rep_message_db and re.match(r"^message_\d+$", str(db_name)) and str(db_name) != rep_message_db: + continue + + db = databases.get(db_name) or {} + if not isinstance(db, dict): + continue + + desc = db.get("description", "") + tables = db.get("tables") or {} + if not isinstance(tables, dict): + tables = {} + + display_table_items = _fold_same_schema_tables_for_display(tables) + display_table_count = len(display_table_items) + + lines.append(f"## {db_name}.db") + lines.append("") + lines.append(f"- 描述:{_md_escape_cell(desc)}") + if display_table_count != len(tables): + lines.append(f"- 表数量:{len(tables)}(同结构表折叠后展示 {display_table_count})") + else: + lines.append(f"- 表数量:{len(tables)}") + lines.append("") + + # Extra note for message shards + if re.match(r"^message_\d+$", db_name): + if rep_message_db and db_name == rep_message_db and len(all_message_db_names) > 1: + others = [n for n in all_message_db_names if n != rep_message_db] + # Keep it short; avoid blowing up the doc with too many names if there are lots of shards. + if len(others) <= 10: + lines.append(f"本节仅展示最后一个分片 `{rep_message_db}.db` 的结构;其它分片结构通常一致:{', '.join([f'`{n}.db`' for n in others])}。") + else: + lines.append( + f"本节仅展示最后一个分片 `{rep_message_db}.db` 的结构;其它分片({len(others)} 个)结构通常一致。" + ) + lines.append("说明:") + lines.append("- `Msg_*` 表组通常对应“每个联系人/会话一个表”,常见命名为 `Msg_{md5(wxid)}`。") + lines.append("- 可通过对 wxid 做 md5 计算定位具体会话表;或结合 `Name2Id`/`name2id` 映射表进行解析。") + lines.append("") + lines.append("示例(Python):") + lines.append("") + lines.append("```python") + lines.append("import hashlib") + lines.append("") + lines.append("wxid = \"wxid_xxx\"") + lines.append("md5_hex = hashlib.md5(wxid.encode(\"utf-8\")).hexdigest()") + lines.append("table = f\"Msg_{md5_hex}\"") + lines.append("print(table)") + lines.append("```") + lines.append("") + + # Tables + for table_name, table in display_table_items: + if not isinstance(table, dict): + continue + + t_type = table.get("type", "table") + t_desc = table.get("description", "") + + # Table header + display_table_name = _mask_hash_table_name(table_name) + lines.append(f"### {display_table_name}") + lines.append("") + if t_desc: + lines.append(f"- 描述:{_md_escape_cell(t_desc)}") + if t_type == "similar_group": + pat = table.get("pattern") or display_table_name + rep = table.get("representative_table") + table_count = table.get("table_count") + lines.append(f"- 类型:相似表组(pattern: `{_md_escape_cell(pat)}`)") + if table_count is not None: + lines.append(f"- 表数量:{_md_escape_cell(table_count)}") + if rep: + rep_s = str(rep) + rep_masked = _mask_hash_table_name(rep_s) + rep_note = "(已脱敏)" if rep_masked != rep_s else "" + lines.append(f"- 代表表:`{_md_escape_cell(rep_masked)}`{rep_note}") + + members = table.get("table_names") or table.get("tables") + if isinstance(members, list) and members: + member_names = [str(x) for x in members] + member_names = [_mask_hash_table_name(n) for n in member_names] + if len(member_names) <= 20: + show = member_names + suffix = "" + else: + show = member_names[:10] + ["..."] + member_names[-5:] + suffix = f"(共 {len(member_names)} 个)" + parts = [f"`{_md_escape_cell(n)}`" if n != "..." else "..." for n in show] + lines.append(f"- 包含表:{', '.join(parts)}{suffix}") + lines.append("") + + fields = table.get("fields") or {} + if not isinstance(fields, dict) or not fields: + lines.append("_无字段信息_\n") + continue + + lines.append("| 字段 | 类型 | 含义 | 备注 |") + lines.append("|---|---|---|---|") + for field_name in sorted(fields.keys()): + fm = fields.get(field_name) or {} + if not isinstance(fm, dict): + fm = {} + f_type = fm.get("type", "") + meaning = fm.get("meaning", "") + notes = fm.get("notes", "") + lines.append( + f"| `{_md_escape_cell(field_name)}` | `{_md_escape_cell(f_type)}` | {_md_escape_cell(meaning)} | {_md_escape_cell(notes)} |" + ) + lines.append("") + + # Appendices + message_types = cfg.get("message_types") or {} + if isinstance(message_types, dict) and message_types: + mt = _render_message_type_map(message_types) + if mt: + lines.append(mt) + + friend_types = cfg.get("friend_types") or {} + if isinstance(friend_types, dict) and friend_types: + # friend_types in config usually uses string keys + items: list[tuple[int, str]] = [] + for k, v in friend_types.items(): + if k in {"_instructions", "examples"}: + continue + try: + items.append((int(str(k)), str(v))) + except Exception: + continue + items.sort(key=lambda x: x[0]) + + if items: + lines.append("## 联系人类型(friend_type)速查\n") + lines.append("| 值 | 含义 |\n|---:|---|\n") + for code, desc in items: + lines.append(f"| {code} | {_md_escape_cell(desc)} |") + lines.append("") + + output_path.parent.mkdir(parents=True, exist_ok=True) + output_path.write_text("\n".join(lines) + "\n", encoding="utf-8") + + +def main() -> int: + parser = argparse.ArgumentParser(description="导出微信数据库字段配置为 Markdown 文档(单文件)") + parser.add_argument( + "--config", + default=str(ROOT / "wechat_db_config.json"), + help="wechat_db_config.json 路径(由 tools/generate_wechat_db_config.py 生成)", + ) + parser.add_argument( + "--output", + default=str(ROOT / "docs" / "wechat_database_schema.md"), + help="Markdown 输出路径", + ) + args = parser.parse_args() + + cfg = Path(args.config) + if not cfg.exists(): + raise FileNotFoundError(f"未找到配置文件: {cfg},请先运行 tools/generate_wechat_db_config.py") + + out = Path(args.output) + export_markdown(cfg, out) + print(f"[OK] 写出 Markdown: {out}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tools/generate_wechat_db_config.py b/tools/generate_wechat_db_config.py index d622368..4d8fde3 100644 --- a/tools/generate_wechat_db_config.py +++ b/tools/generate_wechat_db_config.py @@ -14,6 +14,7 @@ import re from pathlib import Path from datetime import datetime +import sys ROOT = Path(__file__).resolve().parents[1] TEMPLATE_PATH = ROOT / "wechat_db_config_template.json" @@ -21,6 +22,10 @@ OUTPUT_DIR = ROOT / "output" / "configs" OUTPUT_COPY = OUTPUT_DIR / "wechat_db_config.generated.json" +# 允许从 tools/ 目录运行时仍能 import 根目录模块 +if str(ROOT) not in sys.path: + sys.path.insert(0, str(ROOT)) + # 尝试导入分析器以复用其启发式 AnalyzerCls = None try: @@ -33,19 +38,24 @@ def build_db_descriptions() -> dict[str, str]: return { "message": "聊天记录核心数据库", - "message_3": "聊天消息分表数据库(示例或分片)", + # message_{n}.db 会在 fill_config 里按正则单独处理(分片/分表) "message_fts": "聊天消息全文索引数据库(FTS)", "message_resource": "消息资源索引数据库(图片/文件/视频等)", "contact": "联系人数据库(好友/群/公众号基础信息)", "session": "会话数据库(会话列表与未读统计)", "sns": "朋友圈数据库(动态与互动)", "favorite": "收藏数据库", + "favorite_fts": "收藏全文索引数据库(FTS)", "emoticon": "表情包数据库", "head_image": "头像数据数据库", "hardlink": "硬链接索引数据库(资源去重/快速定位)", "media_0": "媒体数据数据库(含语音SILK等)", "unspportmsg": "不支持消息数据库(客户端不支持的消息类型)", "general": "通用/系统数据库(新消息通知/支付等)", + "contact_fts": "联系人全文索引数据库(FTS)", + "chat_search_index": "(本项目生成)聊天记录全文检索索引库(FTS5,用于搜索)", + "bizchat": "公众号/企业微信相关数据库(会话/联系人等)", + "digital_twin": "(本项目生成)数字分身数据库(派生数据,非微信原始库)", } @@ -172,6 +182,12 @@ def build_message_types_from_ohmywechat() -> dict[str, str]: "c4": "FTS列c4(内部结构)", "c5": "FTS列c5(内部结构)", "c6": "FTS列c6(内部结构)", + "c7": "FTS列c7(内部结构)", + "c8": "FTS列c8(内部结构)", + "c9": "FTS列c9(内部结构)", + "c10": "FTS列c10(内部结构)", + "c11": "FTS列c11(内部结构)", + "c12": "FTS列c12(内部结构)", "sz": "FTS文档大小信息", "_rowid_": "SQLite内部行ID", @@ -199,12 +215,483 @@ def build_message_types_from_ohmywechat() -> dict[str, str]: "last_sender_display_name": "最后一条消息发送者显示名", "last_msg_ext_type": "最后一条消息扩展类型", + # 常见“Key-Value”配置表(多库复用) + "key": "键(Key-Value配置表)", + "valueint64": "整数值(int64)", + "valuedouble": "浮点值(double)", + "valuestdstr": "字符串值(std::string)", + "valueblob": "二进制值(blob)", + "k": "配置键(k)", + "v": "配置值(v)", + + # 常见保留字段 + "reserved0": "保留字段(reserved0)", + "reserved1": "保留字段(reserved1)", + "reserved2": "保留字段(reserved2)", + "reserved3": "保留字段(reserved3)", + + # 版本/位标志 + "version": "版本号(记录/结构版本,具体含义依表而定)", + "bit_flag": "位标志/开关(bit flags)", + + # 本项目索引/缓存库常见字段 + "render_type": "渲染类型(本项目定义:text/image/system/...)", + "db_stem": "来源数据库分片名(如 message_0)", + "table_name": "来源表名(如 Msg_xxx)", + "sender_username": "发送者username(解码后)", + "preview": "会话预览文本(用于会话列表展示)", + "built_at": "构建时间(Unix时间戳,秒)", + "tablename": "表名(tableName)", + "value": "值(value)", + "brand_user_name": "品牌/公众号username(brand_user_name)", + + # 常见业务字段(命名自解释) + "ticket": "票据/验证ticket(ticket)", + "delete_table_name": "删除记录关联的消息表名(delete_table_name)", + "res_path": "资源路径(res_path)", + "biz_username": "公众号username(biz_username)", + "search_key": "搜索键/索引字段(search_key)", + "click_type": "点击/热词类型(click_type)", + "a_group_remark": "群备注(FTS检索字段:a_group_remark)", + "op_code": "操作码(op_code)", + "query": "查询关键词(query)", + "score": "评分/权重(score)", + "keyword": "关键词(keyword)", + "pay_load_": "payload/扩展数据(pay_load_)", + "bill_no": "账单号(bill_no)", + "session_title": "会话标题(session_title)", + "unread_stat": "未读统计字段(unread_stat)", + "ui_type": "UI类型/发布类型(ui_type)", + "error_type": "错误类型(error_type)", + "tips_content": "提示内容(tips_content)", + "record_content": "记录内容(record_content)", + "business_type": "业务类型(business_type)", + "access_content_key": "访问内容key(access_content_key)", + "access_content_type": "访问内容类型(access_content_type)", + "range_type": "范围类型(range_type)", + "message_local_type": "消息类型(message_local_type)", + "message_origin_source": "消息来源标识(message_origin_source)", + + # 朋友圈(sns)常见拆分字段 + "tid_heigh_bit": "tid 高位拆分字段(heigh_bit,字段名原样保留)", + "tid_low_bit": "tid 低位拆分字段(low_bit)", + "break_flag": "断点/分页标志(0/1;用于分页/增量拉取水位)", + # WCDB 压缩控制 "WCDB_CT_message_content": "WCDB压缩标记(message_content列)", "WCDB_CT_source": "WCDB压缩标记(source列)", } +# 表级字段含义覆盖(优先级高于 KNOWN_FIELD_MEANINGS) +# key: table_name.lower() ; value: { field_name.lower(): meaning } +KNOWN_FIELD_MEANINGS_BY_TABLE: dict[str, dict[str, str]] = { + # contact.db + "contact": { + "id": "序号(通常与 name2id.rowid 对应)", + "username": "联系人的 wxid / 群聊 username(可唯一确定联系人)", + "local_type": "联系人类型:1=通讯录好友/公众号/已添加群聊;2=未添加到通讯录的群聊;3=群中的陌生人;5=企业微信好友;6=群聊中的陌生企业微信好友", + "alias": "微信号(微信里显示的微信号)", + "flag": "联系人标志位(需转二进制;常见:第7位星标,第12位置顶,第17位屏蔽朋友圈,第24位仅聊天)", + "head_img_md5": "头像md5(可通过 head_image.db 查询对应头像)", + "verify_flag": "认证标志(公众号/企业等;非0常表示公众号)", + "description": "描述字段(样本为空;用途待确认)", + "extra_buffer": "好友扩展信息(protobuf;包含性别/地区/签名等,本项目解析 gender/signature/country/province/city/source_scene)", + "chat_room_notify": "群消息通知相关设置(样本为0/1;疑似免打扰/通知开关,待确认)", + "is_in_chat_room": "群聊状态标记(样本为1/2;具体含义待确认)", + "chat_room_type": "群聊类型/标志(样本为0/2;具体含义待确认)", + }, + "stranger": { + "id": "序号(通常与 name2id.rowid 对应)", + "username": "联系人的 wxid / 群聊 username", + "local_type": "联系人类型:1=通讯录好友/公众号/已添加群聊;2=未添加到通讯录的群聊;3=群中的陌生人;5=企业微信好友;6=群聊中的陌生企业微信好友", + "alias": "微信号(微信里显示的微信号)", + "flag": "联系人标志位(需转二进制;常见:第7位星标,第12位置顶,第17位屏蔽朋友圈,第24位仅聊天)", + "head_img_md5": "头像md5(可通过 head_image.db 查询对应头像)", + "verify_flag": "认证标志(公众号/企业等;非0常表示公众号)", + "description": "描述字段(样本为空;用途待确认)", + "extra_buffer": "好友扩展信息(protobuf;包含性别/地区/签名等,本项目解析 gender/signature/country/province/city/source_scene)", + "chat_room_notify": "群消息通知相关设置(样本为0/1;疑似免打扰/通知开关,待确认)", + "is_in_chat_room": "群聊状态标记(样本为1/2;具体含义待确认)", + "chat_room_type": "群聊类型/标志(样本为0/2;具体含义待确认)", + }, + "biz_info": { + "id": "序号(与 name2id.rowid 对应,可唯一确定一个公众号)", + "username": "公众号username(原始 wxid/gh_xxx)", + "type": "公众号类型:1=公众号,0=订阅号(资料来源:万字长文)", + "accept_type": "接收类型(accept_type;含义待确认,样本常为0)", + "child_type": "子类型(child_type;含义待确认,样本常为0)", + "version": "版本号(含义待确认,样本常为0)", + "external_info": "公众号详细信息(常见 JSON;含底部菜单/交互配置等)", + "brand_info": "公众号品牌/菜单信息(常见 JSON:urls 等)", + "brand_list": "品牌列表/关联列表(格式待确认,可能为 JSON)", + "brand_flag": "品牌/能力标志位(含义待确认)", + "belong": "归属字段(含义待确认)", + "home_url": "主页链接(含义待确认)", + }, + "chat_room": { + "id": "序号(与 name2id.rowid 对应)", + "username": "群聊的username(xxx@chatroom)", + "owner": "群主username", + "ext_buffer": "群成员username与群昵称(protobuf:ChatRoomData.members 等)", + }, + "chat_room_info_detail": { + "room_id_": "序号(与 name2id.rowid 对应)", + "username_": "群聊的username(xxx@chatroom)", + "announcement_": "群公告(文本)", + "announcement_editor_": "群公告编辑者username", + "announcement_publish_time_": "群公告发布时间(时间戳)", + "chat_room_status_": "群状态/标志位(bitmask;样本常见 0x80000 等,具体位含义待确认)", + "xml_announcement_": "群公告(XML,可解析更多信息:图片/文件等)", + "ext_buffer_": "扩展信息(protobuf-like;样本长度较小,具体结构待确认)", + }, + "chatroom_member": { + "room_id": "群聊ID(对应 name2id.rowid)", + "member_id": "群成员ID(对应 name2id.rowid)", + }, + "contact_label": { + "label_id_": "标签ID", + "label_name_": "标签名称", + "sort_order_": "排序", + }, + + # message_*.db / biz_message_*.db + "msg_*": { + "local_id": "自增id(本地)", + "server_id": "服务端id(每条消息唯一)", + "local_type": "消息类型(local_type;低32位=type,高32位=sub_type;可用 (local_type & 0xFFFFFFFF) 与 (local_type >> 32) 拆分)", + "sort_seq": "排序字段(单会话内消息排序;样本≈create_time*1000)", + "real_sender_id": "发送者id(可通过 Name2Id.rowid 映射到 username)", + "create_time": "秒级时间戳", + "server_seq": "服务端接收顺序id(server_seq)", + "message_content": "消息内容:local_type=1 时为文本,其它类型多为 Zstandard 压缩后的XML/二进制", + "compress_content": "压缩后的内容(多见 Zstandard)", + "packed_info_data": "protobuf扩展信息(图片文件名/语音转文字/合并转发文件夹名等)", + }, + "name2id": { + "is_session": "是否会话名标记(1=会话/聊天对象;0=其它映射,如群成员ID)", + }, + + # session.db + "sessiontable": { + "type": "会话类型(样本为0;枚举待确认)", + "status": "会话状态(样本为0;枚举待确认)", + "unread_first_pat_msg_local_id": "未读拍一拍消息的本地ID(样本为0;含义待确认)", + "unread_first_pat_msg_sort_seq": "未读拍一拍消息的排序序号(样本为0;含义待确认)", + }, + "session_last_message": { + "username": "会话username", + "sort_seq": "最后一条消息sort_seq", + "local_id": "最后一条消息local_id", + "create_time": "最后一条消息create_time(秒级时间戳)", + "local_type": "最后一条消息local_type", + "sender_username": "最后一条消息发送者username", + "preview": "最后一条消息预览文本(用于会话列表)", + "db_stem": "来源消息库分片名(如 message_0)", + "table_name": "来源消息表名(如 Msg_xxx)", + "built_at": "构建时间(Unix时间戳,秒)", + }, + + # 本项目 chat_search_index.db + "message_fts": { + "text": "可检索文本(索引内容)", + "render_type": "渲染类型(text/system/image/voice/video/emoji/...,本项目定义)", + "db_stem": "来源消息库分片名(如 message_0)", + "table_name": "来源消息表名(如 Msg_xxx)", + "sender_username": "发送者username(解码后)", + }, + + # emoticon.db + "knonstoreemoticontable": { + "type": "表情类型(样本均为3;枚举含义待确认)", + "caption": "表情说明/标题(caption)", + "product_id": "表情包/产品ID(product_id)", + "aes_key": "AES密钥(用于CDN下载解密)", + "auth_key": "鉴权key(CDN下载)", + "extern_md5": "外部资源md5(extern_md5)", + }, + "kstoreemoticonpackagetable": { + "package_id_": "表情包ID(package_id)", + "package_name_": "表情包名称", + "payment_status_": "支付状态(payment_status)", + "download_status_": "下载状态(download_status)", + "install_time_": "安装时间(时间戳)", + "remove_time_": "移除时间(时间戳)", + "sort_order_": "排序", + "introduction_": "简介(introduction)", + "full_description_": "完整描述(full_description)", + "copyright_": "版权信息", + "author_": "作者信息", + "store_icon_url_": "商店图标URL", + "panel_url_": "面板/详情页URL", + }, + "kstoreemoticonfilestable": { + "package_id_": "表情包ID(package_id)", + "md5_": "表情md5", + "type_": "表情类型(type)", + "sort_order_": "排序", + "emoticon_size_": "表情文件大小(字节)", + "emoticon_offset_": "表情文件偏移(用于包内定位)", + "thumb_size_": "缩略图大小(字节)", + "thumb_offset_": "缩略图偏移(用于包内定位)", + }, + + # favorite.db + "fav_db_item": { + "version": "版本号(收藏条目结构/内容版本;样本为87)", + "fromusr": "来源用户username(收藏来源)", + "realchatname": "来源群聊username(若收藏来源于群聊)", + "upload_error_code": "上传错误码", + "trans_res_error_code": "资源转换错误码(trans_res_error_code)", + }, + + # general.db + "ilink_voip": { + "wx_chatroom_": "群聊username(xxx@chatroom)", + "millsecond_": "毫秒时间戳/时间标记(字段名推断)", + "group_id_": "ILink group_id(字段名推断)", + "room_id_": "房间ID(字段名推断)", + "room_key_": "房间key(字段名推断)", + "route_id_": "路由ID(字段名推断)", + "voice_status_": "通话状态(字段名推断)", + "talker_create_user_": "发起者username(字段名推断)", + "not_friend_user_list_": "非好友成员列表(字段名推断)", + "members_": "成员列表(字段名推断)", + "is_ilink_": "是否ilink通话(字段名推断)", + "ever_quit_chatroom_": "是否曾退出群聊(字段名推断)", + }, + "fmessagetable": { + "user_name_": "用户名(好友验证/陌生人会话用户名)", + "type_": "消息类型(好友验证/系统消息;样本为37)", + "timestamp_": "时间戳", + "encrypt_user_name_": "加密用户名", + "content_": "内容(验证消息/系统提示等)", + "is_sender_": "是否发送方(is_sender)", + "ticket_": "票据/验证ticket", + "scene_": "来源场景码(scene)", + "fmessage_detail_buf_": "详细信息(protobuf-like;包含验证文案/来源等信息)", + }, + "handoff_remind_v0": { + "item_id": "条目ID(item_id)", + "head_icon": "图标(URL/资源标识)", + "title": "标题", + "desc_type": "描述类型(desc_type)", + "create_time": "创建时间(时间戳)", + "start_time": "开始时间(时间戳)", + "expire_time": "过期时间(时间戳)", + "biz_type": "业务类型(biz_type)", + "version": "版本号(version)", + "url": "跳转URL", + "extra_info": "扩展信息(extra_info)", + }, + "transfertable": { + "transfer_id": "转账ID(transfer_id)", + "transcation_id": "交易ID(transaction_id,原字段拼写保留)", + "message_server_id": "关联消息server_id", + "second_message_server_id": "关联第二条转账消息server_id(可在 message_*.db::Msg_* 表的 server_id 对应到)", + "session_name": "会话username", + "pay_sub_type": "支付子类型(pay_sub_type)", + "pay_receiver": "收款方username", + "pay_payer": "付款方username", + "begin_transfer_time": "转账开始时间(时间戳)", + "last_modified_time": "最后修改时间(时间戳)", + "invalid_time": "失效时间(时间戳)", + "last_update_time": "最后更新时间(时间戳)", + "delay_confirm_flag": "延迟确认标志(delay_confirm_flag)", + "bubble_clicked_flag": "气泡点击标志(bubble_clicked_flag)", + }, + + # bizchat.db + "chat_group": { + "brand_user_name": "品牌/公众号username(brand_user_name)", + "bit_flag": "位标志/开关(bit_flag)", + "chat_name": "群组名称(chat_name)", + "user_list": "成员列表(常见为 ; 分隔的 user_id/username 列表;待确认)", + "reserved0": "保留字段(reserved0)", + "reserved1": "保留字段(reserved1)", + "reserved2": "保留字段(reserved2)", + "reserved3": "保留字段(reserved3)", + }, + "user_info": { + "brand_user_name": "品牌/公众号username(brand_user_name)", + "bit_flag": "位标志/开关(bit_flag)", + "reserved0": "保留字段(reserved0)", + "reserved1": "保留字段(reserved1)", + "reserved2": "保留字段(reserved2)", + "reserved3": "保留字段(reserved3)", + }, + + # sns.db + "snsmessage_tmp3": { + "from_username": "来源用户username(评论/点赞发起者)", + "from_nickname": "来源用户昵称(评论/点赞发起者)", + "to_username": "目标用户username(被回复/被@的人)", + "to_nickname": "目标用户昵称(被回复/被@的人)", + "comment_flag": "评论标志位(样本为0;具体 bit 含义待确认)", + }, + "snsadtimeline": { + "ad_content": "广告内容(ad_content,格式待确认)", + "remind_source_info": "提醒来源信息(remind_source_info,格式待确认)", + "remind_self_info": "提醒自身信息(remind_self_info,格式待确认)", + "extra_data": "扩展数据(extra_data,格式待确认)", + }, + + # unspportmsg.db + "unsupportmessage": { + "from_user": "发送者username", + "to_user": "接收者username", + "msg_source": "消息来源附加信息(msg_source)", + }, + + # contact.db + "openim_wording": { + "wording": "文案/提示语(wording)", + "pinyin": "拼音(pinyin)", + }, + + # message_*.db / biz_message_*.db (WCDB) + "wcdb_builtin_compression_record": { + "tablename": "表名(tableName)", + "columns": "被WCDB压缩的列列表(columns)", + }, + + # general.db + "revokemessage": { + "to_user_name": "会话username(撤回消息所在会话)", + "message_type": "消息类型(local_type)", + "at_user_list": "@用户列表(字段名推断)", + }, + "wcfinderlivestatus": { + "finder_username": "视频号作者username(finder_username)", + "charge_flag": "是否付费/收费标志(charge_flag)", + }, + "new_tips": { + "disable": "禁用标志(disable)", + "new_tips_content": "提示内容(new_tips_content)", + }, + "redenvelopetable": { + "sender_user_name": "红包发送者username", + "hb_type": "红包类型(hb_type)", + }, + "wacontact": { + "external_info": "外部信息(JSON;常见包含 BindWxaInfo/RegisterSource/WxaAppDynamic 等)", + "contact_pack_data": "联系人打包数据(protobuf-like;常含昵称/品牌名等)", + "wx_app_opt": "小程序/应用选项(wx_app_opt;位标志/开关;样本为0)", + }, + + # emoticon.db + "kstoreemoticoncaptionstable": { + "package_id_": "表情包ID(package_id)", + "md5_": "表情md5", + "language_": "语言(language)", + "caption_": "文案/标题(caption)", + }, +} + + +KNOWN_TABLE_DESCRIPTIONS: dict[str, str] = { + # contact.db + "biz_info": "公众号信息表(公众号类型/菜单/品牌信息等)", + "chat_room": "群聊基础信息表(群主/成员列表等扩展在 ext_buffer)", + "chat_room_info_detail": "群聊详细信息表(群公告/群状态等)", + "chatroom_member": "群聊成员映射表(room_id ↔ member_id)", + "contact": "联系人核心表(好友/群/公众号等基础信息)", + "contact_label": "联系人标签表(标签ID与名称)", + "name2id": "用户名(wxid/群id@chatroom 等)到内部数值ID映射表", + "encrypt_name2id": "加密用户名到内部数值ID映射表", + "stranger": "陌生人/临时会话信息表", + "ticket_info": "票据/会话票据信息表(用途待进一步确认)", + "stranger_ticket_info": "陌生人票据信息表(用途待进一步确认)", + "oplog": "操作/同步日志表(增量同步相关)", + "openim_appid": "OpenIM 应用ID表(企业微信/互通相关)", + "openim_acct_type": "OpenIM 账号类型表", + "openim_wording": "OpenIM 文案/提示语表", + + # session.db + "sessiontable": "会话列表表(会话展示/未读/置顶/隐藏等)", + "sessiondeletetable": "会话删除记录表", + "sessionunreadlisttable_1": "未读会话列表表(分表)", + "sessionunreadstattable_1": "未读统计表(分表)", + "sessionnocontactinfotable": "会话表(无联系人信息的会话)", + "session_last_message": "会话最后一条消息缓存/索引表(版本/实现差异)", + + # message_*.db / biz_message_*.db + "timestamp": "时间戳/增量同步辅助表", + "deleteinfo": "删除消息记录表(删除/撤回相关)", + "deleteresinfo": "删除资源记录表(资源删除相关)", + "sendinfo": "发送相关信息表(发送状态/队列等)", + "historysysmsginfo": "历史系统消息表", + "historyaddmsginfo": "历史新增消息表", + + # message_resource.db + "chatname2id": "会话名 → 会话ID 映射表(资源库维度)", + "sendername2id": "发送者名 → 发送者ID 映射表(资源库维度)", + "messageresourceinfo": "消息资源索引表(按消息/会话定位资源)", + "messageresourcedetail": "消息资源明细表(md5/路径/大小等)", + "ftsrange": "FTS 范围信息表(搜索/索引辅助)", + "ftsdeleteinfo": "FTS 删除记录表(索引维护)", + + # media_0.db + "voiceinfo": "语音数据表(voice_data 等)", + + # hardlink.db + "db_info": "WCDB Key-Value 元信息表(FTS构建状态/版本/扫描时间等)", + "dir2id": "目录 → ID 映射表(硬链接索引)", + "image_hardlink_info_v4": "图片硬链接索引表(v4)", + "file_hardlink_info_v4": "文件硬链接索引表(v4)", + "video_hardlink_info_v4": "视频硬链接索引表(v4)", + "file_checkpoint_v4": "文件索引检查点(增量)", + "video_checkpoint_v4": "视频索引检查点(增量)", + "talker_checkpoint_v4": "会话索引检查点(增量)", + + # *_fts.db / message_fts.db + "table_info": "WCDB Key-Value 元信息表(索引范围/水位/时间戳等)", + + # head_image.db + "head_image": "头像缓存表(头像 md5/二进制缩略图等)", + + # favorite.db + "buff": "WCDB Key-Value 缓冲/配置表(收藏等模块的缓存)", + "fav_db_item": "收藏条目表", + "fav_tag_db_item": "收藏标签表", + "fav_bind_tag_db_item": "收藏条目与标签绑定表", + + # emoticon.db + "kcustomemoticonordertable": "自定义表情排序表(md5 列表)", + "kexpressrecentuseeemoticontable": "最近使用表情记录(Key-Value)", + "knonstoreemoticontable": "非商店表情表(用户收藏/外部表情资源;含CDN下载信息)", + "kstoreemoticonpackagetable": "商店表情包信息表(package 元数据)", + "kstoreemoticoncaptionstable": "商店表情文案表(多语言 caption)", + + # unspportmsg.db + "unsupportmessage": "不支持消息表(PC端无法直接展示的消息类型)", + + # bizchat.db + "chat_group": "BizChat 群组表(企业微信/公众号群组信息)", + "user_info": "BizChat 用户表(企业微信/公众号用户信息)", + "my_user_info": "BizChat 当前账号映射表(brand_user_name ↔ user_id)", + + # general.db + "forwardrecent": "最近转发会话记录表(username/时间)", + "transfertable": "转账记录表(转账ID/关联消息/状态等)", + "redenvelopetable": "红包记录表(关联消息/状态等)", + "ilink_voip": "iLink/群通话相关表(房间ID/成员/状态等)", + "fmessagetable": "好友验证/陌生人消息表(FMessage)", + "handoff_remind_v0": "跨设备接力/提醒项表(handoff_remind_v0)", + "biz_pay_status": "公众号文章付费状态表(url_id/is_paid 等)", + "biz_subscribe_status": "公众号订阅模板状态表(template_id/is_subscribe)", + "new_tips": "新提示/新功能提示表", + "reddot": "小红点提示表", + "reddot_record": "小红点记录表", + "wcfinderlivestatus": "视频号直播状态表", + "teenager_apply_access_agree_info": "青少年模式访问同意记录表", + + # chat_search_index.db(本项目生成) + "meta": "索引元数据表(schema_version/构建时间等)", + "message_fts": "全文索引表(fts5,用于搜索)", +} + + def simple_heuristic(field_name: str, table_name: str) -> str: """简易兜底启发式,避免完全空白""" f = field_name.lower() @@ -243,10 +730,17 @@ def simple_heuristic(field_name: str, table_name: str) -> str: def compute_field_meaning(analyzer, table_name: str, field_name: str) -> str: - # 优先精确已知映射 + lt = table_name.lower() + lf = field_name.lower() + + # 1) 表级覆盖优先 + tmap = KNOWN_FIELD_MEANINGS_BY_TABLE.get(lt) + if tmap and lf in tmap: + return tmap[lf] + + # 2) 全局精确映射 if field_name in KNOWN_FIELD_MEANINGS: return KNOWN_FIELD_MEANINGS[field_name] - lf = field_name.lower() if lf in KNOWN_FIELD_MEANINGS: return KNOWN_FIELD_MEANINGS[lf] @@ -266,13 +760,44 @@ def compute_field_meaning(analyzer, table_name: str, field_name: str) -> str: def guess_table_desc(analyzer, table_name: str) -> str: + # 简易猜测(优先命中已知表名) + tl = table_name.lower() + + # 已知表名(大小写不敏感) + if tl in KNOWN_TABLE_DESCRIPTIONS: + return KNOWN_TABLE_DESCRIPTIONS[tl] + + # SQLite / WCDB 内置 + if tl == "sqlite_sequence": + return "SQLite 自增序列表" + if tl.startswith("wcdb"): + return "WCDB 内置表(压缩/元数据等)" + + # FTS 内部表(多为 *_data/_idx/_config/_content/_docsize/_aux) + if "fts" in tl: + if tl.endswith("_data"): + return "全文检索(FTS)内部数据表" + if tl.endswith("_idx"): + return "全文检索(FTS)内部索引表" + if tl.endswith("_config"): + return "全文检索(FTS)内部配置表" + if tl.endswith("_content"): + return "全文检索(FTS)内部内容表" + if tl.endswith("_docsize"): + return "全文检索(FTS)内部文档长度表" + if tl.endswith("_aux") or "_aux_" in tl: + return "全文检索(FTS)辅助表" + return "全文检索(FTS)表/索引表" + + # 借助分析器的启发式(如果可用,且不是“未知功能表”) if analyzer is not None: try: - return analyzer.guess_table_function(table_name) + guessed = analyzer.guess_table_function(table_name) + if isinstance(guessed, str) and guessed.strip() and guessed.strip() != "未知功能表": + return guessed.strip() except Exception: pass - # 简易猜测 - tl = table_name.lower() + if tl == "msg" or tl.startswith("msg_"): return "某会话的消息表(聊天消息数据)" if "name2id" in tl: @@ -281,10 +806,18 @@ def guess_table_desc(analyzer, table_name: str) -> str: return "联系人/群聊信息表" if "session" in tl: return "会话信息/未读统计表" - if "fts" in tl: - return "全文检索(FTS)内部表" if "resource" in tl: return "消息资源/附件索引表" + if "voice" in tl: + return "语音相关数据表" + if "image" in tl or "img" in tl: + return "图片相关数据表" + if "video" in tl: + return "视频相关数据表" + if "file" in tl: + return "文件相关数据表" + if "sns" in tl: + return "朋友圈相关数据表" return "未知功能表" @@ -301,13 +834,38 @@ def fill_config(template: dict) -> dict: # 数据库描述补齐 db_desc_map = build_db_descriptions() + def guess_db_desc(db_name: str) -> str: + # 1) 精确映射优先 + if db_name in db_desc_map: + return db_desc_map[db_name] + + # 2) 常见分片/变体:message_{n}.db + m = re.match(r"^message_(\d+)$", db_name) + if m: + return f"聊天记录数据库分片(message_{m.group(1)}.db)" + + # 3) 公众号/企业微信消息库:biz_message_{n}.db(结构通常同 message_{n}.db) + m = re.match(r"^biz_message_(\d+)$", db_name) + if m: + return f"公众号消息记录数据库(biz_message_{m.group(1)}.db,结构通常同 message_{m.group(1)}.db)" + + # 4) FTS/索引类库:*_fts.db + if db_name.endswith("_fts"): + return "全文索引数据库(FTS)" + + # 5) 退化到 base 前缀 + base = db_name.split("_", 1)[0] + if base in db_desc_map: + return db_desc_map[base] + + return "未知用途数据库" + databases = template.get("databases", {}) for db_name, db in databases.items(): if isinstance(db, dict): # 数据库级描述 if not db.get("description"): - # 用已知映射或尝试推断 - db["description"] = db_desc_map.get(db_name, db.get("description", "")) or "未知用途数据库" + db["description"] = guess_db_desc(db_name) # 遍历表 tables = db.get("tables", {}) @@ -378,4 +936,4 @@ def main(): if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/tools/key_wheels/README.md b/tools/key_wheels/README.md new file mode 100644 index 0000000..fe0194f --- /dev/null +++ b/tools/key_wheels/README.md @@ -0,0 +1,2 @@ +> 这里放wx_key模块的python预编译wheel:https://github.com/H3CoF6/py_wx_key/releases/ +> 解压放入即可 \ No newline at end of file diff --git a/tools/key_wheels/wx_key-1.1.0-cp310-cp310-win_amd64.whl b/tools/key_wheels/wx_key-1.1.0-cp310-cp310-win_amd64.whl new file mode 100644 index 0000000..3d8ae63 Binary files /dev/null and b/tools/key_wheels/wx_key-1.1.0-cp310-cp310-win_amd64.whl differ diff --git a/tools/key_wheels/wx_key-1.1.0-cp311-cp311-win_amd64.whl b/tools/key_wheels/wx_key-1.1.0-cp311-cp311-win_amd64.whl new file mode 100644 index 0000000..c628766 Binary files /dev/null and b/tools/key_wheels/wx_key-1.1.0-cp311-cp311-win_amd64.whl differ diff --git a/tools/key_wheels/wx_key-1.1.0-cp312-cp312-win_amd64.whl b/tools/key_wheels/wx_key-1.1.0-cp312-cp312-win_amd64.whl new file mode 100644 index 0000000..2a39ad4 Binary files /dev/null and b/tools/key_wheels/wx_key-1.1.0-cp312-cp312-win_amd64.whl differ diff --git a/tools/key_wheels/wx_key-1.1.0-cp313-cp313-win_amd64.whl b/tools/key_wheels/wx_key-1.1.0-cp313-cp313-win_amd64.whl new file mode 100644 index 0000000..f00eccc Binary files /dev/null and b/tools/key_wheels/wx_key-1.1.0-cp313-cp313-win_amd64.whl differ diff --git a/tools/key_wheels/wx_key-1.1.0-cp314-cp314-win_amd64.whl b/tools/key_wheels/wx_key-1.1.0-cp314-cp314-win_amd64.whl new file mode 100644 index 0000000..26cffc4 Binary files /dev/null and b/tools/key_wheels/wx_key-1.1.0-cp314-cp314-win_amd64.whl differ diff --git a/tools/test_image_api.py b/tools/test_image_api.py index cac32d9..bdd6e72 100644 --- a/tools/test_image_api.py +++ b/tools/test_image_api.py @@ -1,9 +1,10 @@ #!/usr/bin/env python3 """测试图片 API""" +import os import requests r = requests.get( - 'http://localhost:8000/api/chat/media/image', + f'http://localhost:{os.environ.get("WECHAT_TOOL_PORT", "10392")}/api/chat/media/image', params={ 'account': 'wxid_v4mbduwqtzpt22', 'md5': '8753fcd3b1f8c4470b53551e13c5fbc1', diff --git a/tools/weflow_wasm_keystream.js b/tools/weflow_wasm_keystream.js new file mode 100644 index 0000000..9125bb0 --- /dev/null +++ b/tools/weflow_wasm_keystream.js @@ -0,0 +1,122 @@ +// Generate WeChat/WeFlow WxIsaac64 keystream via WeFlow's WASM module. +// +// Usage: +// node tools/weflow_wasm_keystream.js +// +// Prints a base64-encoded keystream to stdout (no extra logs). + +const fs = require('fs') +const path = require('path') +const vm = require('vm') + +function usageAndExit() { + process.stderr.write('Usage: node tools/weflow_wasm_keystream.js \\n') + process.exit(2) +} + +const key = String(process.argv[2] || '').trim() +const size = Number(process.argv[3] || 0) + +if (!key || !Number.isFinite(size) || size <= 0) usageAndExit() + +const basePath = path.join(__dirname, '..', 'WeFlow', 'electron', 'assets', 'wasm') +const wasmPath = path.join(basePath, 'wasm_video_decode.wasm') +const jsPath = path.join(basePath, 'wasm_video_decode.js') + +if (!fs.existsSync(wasmPath) || !fs.existsSync(jsPath)) { + process.stderr.write(`WeFlow WASM assets not found: ${basePath}\\n`) + process.exit(1) +} + +const wasmBinary = fs.readFileSync(wasmPath) +const jsContent = fs.readFileSync(jsPath, 'utf8') + +let capturedKeystream = null +let resolveInit +let rejectInit +const initPromise = new Promise((res, rej) => { + resolveInit = res + rejectInit = rej +}) + +const mockGlobal = { + console: { log: () => {}, error: () => {} }, // keep stdout clean + Buffer, + Uint8Array, + Int8Array, + Uint16Array, + Int16Array, + Uint32Array, + Int32Array, + Float32Array, + Float64Array, + BigInt64Array, + BigUint64Array, + Array, + Object, + Function, + String, + Number, + Boolean, + Error, + Promise, + require, + process, + setTimeout, + clearTimeout, + setInterval, + clearInterval, +} + +mockGlobal.Module = { + onRuntimeInitialized: () => resolveInit(), + wasmBinary, + print: () => {}, + printErr: () => {}, +} + +mockGlobal.self = mockGlobal +mockGlobal.self.location = { href: jsPath } +mockGlobal.WorkerGlobalScope = function () {} +mockGlobal.VTS_WASM_URL = `file://${wasmPath}` + +mockGlobal.wasm_isaac_generate = (ptr, n) => { + const buf = new Uint8Array(mockGlobal.Module.HEAPU8.buffer, ptr, n) + capturedKeystream = new Uint8Array(buf) // copy view +} + +try { + const context = vm.createContext(mockGlobal) + new vm.Script(jsContent, { filename: jsPath }).runInContext(context) +} catch (e) { + rejectInit(e) +} + +;(async () => { + try { + await initPromise + + if (!mockGlobal.Module.WxIsaac64 && mockGlobal.Module.asm && mockGlobal.Module.asm.WxIsaac64) { + mockGlobal.Module.WxIsaac64 = mockGlobal.Module.asm.WxIsaac64 + } + if (!mockGlobal.Module.WxIsaac64) { + throw new Error('WxIsaac64 not found in WASM module') + } + + capturedKeystream = null + const isaac = new mockGlobal.Module.WxIsaac64(key) + isaac.generate(size) + if (isaac.delete) isaac.delete() + + if (!capturedKeystream) throw new Error('Failed to capture keystream') + + const out = Buffer.from(capturedKeystream) + // Match WeFlow worker logic: reverse the captured Uint8Array. + out.reverse() + process.stdout.write(out.toString('base64')) + } catch (e) { + process.stderr.write(String(e && e.stack ? e.stack : e) + '\\n') + process.exit(1) + } +})() + diff --git a/uv.lock b/uv.lock index fd3b9b3..3dc6aee 100644 --- a/uv.lock +++ b/uv.lock @@ -230,6 +230,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, ] +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + [[package]] name = "httptools" version = "0.6.4" @@ -259,6 +272,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682, upload-time = "2024-10-16T19:44:46.46Z" }, ] +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + [[package]] name = "idna" version = "3.10" @@ -268,6 +296,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] +[[package]] +name = "jieba" +version = "0.42.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/cb/18eeb235f833b726522d7ebed54f2278ce28ba9438e3135ab0278d9792a2/jieba-0.42.1.tar.gz", hash = "sha256:055ca12f62674fafed09427f176506079bc135638a14e23e25be909131928db2", size = 19214172, upload-time = "2020-01-20T14:27:23.5Z" } + [[package]] name = "loguru" version = "0.7.3" @@ -498,6 +532,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a7/c4/3a096c6e701832443b957b9dac18a163103360d0c7f5842ca41695371148/pyinstaller_hooks_contrib-2025.11-py3-none-any.whl", hash = "sha256:777e163e2942474aa41a8e6d31ac1635292d63422c3646c176d584d04d971c34", size = 449478, upload-time = "2025-12-23T12:59:35.987Z" }, ] +[[package]] +name = "pypinyin" +version = "0.55.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b4/a4/784cf98c09e0dc22776b0d7d8a4a5b761218bcae4608c2416ce1e167c8af/pypinyin-0.55.0.tar.gz", hash = "sha256:b5711b3a0c6f76e67408ec6b2e3c4987a3a806b7c528076e7c7b86fcf0eaa66b", size = 839836, upload-time = "2025-07-20T12:01:50.657Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/7b/4cabc76fcc21c3c7d5c671d8783984d30ac9d3bb387c4ba784fca3cdfa3a/pypinyin-0.55.0-py2.py3-none-any.whl", hash = "sha256:d53b1e8ad2cdb815fb2cb604ed3123372f5a28c6f447571244aca36fc62a286f", size = 840203, upload-time = "2025-07-20T12:01:48.535Z" }, +] + [[package]] name = "python-dotenv" version = "1.1.0" @@ -829,21 +872,26 @@ wheels = [ [[package]] name = "wechat-decrypt-tool" -version = "0.1.0" +version = "1.3.0" source = { editable = "." } dependencies = [ { name = "aiofiles" }, { name = "cryptography" }, { name = "fastapi" }, + { name = "httpx" }, + { name = "jieba" }, { name = "loguru" }, + { name = "packaging" }, { name = "pilk" }, { name = "psutil" }, { name = "pycryptodome" }, + { name = "pypinyin" }, { name = "python-multipart" }, { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "requests" }, { name = "typing-extensions" }, { name = "uvicorn", extra = ["standard"] }, + { name = "wx-key" }, { name = "zstandard" }, ] @@ -857,16 +905,21 @@ requires-dist = [ { name = "aiofiles", specifier = ">=23.2.1" }, { name = "cryptography", specifier = ">=41.0.0" }, { name = "fastapi", specifier = ">=0.104.0" }, + { name = "httpx" }, + { name = "jieba", specifier = ">=0.42.1" }, { name = "loguru", specifier = ">=0.7.0" }, + { name = "packaging" }, { name = "pilk", specifier = ">=0.2.4" }, { name = "psutil", specifier = ">=7.0.0" }, { name = "pycryptodome", specifier = ">=3.23.0" }, { name = "pyinstaller", marker = "extra == 'build'", specifier = ">=6.0.0" }, + { name = "pypinyin", specifier = ">=0.53.0" }, { name = "python-multipart", specifier = ">=0.0.6" }, { name = "pywin32", marker = "sys_platform == 'win32'", specifier = ">=310" }, { name = "requests", specifier = ">=2.32.4" }, { name = "typing-extensions", specifier = ">=4.8.0" }, { name = "uvicorn", extras = ["standard"], specifier = ">=0.24.0" }, + { name = "wx-key", specifier = ">=1.1.0" }, { name = "zstandard", specifier = ">=0.23.0" }, ] provides-extras = ["build"] @@ -880,6 +933,17 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" }, ] +[[package]] +name = "wx-key" +version = "1.1.0" +source = { registry = "tools/key_wheels" } +wheels = [ + { path = "wx_key-1.1.0-cp311-cp311-win_amd64.whl" }, + { path = "wx_key-1.1.0-cp312-cp312-win_amd64.whl" }, + { path = "wx_key-1.1.0-cp313-cp313-win_amd64.whl" }, + { path = "wx_key-1.1.0-cp314-cp314-win_amd64.whl" }, +] + [[package]] name = "zstandard" version = "0.25.0"