mirror of
https://github.com/KnugiHK/WhatsApp-Chat-Exporter.git
synced 2026-01-29 05:40:42 +00:00
Compare commits
462 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bac2efe15a | ||
|
|
9a6ee3ce5f | ||
|
|
823a89e677 | ||
|
|
945b422f71 | ||
|
|
19008a80bc | ||
|
|
4e877987fb | ||
|
|
322b12a5a4 | ||
|
|
1560c49644 | ||
|
|
28ba97d72f | ||
|
|
eab98ba0d6 | ||
|
|
f920ca82b4 | ||
|
|
4eed3ca321 | ||
|
|
746e4e1ac5 | ||
|
|
1694ae7dd9 | ||
|
|
f05e0d3451 | ||
|
|
0c5f2b7f13 | ||
|
|
db01d05263 | ||
|
|
2e7953f4ca | ||
|
|
95a52231be | ||
|
|
e0aab06192 | ||
|
|
43b00d8b48 | ||
|
|
bf230db595 | ||
|
|
242e8ee43a | ||
|
|
c32096b26b | ||
|
|
4aa1c26232 | ||
|
|
feca9ae8e0 | ||
|
|
92c325294c | ||
|
|
7dbd0dbe3c | ||
|
|
035e61c4d7 | ||
|
|
96d323e0ed | ||
|
|
35ad2559d7 | ||
|
|
8058ed8219 | ||
|
|
908d8f71ca | ||
|
|
f2b6a39011 | ||
|
|
4f531ec52a | ||
|
|
b69f645ac3 | ||
|
|
f8b959e1e1 | ||
|
|
9be210f34a | ||
|
|
ae7ba3da96 | ||
|
|
00e58ce2c9 | ||
|
|
4245ecc615 | ||
|
|
68dcc6abe0 | ||
|
|
c05e76569b | ||
|
|
a6fe0d93b1 | ||
|
|
2d096eff4d | ||
|
|
ea9675973c | ||
|
|
064b923cfa | ||
|
|
cd35ffc185 | ||
|
|
05bd26b8ed | ||
|
|
d200130335 | ||
|
|
1c7d6f7912 | ||
|
|
94960e4a23 | ||
|
|
79578d867f | ||
|
|
32c93159ac | ||
|
|
6910cc46a4 | ||
|
|
9e0457e720 | ||
|
|
e0967a3104 | ||
|
|
db50f24dd8 | ||
|
|
75fcf33fda | ||
|
|
0ba81e0863 | ||
|
|
647e406ac0 | ||
|
|
9cedcf1767 | ||
|
|
93a020f68d | ||
|
|
401abfb732 | ||
|
|
3538c81605 | ||
|
|
5a20953a81 | ||
|
|
8f29fa0505 | ||
|
|
0a14da9108 | ||
|
|
929534ff80 | ||
|
|
87c1555f03 | ||
|
|
fd325b6b59 | ||
|
|
17e927ffd6 | ||
|
|
5b488359c8 | ||
|
|
d2186447c6 | ||
|
|
82abf7d874 | ||
|
|
5e676f2663 | ||
|
|
5da2772112 | ||
|
|
04a21728a8 | ||
|
|
412efd66a0 | ||
|
|
0ac1612c6c | ||
|
|
8ffeabfca6 | ||
|
|
d5ad085210 | ||
|
|
baaafe1eca | ||
|
|
91f160fc2a | ||
|
|
21cae9fe93 | ||
|
|
a70895f959 | ||
|
|
79d12b9c8b | ||
|
|
ff27918705 | ||
|
|
a1c53c3db2 | ||
|
|
173eb5d02e | ||
|
|
b39aae365a | ||
|
|
10691b954a | ||
|
|
60c421a7d0 | ||
|
|
60ddcc08ed | ||
|
|
02b770a6f4 | ||
|
|
5e1bca53d1 | ||
|
|
968447fef9 | ||
|
|
506442392c | ||
|
|
1c2d3acf1b | ||
|
|
aef568b80b | ||
|
|
42e583ac7c | ||
|
|
ea60f878be | ||
|
|
9d2e06f973 | ||
|
|
dffce977de | ||
|
|
71ca293557 | ||
|
|
75720c6d0a | ||
|
|
5a80fe189d | ||
|
|
bb10203b44 | ||
|
|
ddd0ac3143 | ||
|
|
43658a92c4 | ||
|
|
194ed29a6e | ||
|
|
fa629503f7 | ||
|
|
f6442f9d73 | ||
|
|
02363af637 | ||
|
|
8c9c69a536 | ||
|
|
029700359e | ||
|
|
beaf272a63 | ||
|
|
1d5bad92a7 | ||
|
|
09162bf522 | ||
|
|
da4cea6230 | ||
|
|
2b8af6a2fc | ||
|
|
f04205cb49 | ||
|
|
177b936b25 | ||
|
|
101e554413 | ||
|
|
49851f5874 | ||
|
|
8cf1071c90 | ||
|
|
25fa1cc530 | ||
|
|
deebd6c87e | ||
|
|
f623eddc23 | ||
|
|
5cd8d953ac | ||
|
|
265afc1312 | ||
|
|
9d3e65bd92 | ||
|
|
5aa12482e0 | ||
|
|
716d4af3f3 | ||
|
|
4742ffd858 | ||
|
|
5ed260b0b7 | ||
|
|
99213503c4 | ||
|
|
f89f53cf2d | ||
|
|
0ecfe6c59a | ||
|
|
706466f63b | ||
|
|
24653b8753 | ||
|
|
e408c31415 | ||
|
|
6a0fca3e9d | ||
|
|
bbb558713f | ||
|
|
ea6e72bf0b | ||
|
|
d7ded16239 | ||
|
|
8c2868a60e | ||
|
|
a53e5a2b3d | ||
|
|
3f88f7fe08 | ||
|
|
7b66fe2ee2 | ||
|
|
c70143fb4b | ||
|
|
9c9c4d9ad2 | ||
|
|
96e483a6b0 | ||
|
|
587b743522 | ||
|
|
33149075d3 | ||
|
|
cc410b8503 | ||
|
|
e8acf6da32 | ||
|
|
667c005a67 | ||
|
|
bb48cd381b | ||
|
|
ae6e8ba7e2 | ||
|
|
1eea5fc5c1 | ||
|
|
dd795f3282 | ||
|
|
75c3999567 | ||
|
|
fa41572753 | ||
|
|
0681661660 | ||
|
|
907fe4aa91 | ||
|
|
4bd3c1d74a | ||
|
|
80cb868beb | ||
|
|
904f44dc12 | ||
|
|
520f31651c | ||
|
|
c346199d05 | ||
|
|
3e37bbb021 | ||
|
|
0bb4f52a26 | ||
|
|
a3294ead11 | ||
|
|
e2b773eac5 | ||
|
|
170a108109 | ||
|
|
1348ec89f0 | ||
|
|
db42ad123d | ||
|
|
dad7666adb | ||
|
|
f7d1332a14 | ||
|
|
a58dd78be8 | ||
|
|
3220ed2d3f | ||
|
|
4e1d994aa5 | ||
|
|
4ca56b1c5c | ||
|
|
60790d89e3 | ||
|
|
ed2ec7cb9e | ||
|
|
75c2db6d5c | ||
|
|
352be849a7 | ||
|
|
3e3aeae7ad | ||
|
|
9d76cf60af | ||
|
|
eded9a140f | ||
|
|
5a9944d14b | ||
|
|
b8652fcb96 | ||
|
|
ad267a7226 | ||
|
|
534aea924d | ||
|
|
d0fc620ba6 | ||
|
|
1f9cbc3ad2 | ||
|
|
fab9bc7649 | ||
|
|
8d34300ea5 | ||
|
|
fbffc16452 | ||
|
|
2f15360526 | ||
|
|
5291ed0d6f | ||
|
|
cab54658ee | ||
|
|
96e5823faa | ||
|
|
d7ba73047a | ||
|
|
81f072f899 | ||
|
|
2d8960d5e3 | ||
|
|
bacbcda474 | ||
|
|
9cfbb560eb | ||
|
|
c37e505408 | ||
|
|
f460f76441 | ||
|
|
0dda7b7bd9 | ||
|
|
7cf7329124 | ||
|
|
1207b1e0cc | ||
|
|
b3ce22ddbc | ||
|
|
15d6674644 | ||
|
|
07b525b0c6 | ||
|
|
bd503a0c7f | ||
|
|
dc639d5dac | ||
|
|
ae6a65f98d | ||
|
|
578c961932 | ||
|
|
82ac466527 | ||
|
|
4faf8e3e16 | ||
|
|
df6bc43aa9 | ||
|
|
40dc3b657e | ||
|
|
6dde72d330 | ||
|
|
eaba41b604 | ||
|
|
a22427e155 | ||
|
|
e287ccb724 | ||
|
|
eb37c91eee | ||
|
|
763b2e5c76 | ||
|
|
9da1da402b | ||
|
|
7c7260893d | ||
|
|
60b8512dde | ||
|
|
09503069b7 | ||
|
|
c56682ff8d | ||
|
|
1c30dc0ed8 | ||
|
|
9adb1f9c08 | ||
|
|
d0100ad904 | ||
|
|
4bafeb9b00 | ||
|
|
538afef5b6 | ||
|
|
6b98acdecf | ||
|
|
17308d9727 | ||
|
|
ed49633f9c | ||
|
|
7ee61084c0 | ||
|
|
9b3e940a4f | ||
|
|
ec53ba61e3 | ||
|
|
d75c485a3d | ||
|
|
0074acca7a | ||
|
|
8f0a9c3cc5 | ||
|
|
6a67f72ff3 | ||
|
|
0ebd01444a | ||
|
|
8c9c43ef38 | ||
|
|
1bb3f2ccea | ||
|
|
7c4705d149 | ||
|
|
4a0be0233c | ||
|
|
2290be751a | ||
|
|
1ef223e238 | ||
|
|
9f321384ec | ||
|
|
4d04e51dda | ||
|
|
431dce7d24 | ||
|
|
86cb44ced9 | ||
|
|
272454c2ce | ||
|
|
b08f958c2a | ||
|
|
6034937cf5 | ||
|
|
2d7a377646 | ||
|
|
e23773e521 | ||
|
|
39a1e1dec0 | ||
|
|
2132bbbff8 | ||
|
|
113e9c1c19 | ||
|
|
457ab209c1 | ||
|
|
a7496f80a7 | ||
|
|
b2bcf36622 | ||
|
|
26abfdd570 | ||
|
|
b9f811c147 | ||
|
|
d6b1d944bf | ||
|
|
8c85656831 | ||
|
|
db577c8de6 | ||
|
|
6896c9663e | ||
|
|
6dda2eb6d5 | ||
|
|
1706f3e9e5 | ||
|
|
63c27f63bd | ||
|
|
0cbae4d276 | ||
|
|
cfe04c8c0b | ||
|
|
aaeff80547 | ||
|
|
c8b71213ae | ||
|
|
05505eb3ba | ||
|
|
88680042ba | ||
|
|
510b4a7e7d | ||
|
|
bb26d7b605 | ||
|
|
dd75ec4b87 | ||
|
|
0b2dfa9aba | ||
|
|
539a1d58b0 | ||
|
|
f43e1f760d | ||
|
|
bfd172031c | ||
|
|
17ec2ecf76 | ||
|
|
f300e017ed | ||
|
|
bf993c5302 | ||
|
|
5b3d0e2b3a | ||
|
|
ec7cafd6b6 | ||
|
|
23af55d645 | ||
|
|
92d710bce8 | ||
|
|
7a1fa46368 | ||
|
|
cf03bfba1b | ||
|
|
a0b8167121 | ||
|
|
7117716e5b | ||
|
|
a1f6320cd8 | ||
|
|
37e329a051 | ||
|
|
a8bac8837e | ||
|
|
82d2485778 | ||
|
|
209d5a7796 | ||
|
|
fef9684189 | ||
|
|
0d43d80e23 | ||
|
|
88c2abd5e7 | ||
|
|
379e4bbb7e | ||
|
|
fa37dd4b2d | ||
|
|
afa6052a08 | ||
|
|
bde3c18498 | ||
|
|
af3307825a | ||
|
|
ecc7706959 | ||
|
|
9b34f7ea6d | ||
|
|
280a1186d8 | ||
|
|
30cff71e76 | ||
|
|
05d21e3e5a | ||
|
|
fb88c83ac4 | ||
|
|
ffb6aef96e | ||
|
|
77c5a3e20c | ||
|
|
7b0965ac1d | ||
|
|
d72b41da11 | ||
|
|
fed14ceb29 | ||
|
|
3e6fdaa126 | ||
|
|
04000c78e2 | ||
|
|
75c429fe22 | ||
|
|
9608fa387d | ||
|
|
fc9c76c34c | ||
|
|
87b1fcc038 | ||
|
|
fe88f1b837 | ||
|
|
af3d31f773 | ||
|
|
df67a549c0 | ||
|
|
884ccc4cc0 | ||
|
|
484910cf5c | ||
|
|
a83c8eb17f | ||
|
|
8ffa8cfcac | ||
|
|
8fcd50d21b | ||
|
|
f91c527676 | ||
|
|
f35bf24a5e | ||
|
|
e2684845b8 | ||
|
|
df3333f948 | ||
|
|
bd4ccbb8ac | ||
|
|
fb5a1c3e1f | ||
|
|
1760dea0f5 | ||
|
|
4fcb4df0a4 | ||
|
|
13904ea4d8 | ||
|
|
8069882dc5 | ||
|
|
d95b075ac0 | ||
|
|
ea01a727cf | ||
|
|
b2f679d975 | ||
|
|
0cf113561a | ||
|
|
80bdc4414a | ||
|
|
09e5e1a756 | ||
|
|
6e37061e71 | ||
|
|
b301dd22d0 | ||
|
|
5b97d6013a | ||
|
|
8f304f1c48 | ||
|
|
7bb2fb2420 | ||
|
|
83fefe585b | ||
|
|
4886587065 | ||
|
|
0423fdabda | ||
|
|
823ed663e7 | ||
|
|
be469aed93 | ||
|
|
b34045a59f | ||
|
|
3461ce3735 | ||
|
|
b0942d695b | ||
|
|
5449646a1b | ||
|
|
6370b81299 | ||
|
|
c69d053049 | ||
|
|
b01d81ddec | ||
|
|
7e2800d89a | ||
|
|
33763b5f41 | ||
|
|
f080e2d4ea | ||
|
|
00f666a3c0 | ||
|
|
2ca064d111 | ||
|
|
3b54ca9d28 | ||
|
|
03312da6ee | ||
|
|
c7e8a603c7 | ||
|
|
574b0393d8 | ||
|
|
baa79a7b74 | ||
|
|
d57ff29e71 | ||
|
|
2d4d934a91 | ||
|
|
9741cab078 | ||
|
|
1e7687f8e8 | ||
|
|
524b3a4034 | ||
|
|
1ab4b24fa0 | ||
|
|
8d003b217c | ||
|
|
d754e6c279 | ||
|
|
0eebbcff21 | ||
|
|
a569fb0875 | ||
|
|
6e8e0d7f59 | ||
|
|
c0a511adb3 | ||
|
|
e84640de1c | ||
|
|
20199ed794 | ||
|
|
f4e610a953 | ||
|
|
99a3a4bcd0 | ||
|
|
dedfce8feb | ||
|
|
54e0b43888 | ||
|
|
d5ea843286 | ||
|
|
b01fe0ab4a | ||
|
|
a7ccc3be66 | ||
|
|
07b1cf6a8a | ||
|
|
2b49ac2e41 | ||
|
|
2466e2542a | ||
|
|
af70f6f6f9 | ||
|
|
48c3fa965f | ||
|
|
472c18448c | ||
|
|
810d8c7c8b | ||
|
|
f80be81ee6 | ||
|
|
0fcaa946e6 | ||
|
|
1e7953e5fe | ||
|
|
481656fdeb | ||
|
|
3d155fb48f | ||
|
|
f659a8c171 | ||
|
|
3ffb63ed28 | ||
|
|
94956913e8 | ||
|
|
7b5a7419f1 | ||
|
|
d5cef051d3 | ||
|
|
f81f31d667 | ||
|
|
8c617b721f | ||
|
|
0d626519ec | ||
|
|
f39d448aa6 | ||
|
|
2dc433df7c | ||
|
|
75a8a2e8c5 | ||
|
|
3847836ed6 | ||
|
|
c27f5ee41c | ||
|
|
e6c43e7e35 | ||
|
|
c2fa18778f | ||
|
|
150180fdff | ||
|
|
86ea938323 | ||
|
|
7da71e84fe | ||
|
|
efd5ed80b2 | ||
|
|
efea1d6165 | ||
|
|
3082c83bc4 | ||
|
|
fc50415afd | ||
|
|
be4adadbd8 | ||
|
|
8eb05424fd | ||
|
|
380289d1c1 | ||
|
|
91ff882d15 | ||
|
|
5aad65fff7 | ||
|
|
decea88028 | ||
|
|
a08f44e6ed | ||
|
|
dbd1802dd6 | ||
|
|
b9f123fbea | ||
|
|
2944d00ca2 | ||
|
|
448ba892cc | ||
|
|
a5cb46e095 | ||
|
|
ee4e95c75f | ||
|
|
f488894942 | ||
|
|
269a59c1e2 | ||
|
|
d8b434e169 | ||
|
|
326b99d860 | ||
|
|
bd2f063cc0 | ||
|
|
736292538b | ||
|
|
d772efe779 |
37
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
37
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: "[BUG]"
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
# Must have
|
||||
- WhatsApp version: [WhatsApp version]
|
||||
- OS: [Android/iOS] - [version]
|
||||
- Platform: [Linux/Windows/MacOS]
|
||||
- Exporter's branch and version: [main/dev] - [exporter version]
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
If it is an error yield by Python, please also provide the trackback
|
||||
```
|
||||
[trackback here]
|
||||
```
|
||||
|
||||
# Nice to have
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
17
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
17
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: "[FEATURE]"
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
20
.github/docs.html
vendored
Normal file
20
.github/docs.html
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<script type="text/javascript">
|
||||
destination = {
|
||||
"filter": "Filter",
|
||||
"date": "Filters#date-filters",
|
||||
"chat": "Filters#chat-filter",
|
||||
"osl": "Open-Source-Licenses",
|
||||
"iose2e": "iOS-Usage#encrypted-iosipados-backup",
|
||||
null: ""
|
||||
};
|
||||
const dest = new URLSearchParams(window.location.search).get('dest');
|
||||
window.location.href = `https://github.com/KnugiHK/WhatsApp-Chat-Exporter/wiki/${destination[dest]}`;
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<p>If the redirection doesn't work, you can find the documentation at <a href="https://github.com/KnugiHK/WhatsApp-Chat-Exporter/wiki">https://github.com/KnugiHK/WhatsApp-Chat-Exporter/wiki</a>.</p>
|
||||
</body>
|
||||
</html>
|
||||
489
.github/generate-website.js
vendored
Normal file
489
.github/generate-website.js
vendored
Normal file
@@ -0,0 +1,489 @@
|
||||
const fs = require('fs-extra');
|
||||
const marked = require('marked');
|
||||
const path = require('path');
|
||||
const markedAlert = require('marked-alert');
|
||||
|
||||
fs.ensureDirSync('docs');
|
||||
fs.ensureDirSync('docs/imgs');
|
||||
|
||||
if (fs.existsSync('imgs')) {
|
||||
fs.copySync('imgs', 'docs/imgs');
|
||||
}
|
||||
if (fs.existsSync('.github/docs.html')) {
|
||||
fs.copySync('.github/docs.html', 'docs/docs.html');
|
||||
}
|
||||
|
||||
const readmeContent = fs.readFileSync('README.md', 'utf8');
|
||||
|
||||
const toc = `<div class="table-of-contents">
|
||||
<h3>Table of Contents</h3>
|
||||
<ul>
|
||||
<li><a href="#intro">Introduction</a></li>
|
||||
<li><a href="#usage">Usage</a></li>
|
||||
<li><a href="#todo">To Do</a></li>
|
||||
<li><a href="#legal">Legal Stuff & Disclaimer</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
`
|
||||
|
||||
const generateHTML = (content) =>
|
||||
`<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="title" content="WhatsApp Chat Exporter">
|
||||
<meta name="description" content="Export your WhatsApp conversations from Android and iOS/iPadOS devices to HTML, JSON, or text formats. Supports encrypted backups (Crypt12, Crypt14, Crypt15) and customizable templates.">
|
||||
<meta name="keywords" content="WhatsApp, WhatsApp Chat Exporter, WhatsApp export tool, WhatsApp backup decryption, Crypt12, Crypt14, Crypt15, WhatsApp database parser, WhatsApp chat history, HTML export, JSON export, text export, customizable templates, media handling, vCard import, Python tool, open source, MIT license">
|
||||
<meta name="robots" content="index, follow">
|
||||
<meta name="author" content="KnugiHK">
|
||||
<meta name="license" content="MIT">
|
||||
<meta name="generator" content="Python">
|
||||
<title>WhatsApp Chat Exporter</title>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
|
||||
<style>
|
||||
:root {
|
||||
--primary-color: #128C7E;
|
||||
--secondary-color: #25D366;
|
||||
--dark-color: #075E54;
|
||||
--light-color: #DCF8C6;
|
||||
--text-color: #333;
|
||||
--light-text: #777;
|
||||
--code-bg: #f6f8fa;
|
||||
--border-color: #e1e4e8;
|
||||
}
|
||||
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif;
|
||||
line-height: 1.6;
|
||||
color: var(--text-color);
|
||||
background-color: #f9f9f9;
|
||||
}
|
||||
|
||||
.container {
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
padding: 0 20px;
|
||||
}
|
||||
|
||||
header {
|
||||
background-color: var(--primary-color);
|
||||
color: white;
|
||||
padding: 60px 0 40px;
|
||||
text-align: center;
|
||||
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
header h1 {
|
||||
font-size: 2.8rem;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.badges {
|
||||
margin: 20px 0;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
flex-wrap: wrap;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.badge {
|
||||
display: inline-block;
|
||||
margin: 5px;
|
||||
}
|
||||
|
||||
.tagline {
|
||||
font-size: 1.2rem;
|
||||
max-width: 800px;
|
||||
margin: 0 auto;
|
||||
padding: 0 20px;
|
||||
}
|
||||
|
||||
.main-content {
|
||||
background: white;
|
||||
padding: 40px 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.inner-content {
|
||||
padding: 0 30px;
|
||||
max-width: 900px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
h2 {
|
||||
color: var(--dark-color);
|
||||
margin: 30px 0 15px;
|
||||
padding-bottom: 8px;
|
||||
border-bottom: 2px solid var(--light-color);
|
||||
font-size: 1.8rem;
|
||||
}
|
||||
|
||||
h3 {
|
||||
color: var(--dark-color);
|
||||
margin: 25px 0 15px;
|
||||
font-size: 1.4rem;
|
||||
}
|
||||
|
||||
h4 {
|
||||
color: var(--dark-color);
|
||||
margin: 20px 0 10px;
|
||||
font-size: 1.2rem;
|
||||
}
|
||||
|
||||
p, ul, ol {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
ul, ol {
|
||||
padding-left: 25px;
|
||||
}
|
||||
|
||||
a {
|
||||
color: var(--primary-color);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.alert {
|
||||
background-color: #f8f9fa;
|
||||
border-left: 4px solid #f0ad4e;
|
||||
padding: 15px;
|
||||
margin-bottom: 20px;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
.alert--tip {
|
||||
border-color: var(--secondary-color);
|
||||
background-color: rgba(37, 211, 102, 0.1);
|
||||
}
|
||||
|
||||
.alert--note {
|
||||
border-color: #0088cc;
|
||||
background-color: rgba(0, 136, 204, 0.1);
|
||||
}
|
||||
.markdown-alert {
|
||||
background-color: #f8f9fa;
|
||||
border-left: 4px solid #f0ad4e;
|
||||
padding: 15px;
|
||||
margin-bottom: 20px;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
.markdown-alert-note {
|
||||
border-color: #0088cc;
|
||||
background-color: rgba(0, 136, 204, 0.1);
|
||||
}
|
||||
|
||||
.markdown-alert-tip {
|
||||
border-color: var(--secondary-color);
|
||||
background-color: rgba(37, 211, 102, 0.1);
|
||||
}
|
||||
|
||||
.markdown-alert-important {
|
||||
border-color: #d9534f;
|
||||
background-color: rgba(217, 83, 79, 0.1);
|
||||
}
|
||||
|
||||
.markdown-alert-warning {
|
||||
border-color: #f0ad4e;
|
||||
background-color: rgba(240, 173, 78, 0.1);
|
||||
}
|
||||
|
||||
.markdown-alert-caution {
|
||||
border-color: #ff9800;
|
||||
background-color: rgba(255, 152, 0, 0.1);
|
||||
}
|
||||
|
||||
.markdown-alert p {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.markdown-alert-title {
|
||||
font-weight: 600;
|
||||
margin-bottom: 8px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
pre {
|
||||
background-color: var(--code-bg);
|
||||
border-radius: 6px;
|
||||
padding: 16px;
|
||||
overflow-x: auto;
|
||||
margin: 16px 0;
|
||||
border: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
code {
|
||||
font-family: SFMono-Regular, Consolas, Liberation Mono, Menlo, monospace;
|
||||
font-size: 85%;
|
||||
background-color: var(--code-bg);
|
||||
padding: 0.2em 0.4em;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
pre code {
|
||||
padding: 0;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.screenshot {
|
||||
max-width: 100%;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
|
||||
margin: 20px 0;
|
||||
border: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.feature-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fill, minmax(280px, 1fr));
|
||||
gap: 20px;
|
||||
margin: 30px 0;
|
||||
}
|
||||
|
||||
.feature-card {
|
||||
background: white;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 2px 5px rgba(0, 0, 0, 0.1);
|
||||
padding: 20px;
|
||||
border: 1px solid var(--border-color);
|
||||
transition: transform 0.3s ease;
|
||||
}
|
||||
|
||||
.feature-card:hover {
|
||||
transform: translateY(-5px);
|
||||
box-shadow: 0 5px 15px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.feature-icon {
|
||||
font-size: 2rem;
|
||||
color: var(--primary-color);
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
.feature-title {
|
||||
font-weight: 600;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
footer {
|
||||
background-color: var(--dark-color);
|
||||
color: white;
|
||||
text-align: center;
|
||||
padding: 30px 0;
|
||||
margin-top: 50px;
|
||||
}
|
||||
|
||||
.btn {
|
||||
display: inline-block;
|
||||
background-color: var(--primary-color);
|
||||
color: white;
|
||||
padding: 10px 20px;
|
||||
border-radius: 4px;
|
||||
text-decoration: none;
|
||||
font-weight: 500;
|
||||
transition: background-color 0.3s ease;
|
||||
margin: 5px;
|
||||
}
|
||||
|
||||
.btn:hover {
|
||||
background-color: var(--dark-color);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.btn-secondary {
|
||||
background-color: white;
|
||||
color: var(--primary-color);
|
||||
border: 1px solid var(--primary-color);
|
||||
}
|
||||
|
||||
.btn-secondary:hover {
|
||||
background-color: var(--light-color);
|
||||
color: var(--dark-color);
|
||||
}
|
||||
|
||||
.action-buttons {
|
||||
margin: 30px 0;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.table-of-contents {
|
||||
background-color: #f8f9fa;
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 6px;
|
||||
padding: 15px 25px;
|
||||
margin: 30px 0;
|
||||
}
|
||||
|
||||
.table-of-contents h3 {
|
||||
margin-top: 0;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.table-of-contents ul {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.help-text {
|
||||
color: var(--light-text);
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.device-section {
|
||||
padding: 15px;
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 6px;
|
||||
margin-bottom: 20px;
|
||||
background-color: #fff;
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
header {
|
||||
padding: 40px 0 30px;
|
||||
}
|
||||
|
||||
header h1 {
|
||||
font-size: 2.2rem;
|
||||
}
|
||||
|
||||
.tagline {
|
||||
font-size: 1.1rem;
|
||||
}
|
||||
|
||||
.feature-grid {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<div class="container">
|
||||
<h1>WhatsApp Chat Exporter</h1>
|
||||
<div class="badges">
|
||||
<a href="https://pypi.org/project/whatsapp-chat-exporter/" class="badge"><img src="https://img.shields.io/pypi/v/whatsapp-chat-exporter?label=Latest%20in%20PyPI" alt="Latest in PyPI"></a>
|
||||
<a href="https://github.com/KnugiHK/WhatsApp-Chat-Exporter/blob/main/LICENSE" class="badge"><img src="https://img.shields.io/pypi/l/whatsapp-chat-exporter?color=427B93" alt="License MIT"></a>
|
||||
<a href="https://pypi.org/project/Whatsapp-Chat-Exporter/" class="badge"><img src="https://img.shields.io/pypi/pyversions/Whatsapp-Chat-Exporter" alt="Python"></a>
|
||||
<a href="https://matrix.to/#/#wtsexporter:matrix.org" class="badge"><img src="https://img.shields.io/matrix/wtsexporter:matrix.org.svg?label=Matrix%20Chat%20Room" alt="Matrix Chat Room"></a>
|
||||
</div>
|
||||
<p class="tagline">A customizable Android and iPhone Whatsapp database parser that will give you the history of your Whatsapp conversations in HTML and JSON</p>
|
||||
<div class="action-buttons">
|
||||
<a href="https://github.com/KnugiHK/WhatsApp-Chat-Exporter" class="btn"><i class="fab fa-github"></i> GitHub</a>
|
||||
<a href="https://pypi.org/project/whatsapp-chat-exporter/" class="btn btn-secondary"><i class="fab fa-python"></i> PyPI</a>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<div class="main-content">
|
||||
<div class="inner-content">
|
||||
<section id="features">
|
||||
<h2>Key Features</h2>
|
||||
|
||||
<div class="feature-grid">
|
||||
<div class="feature-card">
|
||||
<div class="feature-icon"><i class="fas fa-mobile-alt"></i></div>
|
||||
<h3 class="feature-title">Cross-Platform</h3>
|
||||
<p>Support for both Android and iOS/iPadOS WhatsApp databases</p>
|
||||
</div>
|
||||
|
||||
<div class="feature-card">
|
||||
<div class="feature-icon"><i class="fas fa-lock"></i></div>
|
||||
<h3 class="feature-title">Backup Decryption</h3>
|
||||
<p>Support for Crypt12, Crypt14, and Crypt15 (End-to-End) encrypted backups</p>
|
||||
</div>
|
||||
|
||||
<div class="feature-card">
|
||||
<div class="feature-icon"><i class="fas fa-file-export"></i></div>
|
||||
<h3 class="feature-title">Multiple Formats</h3>
|
||||
<p>Export your chats in HTML, JSON, and text formats</p>
|
||||
</div>
|
||||
|
||||
<div class="feature-card">
|
||||
<div class="feature-icon"><i class="fas fa-paint-brush"></i></div>
|
||||
<h3 class="feature-title">Customizable</h3>
|
||||
<p>Use custom HTML templates and styling for your chat exports</p>
|
||||
</div>
|
||||
|
||||
<div class="feature-card">
|
||||
<div class="feature-icon"><i class="fas fa-images"></i></div>
|
||||
<h3 class="feature-title">Media Support</h3>
|
||||
<p>Properly handles and organizes your media files in the exports</p>
|
||||
</div>
|
||||
|
||||
<div class="feature-card">
|
||||
<div class="feature-icon"><i class="fas fa-filter"></i></div>
|
||||
<h3 class="feature-title">Filtering Options</h3>
|
||||
<p>Filter chats by date, phone number, and more</p>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<div class="readme-content">
|
||||
${content}
|
||||
</div>
|
||||
|
||||
|
||||
<div class="action-buttons">
|
||||
<a href="https://github.com/KnugiHK/WhatsApp-Chat-Exporter" class="btn"><i class="fab fa-github"></i> View on GitHub</a>
|
||||
<a href="https://pypi.org/project/whatsapp-chat-exporter/" class="btn btn-secondary"><i class="fab fa-python"></i> PyPI Package</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<footer>
|
||||
<div class="container">
|
||||
<p>© 2021-${new Date().getFullYear()} WhatsApp Chat Exporter</p>
|
||||
<p>Licensed under MIT License</p>
|
||||
<p>
|
||||
<a href="https://github.com/KnugiHK/WhatsApp-Chat-Exporter" style="color: white; margin: 0 10px;"><i class="fab fa-github fa-lg"></i></a>
|
||||
<a href="https://matrix.to/#/#wtsexporter:matrix.org" style="color: white; margin: 0 10px;"><i class="fas fa-comments fa-lg"></i></a>
|
||||
</p>
|
||||
<p><small>Last updated: ${new Date().toLocaleDateString()}</small></p>
|
||||
</div>
|
||||
</footer>
|
||||
|
||||
<script>
|
||||
// Simple script to handle smooth scrolling for anchor links
|
||||
document.querySelectorAll('a[href^="#"]').forEach(anchor => {
|
||||
anchor.addEventListener('click', function(e) {
|
||||
e.preventDefault();
|
||||
|
||||
const targetId = this.getAttribute('href');
|
||||
const targetElement = document.querySelector(targetId);
|
||||
|
||||
if (targetElement) {
|
||||
window.scrollTo({
|
||||
top: targetElement.offsetTop - 20,
|
||||
behavior: 'smooth'
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
`;
|
||||
|
||||
const processedContent = readmeContent.replace(/\[!\[.*?\]\(.*?\)\]\(.*?\)/g, '')
|
||||
|
||||
const htmlContent = marked.use(markedAlert()).parse(processedContent, {
|
||||
gfm: true,
|
||||
breaks: true,
|
||||
renderer: new marked.Renderer()
|
||||
});
|
||||
|
||||
const finalHTML = generateHTML(htmlContent);
|
||||
fs.writeFileSync('docs/index.html', finalHTML);
|
||||
|
||||
console.log('Website generated successfully!');
|
||||
11
.github/pull_request_template.md
vendored
Normal file
11
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
# Important Note
|
||||
|
||||
**All PRs (except for changes unrelated to source files) should target and start from the `dev` branch.**
|
||||
|
||||
## Related Issue
|
||||
|
||||
- Please put a reference to the related issue here (e.g., `Fixes #123` or `Closes #456`), if there are any.
|
||||
|
||||
## Description of Changes
|
||||
|
||||
- Briefly describe the changes made in this PR. Explain the purpose, the implementation details, and any important information that reviewers should be aware of.
|
||||
50
.github/workflows/ci.yml
vendored
Normal file
50
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: Run Pytest on Dev Branch Push
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
pull_request:
|
||||
jobs:
|
||||
ci:
|
||||
runs-on: ${{ matrix.os }}
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
include:
|
||||
- os: windows-latest
|
||||
python-version: "3.13"
|
||||
python_utf8: "1"
|
||||
- os: macos-latest
|
||||
python-version: "3.13"
|
||||
- os: windows-11-arm
|
||||
python-version: "3.13"
|
||||
python_utf8: "1"
|
||||
- os: macos-15-intel
|
||||
python-version: "3.13"
|
||||
- os: windows-latest
|
||||
python-version: "3.14"
|
||||
python_utf8: "1"
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }} on ${{ matrix.os }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install .[all] pytest nuitka
|
||||
|
||||
- name: Run pytest
|
||||
env:
|
||||
PYTHONUTF8: ${{ matrix.python_utf8 || '0' }}
|
||||
run: pytest
|
||||
100
.github/workflows/codeql.yml
vendored
Normal file
100
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL Advanced"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "main", "dev" ]
|
||||
pull_request:
|
||||
branches: [ "main", "dev" ]
|
||||
schedule:
|
||||
- cron: '25 21 * * 5'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze (${{ matrix.language }})
|
||||
# Runner size impacts CodeQL analysis time. To learn more, please see:
|
||||
# - https://gh.io/recommended-hardware-resources-for-running-codeql
|
||||
# - https://gh.io/supported-runners-and-hardware-resources
|
||||
# - https://gh.io/using-larger-runners (GitHub.com only)
|
||||
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
|
||||
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
# required for all workflows
|
||||
security-events: write
|
||||
|
||||
# required to fetch internal or private CodeQL packs
|
||||
packages: read
|
||||
|
||||
# only required for workflows in private repositories
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- language: actions
|
||||
build-mode: none
|
||||
- language: python
|
||||
build-mode: none
|
||||
# CodeQL supports the following values keywords for 'language': 'actions', 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
|
||||
# Use `c-cpp` to analyze code written in C, C++ or both
|
||||
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
|
||||
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
|
||||
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
|
||||
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Add any setup steps before running the `github/codeql-action/init` action.
|
||||
# This includes steps like installing compilers or runtimes (`actions/setup-node`
|
||||
# or others). This is typically only required for manual builds.
|
||||
# - name: Setup runtime (example)
|
||||
# uses: actions/setup-example@v1
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
# If the analyze step fails for one of the languages you are analyzing with
|
||||
# "We were unable to automatically build your code", modify the matrix above
|
||||
# to set the build mode to "manual" for that language. Then modify this step
|
||||
# to build your code.
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
- if: matrix.build-mode == 'manual'
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'If you are using a "manual" build mode for one or more of the' \
|
||||
'languages you are analyzing, replace this with the commands to build' \
|
||||
'your code, for example:'
|
||||
echo ' make bootstrap'
|
||||
echo ' make release'
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v4
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
138
.github/workflows/compile-binary.yml
vendored
138
.github/workflows/compile-binary.yml
vendored
@@ -7,76 +7,146 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
attestations: write
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: '3.13'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install pycryptodome javaobj-py3 ordered-set zstandard nuitka
|
||||
pip install pycryptodome javaobj-py3 ordered-set zstandard nuitka==2.8.9
|
||||
pip install .
|
||||
- name: Build binary with Nuitka
|
||||
run: |
|
||||
python -m nuitka --onefile --include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html --follow-imports Whatsapp_Chat_Exporter/__main__.py
|
||||
cp __main__.bin wtsexporter_linux_x64
|
||||
python -m nuitka --onefile \
|
||||
--include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html \
|
||||
--assume-yes-for-downloads Whatsapp_Chat_Exporter --output-filename=wtsexporter_linux_x64
|
||||
sha256sum wtsexporter_linux_x64
|
||||
- uses: actions/upload-artifact@v3
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
name: binary-linux
|
||||
path: |
|
||||
./wtsexporter_linux_x64
|
||||
subject-path: ./wtsexporter_linux_x64
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: binary-linux-x64
|
||||
path: ./wtsexporter_linux_x64
|
||||
|
||||
windows:
|
||||
windows-x64:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: '3.13'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install pycryptodome javaobj-py3 ordered-set zstandard nuitka
|
||||
pip install pycryptodome javaobj-py3 ordered-set zstandard nuitka==2.8.9
|
||||
pip install .
|
||||
- name: Build binary with Nuitka
|
||||
run: |
|
||||
python -m nuitka --onefile --include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html --assume-yes-for-downloads --follow-imports Whatsapp_Chat_Exporter\__main__.py
|
||||
copy __main__.exe wtsexporter_x64.exe
|
||||
Get-FileHash wtsexporter_x64.exe
|
||||
- uses: actions/upload-artifact@v3
|
||||
python -m nuitka --onefile --include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html --assume-yes-for-downloads Whatsapp_Chat_Exporter --output-filename=wtsexporter
|
||||
Rename-Item -Path "wtsexporter.exe" -NewName "wtsexporter_win_x64.exe"
|
||||
Get-FileHash wtsexporter_win_x64.exe
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
name: binary-windows
|
||||
path: |
|
||||
.\wtsexporter_x64.exe
|
||||
subject-path: .\wtsexporter_win_x64.exe
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: binary-windows-x64
|
||||
path: .\wtsexporter_win_x64.exe
|
||||
|
||||
macos:
|
||||
windows-arm:
|
||||
runs-on: windows-11-arm
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.13'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install pycryptodome javaobj-py3 ordered-set zstandard nuitka==2.8.9
|
||||
pip install .
|
||||
- name: Build binary with Nuitka
|
||||
run: |
|
||||
python -m nuitka --onefile --include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html --assume-yes-for-downloads Whatsapp_Chat_Exporter --output-filename=wtsexporter
|
||||
Rename-Item -Path "wtsexporter.exe" -NewName "wtsexporter_win_arm64.exe"
|
||||
Get-FileHash wtsexporter_win_arm64.exe
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
subject-path: .\wtsexporter_win_arm64.exe
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: binary-windows-arm64
|
||||
path: .\wtsexporter_win_arm64.exe
|
||||
|
||||
macos-arm:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: '3.13'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install pycryptodome javaobj-py3 ordered-set zstandard nuitka
|
||||
pip install pycryptodome javaobj-py3 ordered-set zstandard nuitka==2.8.9
|
||||
pip install .
|
||||
- name: Build binary with Nuitka
|
||||
run: |
|
||||
python -m nuitka --onefile --include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html --follow-imports Whatsapp_Chat_Exporter/__main__.py
|
||||
cp __main__.bin wtsexporter_macos_x64
|
||||
shasum -a 256 wtsexporter_macos_x64
|
||||
- uses: actions/upload-artifact@v3
|
||||
python -m nuitka --onefile \
|
||||
--include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html \
|
||||
--assume-yes-for-downloads Whatsapp_Chat_Exporter --output-filename=wtsexporter
|
||||
mv wtsexporter wtsexporter_macos_arm64
|
||||
shasum -a 256 wtsexporter_macos_arm64
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
name: binary-macos
|
||||
path: |
|
||||
./wtsexporter_macos_x64
|
||||
subject-path: ./wtsexporter_macos_arm64
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: binary-macos-arm64
|
||||
path: ./wtsexporter_macos_arm64
|
||||
|
||||
macos-intel:
|
||||
runs-on: macos-15-intel
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.13'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install pycryptodome javaobj-py3 ordered-set zstandard nuitka==2.8.9
|
||||
pip install .
|
||||
- name: Build binary with Nuitka
|
||||
run: |
|
||||
python -m nuitka --onefile \
|
||||
--include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html \
|
||||
--assume-yes-for-downloads Whatsapp_Chat_Exporter --output-filename=wtsexporter
|
||||
mv wtsexporter wtsexporter_macos_x64
|
||||
shasum -a 256 wtsexporter_macos_x64
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
subject-path: ./wtsexporter_macos_x64
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: binary-macos-x64
|
||||
path: ./wtsexporter_macos_x64
|
||||
43
.github/workflows/generate-website.yml
vendored
Normal file
43
.github/workflows/generate-website.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: Generate Website from README
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'README.md'
|
||||
- '.github/workflows/generate-website.yml'
|
||||
- '.github/generate-website.js'
|
||||
- '.github/docs.html'
|
||||
workflow_dispatch:
|
||||
permissions:
|
||||
contents: write
|
||||
pages: write
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '24'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install marked fs-extra marked-alert
|
||||
|
||||
- name: Generate website from README
|
||||
run: |
|
||||
node .github/generate-website.js
|
||||
echo 'wts.knugi.dev' > ./docs/CNAME
|
||||
|
||||
- name: Deploy to gh-pages
|
||||
if: github.ref == 'refs/heads/main' # Ensure deployment only happens from main
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./docs
|
||||
publish_branch: gh-pages
|
||||
11
.github/workflows/python-publish.yml
vendored
11
.github/workflows/python-publish.yml
vendored
@@ -11,6 +11,10 @@ name: Upload Python Package
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
@@ -18,9 +22,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Install dependencies
|
||||
@@ -31,6 +35,3 @@ jobs:
|
||||
run: python -m build
|
||||
- name: Publish package
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
|
||||
20
.gitignore
vendored
20
.gitignore
vendored
@@ -127,3 +127,23 @@ dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# Nuitka
|
||||
*.build/
|
||||
*.dist/
|
||||
*.onefile-build/
|
||||
*.exe
|
||||
__main__
|
||||
|
||||
|
||||
# Dev time intermidiates & temp files
|
||||
result/
|
||||
output/
|
||||
WhatsApp/
|
||||
AppDomainGroup-group.net.whatsapp.WhatsApp.shared/
|
||||
/*.db
|
||||
/*.db-*
|
||||
/myout
|
||||
/msgstore.db
|
||||
/myout-json
|
||||
.vscode/
|
||||
63
CONTRIBUTING.md
Normal file
63
CONTRIBUTING.md
Normal file
@@ -0,0 +1,63 @@
|
||||
# Contributing Guidelines
|
||||
|
||||
*Pull requests, bug reports, and all other forms of contribution are welcomed and highly encouraged!*
|
||||
|
||||
> **This guide serves to set clear expectations for everyone involved with the project so that we can improve it together while also creating a welcoming space for everyone to participate. Following these guidelines will help ensure a positive experience for contributors and maintainers.**
|
||||
|
||||
<sub> Maintainer's note: I aim to keep things simple and flexible, without imposing too many restrictions, while still ensuring it’s useful for the project. </sub>
|
||||
|
||||
## :book: Code of Conduct
|
||||
|
||||
There isn't an official code of conduct at the moment, and we hope it won't be necessary. The rule is simple: be reasonable and treat others with respect!
|
||||
|
||||
## :bulb: Asking Questions
|
||||
|
||||
While there is no formal support from the maintainer, they are happy to help if you provide enough information. However, please note:
|
||||
|
||||
If you feel the questions or difficulties you're encountering aren't related to the software itself, please [open a discussion thread](https://github.com/KnugiHK/WhatsApp-Chat-Exporter/discussions/new/choose). Do not open an issue just to ask a question. While asking questions in the project issues is not strictly prohibited, any issues that don't qualify as genuine problems will be converted into discussion threads.
|
||||
|
||||
Hopefully, the community will be able to offer assistance as well. You can check out the article [How do I ask a good question?](https://stackoverflow.com/help/how-to-ask) on StackOverflow to learn how to craft questions that encourage more people to respond.
|
||||
|
||||
## :inbox_tray: Opening an Issue
|
||||
|
||||
Before [creating an issue](https://help.github.com/en/github/managing-your-work-on-github/creating-an-issue), check if you are using the latest version of the project. If you are not up-to-date, see if updating fixes your issue first.
|
||||
|
||||
### :lock: Reporting Security Issues
|
||||
|
||||
Please report any vulnerability to [GitHub Security Advisory](https://github.com/KnugiHK/WhatsApp-Chat-Exporter/security/advisories/new). **Do not** file a public issue for security vulnerabilities.
|
||||
|
||||
### :beetle: Bug Reports and Feature Requests
|
||||
|
||||
- **Do not open a duplicate issue!** Search through existing issues to see if your issue or request has previously been reported. If your issue exists, comment with any additional information you have. You may simply note "I have this problem too/I want this feature too", which helps prioritize the most common problems and requests.
|
||||
|
||||
- **Fully complete the provided issue template.** The issue templates request all the information we need to quickly and efficiently address your issue. Be clear, concise, and descriptive. Provide as much information as you can, including steps to reproduce, stack traces, compiler errors, library versions, OS versions, and screenshots (if applicable). This will assist the maintainer in efficiently triaging your issues and isolating the problems.
|
||||
|
||||
- For feature requests, be specific about the proposed outcome and how it fits with the existing features. If possible, include implementation details.
|
||||
|
||||
Note that feature requests may be out of scope for the project, and if accepted, we cannot commit to a specific timeline for implementation.
|
||||
|
||||
## :repeat: Submitting Pull Requests
|
||||
|
||||
- **Smaller is better.** Submit **one** pull request per bug fix or feature. A pull request should contain isolated changes pertaining to a single bug fix or feature implementation. **Do not** refactor or reformat code that is unrelated to your change. It is better to **submit many small pull requests** rather than a single large one. Enormous pull requests will take enormous amounts of time to review, or may be rejected altogether.
|
||||
|
||||
- **Coordinate bigger changes.** For large and non-trivial changes, open an issue to discuss a strategy with the maintainers. Otherwise, you risk doing a lot of work for nothing!
|
||||
|
||||
- **Follow PEP8.** Python code should follow PEP8 formatting and styling guidelines. Consider using automated tools like [autopep8](https://github.com/hhatto/autopep8) or [flake8](https://github.com/PyCQA/flake8) to ensure your code adheres to these standards.
|
||||
|
||||
- **[Resolve any merge conflicts](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/resolving-a-merge-conflict-on-github)** that occur.
|
||||
|
||||
- Use spaces, not tabs.
|
||||
|
||||
- Make sure all commits work with the new template — the old one is being deprecated.
|
||||
|
||||
## :memo: Copyright
|
||||
|
||||
This repository is licensed under the MIT License. **Any contributions you submit will be licensed under the same terms.**
|
||||
|
||||
By contributing, you confirm that your contributions do not infringe on the rights of others.
|
||||
|
||||
If your contribution includes code from other open-source projects, ensure that their licenses are compatible with this one. For example, code licensed under the GPL cannot be included in this project.
|
||||
|
||||
## :pray: Credit
|
||||
|
||||
This contribution guidelines is remixed from [jessesquires/.github:CONTRIBUTING.md](https://github.com/jessesquires/.github/blob/main/CONTRIBUTING.md) which also incorporated other works. *We commend them for their efforts to facilitate collaboration in their projects.*
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2021-2023 Knugi
|
||||
Copyright (c) 2021-2026 Knugi
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
258
README.md
258
README.md
@@ -1,15 +1,24 @@
|
||||
# Whatsapp-Chat-Exporter
|
||||
[](https://pypi.org/project/whatsapp-chat-exporter/)
|
||||

|
||||
[](https://pypi.org/project/whatsapp-chat-exporter/)
|
||||
[](https://github.com/KnugiHK/WhatsApp-Chat-Exporter/blob/main/LICENSE)
|
||||
[](https://pypi.org/project/Whatsapp-Chat-Exporter/)
|
||||
[](https://matrix.to/#/#wtsexporter:matrix.org)
|
||||
[](https://wts.knugi.dev)
|
||||
|
||||
A customizable Android and iPhone Whatsapp database parser that will give you the history of your Whatsapp conversations in HTML and JSON. Inspired by [Telegram Chat Export Tool](https://telegram.org/blog/export-and-more).
|
||||
**If you plan to uninstall WhatsApp or delete your WhatsApp account, please make a backup of your WhatsApp database. You may want to use this exporter again on the same database in the future as the exporter develops**
|
||||
> [!TIP]
|
||||
> If you plan to uninstall WhatsApp or delete your WhatsApp account, please make a backup of your WhatsApp database. You may want to use this exporter again on the same database in the future as the exporter develops.
|
||||
|
||||
If you would like to support this project, all you need to do is to contribute or share this project! If you think otherwise and want to make a donation, please refer to the [Donation Guide](https://blog.knugi.com/DONATE.html).
|
||||
|
||||
To contribute, see the [Contributing Guidelines](https://github.com/KnugiHK/WhatsApp-Chat-Exporter/blob/main/CONTRIBUTING.md).
|
||||
|
||||
# Usage
|
||||
**Usage in README may be removed in the future. Check the usage in [Wiki](https://github.com/KnugiHK/Whatsapp-Chat-Exporter/wiki)**.
|
||||
|
||||
**If you want to use the old release (< 0.5) of the exporter, please follow the [old usage guide](https://github.com/KnugiHK/Whatsapp-Chat-Exporter/wiki/Old-Usage#usage)**.
|
||||
> [!NOTE]
|
||||
> Usage in README may be removed in the future. Check the usage in [Wiki](https://github.com/KnugiHK/Whatsapp-Chat-Exporter/wiki)
|
||||
>
|
||||
> Click [here](https://github.com/KnugiHK/WhatsApp-Chat-Exporter/wiki/Android-Usage#crypt15-end-to-end-encrypted-backup) for the most trivia way for exporting from Android
|
||||
|
||||
First, install the exporter by:
|
||||
```shell
|
||||
@@ -21,9 +30,13 @@ Then, create a working directory in somewhere you want
|
||||
mkdir working_wts
|
||||
cd working_wts
|
||||
```
|
||||
|
||||
> [!TIP]
|
||||
> macOS users should grant *Full Disk Access* to Terminal in the *Security & Privacy* settings before using the exporter.
|
||||
|
||||
## Working with Android
|
||||
### Unencrypted WhatsApp database
|
||||
Extract the WhatsApp database with whatever means, one possible means is to use the [WhatsApp-Key-DB-Extractor](https://github.com/KnugiHK/WhatsApp-Key-DB-Extractor)
|
||||
Extract the WhatsApp database with whatever means, one possible means is to use the [WhatsApp-Key-DB-Extractor](https://github.com/KnugiHK/WhatsApp-Key-DB-Extractor). Note that the extractor only works on Android 4.0 to 13.
|
||||
|
||||
After you obtain your WhatsApp database, copy the WhatsApp database and media folder to the working directory. The database is called msgstore.db. If you also want the name of your contacts, get the contact database, which is called wa.db. And copy the WhatsApp (Media) directory from your phone directly.
|
||||
|
||||
@@ -35,6 +48,12 @@ Simply invoke the following command from shell.
|
||||
```sh
|
||||
wtsexporter -a
|
||||
```
|
||||
#### Enriching Contact from vCard
|
||||
The default WhatsApp contact database typically contained contact names extracted from your phone, which the exporter used to map your chats. However, in some reported cases, the database may have never been populated. In such case, you can export your contacts to a vCard file from your phone or a cloud provider like Google Contacts. Then, install the necessary dependency and run the following command from the shell:
|
||||
```sh
|
||||
pip install whatsapp-chat-exporter["vcards"]
|
||||
wtsexporter -a --enrich-from-vcards contacts.vcf --default-country-code 852
|
||||
```
|
||||
|
||||
### Encrypted Android WhatsApp Backup
|
||||
In order to support the decryption, install pycryptodome if it is not installed
|
||||
@@ -42,7 +61,10 @@ In order to support the decryption, install pycryptodome if it is not installed
|
||||
pip install pycryptodome # Or
|
||||
pip install whatsapp-chat-exporter["android_backup"] # install along with this software
|
||||
```
|
||||
### Crypt15 is now the easiest way to decrypt a backup. If you have the 32 bytes hex key generated when you enable End-to-End encrypted backup, you can use it to decrypt the backup. If you do not have the 32 bytes hex key, you can still use the key file extracted just like extacting key file for Crypt12 and Crypt14 to decrypt the backup.
|
||||
|
||||
> [!TIP]
|
||||
> Crypt15 is now the easiest way to decrypt a backup. If you have the 32 bytes hex key generated when you enable End-to-End encrypted backup, you can use it to decrypt the backup. If you do not have the 32 bytes hex key, you can still use the key file extracted just like extacting key file for Crypt12 and Crypt14 to decrypt the backup.
|
||||
|
||||
#### Crypt12 or Crypt14
|
||||
You will need the decryption key file from your phone. If you have root access, you can find it as `/data/data/com.whatsapp/files/key`. Otherwise, if you used WhatsApp-Key-DB-Extractor before, it will appear in the WhatsApp backup directory as `WhatsApp/Databases/.nomedia`.
|
||||
|
||||
@@ -63,7 +85,10 @@ To support Crypt15 backup, install javaobj-py3 if it is not installed
|
||||
pip install javaobj-py3 # Or
|
||||
pip install whatsapp-chat-exporter["crypt15"] # install along with this software
|
||||
```
|
||||
Place the encrypted WhatsApp Backup (msgstore.db.crypt15) in the working directory. If you also want the name of your contacts, get the contact database, which is called wa.db. And copy the WhatsApp (Media) directory from your phone directly.
|
||||
Before proceeding with this method, you must first create an end-to-end encrypted backup. For detailed instructions, refer to [WhatsApp's help center](https://faq.whatsapp.com/490592613091019).
|
||||
|
||||
Once you have copied the backup files to your computer, place the encrypted WhatsApp backup file (msgstore.db.crypt15) into the working directory. If you also wish to include your contacts' names, obtain the contact database file, named wa.db. Additionally, copy the WhatsApp Media folder directly from your phone.
|
||||
|
||||
If you do not have the 32 bytes hex key (64 hexdigits), place the decryption key file (encrypted_backup.key) extracted from Android. If you gave the 32 bytes hex key, simply put the key in the shell.
|
||||
|
||||
Now, you should have something like this in the working directory (if you do not have 32 bytes hex key).
|
||||
@@ -76,86 +101,217 @@ wtsexporter -a -k encrypted_backup.key -b msgstore.db.crypt15
|
||||
```
|
||||
If you have the 32 bytes hex key, simply put the hex key in the -k option and invoke the command from shell like this:
|
||||
```sh
|
||||
wtsexporter -a -k 432435053b5204b08e5c3823423399aa30ff061435ab89bc4e6713969cdaa5a8 -b msgstore.db.crypt15
|
||||
wtsexporter -a -k 133735053b5204b08e5c3823423399aa30ff061435ab89bc4e6713969cda1337 -b msgstore.db.crypt15
|
||||
```
|
||||
|
||||
## Working with iOS/iPadOS (iPhone or iPad)
|
||||
Do an iPhone/iPad Backup with iTunes first.
|
||||
Do an iPhone/iPad Backup with iTunes/Finder first.
|
||||
* iPhone backup on Mac: https://support.apple.com/HT211229
|
||||
* iPhone backup on Windows: https://support.apple.com/HT212156
|
||||
* iPad backup: https://support.apple.com/guide/ipad/ipad9a74df05xx/ipados
|
||||
### Encrypted iOS/iPadOS Backup
|
||||
**If you are working on unencrypted iOS/iPadOS backup, skip this**
|
||||
> [!NOTE]
|
||||
> If you are working on unencrypted iOS/iPadOS backup, skip this.
|
||||
|
||||
If you want to work on an encrypted iOS/iPadOS Backup, you should install iphone_backup_decrypt from [KnugiHK/iphone_backup_decrypt](https://github.com/KnugiHK/iphone_backup_decrypt) before you run the extract_iphone_media.py.
|
||||
```sh
|
||||
pip install git+https://github.com/KnugiHK/iphone_backup_decrypt
|
||||
```
|
||||
> [!NOTE]
|
||||
> You will need to disable the built-in end-to-end encryption for WhatsApp backups. See [WhatsApp's FAQ](https://faq.whatsapp.com/490592613091019#turn-off-end-to-end-encrypted-backup) for how to do it.
|
||||
### Extracting
|
||||
Simply invoke the following command from shell, remember to replace the username and device id correspondingly in the command.
|
||||
To extract messages from iOS/iPadOS backups, run the following command in the shell, making sure to replace the username and device ID with the correct values. Keep in mind that there are at least two possible paths for the backups on Windows.
|
||||
#### Windows
|
||||
```sh
|
||||
```powershell
|
||||
# Possible path one
|
||||
wtsexporter -i -b "C:\Users\[Username]\AppData\Roaming\Apple Computer\MobileSync\Backup\[device id]"
|
||||
|
||||
# Possible path two
|
||||
wtsexporter -i -b "C:\Users\[Username]\Apple\MobileSync\Backup\[device id]"
|
||||
```
|
||||
|
||||
#### Mac
|
||||
```sh
|
||||
wtsexporter -i -b "~/Library/Application Support/MobileSync/Backup/[device id]"
|
||||
wtsexporter -i -b ~/Library/Application\ Support/MobileSync/Backup/[device id]
|
||||
```
|
||||
|
||||
## Results
|
||||
After extracting, you will get these:
|
||||
#### Private Message
|
||||
After extracting, you will get this:
|
||||
|
||||

|
||||
|
||||
#### Group Message
|
||||

|
||||
|
||||
*The above screenshots were taken longgggggggggggggg ago. I am going to update them when possible.*
|
||||
## Working with Business
|
||||
If you are working with WhatsApp Business, add the `--business` flag to the command
|
||||
```sh
|
||||
wtsexporter -a --business ...other flags
|
||||
wtsexporter -i --business ...other flags
|
||||
```
|
||||
|
||||
## More options
|
||||
Invoke the wtsexporter with --help option will show you all options available.
|
||||
```sh
|
||||
> wtsexporter --help
|
||||
usage: wtsexporter [-h] [-a] [-i] [-e EXPORTED] [-w WA] [-m MEDIA] [-b BACKUP] [-o OUTPUT] [-j [JSON]] [-d DB] [-k KEY] [-t TEMPLATE] [-s] [-c] [--offline OFFLINE] [--size [SIZE]]
|
||||
[--no-html] [--check-update] [--assume-first-as-me]
|
||||
usage: wtsexporter [-h] [--debug] [-a] [-i] [-e EXPORTED] [-w WA] [-m MEDIA] [-b BACKUP] [-d DB]
|
||||
[-k [KEY]] [--call-db [CALL_DB_IOS]] [--wab WAB] [-o OUTPUT] [-j [JSON]]
|
||||
[--txt [TEXT_FORMAT]] [--no-html] [--size [SIZE]] [--no-reply] [--avoid-encoding-json]
|
||||
[--pretty-print-json [PRETTY_PRINT_JSON]] [--tg] [--per-chat] [--import] [-t TEMPLATE]
|
||||
[--offline OFFLINE] [--no-avatar] [--old-theme] [--headline HEADLINE] [-c]
|
||||
[--create-separated-media] [--time-offset {-12 to 14}] [--date DATE]
|
||||
[--date-format FORMAT] [--include [phone number ...]] [--exclude [phone number ...]]
|
||||
[--dont-filter-empty] [--enrich-from-vcards ENRICH_FROM_VCARDS]
|
||||
[--default-country-code DEFAULT_COUNTRY_CODE] [--incremental-merge]
|
||||
[--source-dir SOURCE_DIR] [--target-dir TARGET_DIR] [-s] [--check-update]
|
||||
[--check-update-pre] [--assume-first-as-me] [--business]
|
||||
[--decrypt-chunk-size DECRYPT_CHUNK_SIZE]
|
||||
[--max-bruteforce-worker MAX_BRUTEFORCE_WORKER] [--no-banner] [--fix-dot-files]
|
||||
|
||||
A customizable Android and iOS/iPadOS WhatsApp database parser that will give you the history of your
|
||||
WhatsApp conversations in HTML and JSON. Android Backup Crypt12, Crypt14 and Crypt15 supported.
|
||||
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
-a, --android Define the target as Android
|
||||
-i, --iphone, --ios Define the target as iPhone/iPad
|
||||
-e EXPORTED, --exported EXPORTED
|
||||
Define the target as exported chat file and specify the path to the file
|
||||
-w WA, --wa WA Path to contact database (default: wa.db/ContactsV2.sqlite)
|
||||
-m MEDIA, --media MEDIA
|
||||
Path to WhatsApp media folder (default: WhatsApp)
|
||||
-b BACKUP, --backup BACKUP
|
||||
Path to Android (must be used together with -k)/iPhone WhatsApp backup
|
||||
-o OUTPUT, --output OUTPUT
|
||||
Output to specific directory (default: result)
|
||||
-j [JSON], --json [JSON]
|
||||
Save the result to a single JSON file (default if present: result.json)
|
||||
-d DB, --db DB Path to database file (default: msgstore.db/7c7fba66680ef796b916b067077cc246adacf01d)
|
||||
-k KEY, --key KEY Path to key file
|
||||
-t TEMPLATE, --template TEMPLATE
|
||||
Path to custom HTML template
|
||||
-s, --showkey Show the HEX key used to decrypt the database
|
||||
-c, --move-media Move the media directory to output directory if the flag is set, otherwise copy it
|
||||
--offline OFFLINE Relative path to offline static files
|
||||
--size [SIZE], --output-size [SIZE], --split [SIZE]
|
||||
Maximum (rough) size of a single output file in bytes, 0 for auto
|
||||
--no-html Do not output html files
|
||||
--check-update Check for updates (require Internet access)
|
||||
--assume-first-as-me Assume the first message in a chat as sent by me (must be used together with -e)
|
||||
--debug Enable debug mode
|
||||
|
||||
WhatsApp Chat Exporter: 0.9.5 Licensed with MIT
|
||||
Device Type:
|
||||
-a, --android Define the target as Android
|
||||
-i, --ios Define the target as iPhone/iPad
|
||||
-e, --exported EXPORTED
|
||||
Define the target as exported chat file and specify the path to the file
|
||||
|
||||
Input Files:
|
||||
-w, --wa WA Path to contact database (default: wa.db/ContactsV2.sqlite)
|
||||
-m, --media MEDIA Path to WhatsApp media folder (default: WhatsApp)
|
||||
-b, --backup BACKUP Path to Android (must be used together with -k)/iOS WhatsApp backup
|
||||
-d, --db DB Path to database file (default:
|
||||
msgstore.db/7c7fba66680ef796b916b067077cc246adacf01d)
|
||||
-k, --key [KEY] Path to key file. If this option is set for crypt15 backup but nothing is
|
||||
specified, you will be prompted to enter the key.
|
||||
--call-db [CALL_DB_IOS]
|
||||
Path to call database (default: 1b432994e958845fffe8e2f190f26d1511534088) iOS only
|
||||
--wab, --wa-backup WAB
|
||||
Path to contact database in crypt15 format
|
||||
|
||||
Output Options:
|
||||
-o, --output OUTPUT Output to specific directory (default: result)
|
||||
-j, --json [JSON] Save the result to a single JSON file (default if present: result.json)
|
||||
--txt [TEXT_FORMAT] Export chats in text format similar to what WhatsApp officially provided (default
|
||||
if present: result/)
|
||||
--no-html Do not output html files
|
||||
--size, --output-size, --split [SIZE]
|
||||
Maximum (rough) size of a single output file in bytes, 0 for auto
|
||||
--no-reply Do not process replies (iOS only) (default: handle replies)
|
||||
|
||||
JSON Options:
|
||||
--avoid-encoding-json
|
||||
Don't encode non-ascii characters in the output JSON files
|
||||
--pretty-print-json [PRETTY_PRINT_JSON]
|
||||
Pretty print the output JSON.
|
||||
--tg, --telegram Output the JSON in a format compatible with Telegram export (implies json-per-
|
||||
chat)
|
||||
--per-chat Output the JSON file per chat
|
||||
--import Import JSON file and convert to HTML output
|
||||
|
||||
HTML Options:
|
||||
-t, --template TEMPLATE
|
||||
Path to custom HTML template
|
||||
--offline OFFLINE Relative path to offline static files
|
||||
--no-avatar Do not render avatar in HTML output
|
||||
--old-theme Use the old Telegram-alike theme
|
||||
--headline HEADLINE The custom headline for the HTML output. Use '??' as a placeholder for the chat
|
||||
name
|
||||
|
||||
Media Handling:
|
||||
-c, --move-media Move the media directory to output directory if the flag is set, otherwise copy it
|
||||
--create-separated-media
|
||||
Create a copy of the media seperated per chat in <MEDIA>/separated/ directory
|
||||
|
||||
Filtering Options:
|
||||
--time-offset {-12 to 14}
|
||||
Offset in hours (-12 to 14) for time displayed in the output
|
||||
--date DATE The date filter in specific format (inclusive)
|
||||
--date-format FORMAT The date format for the date filter
|
||||
--include [phone number ...]
|
||||
Include chats that match the supplied phone number
|
||||
--exclude [phone number ...]
|
||||
Exclude chats that match the supplied phone number
|
||||
--dont-filter-empty By default, the exporter will not render chats with no valid message. Setting this
|
||||
flag will cause the exporter to render those. This is useful if chat(s) are
|
||||
missing from the output
|
||||
|
||||
Contact Enrichment:
|
||||
--enrich-from-vcards ENRICH_FROM_VCARDS
|
||||
Path to an exported vcf file from Google contacts export. Add names missing from
|
||||
WhatsApp's default database
|
||||
--default-country-code DEFAULT_COUNTRY_CODE
|
||||
Use with --enrich-from-vcards. When numbers in the vcf file does not have a
|
||||
country code, this will be used. 1 is for US, 66 for Thailand etc. Most likely use
|
||||
the number of your own country
|
||||
|
||||
Incremental Merging:
|
||||
--incremental-merge Performs an incremental merge of two exports. Requires setting both --source-dir
|
||||
and --target-dir. The chats (JSON files only) and media from the source directory
|
||||
will be merged into the target directory. No chat messages or media will be
|
||||
deleted from the target directory; only new chat messages and media will be added
|
||||
to it. This enables chat messages and media to be deleted from the device to free
|
||||
up space, while ensuring they are preserved in the exported backups.
|
||||
--source-dir SOURCE_DIR
|
||||
Sets the source directory. Used for performing incremental merges.
|
||||
--target-dir TARGET_DIR
|
||||
Sets the target directory. Used for performing incremental merges.
|
||||
|
||||
Miscellaneous:
|
||||
-s, --showkey Show the HEX key used to decrypt the database
|
||||
--check-update Check for updates (require Internet access)
|
||||
--check-update-pre Check for updates including pre-releases (require Internet access)
|
||||
--assume-first-as-me Assume the first message in a chat as sent by me (must be used together with -e)
|
||||
--business Use Whatsapp Business default files (iOS only)
|
||||
--decrypt-chunk-size DECRYPT_CHUNK_SIZE
|
||||
Specify the chunk size for decrypting iOS backup, which may affect the decryption
|
||||
speed.
|
||||
--max-bruteforce-worker MAX_BRUTEFORCE_WORKER
|
||||
Specify the maximum number of worker for bruteforce decryption.
|
||||
--no-banner Do not show the banner
|
||||
--fix-dot-files Fix files with a dot at the end of their name (allowing the outputs be stored in
|
||||
FAT filesystems)
|
||||
|
||||
WhatsApp Chat Exporter: 0.13.0 Licensed with MIT. See https://wts.knugi.dev/docs?dest=osl for all open
|
||||
source licenses.
|
||||
```
|
||||
|
||||
# To do
|
||||
See [issues](https://github.com/KnugiHK/Whatsapp-Chat-Exporter/issues).
|
||||
# Verifying Build Integrity
|
||||
|
||||
To ensure that the binaries provided in the releases were built directly from this source code via GitHub Actions and have not been tampered with, GitHub Artifact Attestations is used. You can verify the authenticity of any pre-built binaries using the GitHub CLI.
|
||||
|
||||
> [!NOTE]
|
||||
> Requires version 0.13.0 or newer. Legacy binaries are unsupported.
|
||||
|
||||
### Using Bash (Linux/WSL/macOS)
|
||||
|
||||
```bash
|
||||
for file in wtsexporter*; do ; gh attestation verify "$file" -R KnugiHK/WhatsApp-Chat-Exporter; done
|
||||
```
|
||||
|
||||
### Using PowerShell (Windows)
|
||||
|
||||
```powershell
|
||||
gci "wtsexporter*" | % { gh attestation verify $_.FullName -R KnugiHK/WhatsApp-Chat-Exporter }
|
||||
```
|
||||
|
||||
|
||||
# Python Support Policy
|
||||
|
||||
This project officially supports all non-EOL (End-of-Life) versions of Python. Once a Python version reaches EOL, it is dropped in the next release. See [Python's EOL Schedule](https://devguide.python.org/versions/).
|
||||
|
||||
|
||||
# Legal Stuff & Disclaimer
|
||||
|
||||
# Copyright
|
||||
This is a MIT licensed project.
|
||||
|
||||
The Telegram Desktop's export is the reference for whatsapp.html in this repo
|
||||
The Telegram Desktop's export is the reference for whatsapp.html in this repo.
|
||||
|
||||
`bplist.py` was released by Vladimir "Farcaller" Pouzanov under MIT license.
|
||||
|
||||
Please also refer to any files prefixed with `LICENSE` to obtain copies of the various licenses.
|
||||
|
||||
WhatsApp Chat Exporter is not affiliated, associated, authorized, endorsed by, or in any way officially connected with the WhatsApp LLC, or any of its subsidiaries or its affiliates. The official WhatsApp LLC website can be found at https://www.whatsapp.com/.
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
__version__ = "0.9.5"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
367
Whatsapp_Chat_Exporter/android_crypt.py
Normal file
367
Whatsapp_Chat_Exporter/android_crypt.py
Normal file
@@ -0,0 +1,367 @@
|
||||
import hmac
|
||||
import io
|
||||
import logging
|
||||
import zlib
|
||||
import concurrent.futures
|
||||
from tqdm import tqdm
|
||||
from typing import Tuple, Union
|
||||
from hashlib import sha256
|
||||
from functools import partial
|
||||
from Whatsapp_Chat_Exporter.utility import CRYPT14_OFFSETS, Crypt, DbType
|
||||
|
||||
try:
|
||||
import zlib
|
||||
from Crypto.Cipher import AES
|
||||
except ModuleNotFoundError:
|
||||
support_backup = False
|
||||
else:
|
||||
support_backup = True
|
||||
|
||||
try:
|
||||
import javaobj
|
||||
except ModuleNotFoundError:
|
||||
support_crypt15 = False
|
||||
else:
|
||||
support_crypt15 = True
|
||||
|
||||
|
||||
|
||||
|
||||
class DecryptionError(Exception):
|
||||
"""Base class for decryption-related exceptions."""
|
||||
pass
|
||||
|
||||
|
||||
class InvalidKeyError(DecryptionError):
|
||||
"""Raised when the provided key is invalid."""
|
||||
pass
|
||||
|
||||
|
||||
class InvalidFileFormatError(DecryptionError):
|
||||
"""Raised when the input file format is invalid."""
|
||||
pass
|
||||
|
||||
|
||||
class OffsetNotFoundError(DecryptionError):
|
||||
"""Raised when the correct offsets for decryption cannot be found."""
|
||||
pass
|
||||
|
||||
|
||||
def _derive_main_enc_key(key_stream: bytes) -> Tuple[bytes, bytes]:
|
||||
"""
|
||||
Derive the main encryption key for the given key stream.
|
||||
|
||||
Args:
|
||||
key_stream (bytes): The key stream to generate HMAC of HMAC.
|
||||
|
||||
Returns:
|
||||
Tuple[bytes, bytes]: A tuple containing the main encryption key and the original key stream.
|
||||
"""
|
||||
intermediate_hmac = hmac.new(b'\x00' * 32, key_stream, sha256).digest()
|
||||
key = hmac.new(intermediate_hmac, b"backup encryption\x01", sha256).digest()
|
||||
return key, key_stream
|
||||
|
||||
|
||||
def _extract_enc_key(keyfile: bytes) -> Tuple[bytes, bytes]:
|
||||
"""
|
||||
Extract the encryption key from the keyfile.
|
||||
|
||||
Args:
|
||||
keyfile (bytes): The keyfile containing the encrypted key.
|
||||
|
||||
Returns:
|
||||
Tuple[bytes, bytes]: values from _derive_main_enc_key()
|
||||
"""
|
||||
key_stream = b''.join([byte.to_bytes(1, "big", signed=True) for byte in javaobj.loads(keyfile)])
|
||||
return _derive_main_enc_key(key_stream)
|
||||
|
||||
|
||||
def brute_force_offset(max_iv: int = 200, max_db: int = 200):
|
||||
"""
|
||||
Brute force the offsets for IV and database start position in WhatsApp backup files.
|
||||
|
||||
Args:
|
||||
max_iv (int, optional): Maximum value to try for IV offset. Defaults to 200.
|
||||
max_db (int, optional): Maximum value to try for database start offset. Defaults to 200.
|
||||
|
||||
Yields:
|
||||
tuple: A tuple containing:
|
||||
- int: Start position of IV
|
||||
- int: End position of IV (start + 16)
|
||||
- int: Start position of database
|
||||
"""
|
||||
for iv in range(0, max_iv):
|
||||
for db in range(0, max_db):
|
||||
yield iv, iv + 16, db
|
||||
|
||||
|
||||
def _decrypt_database(db_ciphertext: bytes, main_key: bytes, iv: bytes) -> bytes:
|
||||
"""Decrypt and decompress a database chunk.
|
||||
|
||||
Args:
|
||||
db_ciphertext (bytes): The encrypted chunk of the database.
|
||||
main_key (bytes): The main decryption key.
|
||||
iv (bytes): The initialization vector.
|
||||
|
||||
Returns:
|
||||
bytes: The decrypted and decompressed database.
|
||||
|
||||
Raises:
|
||||
zlib.error: If decompression fails.
|
||||
ValueError: if the plaintext is not a SQLite database.
|
||||
"""
|
||||
FOOTER_SIZE = 32
|
||||
if len(db_ciphertext) <= FOOTER_SIZE:
|
||||
raise ValueError("Input data too short to contain a valid GCM tag.")
|
||||
|
||||
actual_ciphertext = db_ciphertext[:-FOOTER_SIZE]
|
||||
tag = db_ciphertext[-FOOTER_SIZE: -FOOTER_SIZE + 16]
|
||||
|
||||
cipher = AES.new(main_key, AES.MODE_GCM, iv)
|
||||
try:
|
||||
db_compressed = cipher.decrypt_and_verify(actual_ciphertext, tag)
|
||||
except ValueError:
|
||||
# This could be key, IV, or tag is wrong, but likely the key is wrong.
|
||||
raise ValueError("Decryption/Authentication failed. Ensure you are using the correct key.")
|
||||
|
||||
if len(db_compressed) < 2 or db_compressed[0] != 0x78:
|
||||
logging.debug(f"Data passes GCM but is not Zlib. Header: {db_compressed[:2].hex()}")
|
||||
raise ValueError(
|
||||
"Key is correct, but decrypted data is not a valid compressed stream. "
|
||||
"Is this even a valid WhatsApp database backup?"
|
||||
)
|
||||
|
||||
try:
|
||||
db = zlib.decompress(db_compressed)
|
||||
except zlib.error as e:
|
||||
raise zlib.error(f"Decompression failed (The backup file likely corrupted at source): {e}")
|
||||
|
||||
if not db.startswith(b"SQLite"):
|
||||
raise ValueError(
|
||||
"Data is valid and decompressed, but it is not a SQLite database. "
|
||||
"Is this even a valid WhatsApp database backup?")
|
||||
return db
|
||||
|
||||
|
||||
def _decrypt_crypt14(database: bytes, main_key: bytes, max_worker: int = 10) -> bytes:
|
||||
"""Decrypt a crypt14 database using multithreading for brute-force offset detection.
|
||||
|
||||
Args:
|
||||
database (bytes): The encrypted database.
|
||||
main_key (bytes): The decryption key.
|
||||
max_worker (int, optional): The maximum number of threads to use for brute force. Defaults to 10.
|
||||
|
||||
Returns:
|
||||
bytes: The decrypted database.
|
||||
|
||||
Raises:
|
||||
InvalidFileFormatError: If the file is too small.
|
||||
OffsetNotFoundError: If no valid offsets are found.
|
||||
"""
|
||||
if len(database) < 191:
|
||||
raise InvalidFileFormatError("The crypt14 file must be at least 191 bytes")
|
||||
|
||||
# Attempt known offsets first
|
||||
for offsets in CRYPT14_OFFSETS:
|
||||
iv = offsets["iv"]
|
||||
db = offsets["db"]
|
||||
try:
|
||||
decrypted_db = _attempt_decrypt_task((iv, iv + 16, db), database, main_key)
|
||||
except (zlib.error, ValueError):
|
||||
continue
|
||||
else:
|
||||
logging.debug(
|
||||
f"Decryption successful with known offsets: IV {iv}, DB {db}"
|
||||
)
|
||||
return decrypted_db # Successful decryption
|
||||
|
||||
logging.info(f"Common offsets failed. Will attempt to brute-force")
|
||||
offset_max = 200
|
||||
workers = max_worker
|
||||
check_offset = partial(_attempt_decrypt_task, database=database, main_key=main_key)
|
||||
all_offsets = list(brute_force_offset(offset_max, offset_max))
|
||||
executor = concurrent.futures.ProcessPoolExecutor(max_workers=workers)
|
||||
try:
|
||||
with tqdm(total=len(all_offsets), desc="Brute-forcing offsets", unit="trial", leave=False) as pbar:
|
||||
results = executor.map(check_offset, all_offsets, chunksize=8)
|
||||
found = False
|
||||
for offset_info, result in zip(all_offsets, results):
|
||||
pbar.update(1)
|
||||
if result:
|
||||
start_iv, _, start_db = offset_info
|
||||
# Clean shutdown on success
|
||||
executor.shutdown(wait=False, cancel_futures=True)
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
logging.info(
|
||||
f"The offsets of your IV and database are {start_iv} and {start_db}, respectively."
|
||||
)
|
||||
logging.info(
|
||||
f"To include your offsets in the expoter, please report it in the discussion thread on GitHub:"
|
||||
)
|
||||
logging.info(f"https://github.com/KnugiHK/Whatsapp-Chat-Exporter/discussions/47")
|
||||
return result
|
||||
|
||||
except KeyboardInterrupt:
|
||||
executor.shutdown(wait=False, cancel_futures=True)
|
||||
logging.info("")
|
||||
raise KeyboardInterrupt(
|
||||
f"Brute force interrupted by user (Ctrl+C). Shutting down gracefully..."
|
||||
)
|
||||
|
||||
finally:
|
||||
executor.shutdown(wait=False)
|
||||
|
||||
raise OffsetNotFoundError("Could not find the correct offsets for decryption.")
|
||||
|
||||
def _attempt_decrypt_task(offset_tuple, database, main_key):
|
||||
"""Attempt decryption with the given offsets."""
|
||||
start_iv, end_iv, start_db = offset_tuple
|
||||
iv = database[start_iv:end_iv]
|
||||
db_ciphertext = database[start_db:]
|
||||
|
||||
try:
|
||||
return _decrypt_database(db_ciphertext, main_key, iv)
|
||||
except (zlib.error, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _decrypt_crypt12(database: bytes, main_key: bytes) -> bytes:
|
||||
"""Decrypt a crypt12 database.
|
||||
|
||||
Args:
|
||||
database (bytes): The encrypted database.
|
||||
main_key (bytes): The decryption key.
|
||||
|
||||
Returns:
|
||||
bytes: The decrypted database.
|
||||
|
||||
Raises:
|
||||
ValueError: If the file format is invalid or the signature mismatches.
|
||||
"""
|
||||
if len(database) < 67:
|
||||
raise InvalidFileFormatError("The crypt12 file must be at least 67 bytes")
|
||||
|
||||
t2 = database[3:35]
|
||||
iv = database[51:67]
|
||||
db_ciphertext = database[67:-20]
|
||||
return _decrypt_database(db_ciphertext, main_key, iv)
|
||||
|
||||
|
||||
def _decrypt_crypt15(database: bytes, main_key: bytes, db_type: DbType) -> bytes:
|
||||
"""Decrypt a crypt15 database.
|
||||
|
||||
Args:
|
||||
database (bytes): The encrypted database.
|
||||
main_key (bytes): The decryption key.
|
||||
db_type (DbType): The type of database.
|
||||
|
||||
Returns:
|
||||
bytes: The decrypted database.
|
||||
|
||||
Raises:
|
||||
ValueError: If the file format is invalid or the signature mismatches.
|
||||
"""
|
||||
if not support_crypt15:
|
||||
raise RuntimeError("Crypt15 is not supported")
|
||||
if len(database) < 131:
|
||||
raise InvalidFileFormatError("The crypt15 file must be at least 131 bytes")
|
||||
|
||||
if db_type == DbType.MESSAGE:
|
||||
iv = database[8:24]
|
||||
db_offset = database[0] + 2
|
||||
elif db_type == DbType.CONTACT:
|
||||
iv = database[7:23]
|
||||
db_offset = database[0] + 1
|
||||
else:
|
||||
raise ValueError(f"Invalid db_type: {db_type}")
|
||||
|
||||
db_ciphertext = database[db_offset:]
|
||||
return _decrypt_database(db_ciphertext, main_key, iv)
|
||||
|
||||
|
||||
def decrypt_backup(
|
||||
database: bytes,
|
||||
key: Union[str, io.IOBase],
|
||||
output: str = None,
|
||||
crypt: Crypt = Crypt.CRYPT14,
|
||||
show_crypt15: bool = False,
|
||||
db_type: DbType = DbType.MESSAGE,
|
||||
*,
|
||||
dry_run: bool = False,
|
||||
keyfile_stream: bool = False,
|
||||
max_worker: int = 10
|
||||
) -> int:
|
||||
"""
|
||||
Decrypt the WhatsApp backup database.
|
||||
|
||||
Args:
|
||||
database (bytes): The encrypted database file.
|
||||
key (str or io.IOBase): The key to decrypt the database.
|
||||
output (str, optional): The path to save the decrypted database. Defaults to None.
|
||||
crypt (Crypt, optional): The encryption version of the database. Defaults to Crypt.CRYPT14.
|
||||
show_crypt15 (bool, optional): Whether to show the HEX key of the crypt15 backup. Defaults to False.
|
||||
db_type (DbType, optional): The type of database (MESSAGE or CONTACT). Defaults to DbType.MESSAGE.
|
||||
dry_run (bool, optional): Whether to perform a dry run. Defaults to False.
|
||||
keyfile_stream (bool, optional): Whether the key is a key stream. Defaults to False.
|
||||
|
||||
Returns:
|
||||
int: The status code of the decryption process (0 for success).
|
||||
|
||||
Raises:
|
||||
ValueError: If the key is invalid or output file not provided when dry_run is False.
|
||||
DecryptionError: for errors during decryption
|
||||
RuntimeError: for dependency errors
|
||||
"""
|
||||
if not support_backup:
|
||||
raise RuntimeError("Dependencies for backup decryption are not available.")
|
||||
|
||||
if not dry_run and output is None:
|
||||
raise ValueError(
|
||||
"The path to the decrypted database must be specified unless dry_run is true."
|
||||
)
|
||||
|
||||
if isinstance(key, io.IOBase):
|
||||
key = key.read()
|
||||
|
||||
if crypt is not Crypt.CRYPT15 and len(key) != 158:
|
||||
raise InvalidKeyError("The key file must be 158 bytes")
|
||||
|
||||
# signature check, this is check is used in crypt 12 and 14
|
||||
if crypt != Crypt.CRYPT15:
|
||||
t1 = key[30:62]
|
||||
|
||||
if t1 != database[15:47] and crypt == Crypt.CRYPT14:
|
||||
raise ValueError("The signature of key file and backup file mismatch")
|
||||
|
||||
if t1 != database[3:35] and crypt == Crypt.CRYPT12:
|
||||
raise ValueError("The signature of key file and backup file mismatch")
|
||||
|
||||
if crypt == Crypt.CRYPT15:
|
||||
if keyfile_stream:
|
||||
main_key, hex_key = _extract_enc_key(key)
|
||||
else:
|
||||
main_key, hex_key = _derive_main_enc_key(key)
|
||||
if show_crypt15:
|
||||
hex_key_str = ' '.join([hex_key.hex()[c:c+4] for c in range(0, len(hex_key.hex()), 4)])
|
||||
logging.info(f"The HEX key of the crypt15 backup is: {hex_key_str}")
|
||||
else:
|
||||
main_key = key[126:]
|
||||
|
||||
try:
|
||||
if crypt == Crypt.CRYPT14:
|
||||
db = _decrypt_crypt14(database, main_key, max_worker)
|
||||
elif crypt == Crypt.CRYPT12:
|
||||
db = _decrypt_crypt12(database, main_key)
|
||||
elif crypt == Crypt.CRYPT15:
|
||||
db = _decrypt_crypt15(database, main_key, db_type)
|
||||
else:
|
||||
raise ValueError(f"Unsupported crypt type: {crypt}")
|
||||
except (InvalidFileFormatError, OffsetNotFoundError, ValueError) as e:
|
||||
raise DecryptionError(f"Decryption failed: {e}") from e
|
||||
|
||||
if not dry_run:
|
||||
with open(output, "wb") as f:
|
||||
f.write(db)
|
||||
return 0
|
||||
1265
Whatsapp_Chat_Exporter/android_handler.py
Normal file
1265
Whatsapp_Chat_Exporter/android_handler.py
Normal file
File diff suppressed because it is too large
Load Diff
245
Whatsapp_Chat_Exporter/bplist.py
Normal file
245
Whatsapp_Chat_Exporter/bplist.py
Normal file
@@ -0,0 +1,245 @@
|
||||
#################################################################################
|
||||
# Copyright (C) 2009-2011 Vladimir "Farcaller" Pouzanov <farcaller@gmail.com> #
|
||||
# #
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy #
|
||||
# of this software and associated documentation files (the "Software"), to deal #
|
||||
# in the Software without restriction, including without limitation the rights #
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
|
||||
# copies of the Software, and to permit persons to whom the Software is #
|
||||
# furnished to do so, subject to the following conditions: #
|
||||
# #
|
||||
# The above copyright notice and this permission notice shall be included in #
|
||||
# all copies or substantial portions of the Software. #
|
||||
# #
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN #
|
||||
# THE SOFTWARE. #
|
||||
#################################################################################
|
||||
|
||||
import struct
|
||||
import codecs
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
class BPListReader(object):
|
||||
def __init__(self, s):
|
||||
self.data = s
|
||||
self.objects = []
|
||||
self.resolved = {}
|
||||
|
||||
def __unpackIntStruct(self, sz, s):
|
||||
'''__unpackIntStruct(size, string) -> int
|
||||
|
||||
Unpacks the integer of given size (1, 2 or 4 bytes) from string
|
||||
'''
|
||||
if sz == 1:
|
||||
ot = '!B'
|
||||
elif sz == 2:
|
||||
ot = '!H'
|
||||
elif sz == 4:
|
||||
ot = '!I'
|
||||
elif sz == 8:
|
||||
ot = '!Q'
|
||||
else:
|
||||
raise Exception('int unpack size '+str(sz)+' unsupported')
|
||||
return struct.unpack(ot, s)[0]
|
||||
|
||||
def __unpackInt(self, offset):
|
||||
'''__unpackInt(offset) -> int
|
||||
|
||||
Unpacks int field from plist at given offset
|
||||
'''
|
||||
return self.__unpackIntMeta(offset)[1]
|
||||
|
||||
def __unpackIntMeta(self, offset):
|
||||
'''__unpackIntMeta(offset) -> (size, int)
|
||||
|
||||
Unpacks int field from plist at given offset and returns its size and value
|
||||
'''
|
||||
obj_header = self.data[offset]
|
||||
obj_type, obj_info = (obj_header & 0xF0), (obj_header & 0x0F)
|
||||
int_sz = 2**obj_info
|
||||
return int_sz, self.__unpackIntStruct(int_sz, self.data[offset+1:offset+1+int_sz])
|
||||
|
||||
def __resolveIntSize(self, obj_info, offset):
|
||||
'''__resolveIntSize(obj_info, offset) -> (count, offset)
|
||||
|
||||
Calculates count of objref* array entries and returns count and offset to first element
|
||||
'''
|
||||
if obj_info == 0x0F:
|
||||
ofs, obj_count = self.__unpackIntMeta(offset+1)
|
||||
objref = offset+2+ofs
|
||||
else:
|
||||
obj_count = obj_info
|
||||
objref = offset+1
|
||||
return obj_count, objref
|
||||
|
||||
def __unpackFloatStruct(self, sz, s):
|
||||
'''__unpackFloatStruct(size, string) -> float
|
||||
|
||||
Unpacks the float of given size (4 or 8 bytes) from string
|
||||
'''
|
||||
if sz == 4:
|
||||
ot = '!f'
|
||||
elif sz == 8:
|
||||
ot = '!d'
|
||||
else:
|
||||
raise Exception('float unpack size '+str(sz)+' unsupported')
|
||||
return struct.unpack(ot, s)[0]
|
||||
|
||||
def __unpackFloat(self, offset):
|
||||
'''__unpackFloat(offset) -> float
|
||||
|
||||
Unpacks float field from plist at given offset
|
||||
'''
|
||||
obj_header = self.data[offset]
|
||||
obj_type, obj_info = (obj_header & 0xF0), (obj_header & 0x0F)
|
||||
int_sz = 2**obj_info
|
||||
return int_sz, self.__unpackFloatStruct(int_sz, self.data[offset+1:offset+1+int_sz])
|
||||
|
||||
def __unpackDate(self, offset):
|
||||
td = int(struct.unpack(">d", self.data[offset+1:offset+9])[0])
|
||||
return datetime(year=2001, month=1, day=1) + timedelta(seconds=td)
|
||||
|
||||
def __unpackItem(self, offset):
|
||||
'''__unpackItem(offset)
|
||||
|
||||
Unpacks and returns an item from plist
|
||||
'''
|
||||
obj_header = self.data[offset]
|
||||
obj_type, obj_info = (obj_header & 0xF0), (obj_header & 0x0F)
|
||||
if obj_type == 0x00:
|
||||
if obj_info == 0x00: # null 0000 0000
|
||||
return None
|
||||
elif obj_info == 0x08: # bool 0000 1000 // false
|
||||
return False
|
||||
elif obj_info == 0x09: # bool 0000 1001 // true
|
||||
return True
|
||||
elif obj_info == 0x0F: # fill 0000 1111 // fill byte
|
||||
raise Exception("0x0F Not Implemented") # this is really pad byte, FIXME
|
||||
else:
|
||||
raise Exception('unpack item type '+str(obj_header)+' at '+str(offset) + 'failed')
|
||||
elif obj_type == 0x10: # int 0001 nnnn ... // # of bytes is 2^nnnn, big-endian bytes
|
||||
return self.__unpackInt(offset)
|
||||
elif obj_type == 0x20: # real 0010 nnnn ... // # of bytes is 2^nnnn, big-endian bytes
|
||||
return self.__unpackFloat(offset)
|
||||
elif obj_type == 0x30: # date 0011 0011 ... // 8 byte float follows, big-endian bytes
|
||||
return self.__unpackDate(offset)
|
||||
# data 0100 nnnn [int] ... // nnnn is number of bytes unless 1111 then int count follows, followed by bytes
|
||||
elif obj_type == 0x40:
|
||||
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
||||
return self.data[objref:objref+obj_count] # XXX: we return data as str
|
||||
# string 0101 nnnn [int] ... // ASCII string, nnnn is # of chars, else 1111 then int count, then bytes
|
||||
elif obj_type == 0x50:
|
||||
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
||||
return self.data[objref:objref+obj_count]
|
||||
# string 0110 nnnn [int] ... // Unicode string, nnnn is # of chars, else 1111 then int count, then big-endian 2-byte uint16_t
|
||||
elif obj_type == 0x60:
|
||||
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
||||
return self.data[objref:objref+obj_count*2].decode('utf-16be')
|
||||
elif obj_type == 0x80: # uid 1000 nnnn ... // nnnn+1 is # of bytes
|
||||
# FIXME: Accept as a string for now
|
||||
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
||||
return self.data[objref:objref+obj_count]
|
||||
# array 1010 nnnn [int] objref* // nnnn is count, unless '1111', then int count follows
|
||||
elif obj_type == 0xA0:
|
||||
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
||||
arr = []
|
||||
for i in range(obj_count):
|
||||
arr.append(self.__unpackIntStruct(
|
||||
self.object_ref_size, self.data[objref+i*self.object_ref_size:objref+i*self.object_ref_size+self.object_ref_size]))
|
||||
return arr
|
||||
# set 1100 nnnn [int] objref* // nnnn is count, unless '1111', then int count follows
|
||||
elif obj_type == 0xC0:
|
||||
# XXX: not serializable via apple implementation
|
||||
raise Exception("0xC0 Not Implemented") # FIXME: implement
|
||||
# dict 1101 nnnn [int] keyref* objref* // nnnn is count, unless '1111', then int count follows
|
||||
elif obj_type == 0xD0:
|
||||
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
||||
keys = []
|
||||
for i in range(obj_count):
|
||||
keys.append(self.__unpackIntStruct(
|
||||
self.object_ref_size, self.data[objref+i*self.object_ref_size:objref+i*self.object_ref_size+self.object_ref_size]))
|
||||
values = []
|
||||
objref += obj_count*self.object_ref_size
|
||||
for i in range(obj_count):
|
||||
values.append(self.__unpackIntStruct(
|
||||
self.object_ref_size, self.data[objref+i*self.object_ref_size:objref+i*self.object_ref_size+self.object_ref_size]))
|
||||
dic = {}
|
||||
for i in range(obj_count):
|
||||
dic[keys[i]] = values[i]
|
||||
return dic
|
||||
else:
|
||||
raise Exception('don\'t know how to unpack obj type '+hex(obj_type)+' at '+str(offset))
|
||||
|
||||
def __resolveObject(self, idx):
|
||||
try:
|
||||
return self.resolved[idx]
|
||||
except KeyError:
|
||||
obj = self.objects[idx]
|
||||
if type(obj) == list:
|
||||
newArr = []
|
||||
for i in obj:
|
||||
newArr.append(self.__resolveObject(i))
|
||||
self.resolved[idx] = newArr
|
||||
return newArr
|
||||
if type(obj) == dict:
|
||||
newDic = {}
|
||||
for k, v in obj.items():
|
||||
key_resolved = self.__resolveObject(k)
|
||||
if isinstance(key_resolved, str):
|
||||
rk = key_resolved
|
||||
else:
|
||||
rk = codecs.decode(key_resolved, "utf-8")
|
||||
rv = self.__resolveObject(v)
|
||||
newDic[rk] = rv
|
||||
self.resolved[idx] = newDic
|
||||
return newDic
|
||||
else:
|
||||
self.resolved[idx] = obj
|
||||
return obj
|
||||
|
||||
def parse(self):
|
||||
# read header
|
||||
if self.data[:8] != b'bplist00':
|
||||
raise Exception('Bad magic')
|
||||
|
||||
# read trailer
|
||||
self.offset_size, self.object_ref_size, self.number_of_objects, self.top_object, self.table_offset = struct.unpack(
|
||||
'!6xBB4xI4xI4xI', self.data[-32:])
|
||||
# print "** plist offset_size:",self.offset_size,"objref_size:",self.object_ref_size,"num_objs:",self.number_of_objects,"top:",self.top_object,"table_ofs:",self.table_offset
|
||||
|
||||
# read offset table
|
||||
self.offset_table = self.data[self.table_offset:-32]
|
||||
self.offsets = []
|
||||
ot = self.offset_table
|
||||
for i in range(self.number_of_objects):
|
||||
offset_entry = ot[:self.offset_size]
|
||||
ot = ot[self.offset_size:]
|
||||
self.offsets.append(self.__unpackIntStruct(self.offset_size, offset_entry))
|
||||
# print "** plist offsets:",self.offsets
|
||||
|
||||
# read object table
|
||||
self.objects = []
|
||||
k = 0
|
||||
for i in self.offsets:
|
||||
obj = self.__unpackItem(i)
|
||||
# print "** plist unpacked",k,type(obj),obj,"at",i
|
||||
k += 1
|
||||
self.objects.append(obj)
|
||||
|
||||
# rebuild object tree
|
||||
# for i in range(len(self.objects)):
|
||||
# self.__resolveObject(i)
|
||||
|
||||
# return root object
|
||||
return self.__resolveObject(self.top_object)
|
||||
|
||||
@classmethod
|
||||
def plistWithString(cls, s):
|
||||
parser = cls(s)
|
||||
return parser.parse()
|
||||
@@ -1,16 +1,198 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import Union
|
||||
from datetime import datetime, tzinfo, timedelta
|
||||
from typing import MutableMapping, Union, Optional, Dict, Any
|
||||
|
||||
|
||||
class ChatStore():
|
||||
def __init__(self, type, name=None, media=None):
|
||||
class Timing:
|
||||
"""
|
||||
Handles timestamp formatting with timezone support.
|
||||
"""
|
||||
|
||||
def __init__(self, timezone_offset: Optional[int]) -> None:
|
||||
"""
|
||||
Initialize Timing object.
|
||||
|
||||
Args:
|
||||
timezone_offset (Optional[int]): Hours offset from UTC
|
||||
"""
|
||||
self.timezone_offset = timezone_offset
|
||||
|
||||
def format_timestamp(self, timestamp: Optional[Union[int, float]], format: str) -> Optional[str]:
|
||||
"""
|
||||
Format a timestamp with the specified format string.
|
||||
|
||||
Args:
|
||||
timestamp (Optional[Union[int, float]]): Unix timestamp to format
|
||||
format (str): strftime format string
|
||||
|
||||
Returns:
|
||||
Optional[str]: Formatted timestamp string, or None if timestamp is None
|
||||
"""
|
||||
if timestamp is not None:
|
||||
timestamp = timestamp / 1000 if timestamp > 9999999999 else timestamp
|
||||
return datetime.fromtimestamp(timestamp, TimeZone(self.timezone_offset)).strftime(format)
|
||||
return None
|
||||
|
||||
|
||||
class TimeZone(tzinfo):
|
||||
"""
|
||||
Custom timezone class with fixed offset.
|
||||
"""
|
||||
|
||||
def __init__(self, offset: int) -> None:
|
||||
"""
|
||||
Initialize TimeZone object.
|
||||
|
||||
Args:
|
||||
offset (int): Hours offset from UTC
|
||||
"""
|
||||
self.offset = offset
|
||||
|
||||
def utcoffset(self, dt: Optional[datetime]) -> timedelta:
|
||||
"""Get UTC offset."""
|
||||
return timedelta(hours=self.offset)
|
||||
|
||||
def dst(self, dt: Optional[datetime]) -> timedelta:
|
||||
"""Get DST offset (always 0)."""
|
||||
return timedelta(0)
|
||||
|
||||
|
||||
class ChatCollection(MutableMapping):
|
||||
"""
|
||||
A collection of chats that provides dictionary-like access with additional chat management methods.
|
||||
Inherits from MutableMapping to implement a custom dictionary-like behavior.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize an empty chat collection."""
|
||||
self._chats: Dict[str, ChatStore] = {}
|
||||
self._system: Dict[str, Any] = {}
|
||||
|
||||
def __getitem__(self, key: str) -> 'ChatStore':
|
||||
"""Get a chat by its ID. Required for dict-like access."""
|
||||
return self._chats[key]
|
||||
|
||||
def __setitem__(self, key: str, value: 'ChatStore') -> None:
|
||||
"""Set a chat by its ID. Required for dict-like access."""
|
||||
if not isinstance(value, ChatStore):
|
||||
raise TypeError("Value must be a ChatStore object")
|
||||
self._chats[key] = value
|
||||
|
||||
def __delitem__(self, key: str) -> None:
|
||||
"""Delete a chat by its ID. Required for dict-like access."""
|
||||
del self._chats[key]
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterate over chat IDs. Required for dict-like access."""
|
||||
return iter(self._chats)
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Get number of chats. Required for dict-like access."""
|
||||
return len(self._chats)
|
||||
|
||||
def get_chat(self, chat_id: str) -> Optional['ChatStore']:
|
||||
"""
|
||||
Get a chat by its ID.
|
||||
|
||||
Args:
|
||||
chat_id (str): The ID of the chat to retrieve
|
||||
|
||||
Returns:
|
||||
Optional['ChatStore']: The chat if found, None otherwise
|
||||
"""
|
||||
return self._chats.get(chat_id)
|
||||
|
||||
def add_chat(self, chat_id: str, chat: 'ChatStore') -> None:
|
||||
"""
|
||||
Add a new chat to the collection.
|
||||
|
||||
Args:
|
||||
chat_id (str): The ID for the chat
|
||||
chat (ChatStore): The chat to add
|
||||
|
||||
Raises:
|
||||
TypeError: If chat is not a ChatStore object
|
||||
"""
|
||||
if not isinstance(chat, ChatStore):
|
||||
raise TypeError("Chat must be a ChatStore object")
|
||||
self._chats[chat_id] = chat
|
||||
return self._chats[chat_id]
|
||||
|
||||
def remove_chat(self, chat_id: str) -> None:
|
||||
"""
|
||||
Remove a chat from the collection.
|
||||
|
||||
Args:
|
||||
chat_id (str): The ID of the chat to remove
|
||||
"""
|
||||
if chat_id in self._chats:
|
||||
del self._chats[chat_id]
|
||||
|
||||
def items(self):
|
||||
"""Get chat items (id, chat) pairs."""
|
||||
return self._chats.items()
|
||||
|
||||
def values(self):
|
||||
"""Get all chats."""
|
||||
return self._chats.values()
|
||||
|
||||
def keys(self):
|
||||
"""Get all chat IDs."""
|
||||
return self._chats.keys()
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert the collection to a dictionary.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Dictionary representation of all chats
|
||||
"""
|
||||
return {chat_id: chat.to_json() for chat_id, chat in self._chats.items()}
|
||||
|
||||
def get_system(self, key: str) -> Any:
|
||||
"""
|
||||
Get a system value by its key.
|
||||
|
||||
Args:
|
||||
key (str): The key of the system value to retrieve
|
||||
|
||||
Returns:
|
||||
Any: The system value if found, None otherwise
|
||||
"""
|
||||
return self._system.get(key)
|
||||
|
||||
def set_system(self, key: str, value: Any) -> None:
|
||||
"""
|
||||
Set a system value by its key.
|
||||
|
||||
Args:
|
||||
key (str): The key of the system value to set
|
||||
value (Any): The value to set
|
||||
"""
|
||||
self._system[key] = value
|
||||
|
||||
|
||||
class ChatStore:
|
||||
"""
|
||||
Stores chat information and messages.
|
||||
"""
|
||||
|
||||
def __init__(self, type: str, name: Optional[str] = None, media: Optional[str] = None) -> None:
|
||||
"""
|
||||
Initialize ChatStore object.
|
||||
|
||||
Args:
|
||||
type (str): Device type (IOS or ANDROID)
|
||||
name (Optional[str]): Chat name
|
||||
media (Optional[str]): Path to media folder
|
||||
|
||||
Raises:
|
||||
TypeError: If name is not a string or None
|
||||
"""
|
||||
if name is not None and not isinstance(name, str):
|
||||
raise TypeError("Name must be a string or None")
|
||||
self.name = name
|
||||
self.messages = {}
|
||||
self._messages: Dict[str, 'Message'] = {}
|
||||
self.type = type
|
||||
if media is not None:
|
||||
from Whatsapp_Chat_Exporter.utility import Device
|
||||
@@ -25,45 +207,130 @@ class ChatStore():
|
||||
self.their_avatar = None
|
||||
self.their_avatar_thumb = None
|
||||
self.status = None
|
||||
|
||||
def add_message(self, id, message):
|
||||
self.media_base = ""
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Get number of chats. Required for dict-like access."""
|
||||
return len(self._messages)
|
||||
|
||||
def add_message(self, id: str, message: 'Message') -> None:
|
||||
"""Add a message to the chat store."""
|
||||
if not isinstance(message, Message):
|
||||
raise TypeError("message must be a Message object")
|
||||
self.messages[id] = message
|
||||
self._messages[id] = message
|
||||
|
||||
def delete_message(self, id):
|
||||
if id in self.messages:
|
||||
del self.messages[id]
|
||||
def get_message(self, id: str) -> 'Message':
|
||||
"""Get a message from the chat store."""
|
||||
return self._messages.get(id)
|
||||
|
||||
def to_json(self):
|
||||
serialized_msgs = {id: msg.to_json() for id, msg in self.messages.items()}
|
||||
return {
|
||||
'name': self.name,
|
||||
'type': self.type,
|
||||
'my_avatar': self.my_avatar,
|
||||
'their_avatar': self.their_avatar,
|
||||
'their_avatar_thumb': self.their_avatar_thumb,
|
||||
'status': self.status,
|
||||
'messages': serialized_msgs
|
||||
def delete_message(self, id: str) -> None:
|
||||
"""Delete a message from the chat store."""
|
||||
if id in self._messages:
|
||||
del self._messages[id]
|
||||
|
||||
def to_json(self) -> Dict[str, Any]:
|
||||
"""Convert chat store to JSON-serializable dict."""
|
||||
json_dict = {
|
||||
key: value
|
||||
for key, value in self.__dict__.items()
|
||||
if key != '_messages'
|
||||
}
|
||||
json_dict['messages'] = {id: msg.to_json() for id, msg in self._messages.items()}
|
||||
return json_dict
|
||||
|
||||
def get_last_message(self):
|
||||
return tuple(self.messages.values())[-1]
|
||||
@classmethod
|
||||
def from_json(cls, data: Dict) -> 'ChatStore':
|
||||
"""Create a chat store from JSON data."""
|
||||
chat = cls(data.get("type"), data.get("name"))
|
||||
for key, value in data.items():
|
||||
if hasattr(chat, key) and key not in ("messages", "type", "name"):
|
||||
setattr(chat, key, value)
|
||||
for id, msg_data in data.get("messages", {}).items():
|
||||
message = Message.from_json(msg_data)
|
||||
chat.add_message(id, message)
|
||||
return chat
|
||||
|
||||
def get_messages(self):
|
||||
return self.messages.values()
|
||||
def get_last_message(self) -> 'Message':
|
||||
"""Get the most recent message in the chat."""
|
||||
return tuple(self._messages.values())[-1]
|
||||
|
||||
def items(self):
|
||||
"""Get message items pairs."""
|
||||
return self._messages.items()
|
||||
|
||||
def values(self):
|
||||
"""Get all messages in the chat."""
|
||||
return self._messages.values()
|
||||
|
||||
def keys(self):
|
||||
"""Get all message keys in the chat."""
|
||||
return self._messages.keys()
|
||||
|
||||
def merge_with(self, other: 'ChatStore'):
|
||||
"""Merge another ChatStore into this one.
|
||||
|
||||
Args:
|
||||
other (ChatStore): The ChatStore to merge with
|
||||
|
||||
"""
|
||||
if not isinstance(other, ChatStore):
|
||||
raise TypeError("Can only merge with another ChatStore object")
|
||||
|
||||
# Update fields if they are not None in the other ChatStore
|
||||
self.name = other.name or self.name
|
||||
self.type = other.type or self.type
|
||||
self.my_avatar = other.my_avatar or self.my_avatar
|
||||
self.their_avatar = other.their_avatar or self.their_avatar
|
||||
self.their_avatar_thumb = other.their_avatar_thumb or self.their_avatar_thumb
|
||||
self.status = other.status or self.status
|
||||
|
||||
# Merge messages
|
||||
self._messages.update(other._messages)
|
||||
|
||||
|
||||
class Message():
|
||||
def __init__(self, from_me: Union[bool,int], timestamp: int, time: Union[int,float,str], key_id: int):
|
||||
class Message:
|
||||
"""
|
||||
Represents a single message in a chat.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
from_me: Union[bool, int],
|
||||
timestamp: int,
|
||||
time: Union[int, float, str],
|
||||
key_id: Union[int, str],
|
||||
received_timestamp: int = None,
|
||||
read_timestamp: int = None,
|
||||
timezone_offset: Optional[Timing] = Timing(0),
|
||||
message_type: Optional[int] = None
|
||||
) -> None:
|
||||
"""
|
||||
Initialize Message object.
|
||||
|
||||
Args:
|
||||
from_me (Union[bool, int]): Whether message was sent by the user
|
||||
timestamp (int): Message timestamp
|
||||
time (Union[int, float, str]): Message time
|
||||
key_id (int): Message unique identifier
|
||||
received_timestamp (int, optional): When message was received. Defaults to None
|
||||
read_timestamp (int, optional): When message was read. Defaults to None
|
||||
timezone_offset (int, optional): Hours offset from UTC. Defaults to 0
|
||||
message_type (Optional[int], optional): Type of message. Defaults to None
|
||||
|
||||
Raises:
|
||||
TypeError: If time is not a string or number
|
||||
"""
|
||||
self.from_me = bool(from_me)
|
||||
self.timestamp = timestamp / 1000 if timestamp > 9999999999 else timestamp
|
||||
if isinstance(time, int) or isinstance(time, float):
|
||||
self.time = datetime.fromtimestamp(time/1000).strftime("%H:%M")
|
||||
|
||||
if isinstance(time, (int, float)):
|
||||
self.time = timezone_offset.format_timestamp(self.timestamp, "%H:%M")
|
||||
elif isinstance(time, str):
|
||||
self.time = time
|
||||
else:
|
||||
raise TypeError("Time must be a string or integer")
|
||||
raise TypeError("Time must be a string or number")
|
||||
|
||||
self.media = False
|
||||
self.key_id = key_id
|
||||
self.meta = False
|
||||
@@ -71,28 +338,51 @@ class Message():
|
||||
self.sender = None
|
||||
self.safe = False
|
||||
self.mime = None
|
||||
# Extra
|
||||
self.message_type = message_type
|
||||
if isinstance(received_timestamp, (int, float)):
|
||||
self.received_timestamp = timezone_offset.format_timestamp(
|
||||
received_timestamp, "%Y/%m/%d %H:%M")
|
||||
elif isinstance(received_timestamp, str):
|
||||
self.received_timestamp = received_timestamp
|
||||
else:
|
||||
self.received_timestamp = None
|
||||
if isinstance(read_timestamp, (int, float)):
|
||||
self.read_timestamp = timezone_offset.format_timestamp(
|
||||
read_timestamp, "%Y/%m/%d %H:%M")
|
||||
elif isinstance(read_timestamp, str):
|
||||
self.read_timestamp = read_timestamp
|
||||
else:
|
||||
self.read_timestamp = None
|
||||
|
||||
# Extra attributes
|
||||
self.reply = None
|
||||
self.quoted_data = None
|
||||
self.caption = None
|
||||
self.thumb = None # Android specific
|
||||
self.thumb = None # Android specific
|
||||
self.sticker = False
|
||||
|
||||
def to_json(self):
|
||||
self.reactions = {}
|
||||
|
||||
def to_json(self) -> Dict[str, Any]:
|
||||
"""Convert message to JSON-serializable dict."""
|
||||
return {
|
||||
'from_me' : self.from_me,
|
||||
'timestamp' : self.timestamp,
|
||||
'time' : self.time,
|
||||
'media' : self.media,
|
||||
'key_id' : self.key_id,
|
||||
'meta' : self.meta,
|
||||
'data' : self.data,
|
||||
'sender' : self.sender,
|
||||
'safe' : self.safe,
|
||||
'mime' : self.mime,
|
||||
'reply' : self.reply,
|
||||
'quoted_data' : self.quoted_data,
|
||||
'caption' : self.caption,
|
||||
'thumb' : self.thumb,
|
||||
'sticker' : self.sticker
|
||||
key: value
|
||||
for key, value in self.__dict__.items()
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, data: Dict) -> 'Message':
|
||||
message = cls(
|
||||
from_me=data["from_me"],
|
||||
timestamp=data["timestamp"],
|
||||
time=data["time"],
|
||||
key_id=data["key_id"],
|
||||
message_type=data.get("message_type"),
|
||||
received_timestamp=data.get("received_timestamp"),
|
||||
read_timestamp=data.get("read_timestamp")
|
||||
)
|
||||
added = ("from_me", "timestamp", "time", "key_id", "message_type",
|
||||
"received_timestamp", "read_timestamp")
|
||||
for key, value in data.items():
|
||||
if hasattr(message, key) and key not in added:
|
||||
setattr(message, key, value)
|
||||
return message
|
||||
|
||||
184
Whatsapp_Chat_Exporter/exported_handler.py
Normal file
184
Whatsapp_Chat_Exporter/exported_handler.py
Normal file
@@ -0,0 +1,184 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import os
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from mimetypes import MimeTypes
|
||||
from tqdm import tqdm
|
||||
from Whatsapp_Chat_Exporter.data_model import ChatStore, Message
|
||||
from Whatsapp_Chat_Exporter.utility import Device, convert_time_unit
|
||||
|
||||
|
||||
|
||||
|
||||
def messages(path, data, assume_first_as_me=False):
|
||||
"""
|
||||
Extracts messages from an exported WhatsApp chat file.
|
||||
|
||||
Args:
|
||||
path: Path to the exported chat file
|
||||
data: Data container object to store the parsed chat
|
||||
assume_first_as_me: If True, assumes the first message is sent from the user without asking
|
||||
|
||||
Returns:
|
||||
Updated data container with extracted messages
|
||||
"""
|
||||
# Create a new chat in the data container
|
||||
chat = data.add_chat("ExportedChat", ChatStore(Device.EXPORTED))
|
||||
you = "" # Will store the username of the current user
|
||||
user_identification_done = False # Flag to track if user identification has been done
|
||||
|
||||
# First pass: count total lines for progress reporting
|
||||
with open(path, "r", encoding="utf8") as file:
|
||||
total_row_number = sum(1 for _ in file)
|
||||
|
||||
# Second pass: process the messages
|
||||
with open(path, "r", encoding="utf8") as file:
|
||||
with tqdm(total=total_row_number, desc="Processing messages & media", unit="msg&media", leave=False) as pbar:
|
||||
for index, line in enumerate(file):
|
||||
you, user_identification_done = process_line(
|
||||
line, index, chat, path, you,
|
||||
assume_first_as_me, user_identification_done
|
||||
)
|
||||
pbar.update(1)
|
||||
total_time = pbar.format_dict['elapsed']
|
||||
logging.info(f"Processed {total_row_number} messages & media in {convert_time_unit(total_time)}")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def process_line(line, index, chat, file_path, you, assume_first_as_me, user_identification_done):
|
||||
"""
|
||||
Process a single line from the chat file
|
||||
|
||||
Returns:
|
||||
Tuple of (updated_you_value, updated_user_identification_done_flag)
|
||||
"""
|
||||
parts = line.split(" - ", 1)
|
||||
|
||||
# Check if this is a new message (has timestamp format)
|
||||
if len(parts) > 1:
|
||||
time = parts[0]
|
||||
you, user_identification_done = process_new_message(
|
||||
time, parts[1], index, chat, you, file_path,
|
||||
assume_first_as_me, user_identification_done
|
||||
)
|
||||
else:
|
||||
# This is a continuation of the previous message
|
||||
process_message_continuation(line, index, chat)
|
||||
|
||||
return you, user_identification_done
|
||||
|
||||
|
||||
def process_new_message(time, content, index, chat, you, file_path,
|
||||
assume_first_as_me, user_identification_done):
|
||||
"""
|
||||
Process a line that contains a new message
|
||||
|
||||
Returns:
|
||||
Tuple of (updated_you_value, updated_user_identification_done_flag)
|
||||
"""
|
||||
# Create a new message
|
||||
msg = Message(
|
||||
from_me=False, # Will be updated later if needed
|
||||
timestamp=datetime.strptime(time, "%d/%m/%Y, %H:%M").timestamp(),
|
||||
time=time.split(", ")[1].strip(),
|
||||
key_id=index,
|
||||
received_timestamp=None,
|
||||
read_timestamp=None
|
||||
)
|
||||
|
||||
# Check if this is a system message (no name:message format)
|
||||
if ":" not in content:
|
||||
msg.data = content
|
||||
msg.meta = True
|
||||
else:
|
||||
# Process user message
|
||||
name, message = content.strip().split(":", 1)
|
||||
|
||||
# Handle user identification
|
||||
if you == "":
|
||||
if chat.name is None:
|
||||
# First sender identification
|
||||
if not user_identification_done:
|
||||
if not assume_first_as_me:
|
||||
# Ask only once if this is the user
|
||||
you = prompt_for_user_identification(name)
|
||||
user_identification_done = True
|
||||
else:
|
||||
you = name
|
||||
user_identification_done = True
|
||||
else:
|
||||
# If we know the chat name, anyone else must be "you"
|
||||
if name != chat.name:
|
||||
you = name
|
||||
|
||||
# Set the chat name if needed
|
||||
if chat.name is None and name != you:
|
||||
chat.name = name
|
||||
|
||||
# Determine if this message is from the current user
|
||||
msg.from_me = (name == you)
|
||||
|
||||
# Process message content
|
||||
process_message_content(msg, message, file_path)
|
||||
|
||||
chat.add_message(index, msg)
|
||||
return you, user_identification_done
|
||||
|
||||
|
||||
def process_message_content(msg, message, file_path):
|
||||
"""Process and set the content of a message based on its type"""
|
||||
if "<Media omitted>" in message:
|
||||
msg.data = "The media is omitted in the chat"
|
||||
msg.mime = "media"
|
||||
msg.meta = True
|
||||
elif "(file attached)" in message:
|
||||
process_attached_file(msg, message, file_path)
|
||||
else:
|
||||
msg.data = message.replace("\r\n", "<br>").replace("\n", "<br>")
|
||||
|
||||
|
||||
def process_attached_file(msg, message, file_path):
|
||||
"""Process an attached file in a message"""
|
||||
mime = MimeTypes()
|
||||
msg.media = True
|
||||
|
||||
# Extract file path and check if it exists
|
||||
file_name = message.split("(file attached)")[0].strip()
|
||||
attached_file_path = os.path.join(os.path.dirname(file_path), file_name)
|
||||
|
||||
if os.path.isfile(attached_file_path):
|
||||
msg.data = attached_file_path
|
||||
guess = mime.guess_type(attached_file_path)[0]
|
||||
msg.mime = guess if guess is not None else "application/octet-stream"
|
||||
else:
|
||||
msg.data = "The media is missing"
|
||||
msg.mime = "media"
|
||||
msg.meta = True
|
||||
|
||||
|
||||
def process_message_continuation(line, index, chat):
|
||||
"""Process a line that continues a previous message"""
|
||||
# Find the previous message
|
||||
lookback = index - 1
|
||||
while lookback not in chat.keys():
|
||||
lookback -= 1
|
||||
|
||||
msg = chat.get_message(lookback)
|
||||
|
||||
# Add the continuation line to the message
|
||||
if msg.media:
|
||||
msg.caption = line.strip()
|
||||
else:
|
||||
msg.data += "<br>" + line.strip()
|
||||
|
||||
|
||||
def prompt_for_user_identification(name):
|
||||
"""Ask the user if the given name is their username"""
|
||||
while True:
|
||||
ans = input(f"Is '{name}' you? (Y/N)").lower()
|
||||
if ans == "y":
|
||||
return name
|
||||
elif ans == "n":
|
||||
return ""
|
||||
@@ -1,721 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import sqlite3
|
||||
import json
|
||||
import jinja2
|
||||
import os
|
||||
import shutil
|
||||
import re
|
||||
import io
|
||||
import hmac
|
||||
from pathlib import Path
|
||||
from mimetypes import MimeTypes
|
||||
from hashlib import sha256
|
||||
from base64 import b64decode, b64encode
|
||||
from Whatsapp_Chat_Exporter.data_model import ChatStore, Message
|
||||
from Whatsapp_Chat_Exporter.utility import MAX_SIZE, ROW_SIZE, determine_metadata, get_status_location
|
||||
from Whatsapp_Chat_Exporter.utility import rendering, Crypt, Device, get_file_name, setup_template
|
||||
from Whatsapp_Chat_Exporter.utility import brute_force_offset, CRYPT14_OFFSETS
|
||||
|
||||
try:
|
||||
import zlib
|
||||
from Crypto.Cipher import AES
|
||||
except ModuleNotFoundError:
|
||||
support_backup = False
|
||||
else:
|
||||
support_backup = True
|
||||
try:
|
||||
import javaobj
|
||||
except ModuleNotFoundError:
|
||||
support_crypt15 = False
|
||||
else:
|
||||
support_crypt15 = True
|
||||
|
||||
|
||||
def _generate_hmac_of_hmac(key_stream):
|
||||
key = hmac.new(
|
||||
hmac.new(
|
||||
b'\x00' * 32,
|
||||
key_stream,
|
||||
sha256
|
||||
).digest(),
|
||||
b"backup encryption\x01",
|
||||
sha256
|
||||
)
|
||||
return key.digest(), key_stream
|
||||
|
||||
|
||||
def _extract_encrypted_key(keyfile):
|
||||
key_stream = b""
|
||||
for byte in javaobj.loads(keyfile):
|
||||
key_stream += byte.to_bytes(1, "big", signed=True)
|
||||
|
||||
return _generate_hmac_of_hmac(key_stream)
|
||||
|
||||
|
||||
def decrypt_backup(database, key, output, crypt=Crypt.CRYPT14, show_crypt15=False):
|
||||
if not support_backup:
|
||||
return 1
|
||||
if isinstance(key, io.IOBase):
|
||||
key = key.read()
|
||||
if crypt is not Crypt.CRYPT15:
|
||||
t1 = key[30:62]
|
||||
if crypt is not Crypt.CRYPT15 and len(key) != 158:
|
||||
raise ValueError("The key file must be 158 bytes")
|
||||
# Determine the IV and database offsets
|
||||
if crypt == Crypt.CRYPT14:
|
||||
if len(database) < 191:
|
||||
raise ValueError("The crypt14 file must be at least 191 bytes")
|
||||
current_try = 0
|
||||
offsets = CRYPT14_OFFSETS[current_try]
|
||||
t2 = database[15:47]
|
||||
iv = database[offsets["iv"]:offsets["iv"] + 16]
|
||||
db_ciphertext = database[offsets["db"]:]
|
||||
elif crypt == Crypt.CRYPT12:
|
||||
if len(database) < 67:
|
||||
raise ValueError("The crypt12 file must be at least 67 bytes")
|
||||
t2 = database[3:35]
|
||||
iv = database[51:67]
|
||||
db_ciphertext = database[67:-20]
|
||||
elif crypt == Crypt.CRYPT15:
|
||||
if not support_crypt15:
|
||||
return 1
|
||||
if len(database) < 131:
|
||||
raise ValueError("The crypt15 file must be at least 131 bytes")
|
||||
t1 = t2 = None
|
||||
iv = database[8:24]
|
||||
db_offset = database[0] + 2 # Skip protobuf + protobuf size and backup type
|
||||
db_ciphertext = database[db_offset:]
|
||||
|
||||
if t1 != t2:
|
||||
raise ValueError("The signature of key file and backup file mismatch")
|
||||
|
||||
if crypt == Crypt.CRYPT15:
|
||||
if len(key) == 32:
|
||||
main_key, hex_key = _generate_hmac_of_hmac(key)
|
||||
else:
|
||||
main_key, hex_key = _extract_encrypted_key(key)
|
||||
if show_crypt15:
|
||||
hex_key = [hex_key.hex()[c:c+4] for c in range(0, len(hex_key.hex()), 4)]
|
||||
print("The HEX key of the crypt15 backup is: " + ' '.join(hex_key))
|
||||
else:
|
||||
main_key = key[126:]
|
||||
decompressed = False
|
||||
while not decompressed:
|
||||
cipher = AES.new(main_key, AES.MODE_GCM, iv)
|
||||
db_compressed = cipher.decrypt(db_ciphertext)
|
||||
try:
|
||||
db = zlib.decompress(db_compressed)
|
||||
except zlib.error:
|
||||
if crypt == Crypt.CRYPT14:
|
||||
current_try += 1
|
||||
if current_try < len(CRYPT14_OFFSETS):
|
||||
offsets = CRYPT14_OFFSETS[current_try]
|
||||
iv = database[offsets["iv"]:offsets["iv"] + 16]
|
||||
db_ciphertext = database[offsets["db"]:]
|
||||
continue
|
||||
else:
|
||||
print("Common offsets are not applicable to "
|
||||
"your backup. Trying to brute force it...")
|
||||
for start_iv, end_iv, start_db in brute_force_offset():
|
||||
iv = database[start_iv:end_iv]
|
||||
db_ciphertext = database[start_db:]
|
||||
cipher = AES.new(main_key, AES.MODE_GCM, iv)
|
||||
db_compressed = cipher.decrypt(db_ciphertext)
|
||||
try:
|
||||
db = zlib.decompress(db_compressed)
|
||||
except zlib.error:
|
||||
continue
|
||||
else:
|
||||
decompressed = True
|
||||
print(
|
||||
f"The offsets of your IV and database are {start_iv} and "
|
||||
f"{start_db}, respectively. To include your offsets in the "
|
||||
"program, please report it by creating an issue on GitHub: "
|
||||
"https://github.com/KnugiHK/Whatsapp-Chat-Exporter/discussions/47"
|
||||
)
|
||||
break
|
||||
if not decompressed:
|
||||
return 2
|
||||
else:
|
||||
return 3
|
||||
else:
|
||||
decompressed = True
|
||||
if db[0:6].upper() == b"SQLITE":
|
||||
with open(output, "wb") as f:
|
||||
f.write(db)
|
||||
return 0
|
||||
else:
|
||||
raise ValueError("The plaintext is not a SQLite database. Did you use the key to encrypt something...")
|
||||
|
||||
|
||||
def contacts(db, data):
|
||||
# Get contacts
|
||||
c = db.cursor()
|
||||
c.execute("""SELECT count() FROM wa_contacts""")
|
||||
total_row_number = c.fetchone()[0]
|
||||
print(f"Processing contacts...({total_row_number})")
|
||||
|
||||
c.execute("""SELECT jid, COALESCE(display_name, wa_name) as display_name, status FROM wa_contacts; """)
|
||||
row = c.fetchone()
|
||||
while row is not None:
|
||||
data[row["jid"]] = ChatStore(Device.ANDROID, row["display_name"])
|
||||
if row["status"] is not None:
|
||||
data[row["jid"]].status = row["status"]
|
||||
row = c.fetchone()
|
||||
|
||||
|
||||
def messages(db, data, media_folder):
|
||||
# Get message history
|
||||
c = db.cursor()
|
||||
try:
|
||||
c.execute("""SELECT count() FROM messages""")
|
||||
except sqlite3.OperationalError:
|
||||
c.execute("""SELECT count() FROM message""")
|
||||
total_row_number = c.fetchone()[0]
|
||||
print(f"Processing messages...(0/{total_row_number})", end="\r")
|
||||
|
||||
phone_number_re = re.compile(r"[0-9]+@s.whatsapp.net")
|
||||
try:
|
||||
c.execute("""SELECT messages.key_remote_jid,
|
||||
messages._id,
|
||||
messages.key_from_me,
|
||||
messages.timestamp,
|
||||
messages.data,
|
||||
messages.status,
|
||||
messages.edit_version,
|
||||
messages.thumb_image,
|
||||
messages.remote_resource,
|
||||
CAST(messages.media_wa_type as INTEGER) as media_wa_type,
|
||||
messages.latitude,
|
||||
messages.longitude,
|
||||
messages_quotes.key_id as quoted,
|
||||
messages.key_id,
|
||||
messages_quotes.data as quoted_data,
|
||||
messages.media_caption,
|
||||
missed_call_logs.video_call,
|
||||
chat.subject as chat_subject,
|
||||
message_system.action_type,
|
||||
message_system_group.is_me_joined,
|
||||
jid_old.raw_string as old_jid,
|
||||
jid_new.raw_string as new_jid
|
||||
FROM messages
|
||||
LEFT JOIN messages_quotes
|
||||
ON messages.quoted_row_id = messages_quotes._id
|
||||
LEFT JOIN missed_call_logs
|
||||
ON messages._id = missed_call_logs.message_row_id
|
||||
INNER JOIN jid jid_global
|
||||
ON messages.key_remote_jid = jid_global.raw_string
|
||||
LEFT JOIN chat
|
||||
ON chat.jid_row_id = jid_global._id
|
||||
LEFT JOIN message_system
|
||||
ON message_system.message_row_id = messages._id
|
||||
LEFT JOIN message_system_group
|
||||
ON message_system_group.message_row_id = messages._id
|
||||
LEFT JOIN message_system_number_change
|
||||
ON message_system_number_change.message_row_id = messages._id
|
||||
LEFT JOIN jid jid_old
|
||||
ON jid_old._id = message_system_number_change.old_jid_row_id
|
||||
LEFT JOIN jid jid_new
|
||||
ON jid_new._id = message_system_number_change.new_jid_row_id
|
||||
WHERE messages.key_remote_jid <> '-1';"""
|
||||
)
|
||||
except sqlite3.OperationalError:
|
||||
try:
|
||||
c.execute("""SELECT jid_global.raw_string as key_remote_jid,
|
||||
message._id,
|
||||
message.from_me as key_from_me,
|
||||
message.timestamp,
|
||||
message.text_data as data,
|
||||
message.status,
|
||||
message_future.version as edit_version,
|
||||
message_thumbnail.thumbnail as thumb_image,
|
||||
message_media.file_path as remote_resource,
|
||||
message_location.latitude,
|
||||
message_location.longitude,
|
||||
message_quoted.key_id as quoted,
|
||||
message.key_id,
|
||||
message_quoted.text_data as quoted_data,
|
||||
message.message_type as media_wa_type,
|
||||
jid_group.raw_string as group_sender_jid,
|
||||
chat.subject as chat_subject,
|
||||
missed_call_logs.video_call,
|
||||
message.sender_jid_row_id,
|
||||
message_system.action_type,
|
||||
message_system_group.is_me_joined,
|
||||
jid_old.raw_string as old_jid,
|
||||
jid_new.raw_string as new_jid
|
||||
FROM message
|
||||
LEFT JOIN message_quoted
|
||||
ON message_quoted.message_row_id = message._id
|
||||
LEFT JOIN message_location
|
||||
ON message_location.message_row_id = message._id
|
||||
LEFT JOIN message_media
|
||||
ON message_media.message_row_id = message._id
|
||||
LEFT JOIN message_thumbnail
|
||||
ON message_thumbnail.message_row_id = message._id
|
||||
LEFT JOIN message_future
|
||||
ON message_future.message_row_id = message._id
|
||||
LEFT JOIN chat
|
||||
ON chat._id = message.chat_row_id
|
||||
INNER JOIN jid jid_global
|
||||
ON jid_global._id = chat.jid_row_id
|
||||
LEFT JOIN jid jid_group
|
||||
ON jid_group._id = message.sender_jid_row_id
|
||||
LEFT JOIN missed_call_logs
|
||||
ON message._id = missed_call_logs.message_row_id
|
||||
LEFT JOIN message_system
|
||||
ON message_system.message_row_id = message._id
|
||||
LEFT JOIN message_system_group
|
||||
ON message_system_group.message_row_id = message._id
|
||||
LEFT JOIN message_system_number_change
|
||||
ON message_system_number_change.message_row_id = message._id
|
||||
LEFT JOIN jid jid_old
|
||||
ON jid_old._id = message_system_number_change.old_jid_row_id
|
||||
LEFT JOIN jid jid_new
|
||||
ON jid_new._id = message_system_number_change.new_jid_row_id
|
||||
WHERE key_remote_jid <> '-1';"""
|
||||
)
|
||||
except Exception as e:
|
||||
raise e
|
||||
else:
|
||||
table_message = True
|
||||
else:
|
||||
table_message = False
|
||||
i = 0
|
||||
while True:
|
||||
try:
|
||||
content = c.fetchone()
|
||||
except sqlite3.OperationalError:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
while content is not None:
|
||||
if content["key_remote_jid"] not in data:
|
||||
data[content["key_remote_jid"]] = ChatStore(Device.ANDROID, content["chat_subject"])
|
||||
if content["key_remote_jid"] is None:
|
||||
continue # Not sure
|
||||
if "sender_jid_row_id" in content:
|
||||
sender_jid_row_id = content["sender_jid_row_id"]
|
||||
else:
|
||||
sender_jid_row_id = None
|
||||
message = Message(
|
||||
from_me=not sender_jid_row_id and content["key_from_me"],
|
||||
timestamp=content["timestamp"],
|
||||
time=content["timestamp"],
|
||||
key_id=content["key_id"],
|
||||
)
|
||||
if isinstance(content["data"], bytes):
|
||||
message.data = ("The message is binary data and its base64 is "
|
||||
'<a href="https://gchq.github.io/CyberChef/#recipe=From_Base64'
|
||||
"('A-Za-z0-9%2B/%3D',true,false)Text_Encoding_Brute_Force"
|
||||
f"""('Decode')&input={b64encode(b64encode(content["data"])).decode()}">""")
|
||||
message.data += b64encode(content["data"]).decode("utf-8") + "</a>"
|
||||
message.safe = message.meta = True
|
||||
data[content["key_remote_jid"]].add_message(content["_id"], message)
|
||||
i += 1
|
||||
content = c.fetchone()
|
||||
continue
|
||||
if "-" in content["key_remote_jid"] and content["key_from_me"] == 0:
|
||||
name = fallback = None
|
||||
if table_message:
|
||||
if content["sender_jid_row_id"] > 0:
|
||||
_jid = content["group_sender_jid"]
|
||||
if _jid in data:
|
||||
name = data[_jid].name
|
||||
if "@" in _jid:
|
||||
fallback = _jid.split('@')[0]
|
||||
else:
|
||||
if content["remote_resource"] is not None:
|
||||
if content["remote_resource"] in data:
|
||||
name = data[content["remote_resource"]].name
|
||||
if "@" in content["remote_resource"]:
|
||||
fallback = content["remote_resource"].split('@')[0]
|
||||
|
||||
message.sender = name or fallback
|
||||
else:
|
||||
message.sender = None
|
||||
|
||||
if content["quoted"] is not None:
|
||||
message.reply = content["quoted"]
|
||||
if content["quoted_data"] is not None and len(content["quoted_data"]) > 200:
|
||||
message.quoted_data = content["quoted_data"][:201] + "..."
|
||||
else:
|
||||
message.quoted_data = content["quoted_data"]
|
||||
else:
|
||||
message.reply = None
|
||||
|
||||
if not table_message and content["media_caption"] is not None:
|
||||
# Old schema
|
||||
message.caption = content["media_caption"]
|
||||
elif table_message and content["media_wa_type"] == 1 and content["data"] is not None:
|
||||
# New schema
|
||||
message.caption = content["data"]
|
||||
else:
|
||||
message.caption = None
|
||||
|
||||
if content["status"] == 6: # 6 = Metadata, otherwise assume a message
|
||||
message.meta = True
|
||||
name = fallback = None
|
||||
if table_message:
|
||||
if content["sender_jid_row_id"] > 0:
|
||||
_jid = content["group_sender_jid"]
|
||||
if _jid in data:
|
||||
name = data[_jid].name
|
||||
if "@" in _jid:
|
||||
fallback = _jid.split('@')[0]
|
||||
else:
|
||||
name = "You"
|
||||
else:
|
||||
_jid = content["remote_resource"]
|
||||
if _jid is not None:
|
||||
if _jid in data:
|
||||
name = data[_jid].name
|
||||
if "@" in _jid:
|
||||
fallback = _jid.split('@')[0]
|
||||
else:
|
||||
name = "You"
|
||||
message.data = determine_metadata(content, name or fallback)
|
||||
if isinstance(message.data, str) and "<br>" in message.data:
|
||||
message.safe = True
|
||||
if message.data is None:
|
||||
if content["video_call"] is not None: # Missed call
|
||||
message.meta = True
|
||||
if content["video_call"] == 1:
|
||||
message.data = "A video call was missed"
|
||||
elif content["video_call"] == 0:
|
||||
message.data = "A voice call was missed"
|
||||
elif content["data"] is None and content["thumb_image"] is None:
|
||||
message.meta = True
|
||||
message.data = None
|
||||
else:
|
||||
# Real message
|
||||
if content["media_wa_type"] == 20: # Sticker is a message
|
||||
message.sticker = True
|
||||
if content["key_from_me"] == 1:
|
||||
if content["status"] == 5 and content["edit_version"] == 7 or table_message and content["media_wa_type"] == 15:
|
||||
msg = "Message deleted"
|
||||
message.meta = True
|
||||
else:
|
||||
if content["media_wa_type"] == 5:
|
||||
msg = f"Location shared: {content['latitude'], content['longitude']}"
|
||||
message.meta = True
|
||||
else:
|
||||
msg = content["data"]
|
||||
if msg is not None:
|
||||
if "\r\n" in msg:
|
||||
msg = msg.replace("\r\n", "<br>")
|
||||
if "\n" in msg:
|
||||
msg = msg.replace("\n", "<br>")
|
||||
else:
|
||||
if content["status"] == 0 and content["edit_version"] == 7 or table_message and content["media_wa_type"] == 15:
|
||||
msg = "Message deleted"
|
||||
message.meta = True
|
||||
else:
|
||||
if content["media_wa_type"] == 5:
|
||||
msg = f"Location shared: {content['latitude'], content['longitude']}"
|
||||
message.meta = True
|
||||
else:
|
||||
msg = content["data"]
|
||||
if msg is not None:
|
||||
if "\r\n" in msg:
|
||||
msg = msg.replace("\r\n", "<br>")
|
||||
if "\n" in msg:
|
||||
msg = msg.replace("\n", "<br>")
|
||||
message.data = msg
|
||||
|
||||
data[content["key_remote_jid"]].add_message(content["_id"], message)
|
||||
i += 1
|
||||
if i % 1000 == 0:
|
||||
print(f"Processing messages...({i}/{total_row_number})", end="\r")
|
||||
while True:
|
||||
try:
|
||||
content = c.fetchone()
|
||||
except sqlite3.OperationalError:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
print(f"Processing messages...({total_row_number}/{total_row_number})", end="\r")
|
||||
|
||||
|
||||
def media(db, data, media_folder):
|
||||
# Get media
|
||||
c = db.cursor()
|
||||
c.execute("""SELECT count() FROM message_media""")
|
||||
total_row_number = c.fetchone()[0]
|
||||
print(f"\nProcessing media...(0/{total_row_number})", end="\r")
|
||||
i = 0
|
||||
try:
|
||||
c.execute("""SELECT messages.key_remote_jid,
|
||||
message_row_id,
|
||||
file_path,
|
||||
message_url,
|
||||
mime_type,
|
||||
media_key,
|
||||
file_hash,
|
||||
thumbnail
|
||||
FROM message_media
|
||||
INNER JOIN messages
|
||||
ON message_media.message_row_id = messages._id
|
||||
LEFT JOIN media_hash_thumbnail
|
||||
ON message_media.file_hash = media_hash_thumbnail.media_hash
|
||||
ORDER BY messages.key_remote_jid ASC"""
|
||||
)
|
||||
except sqlite3.OperationalError:
|
||||
c.execute("""SELECT jid.raw_string as key_remote_jid,
|
||||
message_row_id,
|
||||
file_path,
|
||||
message_url,
|
||||
mime_type,
|
||||
media_key,
|
||||
file_hash,
|
||||
thumbnail
|
||||
FROM message_media
|
||||
INNER JOIN message
|
||||
ON message_media.message_row_id = message._id
|
||||
LEFT JOIN chat
|
||||
ON chat._id = message.chat_row_id
|
||||
INNER JOIN jid
|
||||
ON jid._id = chat.jid_row_id
|
||||
LEFT JOIN media_hash_thumbnail
|
||||
ON message_media.file_hash = media_hash_thumbnail.media_hash
|
||||
ORDER BY jid.raw_string ASC"""
|
||||
)
|
||||
content = c.fetchone()
|
||||
mime = MimeTypes()
|
||||
if not os.path.isdir(f"{media_folder}/thumbnails"):
|
||||
Path(f"{media_folder}/thumbnails").mkdir(parents=True, exist_ok=True)
|
||||
while content is not None:
|
||||
file_path = f"{media_folder}/{content['file_path']}"
|
||||
message = data[content["key_remote_jid"]].messages[content["message_row_id"]]
|
||||
message.media = True
|
||||
if os.path.isfile(file_path):
|
||||
message.data = file_path
|
||||
if content["mime_type"] is None:
|
||||
guess = mime.guess_type(file_path)[0]
|
||||
if guess is not None:
|
||||
message.mime = guess
|
||||
else:
|
||||
message.mime = "application/octet-stream"
|
||||
else:
|
||||
message.mime = content["mime_type"]
|
||||
else:
|
||||
if False: # Block execution
|
||||
try:
|
||||
r = requests.get(content["message_url"])
|
||||
if r.status_code != 200:
|
||||
raise RuntimeError()
|
||||
except:
|
||||
message.data = "The media is missing"
|
||||
message.mime = "media"
|
||||
message.meta = True
|
||||
else:
|
||||
...
|
||||
message.data = "The media is missing"
|
||||
message.mime = "media"
|
||||
message.meta = True
|
||||
if content["thumbnail"] is not None:
|
||||
thumb_path = f"{media_folder}/thumbnails/{b64decode(content['file_hash']).hex()}.png"
|
||||
if not os.path.isfile(thumb_path):
|
||||
with open(thumb_path, "wb") as f:
|
||||
f.write(content["thumbnail"])
|
||||
message.thumb = thumb_path
|
||||
i += 1
|
||||
if i % 100 == 0:
|
||||
print(f"Processing media...({i}/{total_row_number})", end="\r")
|
||||
content = c.fetchone()
|
||||
print(
|
||||
f"Processing media...({total_row_number}/{total_row_number})", end="\r")
|
||||
|
||||
|
||||
def vcard(db, data):
|
||||
c = db.cursor()
|
||||
try:
|
||||
c.execute("""SELECT message_row_id,
|
||||
messages.key_remote_jid,
|
||||
vcard,
|
||||
messages.media_name
|
||||
FROM messages_vcards
|
||||
INNER JOIN messages
|
||||
ON messages_vcards.message_row_id = messages._id
|
||||
ORDER BY messages.key_remote_jid ASC;"""
|
||||
)
|
||||
except sqlite3.OperationalError:
|
||||
c.execute("""SELECT message_row_id,
|
||||
jid.raw_string as key_remote_jid,
|
||||
vcard,
|
||||
message.text_data as media_name
|
||||
FROM message_vcard
|
||||
INNER JOIN message
|
||||
ON message_vcard.message_row_id = message._id
|
||||
LEFT JOIN chat
|
||||
ON chat._id = message.chat_row_id
|
||||
INNER JOIN jid
|
||||
ON jid._id = chat.jid_row_id
|
||||
ORDER BY message.chat_row_id ASC;"""
|
||||
)
|
||||
|
||||
rows = c.fetchall()
|
||||
total_row_number = len(rows)
|
||||
print(f"\nProcessing vCards...(0/{total_row_number})", end="\r")
|
||||
base = "WhatsApp/vCards"
|
||||
if not os.path.isdir(base):
|
||||
Path(base).mkdir(parents=True, exist_ok=True)
|
||||
for index, row in enumerate(rows):
|
||||
media_name = row["media_name"] if row["media_name"] is not None else ""
|
||||
file_name = "".join(x for x in media_name if x.isalnum())
|
||||
file_name = file_name.encode('utf-8')[:230].decode('utf-8', 'ignore')
|
||||
file_path = os.path.join(base, f"{file_name}.vcf")
|
||||
if not os.path.isfile(file_path):
|
||||
with open(file_path, "w", encoding="utf-8") as f:
|
||||
f.write(row["vcard"])
|
||||
message = data[row["key_remote_jid"]].messages[row["message_row_id"]]
|
||||
message.data = media_name + \
|
||||
"The vCard file cannot be displayed here, " \
|
||||
f"however it should be located at {file_path}"
|
||||
message.mime = "text/x-vcard"
|
||||
message.meta = True
|
||||
print(f"Processing vCards...({index + 1}/{total_row_number})", end="\r")
|
||||
|
||||
|
||||
def calls(db, data):
|
||||
c = db.cursor()
|
||||
c.execute("""SELECT count() FROM call_log""")
|
||||
total_row_number = c.fetchone()[0]
|
||||
if total_row_number == 0:
|
||||
return
|
||||
print(f"\nProcessing calls...({total_row_number})", end="\r")
|
||||
c.execute("""SELECT call_log._id,
|
||||
jid.raw_string,
|
||||
from_me,
|
||||
call_id,
|
||||
timestamp,
|
||||
video_call,
|
||||
duration,
|
||||
call_result,
|
||||
bytes_transferred
|
||||
FROM call_log
|
||||
INNER JOIN jid
|
||||
ON call_log.jid_row_id = jid._id"""
|
||||
)
|
||||
chat = ChatStore(Device.ANDROID, "WhatsApp Calls")
|
||||
content = c.fetchone()
|
||||
while content is not None:
|
||||
call = Message(
|
||||
from_me=content["from_me"],
|
||||
timestamp=content["timestamp"],
|
||||
time=content["timestamp"],
|
||||
key_id=content["call_id"],
|
||||
)
|
||||
_jid = content["raw_string"]
|
||||
if _jid in data:
|
||||
name = data[_jid].name
|
||||
fallback = _jid.split('@')[0] if "@" in _jid else None
|
||||
call.sender = name or fallback
|
||||
|
||||
call.meta = True
|
||||
call.data = (
|
||||
f"A {'video' if content['video_call'] else 'voice'} "
|
||||
f"call {'to' if call.from_me else 'from'} "
|
||||
f"{name or fallback} was "
|
||||
)
|
||||
if content['call_result'] == 2:
|
||||
call.data += "not answered." if call.from_me else "missed."
|
||||
elif content['call_result'] == 3:
|
||||
call.data += "unavailable."
|
||||
elif content['call_result'] == 5:
|
||||
call.data += (
|
||||
f"initiated and lasted for {content['duration']} second(s) "
|
||||
f"with {content['bytes_transferred']} bytes transferred."
|
||||
)
|
||||
chat.add_message(content["_id"], call)
|
||||
content = c.fetchone()
|
||||
data["000000000000000"] = chat
|
||||
|
||||
|
||||
def create_html(
|
||||
data,
|
||||
output_folder,
|
||||
template=None,
|
||||
embedded=False,
|
||||
offline_static=False,
|
||||
maximum_size=None,
|
||||
no_avatar=False
|
||||
):
|
||||
template = setup_template(template, no_avatar)
|
||||
|
||||
total_row_number = len(data)
|
||||
print(f"\nGenerating chats...(0/{total_row_number})", end="\r")
|
||||
|
||||
if not os.path.isdir(output_folder):
|
||||
os.mkdir(output_folder)
|
||||
|
||||
w3css = get_status_location(output_folder, offline_static)
|
||||
|
||||
for current, contact in enumerate(data):
|
||||
chat = data[contact]
|
||||
if len(chat.messages) == 0:
|
||||
continue
|
||||
safe_file_name, name = get_file_name(contact, chat)
|
||||
|
||||
if maximum_size is not None:
|
||||
current_size = 0
|
||||
current_page = 1
|
||||
render_box = []
|
||||
if maximum_size == 0:
|
||||
maximum_size = MAX_SIZE
|
||||
last_msg = chat.get_last_message().key_id
|
||||
for message in chat.get_messages():
|
||||
if message.data is not None and not message.meta and not message.media:
|
||||
current_size += len(message.data) + ROW_SIZE
|
||||
else:
|
||||
current_size += ROW_SIZE + 100 # Assume media and meta HTML are 100 bytes
|
||||
if current_size > maximum_size:
|
||||
output_file_name = f"{output_folder}/{safe_file_name}-{current_page}.html"
|
||||
rendering(
|
||||
output_file_name,
|
||||
template,
|
||||
name,
|
||||
render_box,
|
||||
contact,
|
||||
w3css,
|
||||
f"{safe_file_name}-{current_page + 1}.html",
|
||||
chat
|
||||
)
|
||||
render_box = [message]
|
||||
current_size = 0
|
||||
current_page += 1
|
||||
else:
|
||||
if message.key_id == last_msg:
|
||||
if current_page == 1:
|
||||
output_file_name = f"{output_folder}/{safe_file_name}.html"
|
||||
else:
|
||||
output_file_name = f"{output_folder}/{safe_file_name}-{current_page}.html"
|
||||
rendering(
|
||||
output_file_name,
|
||||
template,
|
||||
name,
|
||||
render_box,
|
||||
contact,
|
||||
w3css,
|
||||
False,
|
||||
chat
|
||||
)
|
||||
else:
|
||||
render_box.append(message)
|
||||
else:
|
||||
output_file_name = f"{output_folder}/{safe_file_name}.html"
|
||||
rendering(
|
||||
output_file_name,
|
||||
template,
|
||||
name,
|
||||
chat.get_messages(),
|
||||
contact,
|
||||
w3css,
|
||||
False,
|
||||
chat
|
||||
)
|
||||
if current % 10 == 0:
|
||||
print(f"Generating chats...({current}/{total_row_number})", end="\r")
|
||||
|
||||
print(f"Generating chats...({total_row_number}/{total_row_number})", end="\r")
|
||||
@@ -1,92 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
from mimetypes import MimeTypes
|
||||
from Whatsapp_Chat_Exporter.data_model import ChatStore, Message
|
||||
from Whatsapp_Chat_Exporter.utility import Device
|
||||
|
||||
|
||||
def messages(path, data, assume_first_as_me=False):
|
||||
"""Extracts messages from the exported file"""
|
||||
with open(path, "r", encoding="utf8") as file:
|
||||
you = ""
|
||||
data["ExportedChat"] = ChatStore(Device.EXPORTED)
|
||||
chat = data["ExportedChat"]
|
||||
total_row_number = len(file.readlines())
|
||||
file.seek(0)
|
||||
for index, line in enumerate(file):
|
||||
if len(line.split(" - ")) > 1:
|
||||
time = line.split(" - ")[0]
|
||||
if ":" not in line.split(time)[1]:
|
||||
msg.data = line.split(time)[1][3:]
|
||||
msg.meta = True
|
||||
else:
|
||||
name = line.split(time)[1].split(":")[0]
|
||||
message = line.split(time)[1].split(name + ":")[1].strip()
|
||||
name = name[3:]
|
||||
if you == "":
|
||||
if chat.name is None:
|
||||
if not assume_first_as_me:
|
||||
while True:
|
||||
ans = input(f"Is '{name}' you? (Y/N)").lower()
|
||||
if ans == "y":
|
||||
you = name
|
||||
break
|
||||
elif ans == "n":
|
||||
chat.name = name
|
||||
break
|
||||
else:
|
||||
you = name
|
||||
else:
|
||||
if name != chat.name:
|
||||
you = name
|
||||
elif chat.name is None:
|
||||
if name != you:
|
||||
chat.name = name
|
||||
msg = Message(
|
||||
you == name,
|
||||
datetime.strptime(time, "%d/%m/%Y, %H:%M").timestamp(),
|
||||
time.split(", ")[1].strip(),
|
||||
index
|
||||
)
|
||||
if "<Media omitted>" in message:
|
||||
msg.data = "The media is omitted in the chat"
|
||||
msg.mime = "media"
|
||||
msg.meta = True
|
||||
elif "(file attached)" in message:
|
||||
mime = MimeTypes()
|
||||
msg.media = True
|
||||
file_path = os.path.join(os.path.dirname(path), message.split("(file attached)")[0].strip())
|
||||
if os.path.isfile(file_path):
|
||||
msg.data = file_path
|
||||
guess = mime.guess_type(file_path)[0]
|
||||
if guess is not None:
|
||||
msg.mime = guess
|
||||
else:
|
||||
msg.mime = "application/octet-stream"
|
||||
else:
|
||||
msg.data = "The media is missing"
|
||||
msg.mime = "media"
|
||||
msg.meta = True
|
||||
else:
|
||||
msg.data = message
|
||||
if "\r\n" in message:
|
||||
msg.data = message.replace("\r\n", "<br>")
|
||||
if "\n" in message:
|
||||
msg.data = message.replace("\n", "<br>")
|
||||
chat.add_message(index, msg)
|
||||
else:
|
||||
lookback = index - 1
|
||||
while lookback not in chat.messages:
|
||||
lookback -= 1
|
||||
msg = chat.messages[lookback]
|
||||
if msg.media:
|
||||
msg.caption = line.strip()
|
||||
else:
|
||||
msg.data += "<br>" + line.strip()
|
||||
|
||||
if index % 1000 == 0:
|
||||
print(f"Processing messages & media...({index}/{total_row_number})", end="\r")
|
||||
print(f"Processing messages & media...({total_row_number}/{total_row_number})", end="\r")
|
||||
return data
|
||||
@@ -1,280 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import os
|
||||
from glob import glob
|
||||
from pathlib import Path
|
||||
from mimetypes import MimeTypes
|
||||
from Whatsapp_Chat_Exporter.data_model import ChatStore, Message
|
||||
from Whatsapp_Chat_Exporter.utility import APPLE_TIME, Device
|
||||
|
||||
|
||||
def contacts(db, data):
|
||||
c = db.cursor()
|
||||
# Get status only lol
|
||||
c.execute("""SELECT count() FROM ZWAADDRESSBOOKCONTACT WHERE ZABOUTTEXT IS NOT NULL""")
|
||||
total_row_number = c.fetchone()[0]
|
||||
print(f"Pre-processing contacts...({total_row_number})")
|
||||
c.execute("""SELECT ZWHATSAPPID, ZABOUTTEXT FROM ZWAADDRESSBOOKCONTACT WHERE ZABOUTTEXT IS NOT NULL""")
|
||||
content = c.fetchone()
|
||||
while content is not None:
|
||||
if not content["ZWHATSAPPID"].endswith("@s.whatsapp.net"):
|
||||
_id = content["ZWHATSAPPID"] + "@s.whatsapp.net"
|
||||
data[_id] = ChatStore(Device.IOS)
|
||||
data[_id].status = content["ZABOUTTEXT"]
|
||||
content = c.fetchone()
|
||||
|
||||
|
||||
def messages(db, data, media_folder):
|
||||
c = db.cursor()
|
||||
# Get contacts
|
||||
c.execute("""SELECT count() FROM ZWACHATSESSION""")
|
||||
total_row_number = c.fetchone()[0]
|
||||
print(f"Processing contacts...({total_row_number})")
|
||||
|
||||
c.execute(
|
||||
"""SELECT ZCONTACTJID,
|
||||
ZPARTNERNAME,
|
||||
ZPUSHNAME
|
||||
FROM ZWACHATSESSION
|
||||
LEFT JOIN ZWAPROFILEPUSHNAME
|
||||
ON ZWACHATSESSION.ZCONTACTJID = ZWAPROFILEPUSHNAME.ZJID;"""
|
||||
)
|
||||
content = c.fetchone()
|
||||
while content is not None:
|
||||
is_phone = content["ZPARTNERNAME"].replace("+", "").replace(" ", "").isdigit()
|
||||
if content["ZPUSHNAME"] is None or (content["ZPUSHNAME"] and not is_phone):
|
||||
contact_name = content["ZPARTNERNAME"]
|
||||
else:
|
||||
contact_name = content["ZPUSHNAME"]
|
||||
contact_id = content["ZCONTACTJID"]
|
||||
if contact_id not in data:
|
||||
data[contact_id] = ChatStore(Device.IOS, contact_name, media_folder)
|
||||
else:
|
||||
data[contact_id].name = contact_name
|
||||
data[contact_id].my_avatar = os.path.join(media_folder, "Media/Profile/Photo.jpg")
|
||||
path = f'{media_folder}/Media/Profile/{contact_id.split("@")[0]}'
|
||||
avatars = glob(f"{path}*")
|
||||
if 0 < len(avatars) <= 1:
|
||||
data[contact_id].their_avatar = avatars[0]
|
||||
else:
|
||||
for avatar in avatars:
|
||||
if avatar.endswith(".thumb") and data[content["ZCONTACTJID"]].their_avatar_thumb is None:
|
||||
data[contact_id].their_avatar_thumb = avatar
|
||||
elif avatar.endswith(".jpg") and data[content["ZCONTACTJID"]].their_avatar is None:
|
||||
data[contact_id].their_avatar = avatar
|
||||
content = c.fetchone()
|
||||
|
||||
# Get message history
|
||||
c.execute("""SELECT count() FROM ZWAMESSAGE""")
|
||||
total_row_number = c.fetchone()[0]
|
||||
print(f"Processing messages...(0/{total_row_number})", end="\r")
|
||||
|
||||
c.execute("""SELECT COALESCE(ZFROMJID, ZTOJID) as _id,
|
||||
ZWAMESSAGE.Z_PK,
|
||||
ZISFROMME,
|
||||
ZMESSAGEDATE,
|
||||
ZTEXT,
|
||||
ZMESSAGETYPE,
|
||||
ZWAGROUPMEMBER.ZMEMBERJID,
|
||||
ZMETADATA,
|
||||
ZSTANZAID
|
||||
FROM ZWAMESSAGE
|
||||
LEFT JOIN ZWAGROUPMEMBER
|
||||
ON ZWAMESSAGE.ZGROUPMEMBER = ZWAGROUPMEMBER.Z_PK
|
||||
LEFT JOIN ZWAMEDIAITEM
|
||||
ON ZWAMESSAGE.Z_PK = ZWAMEDIAITEM.ZMESSAGE;""")
|
||||
i = 0
|
||||
content = c.fetchone()
|
||||
while content is not None:
|
||||
_id = content["_id"]
|
||||
Z_PK = content["Z_PK"]
|
||||
if _id not in data:
|
||||
data[_id] = ChatStore(Device.IOS)
|
||||
path = f'{media_folder}/Media/Profile/{_id.split("@")[0]}'
|
||||
avatars = glob(f"{path}*")
|
||||
if 0 < len(avatars) <= 1:
|
||||
data[_id].their_avatar = avatars[0]
|
||||
else:
|
||||
for avatar in avatars:
|
||||
if avatar.endswith(".thumb"):
|
||||
data[_id].their_avatar_thumb = avatar
|
||||
elif avatar.endswith(".jpg"):
|
||||
data[_id].their_avatar = avatar
|
||||
ts = APPLE_TIME + content["ZMESSAGEDATE"]
|
||||
message = Message(
|
||||
from_me=content["ZISFROMME"],
|
||||
timestamp=ts,
|
||||
time=ts, # TODO: Could be bug
|
||||
key_id=content["ZSTANZAID"][:17],
|
||||
)
|
||||
invalid = False
|
||||
if "-" in _id and content["ZISFROMME"] == 0:
|
||||
name = None
|
||||
if content["ZMEMBERJID"] is not None:
|
||||
if content["ZMEMBERJID"] in data:
|
||||
name = data[content["ZMEMBERJID"]].name
|
||||
if "@" in content["ZMEMBERJID"]:
|
||||
fallback = content["ZMEMBERJID"].split('@')[0]
|
||||
else:
|
||||
fallback = None
|
||||
else:
|
||||
fallback = None
|
||||
message.sender = name or fallback
|
||||
else:
|
||||
message.sender = None
|
||||
if content["ZMESSAGETYPE"] == 6:
|
||||
# Metadata
|
||||
if "-" in _id:
|
||||
# Group
|
||||
if content["ZTEXT"] is not None:
|
||||
# Chnaged name
|
||||
try:
|
||||
int(content["ZTEXT"])
|
||||
except ValueError:
|
||||
msg = f"The group name changed to {content['ZTEXT']}"
|
||||
message.data = msg
|
||||
message.meta = True
|
||||
else:
|
||||
invalid = True
|
||||
else:
|
||||
message.data = None
|
||||
else:
|
||||
message.data = None
|
||||
else:
|
||||
# real message
|
||||
if content["ZMETADATA"] is not None and content["ZMETADATA"].startswith(b"\x2a\x14"):
|
||||
quoted = content["ZMETADATA"][2:19]
|
||||
message.reply = quoted.decode()
|
||||
message.quoted_data = None # TODO
|
||||
if content["ZMESSAGETYPE"] == 15: # Sticker
|
||||
message.sticker = True
|
||||
|
||||
if content["ZISFROMME"] == 1:
|
||||
if content["ZMESSAGETYPE"] == 14:
|
||||
msg = "Message deleted"
|
||||
message.meta = True
|
||||
else:
|
||||
msg = content["ZTEXT"]
|
||||
if msg is not None:
|
||||
if "\r\n" in msg:
|
||||
msg = msg.replace("\r\n", "<br>")
|
||||
if "\n" in msg:
|
||||
msg = msg.replace("\n", "<br>")
|
||||
else:
|
||||
if content["ZMESSAGETYPE"] == 14:
|
||||
msg = "Message deleted"
|
||||
message.meta = True
|
||||
else:
|
||||
msg = content["ZTEXT"]
|
||||
if msg is not None:
|
||||
if "\r\n" in msg:
|
||||
msg = msg.replace("\r\n", "<br>")
|
||||
if "\n" in msg:
|
||||
msg = msg.replace("\n", "<br>")
|
||||
message.data = msg
|
||||
if not invalid:
|
||||
data[_id].add_message(Z_PK, message)
|
||||
i += 1
|
||||
if i % 1000 == 0:
|
||||
print(f"Processing messages...({i}/{total_row_number})", end="\r")
|
||||
content = c.fetchone()
|
||||
print(
|
||||
f"Processing messages...({total_row_number}/{total_row_number})", end="\r")
|
||||
|
||||
|
||||
def media(db, data, media_folder):
|
||||
c = db.cursor()
|
||||
# Get media
|
||||
c.execute("""SELECT count() FROM ZWAMEDIAITEM""")
|
||||
total_row_number = c.fetchone()[0]
|
||||
print(f"\nProcessing media...(0/{total_row_number})", end="\r")
|
||||
i = 0
|
||||
c.execute("""SELECT COALESCE(ZWAMESSAGE.ZFROMJID, ZWAMESSAGE.ZTOJID) as _id,
|
||||
ZMESSAGE,
|
||||
ZMEDIALOCALPATH,
|
||||
ZMEDIAURL,
|
||||
ZVCARDSTRING,
|
||||
ZMEDIAKEY,
|
||||
ZTITLE
|
||||
FROM ZWAMEDIAITEM
|
||||
INNER JOIN ZWAMESSAGE
|
||||
ON ZWAMEDIAITEM.ZMESSAGE = ZWAMESSAGE.Z_PK
|
||||
WHERE ZMEDIALOCALPATH IS NOT NULL
|
||||
ORDER BY _id ASC""")
|
||||
content = c.fetchone()
|
||||
mime = MimeTypes()
|
||||
while content is not None:
|
||||
file_path = f"{media_folder}/Message/{content['ZMEDIALOCALPATH']}"
|
||||
_id = content["_id"]
|
||||
ZMESSAGE = content["ZMESSAGE"]
|
||||
message = data[_id].messages[ZMESSAGE]
|
||||
message.media = True
|
||||
if os.path.isfile(file_path):
|
||||
message.data = file_path
|
||||
if content["ZVCARDSTRING"] is None:
|
||||
guess = mime.guess_type(file_path)[0]
|
||||
if guess is not None:
|
||||
message.mime = guess
|
||||
else:
|
||||
message.mime = "application/octet-stream"
|
||||
else:
|
||||
message.mime = content["ZVCARDSTRING"]
|
||||
else:
|
||||
if False: # Block execution
|
||||
try:
|
||||
r = requests.get(content["ZMEDIAURL"])
|
||||
if r.status_code != 200:
|
||||
raise RuntimeError()
|
||||
except:
|
||||
message.data = "The media is missing"
|
||||
message.mime = "media"
|
||||
message.meta = True
|
||||
else:
|
||||
...
|
||||
message.data = "The media is missing"
|
||||
message.mime = "media"
|
||||
message.meta = True
|
||||
if content["ZTITLE"] is not None:
|
||||
message.caption = content["ZTITLE"]
|
||||
i += 1
|
||||
if i % 100 == 0:
|
||||
print(f"Processing media...({i}/{total_row_number})", end="\r")
|
||||
content = c.fetchone()
|
||||
print(
|
||||
f"Processing media...({total_row_number}/{total_row_number})", end="\r")
|
||||
|
||||
|
||||
def vcard(db, data):
|
||||
c = db.cursor()
|
||||
c.execute("""SELECT DISTINCT ZWAVCARDMENTION.ZMEDIAITEM,
|
||||
ZWAMEDIAITEM.ZMESSAGE,
|
||||
COALESCE(ZWAMESSAGE.ZFROMJID,
|
||||
ZWAMESSAGE.ZTOJID) as _id,
|
||||
ZVCARDNAME,
|
||||
ZVCARDSTRING
|
||||
FROM ZWAVCARDMENTION
|
||||
INNER JOIN ZWAMEDIAITEM
|
||||
ON ZWAVCARDMENTION.ZMEDIAITEM = ZWAMEDIAITEM.Z_PK
|
||||
INNER JOIN ZWAMESSAGE
|
||||
ON ZWAMEDIAITEM.ZMESSAGE = ZWAMESSAGE.Z_PK""")
|
||||
contents = c.fetchall()
|
||||
total_row_number = len(contents)
|
||||
print(f"\nProcessing vCards...(0/{total_row_number})", end="\r")
|
||||
base = "AppDomainGroup-group.net.whatsapp.WhatsApp.shared/Message/vCards"
|
||||
if not os.path.isdir(base):
|
||||
Path(base).mkdir(parents=True, exist_ok=True)
|
||||
for index, content in enumerate(contents):
|
||||
file_name = "".join(x for x in content["ZVCARDNAME"] if x.isalnum())
|
||||
file_name = file_name.encode('utf-8')[:230].decode('utf-8', 'ignore')
|
||||
file_path = os.path.join(base, f"{file_name}.vcf")
|
||||
if not os.path.isfile(file_path):
|
||||
with open(file_path, "w", encoding="utf-8") as f:
|
||||
f.write(content["ZVCARDSTRING"])
|
||||
message = data[content["_id"]].messages[content["ZMESSAGE"]]
|
||||
message.data = content["ZVCARDNAME"] + \
|
||||
"The vCard file cannot be displayed here, " \
|
||||
f"however it should be located at {file_path}"
|
||||
message.mime = "text/x-vcard"
|
||||
message.media = True
|
||||
message.meta = True
|
||||
print(f"Processing vCards...({index + 1}/{total_row_number})", end="\r")
|
||||
@@ -1,126 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import shutil
|
||||
import sqlite3
|
||||
import os
|
||||
import time
|
||||
import getpass
|
||||
import threading
|
||||
try:
|
||||
from iphone_backup_decrypt import EncryptedBackup, RelativePath
|
||||
from iphone_backup_decrypt import FailedToDecryptError, Domain
|
||||
except ModuleNotFoundError:
|
||||
support_encrypted = False
|
||||
else:
|
||||
support_encrypted = True
|
||||
|
||||
|
||||
def extract_encrypted(base_dir, password):
|
||||
backup = EncryptedBackup(backup_directory=base_dir, passphrase=password, cleanup=False, check_same_thread=False)
|
||||
print("Decrypting WhatsApp database...")
|
||||
try:
|
||||
backup.extract_file(relative_path=RelativePath.WHATSAPP_MESSAGES,
|
||||
output_filename="7c7fba66680ef796b916b067077cc246adacf01d")
|
||||
backup.extract_file(relative_path=RelativePath.WHATSAPP_CONTACTS,
|
||||
output_filename="b8548dc30aa1030df0ce18ef08b882cf7ab5212f")
|
||||
except FailedToDecryptError:
|
||||
print("Failed to decrypt backup: incorrect password?")
|
||||
exit()
|
||||
extract_thread = threading.Thread(
|
||||
target=backup.extract_files_by_domain,
|
||||
args=(Domain.WHATSAPP, Domain.WHATSAPP)
|
||||
)
|
||||
extract_thread.daemon = True
|
||||
extract_thread.start()
|
||||
dot = 0
|
||||
while extract_thread.is_alive():
|
||||
print(f"Decrypting and extracting files{'.' * dot}{' ' * (3 - dot)}", end="\r")
|
||||
if dot < 3:
|
||||
dot += 1
|
||||
time.sleep(0.5)
|
||||
else:
|
||||
dot = 0
|
||||
time.sleep(0.4)
|
||||
print(f"All required files decrypted and extracted.", end="\n")
|
||||
extract_thread.handled = True
|
||||
return backup
|
||||
|
||||
|
||||
def is_encrypted(base_dir):
|
||||
with sqlite3.connect(os.path.join(base_dir, "Manifest.db")) as f:
|
||||
c = f.cursor()
|
||||
try:
|
||||
c.execute("""SELECT count()
|
||||
FROM Files
|
||||
""")
|
||||
except sqlite3.OperationalError as e:
|
||||
raise e # These error cannot be used to determine if the backup is encrypted
|
||||
except sqlite3.DatabaseError:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def extract_media(base_dir):
|
||||
if is_encrypted(base_dir):
|
||||
if not support_encrypted:
|
||||
print("You don't have the dependencies to handle encrypted backup.")
|
||||
print("Read more on how to deal with encrypted backup:")
|
||||
print("https://github.com/KnugiHK/Whatsapp-Chat-Exporter/blob/main/README.md#usage")
|
||||
return False
|
||||
print("Encryption detected on the backup!")
|
||||
password = getpass.getpass("Enter the password for the backup:")
|
||||
extract_encrypted(base_dir, password)
|
||||
else:
|
||||
wts_db = os.path.join(base_dir, "7c/7c7fba66680ef796b916b067077cc246adacf01d")
|
||||
contact_db = os.path.join(base_dir, "b8/b8548dc30aa1030df0ce18ef08b882cf7ab5212f")
|
||||
if not os.path.isfile(wts_db):
|
||||
print("WhatsApp database not found.")
|
||||
exit()
|
||||
else:
|
||||
shutil.copyfile(wts_db, "7c7fba66680ef796b916b067077cc246adacf01d")
|
||||
if not os.path.isfile(contact_db):
|
||||
print("Contact database not found.")
|
||||
exit()
|
||||
else:
|
||||
shutil.copyfile(contact_db, "b8548dc30aa1030df0ce18ef08b882cf7ab5212f")
|
||||
_wts_id = "AppDomainGroup-group.net.whatsapp.WhatsApp.shared"
|
||||
with sqlite3.connect(os.path.join(base_dir, "Manifest.db")) as manifest:
|
||||
manifest.row_factory = sqlite3.Row
|
||||
c = manifest.cursor()
|
||||
c.execute(
|
||||
f"""SELECT count()
|
||||
FROM Files
|
||||
WHERE domain = '{_wts_id}'"""
|
||||
)
|
||||
total_row_number = c.fetchone()[0]
|
||||
print(f"Extracting WhatsApp files...(0/{total_row_number})", end="\r")
|
||||
c.execute(f"""SELECT fileID,
|
||||
relativePath,
|
||||
flags,
|
||||
ROW_NUMBER() OVER(ORDER BY relativePath) AS _index
|
||||
FROM Files
|
||||
WHERE domain = '{_wts_id}'
|
||||
ORDER BY relativePath""")
|
||||
if not os.path.isdir(_wts_id):
|
||||
os.mkdir(_wts_id)
|
||||
row = c.fetchone()
|
||||
while row is not None:
|
||||
if row["relativePath"] == "":
|
||||
row = c.fetchone()
|
||||
continue
|
||||
destination = os.path.join(_wts_id, row["relativePath"])
|
||||
hashes = row["fileID"]
|
||||
folder = hashes[:2]
|
||||
flags = row["flags"]
|
||||
if flags == 2:
|
||||
try:
|
||||
os.mkdir(destination)
|
||||
except FileExistsError:
|
||||
pass
|
||||
elif flags == 1:
|
||||
shutil.copyfile(os.path.join(base_dir, folder, hashes), destination)
|
||||
if row["_index"] % 100 == 0:
|
||||
print(f"Extracting WhatsApp files...({row['_index']}/{total_row_number})", end="\r")
|
||||
row = c.fetchone()
|
||||
print(f"Extracting WhatsApp files...({total_row_number}/{total_row_number})", end="\n")
|
||||
624
Whatsapp_Chat_Exporter/ios_handler.py
Normal file
624
Whatsapp_Chat_Exporter/ios_handler.py
Normal file
@@ -0,0 +1,624 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import os
|
||||
import logging
|
||||
import shutil
|
||||
from glob import glob
|
||||
from tqdm import tqdm
|
||||
from pathlib import Path
|
||||
from mimetypes import MimeTypes
|
||||
from markupsafe import escape as htmle
|
||||
from Whatsapp_Chat_Exporter.data_model import ChatStore, Message
|
||||
from Whatsapp_Chat_Exporter.utility import APPLE_TIME, get_chat_condition, Device
|
||||
from Whatsapp_Chat_Exporter.utility import bytes_to_readable, convert_time_unit, safe_name
|
||||
|
||||
|
||||
|
||||
|
||||
def contacts(db, data):
|
||||
"""Process WhatsApp contacts with status information."""
|
||||
c = db.cursor()
|
||||
c.execute("""SELECT count() FROM ZWAADDRESSBOOKCONTACT WHERE ZABOUTTEXT IS NOT NULL""")
|
||||
total_row_number = c.fetchone()[0]
|
||||
logging.info(f"Pre-processing contacts...({total_row_number})", extra={"clear": True})
|
||||
|
||||
c.execute("""SELECT ZWHATSAPPID, ZABOUTTEXT FROM ZWAADDRESSBOOKCONTACT WHERE ZABOUTTEXT IS NOT NULL""")
|
||||
with tqdm(total=total_row_number, desc="Processing contacts", unit="contact", leave=False) as pbar:
|
||||
while (content := c.fetchone()) is not None:
|
||||
zwhatsapp_id = content["ZWHATSAPPID"]
|
||||
if not zwhatsapp_id.endswith("@s.whatsapp.net"):
|
||||
zwhatsapp_id += "@s.whatsapp.net"
|
||||
|
||||
current_chat = ChatStore(Device.IOS)
|
||||
current_chat.status = content["ZABOUTTEXT"]
|
||||
data.add_chat(zwhatsapp_id, current_chat)
|
||||
pbar.update(1)
|
||||
total_time = pbar.format_dict['elapsed']
|
||||
logging.info(f"Pre-processed {total_row_number} contacts in {convert_time_unit(total_time)}")
|
||||
|
||||
|
||||
def process_contact_avatars(current_chat, media_folder, contact_id):
|
||||
"""Process and assign avatar images for a contact."""
|
||||
path = f'{media_folder}/Media/Profile/{contact_id.split("@")[0]}'
|
||||
avatars = glob(f"{path}*")
|
||||
|
||||
if 0 < len(avatars) <= 1:
|
||||
current_chat.their_avatar = avatars[0]
|
||||
else:
|
||||
for avatar in avatars:
|
||||
if avatar.endswith(".thumb") and current_chat.their_avatar_thumb is None:
|
||||
current_chat.their_avatar_thumb = avatar
|
||||
elif avatar.endswith(".jpg") and current_chat.their_avatar is None:
|
||||
current_chat.their_avatar = avatar
|
||||
|
||||
|
||||
def get_contact_name(content):
|
||||
"""Determine the appropriate contact name based on push name and partner name."""
|
||||
is_phone = content["ZPARTNERNAME"].replace("+", "").replace(" ", "").isdigit()
|
||||
if content["ZPUSHNAME"] is None or (content["ZPUSHNAME"] and not is_phone):
|
||||
return content["ZPARTNERNAME"]
|
||||
else:
|
||||
return content["ZPUSHNAME"]
|
||||
|
||||
|
||||
def messages(db, data, media_folder, timezone_offset, filter_date, filter_chat, filter_empty, no_reply):
|
||||
"""Process WhatsApp messages and contacts from the database."""
|
||||
c = db.cursor()
|
||||
cursor2 = db.cursor()
|
||||
|
||||
# Build the chat filter conditions
|
||||
chat_filter_include = get_chat_condition(
|
||||
filter_chat[0], True, ["ZWACHATSESSION.ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
||||
chat_filter_exclude = get_chat_condition(
|
||||
filter_chat[1], False, ["ZWACHATSESSION.ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
||||
date_filter = f'AND ZMESSAGEDATE {filter_date}' if filter_date is not None else ''
|
||||
|
||||
# Process contacts first
|
||||
contact_query = f"""
|
||||
SELECT count()
|
||||
FROM (SELECT DISTINCT ZCONTACTJID,
|
||||
ZPARTNERNAME,
|
||||
ZWAPROFILEPUSHNAME.ZPUSHNAME
|
||||
FROM ZWACHATSESSION
|
||||
INNER JOIN ZWAMESSAGE
|
||||
ON ZWAMESSAGE.ZCHATSESSION = ZWACHATSESSION.Z_PK
|
||||
LEFT JOIN ZWAPROFILEPUSHNAME
|
||||
ON ZWACHATSESSION.ZCONTACTJID = ZWAPROFILEPUSHNAME.ZJID
|
||||
LEFT JOIN ZWAGROUPMEMBER
|
||||
ON ZWAMESSAGE.ZGROUPMEMBER = ZWAGROUPMEMBER.Z_PK
|
||||
WHERE 1=1
|
||||
{chat_filter_include}
|
||||
{chat_filter_exclude}
|
||||
GROUP BY ZCONTACTJID);
|
||||
"""
|
||||
c.execute(contact_query)
|
||||
total_row_number = c.fetchone()[0]
|
||||
|
||||
# Get distinct contacts
|
||||
contacts_query = f"""
|
||||
SELECT DISTINCT ZCONTACTJID,
|
||||
ZPARTNERNAME,
|
||||
ZWAPROFILEPUSHNAME.ZPUSHNAME
|
||||
FROM ZWACHATSESSION
|
||||
INNER JOIN ZWAMESSAGE
|
||||
ON ZWAMESSAGE.ZCHATSESSION = ZWACHATSESSION.Z_PK
|
||||
LEFT JOIN ZWAPROFILEPUSHNAME
|
||||
ON ZWACHATSESSION.ZCONTACTJID = ZWAPROFILEPUSHNAME.ZJID
|
||||
LEFT JOIN ZWAGROUPMEMBER
|
||||
ON ZWAMESSAGE.ZGROUPMEMBER = ZWAGROUPMEMBER.Z_PK
|
||||
WHERE 1=1
|
||||
{chat_filter_include}
|
||||
{chat_filter_exclude}
|
||||
GROUP BY ZCONTACTJID;
|
||||
"""
|
||||
c.execute(contacts_query)
|
||||
|
||||
# Process each contact
|
||||
with tqdm(total=total_row_number, desc="Processing contacts", unit="contact", leave=False) as pbar:
|
||||
while (content := c.fetchone()) is not None:
|
||||
contact_name = get_contact_name(content)
|
||||
contact_id = content["ZCONTACTJID"]
|
||||
|
||||
# Add or update chat
|
||||
if contact_id not in data:
|
||||
current_chat = data.add_chat(contact_id, ChatStore(Device.IOS, contact_name, media_folder))
|
||||
else:
|
||||
current_chat = data.get_chat(contact_id)
|
||||
current_chat.name = contact_name
|
||||
current_chat.my_avatar = os.path.join(media_folder, "Media/Profile/Photo.jpg")
|
||||
|
||||
# Process avatar images
|
||||
process_contact_avatars(current_chat, media_folder, contact_id)
|
||||
pbar.update(1)
|
||||
total_time = pbar.format_dict['elapsed']
|
||||
logging.info(f"Processed {total_row_number} contacts in {convert_time_unit(total_time)}")
|
||||
|
||||
# Get message count
|
||||
message_count_query = f"""
|
||||
SELECT count()
|
||||
FROM ZWAMESSAGE
|
||||
INNER JOIN ZWACHATSESSION
|
||||
ON ZWAMESSAGE.ZCHATSESSION = ZWACHATSESSION.Z_PK
|
||||
LEFT JOIN ZWAGROUPMEMBER
|
||||
ON ZWAMESSAGE.ZGROUPMEMBER = ZWAGROUPMEMBER.Z_PK
|
||||
WHERE 1=1
|
||||
{date_filter}
|
||||
{chat_filter_include}
|
||||
{chat_filter_exclude}
|
||||
"""
|
||||
c.execute(message_count_query)
|
||||
total_row_number = c.fetchone()[0]
|
||||
logging.info(f"Processing messages...(0/{total_row_number})", extra={"clear": True})
|
||||
|
||||
# Fetch messages
|
||||
messages_query = f"""
|
||||
SELECT ZCONTACTJID,
|
||||
ZWAMESSAGE.Z_PK,
|
||||
ZISFROMME,
|
||||
ZMESSAGEDATE,
|
||||
ZTEXT,
|
||||
ZMESSAGETYPE,
|
||||
ZWAGROUPMEMBER.ZMEMBERJID,
|
||||
ZMETADATA,
|
||||
ZSTANZAID,
|
||||
ZGROUPINFO,
|
||||
ZSENTDATE
|
||||
FROM ZWAMESSAGE
|
||||
LEFT JOIN ZWAGROUPMEMBER
|
||||
ON ZWAMESSAGE.ZGROUPMEMBER = ZWAGROUPMEMBER.Z_PK
|
||||
LEFT JOIN ZWAMEDIAITEM
|
||||
ON ZWAMESSAGE.Z_PK = ZWAMEDIAITEM.ZMESSAGE
|
||||
INNER JOIN ZWACHATSESSION
|
||||
ON ZWAMESSAGE.ZCHATSESSION = ZWACHATSESSION.Z_PK
|
||||
WHERE 1=1
|
||||
{date_filter}
|
||||
{chat_filter_include}
|
||||
{chat_filter_exclude}
|
||||
ORDER BY ZMESSAGEDATE ASC;
|
||||
"""
|
||||
c.execute(messages_query)
|
||||
|
||||
reply_query = """SELECT ZSTANZAID,
|
||||
ZTEXT,
|
||||
ZTITLE
|
||||
FROM ZWAMESSAGE
|
||||
LEFT JOIN ZWAMEDIAITEM
|
||||
ON ZWAMESSAGE.Z_PK = ZWAMEDIAITEM.ZMESSAGE
|
||||
WHERE ZTEXT IS NOT NULL
|
||||
OR ZTITLE IS NOT NULL;"""
|
||||
cursor2.execute(reply_query)
|
||||
message_map = {row[0][:17]: row[1] or row[2] for row in cursor2.fetchall() if row[0]}
|
||||
|
||||
# Process each message
|
||||
with tqdm(total=total_row_number, desc="Processing messages", unit="msg", leave=False) as pbar:
|
||||
while (content := c.fetchone()) is not None:
|
||||
contact_id = content["ZCONTACTJID"]
|
||||
message_pk = content["Z_PK"]
|
||||
is_group_message = content["ZGROUPINFO"] is not None
|
||||
|
||||
# Ensure chat exists
|
||||
if contact_id not in data:
|
||||
current_chat = data.add_chat(contact_id, ChatStore(Device.IOS))
|
||||
process_contact_avatars(current_chat, media_folder, contact_id)
|
||||
else:
|
||||
current_chat = data.get_chat(contact_id)
|
||||
|
||||
# Create message object
|
||||
ts = APPLE_TIME + content["ZMESSAGEDATE"]
|
||||
message = Message(
|
||||
from_me=content["ZISFROMME"],
|
||||
timestamp=ts,
|
||||
time=ts,
|
||||
key_id=content["ZSTANZAID"][:17],
|
||||
timezone_offset=timezone_offset,
|
||||
message_type=content["ZMESSAGETYPE"],
|
||||
received_timestamp=APPLE_TIME + content["ZSENTDATE"] if content["ZSENTDATE"] else None,
|
||||
read_timestamp=None # TODO: Add timestamp
|
||||
)
|
||||
|
||||
# Process message data
|
||||
invalid = process_message_data(message, content, is_group_message, data, message_map, no_reply)
|
||||
|
||||
# Add valid messages to chat
|
||||
if not invalid:
|
||||
current_chat.add_message(message_pk, message)
|
||||
|
||||
pbar.update(1)
|
||||
total_time = pbar.format_dict['elapsed']
|
||||
logging.info(f"Processed {total_row_number} messages in {convert_time_unit(total_time)}")
|
||||
|
||||
|
||||
def process_message_data(message, content, is_group_message, data, message_map, no_reply):
|
||||
"""Process and set message data from content row."""
|
||||
# Handle group sender info
|
||||
if is_group_message and content["ZISFROMME"] == 0:
|
||||
name = None
|
||||
if content["ZMEMBERJID"] is not None:
|
||||
if content["ZMEMBERJID"] in data:
|
||||
name = data.get_chat(content["ZMEMBERJID"]).name
|
||||
if "@" in content["ZMEMBERJID"]:
|
||||
fallback = content["ZMEMBERJID"].split('@')[0]
|
||||
else:
|
||||
fallback = None
|
||||
else:
|
||||
fallback = None
|
||||
message.sender = name or fallback
|
||||
else:
|
||||
message.sender = None
|
||||
|
||||
# Handle metadata messages
|
||||
if content["ZMESSAGETYPE"] == 6:
|
||||
return process_metadata_message(message, content, is_group_message)
|
||||
|
||||
# Handle quoted replies
|
||||
if content["ZMETADATA"] is not None and content["ZMETADATA"].startswith(b"\x2a\x14") and not no_reply:
|
||||
quoted = content["ZMETADATA"][2:19]
|
||||
message.reply = quoted.decode()
|
||||
message.quoted_data = message_map.get(message.reply)
|
||||
|
||||
# Handle stickers
|
||||
if content["ZMESSAGETYPE"] == 15:
|
||||
message.sticker = True
|
||||
|
||||
# Process message text
|
||||
process_message_text(message, content)
|
||||
|
||||
return False # Message is valid
|
||||
|
||||
|
||||
def process_metadata_message(message, content, is_group_message):
|
||||
"""Process metadata messages (action_type 6)."""
|
||||
if is_group_message:
|
||||
# Group
|
||||
if content["ZTEXT"] is not None:
|
||||
# Changed name
|
||||
try:
|
||||
int(content["ZTEXT"])
|
||||
except ValueError:
|
||||
msg = f"The group name changed to {content['ZTEXT']}"
|
||||
message.data = msg
|
||||
message.meta = True
|
||||
return False # Valid message
|
||||
else:
|
||||
return True # Invalid message
|
||||
else:
|
||||
message.data = None
|
||||
return False
|
||||
else:
|
||||
message.data = None
|
||||
return False
|
||||
|
||||
|
||||
def process_message_text(message, content):
|
||||
"""Process and format message text content."""
|
||||
if content["ZISFROMME"] == 1:
|
||||
if content["ZMESSAGETYPE"] == 14:
|
||||
msg = "Message deleted"
|
||||
message.meta = True
|
||||
else:
|
||||
msg = content["ZTEXT"]
|
||||
if msg is not None:
|
||||
msg = msg.replace("\r\n", "<br>").replace("\n", "<br>")
|
||||
else:
|
||||
if content["ZMESSAGETYPE"] == 14:
|
||||
msg = "Message deleted"
|
||||
message.meta = True
|
||||
else:
|
||||
msg = content["ZTEXT"]
|
||||
if msg is not None:
|
||||
msg = msg.replace("\r\n", "<br>").replace("\n", "<br>")
|
||||
|
||||
message.data = msg
|
||||
|
||||
|
||||
def media(db, data, media_folder, filter_date, filter_chat, filter_empty, separate_media=False, fix_dot_files=False):
|
||||
"""Process media files from WhatsApp messages."""
|
||||
c = db.cursor()
|
||||
|
||||
# Build filter conditions
|
||||
chat_filter_include = get_chat_condition(
|
||||
filter_chat[0], True, ["ZWACHATSESSION.ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
||||
chat_filter_exclude = get_chat_condition(
|
||||
filter_chat[1], False, ["ZWACHATSESSION.ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
||||
date_filter = f'AND ZMESSAGEDATE {filter_date}' if filter_date is not None else ''
|
||||
|
||||
# Get media count
|
||||
media_count_query = f"""
|
||||
SELECT count()
|
||||
FROM ZWAMEDIAITEM
|
||||
INNER JOIN ZWAMESSAGE
|
||||
ON ZWAMEDIAITEM.ZMESSAGE = ZWAMESSAGE.Z_PK
|
||||
INNER JOIN ZWACHATSESSION
|
||||
ON ZWAMESSAGE.ZCHATSESSION = ZWACHATSESSION.Z_PK
|
||||
LEFT JOIN ZWAGROUPMEMBER
|
||||
ON ZWAMESSAGE.ZGROUPMEMBER = ZWAGROUPMEMBER.Z_PK
|
||||
WHERE 1=1
|
||||
{date_filter}
|
||||
{chat_filter_include}
|
||||
{chat_filter_exclude}
|
||||
"""
|
||||
c.execute(media_count_query)
|
||||
total_row_number = c.fetchone()[0]
|
||||
logging.info(f"Processing media...(0/{total_row_number})", extra={"clear": True})
|
||||
|
||||
# Fetch media items
|
||||
media_query = f"""
|
||||
SELECT ZCONTACTJID,
|
||||
ZMESSAGE,
|
||||
ZMEDIALOCALPATH,
|
||||
ZMEDIAURL,
|
||||
ZVCARDSTRING,
|
||||
ZMEDIAKEY,
|
||||
ZTITLE
|
||||
FROM ZWAMEDIAITEM
|
||||
INNER JOIN ZWAMESSAGE
|
||||
ON ZWAMEDIAITEM.ZMESSAGE = ZWAMESSAGE.Z_PK
|
||||
INNER JOIN ZWACHATSESSION
|
||||
ON ZWAMESSAGE.ZCHATSESSION = ZWACHATSESSION.Z_PK
|
||||
LEFT JOIN ZWAGROUPMEMBER
|
||||
ON ZWAMESSAGE.ZGROUPMEMBER = ZWAGROUPMEMBER.Z_PK
|
||||
WHERE ZMEDIALOCALPATH IS NOT NULL
|
||||
{date_filter}
|
||||
{chat_filter_include}
|
||||
{chat_filter_exclude}
|
||||
ORDER BY ZCONTACTJID ASC
|
||||
"""
|
||||
c.execute(media_query)
|
||||
|
||||
# Process each media item
|
||||
mime = MimeTypes()
|
||||
with tqdm(total=total_row_number, desc="Processing media", unit="media", leave=False) as pbar:
|
||||
while (content := c.fetchone()) is not None:
|
||||
process_media_item(content, data, media_folder, mime, separate_media, fix_dot_files)
|
||||
pbar.update(1)
|
||||
total_time = pbar.format_dict['elapsed']
|
||||
logging.info(f"Processed {total_row_number} media in {convert_time_unit(total_time)}")
|
||||
|
||||
|
||||
def process_media_item(content, data, media_folder, mime, separate_media, fix_dot_files=False):
|
||||
"""Process a single media item."""
|
||||
file_path = f"{media_folder}/Message/{content['ZMEDIALOCALPATH']}"
|
||||
current_chat = data.get_chat(content["ZCONTACTJID"])
|
||||
message = current_chat.get_message(content["ZMESSAGE"])
|
||||
message.media = True
|
||||
|
||||
if current_chat.media_base == "":
|
||||
current_chat.media_base = media_folder + "/"
|
||||
|
||||
if os.path.isfile(file_path):
|
||||
# Set MIME type
|
||||
if content["ZVCARDSTRING"] is None:
|
||||
guess = mime.guess_type(file_path)[0]
|
||||
message.mime = guess if guess is not None else "application/octet-stream"
|
||||
else:
|
||||
message.mime = content["ZVCARDSTRING"]
|
||||
|
||||
if fix_dot_files and file_path.endswith("."):
|
||||
extension = mime.guess_extension(message.mime)
|
||||
if message.mime == "application/octet-stream" or not extension:
|
||||
new_file_path = file_path[:-1]
|
||||
else:
|
||||
extension = mime.guess_extension(message.mime)
|
||||
new_file_path = file_path[:-1] + extension
|
||||
os.rename(file_path, new_file_path)
|
||||
file_path = new_file_path
|
||||
|
||||
# Handle separate media option
|
||||
if separate_media:
|
||||
chat_display_name = safe_name(
|
||||
current_chat.name or message.sender or content["ZCONTACTJID"].split('@')[0])
|
||||
current_filename = file_path.split("/")[-1]
|
||||
new_folder = os.path.join(media_folder, "separated", chat_display_name)
|
||||
Path(new_folder).mkdir(parents=True, exist_ok=True)
|
||||
new_path = os.path.join(new_folder, current_filename)
|
||||
shutil.copy2(file_path, new_path)
|
||||
message.data = '/'.join(new_path.split("/")[1:])
|
||||
else:
|
||||
message.data = '/'.join(file_path.split("/")[1:])
|
||||
else:
|
||||
# Handle missing media
|
||||
message.data = "The media is missing"
|
||||
message.mime = "media"
|
||||
message.meta = True
|
||||
|
||||
# Add caption if available
|
||||
if content["ZTITLE"] is not None:
|
||||
message.caption = content["ZTITLE"]
|
||||
|
||||
|
||||
def vcard(db, data, media_folder, filter_date, filter_chat, filter_empty):
|
||||
"""Process vCard contacts from WhatsApp messages."""
|
||||
c = db.cursor()
|
||||
|
||||
# Build filter conditions
|
||||
chat_filter_include = get_chat_condition(
|
||||
filter_chat[0], True, ["ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
||||
chat_filter_exclude = get_chat_condition(
|
||||
filter_chat[1], False, ["ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
||||
date_filter = f'AND ZWAMESSAGE.ZMESSAGEDATE {filter_date}' if filter_date is not None else ''
|
||||
|
||||
# Fetch vCard mentions
|
||||
vcard_query = f"""
|
||||
SELECT DISTINCT ZWAVCARDMENTION.ZMEDIAITEM,
|
||||
ZWAMEDIAITEM.ZMESSAGE,
|
||||
ZCONTACTJID,
|
||||
ZVCARDNAME,
|
||||
ZVCARDSTRING
|
||||
FROM ZWAVCARDMENTION
|
||||
INNER JOIN ZWAMEDIAITEM
|
||||
ON ZWAVCARDMENTION.ZMEDIAITEM = ZWAMEDIAITEM.Z_PK
|
||||
INNER JOIN ZWAMESSAGE
|
||||
ON ZWAMEDIAITEM.ZMESSAGE = ZWAMESSAGE.Z_PK
|
||||
INNER JOIN ZWACHATSESSION
|
||||
ON ZWAMESSAGE.ZCHATSESSION = ZWACHATSESSION.Z_PK
|
||||
LEFT JOIN ZWAGROUPMEMBER
|
||||
ON ZWAMESSAGE.ZGROUPMEMBER = ZWAGROUPMEMBER.Z_PK
|
||||
WHERE 1=1
|
||||
{date_filter}
|
||||
{chat_filter_include}
|
||||
{chat_filter_exclude}
|
||||
"""
|
||||
c.execute(vcard_query)
|
||||
contents = c.fetchall()
|
||||
total_row_number = len(contents)
|
||||
logging.info(f"Processing vCards...(0/{total_row_number})", extra={"clear": True})
|
||||
|
||||
# Create vCards directory
|
||||
path = f'{media_folder}/Message/vCards'
|
||||
Path(path).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Process each vCard
|
||||
with tqdm(total=total_row_number, desc="Processing vCards", unit="vcard", leave=False) as pbar:
|
||||
for content in contents:
|
||||
process_vcard_item(content, path, data)
|
||||
pbar.update(1)
|
||||
total_time = pbar.format_dict['elapsed']
|
||||
logging.info(f"Processed {total_row_number} vCards in {convert_time_unit(total_time)}")
|
||||
|
||||
|
||||
def process_vcard_item(content, path, data):
|
||||
"""Process a single vCard item."""
|
||||
file_paths = []
|
||||
vcard_names = content["ZVCARDNAME"].split("_$!<Name-Separator>!$_")
|
||||
vcard_strings = content["ZVCARDSTRING"].split("_$!<VCard-Separator>!$_")
|
||||
|
||||
# If this is a list of contacts
|
||||
if len(vcard_names) > len(vcard_strings):
|
||||
vcard_names.pop(0) # Dismiss the first element, which is the group name
|
||||
|
||||
# Save each vCard file
|
||||
for name, vcard_string in zip(vcard_names, vcard_strings):
|
||||
file_name = "".join(x for x in name if x.isalnum())
|
||||
file_name = file_name.encode('utf-8')[:230].decode('utf-8', 'ignore')
|
||||
file_path = os.path.join(path, f"{file_name}.vcf")
|
||||
file_paths.append(file_path)
|
||||
|
||||
if not os.path.isfile(file_path):
|
||||
with open(file_path, "w", encoding="utf-8") as f:
|
||||
f.write(vcard_string)
|
||||
|
||||
# Create vCard summary and update message
|
||||
vcard_summary = "This media include the following vCard file(s):<br>"
|
||||
vcard_summary += " | ".join([f'<a href="{htmle(fp)}">{htmle(name)}</a>' for name,
|
||||
fp in zip(vcard_names, file_paths)])
|
||||
|
||||
message = data.get_chat(content["ZCONTACTJID"]).get_message(content["ZMESSAGE"])
|
||||
message.data = vcard_summary
|
||||
message.mime = "text/x-vcard"
|
||||
message.media = True
|
||||
message.meta = True
|
||||
message.safe = True
|
||||
|
||||
|
||||
def calls(db, data, timezone_offset, filter_chat):
|
||||
"""Process WhatsApp call records."""
|
||||
c = db.cursor()
|
||||
|
||||
# Build filter conditions
|
||||
chat_filter_include = get_chat_condition(
|
||||
filter_chat[0], True, ["ZGROUPCALLCREATORUSERJIDSTRING"], None, "ios")
|
||||
chat_filter_exclude = get_chat_condition(
|
||||
filter_chat[1], False, ["ZGROUPCALLCREATORUSERJIDSTRING"], None, "ios")
|
||||
|
||||
# Get call count
|
||||
call_count_query = f"""
|
||||
SELECT count()
|
||||
FROM ZWACDCALLEVENT
|
||||
WHERE 1=1
|
||||
{chat_filter_include}
|
||||
{chat_filter_exclude}
|
||||
"""
|
||||
c.execute(call_count_query)
|
||||
total_row_number = c.fetchone()[0]
|
||||
if total_row_number == 0:
|
||||
return
|
||||
|
||||
# Fetch call records
|
||||
calls_query = f"""
|
||||
SELECT ZCALLIDSTRING,
|
||||
ZGROUPCALLCREATORUSERJIDSTRING,
|
||||
ZGROUPJIDSTRING,
|
||||
ZDATE,
|
||||
ZOUTCOME,
|
||||
ZBYTESRECEIVED + ZBYTESSENT AS bytes_transferred,
|
||||
ZDURATION,
|
||||
ZVIDEO,
|
||||
ZMISSED,
|
||||
ZINCOMING
|
||||
FROM ZWACDCALLEVENT
|
||||
INNER JOIN ZWAAGGREGATECALLEVENT
|
||||
ON ZWACDCALLEVENT.Z1CALLEVENTS = ZWAAGGREGATECALLEVENT.Z_PK
|
||||
WHERE 1=1
|
||||
{chat_filter_include}
|
||||
{chat_filter_exclude}
|
||||
"""
|
||||
c.execute(calls_query)
|
||||
|
||||
# Create calls chat
|
||||
chat = ChatStore(Device.ANDROID, "WhatsApp Calls")
|
||||
|
||||
with tqdm(total=total_row_number, desc="Processing calls", unit="call", leave=False) as pbar:
|
||||
while (content := c.fetchone()) is not None:
|
||||
process_call_record(content, chat, data, timezone_offset)
|
||||
pbar.update(1)
|
||||
total_time = pbar.format_dict['elapsed']
|
||||
|
||||
# Add calls chat to data
|
||||
data.add_chat("000000000000000", chat)
|
||||
logging.info(f"Processed {total_row_number} calls in {convert_time_unit(total_time)}")
|
||||
|
||||
|
||||
def process_call_record(content, chat, data, timezone_offset):
|
||||
"""Process a single call record."""
|
||||
ts = APPLE_TIME + int(content["ZDATE"])
|
||||
call = Message(
|
||||
from_me=content["ZINCOMING"] == 0,
|
||||
timestamp=ts,
|
||||
time=ts,
|
||||
key_id=content["ZCALLIDSTRING"],
|
||||
timezone_offset=timezone_offset
|
||||
)
|
||||
|
||||
# Set sender info
|
||||
_jid = content["ZGROUPCALLCREATORUSERJIDSTRING"]
|
||||
name = data.get_chat(_jid).name if _jid in data else None
|
||||
if _jid is not None and "@" in _jid:
|
||||
fallback = _jid.split('@')[0]
|
||||
else:
|
||||
fallback = None
|
||||
call.sender = name or fallback
|
||||
|
||||
# Set call metadata
|
||||
call.meta = True
|
||||
call.data = format_call_data(call, content)
|
||||
|
||||
# Add call to chat
|
||||
chat.add_message(call.key_id, call)
|
||||
|
||||
|
||||
def format_call_data(call, content):
|
||||
"""Format call data message based on call attributes."""
|
||||
# Basic call info
|
||||
call_data = (
|
||||
f"A {'group ' if content['ZGROUPJIDSTRING'] is not None else ''}"
|
||||
f"{'video' if content['ZVIDEO'] == 1 else 'voice'} "
|
||||
f"call {'to' if call.from_me else 'from'} "
|
||||
f"{call.sender} was "
|
||||
)
|
||||
|
||||
# Call outcome
|
||||
if content['ZOUTCOME'] in (1, 4):
|
||||
call_data += "not answered." if call.from_me else "missed."
|
||||
elif content['ZOUTCOME'] == 2:
|
||||
call_data += "failed."
|
||||
elif content['ZOUTCOME'] == 0:
|
||||
call_time = convert_time_unit(int(content['ZDURATION']))
|
||||
call_bytes = bytes_to_readable(content['bytes_transferred'])
|
||||
call_data += (
|
||||
f"initiated and lasted for {call_time} "
|
||||
f"with {call_bytes} data transferred."
|
||||
)
|
||||
else:
|
||||
call_data += "in an unknown state."
|
||||
|
||||
return call_data
|
||||
251
Whatsapp_Chat_Exporter/ios_media_handler.py
Normal file
251
Whatsapp_Chat_Exporter/ios_media_handler.py
Normal file
@@ -0,0 +1,251 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import logging
|
||||
import shutil
|
||||
import sqlite3
|
||||
import os
|
||||
import getpass
|
||||
from sys import exit, platform as osname
|
||||
import sys
|
||||
from tqdm import tqdm
|
||||
from Whatsapp_Chat_Exporter.utility import WhatsAppIdentifier, convert_time_unit
|
||||
from Whatsapp_Chat_Exporter.bplist import BPListReader
|
||||
try:
|
||||
from iphone_backup_decrypt import EncryptedBackup, RelativePath
|
||||
except ModuleNotFoundError:
|
||||
support_encrypted = False
|
||||
else:
|
||||
support_encrypted = True
|
||||
|
||||
|
||||
|
||||
|
||||
class BackupExtractor:
|
||||
"""
|
||||
A class to handle the extraction of WhatsApp data from iOS backups,
|
||||
including encrypted and unencrypted backups.
|
||||
"""
|
||||
|
||||
def __init__(self, base_dir, identifiers, decrypt_chunk_size):
|
||||
self.base_dir = base_dir
|
||||
self.identifiers = identifiers
|
||||
self.decrypt_chunk_size = decrypt_chunk_size
|
||||
|
||||
def extract(self):
|
||||
"""
|
||||
Extracts WhatsApp data from the backup based on whether it's encrypted or not.
|
||||
"""
|
||||
if self._is_encrypted():
|
||||
self._extract_encrypted_backup()
|
||||
else:
|
||||
self._extract_unencrypted_backup()
|
||||
|
||||
def _is_encrypted(self):
|
||||
"""
|
||||
Checks if the iOS backup is encrypted.
|
||||
|
||||
Returns:
|
||||
bool: True if encrypted, False otherwise.
|
||||
"""
|
||||
try:
|
||||
with sqlite3.connect(os.path.join(self.base_dir, "Manifest.db")) as db:
|
||||
c = db.cursor()
|
||||
try:
|
||||
c.execute("SELECT count() FROM Files")
|
||||
c.fetchone() # Execute and fetch to trigger potential errors
|
||||
except (sqlite3.OperationalError, sqlite3.DatabaseError):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
except sqlite3.DatabaseError as e:
|
||||
if str(e) == "authorization denied" and osname == "darwin":
|
||||
logging.error(
|
||||
"You don't have permission to access the backup database. Please"
|
||||
"check your permissions or try moving the backup to somewhere else."
|
||||
)
|
||||
exit(8)
|
||||
else:
|
||||
raise e
|
||||
|
||||
def _extract_encrypted_backup(self):
|
||||
"""
|
||||
Handles the extraction of data from an encrypted iOS backup.
|
||||
"""
|
||||
if not support_encrypted:
|
||||
logging.error("You don't have the dependencies to handle encrypted backup."
|
||||
"Read more on how to deal with encrypted backup:"
|
||||
"https://github.com/KnugiHK/Whatsapp-Chat-Exporter/blob/main/README.md#usage"
|
||||
)
|
||||
return
|
||||
|
||||
logging.info(f"Encryption detected on the backup!")
|
||||
password = getpass.getpass("Enter the password for the backup:")
|
||||
sys.stdout.write("\033[F\033[K")
|
||||
sys.stdout.flush()
|
||||
self._decrypt_backup(password)
|
||||
self._extract_decrypted_files()
|
||||
|
||||
def _decrypt_backup(self, password):
|
||||
"""
|
||||
Decrypts the iOS backup using the provided password.
|
||||
|
||||
Args:
|
||||
password (str): The password for the encrypted backup.
|
||||
"""
|
||||
logging.info(f"Trying to open the iOS backup...")
|
||||
self.backup = EncryptedBackup(
|
||||
backup_directory=self.base_dir,
|
||||
passphrase=password,
|
||||
cleanup=False,
|
||||
check_same_thread=False,
|
||||
decrypt_chunk_size=self.decrypt_chunk_size,
|
||||
)
|
||||
logging.info(f"iOS backup is opened successfully")
|
||||
logging.info("Decrypting WhatsApp database...", extra={"clear": True})
|
||||
try:
|
||||
self.backup.extract_file(
|
||||
relative_path=RelativePath.WHATSAPP_MESSAGES,
|
||||
domain_like=self.identifiers.DOMAIN,
|
||||
output_filename=self.identifiers.MESSAGE,
|
||||
)
|
||||
self.backup.extract_file(
|
||||
relative_path=RelativePath.WHATSAPP_CONTACTS,
|
||||
domain_like=self.identifiers.DOMAIN,
|
||||
output_filename=self.identifiers.CONTACT,
|
||||
)
|
||||
self.backup.extract_file(
|
||||
relative_path=RelativePath.WHATSAPP_CALLS,
|
||||
domain_like=self.identifiers.DOMAIN,
|
||||
output_filename=self.identifiers.CALL,
|
||||
)
|
||||
except ValueError:
|
||||
logging.error("Failed to decrypt backup: incorrect password?")
|
||||
exit(7)
|
||||
except FileNotFoundError:
|
||||
logging.error(
|
||||
"Essential WhatsApp files are missing from the iOS backup. "
|
||||
"Perhapse you enabled end-to-end encryption for the backup? "
|
||||
"See https://wts.knugi.dev/docs.html?dest=iose2e"
|
||||
)
|
||||
exit(6)
|
||||
else:
|
||||
logging.info(f"WhatsApp database decrypted successfully")
|
||||
|
||||
def _extract_decrypted_files(self):
|
||||
"""Extract all WhatsApp files after decryption"""
|
||||
pbar = tqdm(desc="Decrypting and extracting files", unit="file", leave=False)
|
||||
def extract_progress_handler(file_id, domain, relative_path, n, total_files):
|
||||
if pbar.total is None:
|
||||
pbar.total = total_files
|
||||
pbar.n = n
|
||||
pbar.refresh()
|
||||
return True
|
||||
|
||||
self.backup.extract_files(
|
||||
domain_like=self.identifiers.DOMAIN,
|
||||
output_folder=self.identifiers.DOMAIN,
|
||||
preserve_folders=True,
|
||||
filter_callback=extract_progress_handler
|
||||
)
|
||||
total_time = pbar.format_dict['elapsed']
|
||||
pbar.close()
|
||||
logging.info(f"All required files are decrypted and extracted in {convert_time_unit(total_time)}")
|
||||
|
||||
def _extract_unencrypted_backup(self):
|
||||
"""
|
||||
Handles the extraction of data from an unencrypted iOS backup.
|
||||
"""
|
||||
self._copy_whatsapp_databases()
|
||||
self._extract_media_files()
|
||||
|
||||
def _copy_whatsapp_databases(self):
|
||||
"""
|
||||
Copies the WhatsApp message, contact, and call databases to the working directory.
|
||||
"""
|
||||
wts_db_path = os.path.join(self.base_dir, self.identifiers.MESSAGE[:2], self.identifiers.MESSAGE)
|
||||
contact_db_path = os.path.join(self.base_dir, self.identifiers.CONTACT[:2], self.identifiers.CONTACT)
|
||||
call_db_path = os.path.join(self.base_dir, self.identifiers.CALL[:2], self.identifiers.CALL)
|
||||
|
||||
if not os.path.isfile(wts_db_path):
|
||||
if self.identifiers is WhatsAppIdentifier:
|
||||
logging.error("WhatsApp database not found.")
|
||||
else:
|
||||
logging.error("WhatsApp Business database not found.")
|
||||
logging.error(
|
||||
"Essential WhatsApp files are missing from the iOS backup. "
|
||||
"Perhapse you enabled end-to-end encryption for the backup? "
|
||||
"See https://wts.knugi.dev/docs.html?dest=iose2e"
|
||||
)
|
||||
exit(1)
|
||||
else:
|
||||
shutil.copyfile(wts_db_path, self.identifiers.MESSAGE)
|
||||
|
||||
if not os.path.isfile(contact_db_path):
|
||||
logging.warning(f"Contact database not found. Skipping...")
|
||||
else:
|
||||
shutil.copyfile(contact_db_path, self.identifiers.CONTACT)
|
||||
|
||||
if not os.path.isfile(call_db_path):
|
||||
logging.warning(f"Call database not found. Skipping...")
|
||||
else:
|
||||
shutil.copyfile(call_db_path, self.identifiers.CALL)
|
||||
|
||||
def _extract_media_files(self):
|
||||
"""
|
||||
Extracts media files from the unencrypted backup.
|
||||
"""
|
||||
_wts_id = self.identifiers.DOMAIN
|
||||
with sqlite3.connect(os.path.join(self.base_dir, "Manifest.db")) as manifest:
|
||||
manifest.row_factory = sqlite3.Row
|
||||
c = manifest.cursor()
|
||||
c.execute(f"SELECT count() FROM Files WHERE domain = '{_wts_id}'")
|
||||
total_row_number = c.fetchone()[0]
|
||||
c.execute(
|
||||
f"""
|
||||
SELECT fileID, relativePath, flags, file AS metadata,
|
||||
ROW_NUMBER() OVER(ORDER BY relativePath) AS _index
|
||||
FROM Files
|
||||
WHERE domain = '{_wts_id}'
|
||||
ORDER BY relativePath
|
||||
"""
|
||||
)
|
||||
if not os.path.isdir(_wts_id):
|
||||
os.mkdir(_wts_id)
|
||||
|
||||
with tqdm(total=total_row_number, desc="Extracting WhatsApp files", unit="file", leave=False) as pbar:
|
||||
while (row := c.fetchone()) is not None:
|
||||
if not row["relativePath"]: # Skip empty relative paths
|
||||
continue
|
||||
|
||||
destination = os.path.join(_wts_id, row["relativePath"])
|
||||
hashes = row["fileID"]
|
||||
folder = hashes[:2]
|
||||
flags = row["flags"]
|
||||
|
||||
if flags == 2: # Directory
|
||||
try:
|
||||
os.mkdir(destination)
|
||||
except FileExistsError:
|
||||
pass
|
||||
elif flags == 1: # File
|
||||
shutil.copyfile(os.path.join(self.base_dir, folder, hashes), destination)
|
||||
metadata = BPListReader(row["metadata"]).parse()
|
||||
_creation = metadata["$objects"][1]["Birth"]
|
||||
modification = metadata["$objects"][1]["LastModified"]
|
||||
os.utime(destination, (modification, modification))
|
||||
pbar.update(1)
|
||||
total_time = pbar.format_dict['elapsed']
|
||||
logging.info(f"Extracted {total_row_number} WhatsApp files in {convert_time_unit(total_time)}")
|
||||
|
||||
|
||||
def extract_media(base_dir, identifiers, decrypt_chunk_size):
|
||||
"""
|
||||
Extracts WhatsApp data (media, messages, contacts, calls) from an iOS backup.
|
||||
|
||||
Args:
|
||||
base_dir (str): The path to the iOS backup directory.
|
||||
identifiers (WhatsAppIdentifier): An object containing WhatsApp file identifiers.
|
||||
decrypt_chunk_size (int): The chunk size for decryption.
|
||||
"""
|
||||
extractor = BackupExtractor(base_dir, identifiers, decrypt_chunk_size)
|
||||
extractor.extract()
|
||||
@@ -1,29 +1,154 @@
|
||||
import logging
|
||||
import sqlite3
|
||||
import jinja2
|
||||
import json
|
||||
import os
|
||||
import unicodedata
|
||||
import re
|
||||
import math
|
||||
import shutil
|
||||
from bleach import clean as sanitize
|
||||
from markupsafe import Markup
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
from enum import IntEnum
|
||||
from Whatsapp_Chat_Exporter.data_model import ChatStore
|
||||
from tqdm import tqdm
|
||||
from Whatsapp_Chat_Exporter.data_model import ChatCollection, ChatStore, Timing
|
||||
from typing import Dict, List, Optional, Tuple, Union, Any
|
||||
try:
|
||||
from enum import StrEnum
|
||||
from enum import StrEnum, IntEnum
|
||||
except ImportError:
|
||||
# < Python 3.11
|
||||
# This should be removed when the support for Python 3.10 ends. (31 Oct 2026)
|
||||
from enum import Enum
|
||||
|
||||
class StrEnum(str, Enum):
|
||||
pass
|
||||
|
||||
class IntEnum(int, Enum):
|
||||
pass
|
||||
|
||||
MAX_SIZE = 4 * 1024 * 1024 # Default 4MB
|
||||
ROW_SIZE = 0x3D0
|
||||
CURRENT_TZ_OFFSET = datetime.now().astimezone().utcoffset().seconds / 3600
|
||||
|
||||
|
||||
def sanitize_except(html):
|
||||
|
||||
def convert_time_unit(time_second: int) -> str:
|
||||
"""Converts a time duration in seconds to a human-readable string.
|
||||
|
||||
Args:
|
||||
time_second: The time duration in seconds.
|
||||
|
||||
Returns:
|
||||
str: A human-readable string representing the time duration.
|
||||
"""
|
||||
if time_second < 1:
|
||||
return "less than a second"
|
||||
elif time_second == 1:
|
||||
return "a second"
|
||||
|
||||
delta = timedelta(seconds=time_second)
|
||||
parts = []
|
||||
|
||||
days = delta.days
|
||||
if days > 0:
|
||||
parts.append(f"{days} day{'s' if days > 1 else ''}")
|
||||
|
||||
hours = delta.seconds // 3600
|
||||
if hours > 0:
|
||||
parts.append(f"{hours} hour{'s' if hours > 1 else ''}")
|
||||
|
||||
minutes = (delta.seconds % 3600) // 60
|
||||
if minutes > 0:
|
||||
parts.append(f"{minutes} minute{'s' if minutes > 1 else ''}")
|
||||
|
||||
seconds = delta.seconds % 60
|
||||
if seconds > 0:
|
||||
parts.append(f"{seconds} second{'s' if seconds > 1 else ''}")
|
||||
|
||||
return " ".join(parts)
|
||||
|
||||
|
||||
def bytes_to_readable(size_bytes: int) -> str:
|
||||
"""Converts a file size in bytes to a human-readable string with units.
|
||||
|
||||
From https://stackoverflow.com/a/14822210/9478891
|
||||
Authors: james-sapam & other contributors
|
||||
Licensed under CC BY-SA 3.0
|
||||
See git commit logs for changes, if any.
|
||||
|
||||
Args:
|
||||
size_bytes: The file size in bytes.
|
||||
|
||||
Returns:
|
||||
A human-readable string representing the file size.
|
||||
"""
|
||||
if size_bytes < 1024:
|
||||
return f"{size_bytes} B"
|
||||
size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
|
||||
i = int(math.floor(math.log(size_bytes, 1024)))
|
||||
p = math.pow(1024, i)
|
||||
s = round(size_bytes / p, 2)
|
||||
return "%s %s" % (s, size_name[i])
|
||||
|
||||
|
||||
def readable_to_bytes(size_str: str) -> int:
|
||||
"""Converts a human-readable file size string to bytes.
|
||||
|
||||
Args:
|
||||
size_str: The human-readable file size string (e.g., "1024KB", "1MB", "2GB").
|
||||
|
||||
Returns:
|
||||
The file size in bytes.
|
||||
|
||||
Raises:
|
||||
ValueError: If the input string is invalid.
|
||||
"""
|
||||
SIZE_UNITS = {
|
||||
'B': 1,
|
||||
'KB': 1024,
|
||||
'MB': 1024**2,
|
||||
'GB': 1024**3,
|
||||
'TB': 1024**4,
|
||||
'PB': 1024**5,
|
||||
'EB': 1024**6,
|
||||
'ZB': 1024**7,
|
||||
'YB': 1024**8
|
||||
}
|
||||
size_str = size_str.upper().strip()
|
||||
if size_str.isnumeric():
|
||||
# If the string is purely numeric, assume it's in bytes
|
||||
return int(size_str)
|
||||
match = re.fullmatch(r'^(\d+(\.\d*)?)\s*([KMGTPEZY]?B)?$', size_str)
|
||||
if not match:
|
||||
raise ValueError("Invalid size format for size_str. Expected format like '10MB', '1024GB', or '512'.")
|
||||
unit = ''.join(filter(str.isalpha, size_str)).strip()
|
||||
number = ''.join(c for c in size_str if c.isdigit() or c == '.').strip()
|
||||
return int(float(number) * SIZE_UNITS[unit])
|
||||
|
||||
|
||||
def sanitize_except(html: str) -> Markup:
|
||||
"""Sanitizes HTML, only allowing <br> tag.
|
||||
|
||||
Args:
|
||||
html: The HTML string to sanitize.
|
||||
|
||||
Returns:
|
||||
A Markup object containing the sanitized HTML.
|
||||
"""
|
||||
return Markup(sanitize(html, tags=["br"]))
|
||||
|
||||
|
||||
def determine_day(last, current):
|
||||
def determine_day(last: int, current: int) -> Optional[datetime.date]:
|
||||
"""Determines if the day has changed between two timestamps. Exposed to Jinja's environment.
|
||||
|
||||
Args:
|
||||
last: The timestamp of the previous message.
|
||||
current: The timestamp of the current message.
|
||||
|
||||
Returns:
|
||||
The date of the current message if it's a different day than the last message, otherwise None.
|
||||
"""
|
||||
last = datetime.fromtimestamp(last).date()
|
||||
current = datetime.fromtimestamp(current).date()
|
||||
if last == current:
|
||||
@@ -32,52 +157,62 @@ def determine_day(last, current):
|
||||
return current
|
||||
|
||||
|
||||
def check_update():
|
||||
def check_update(include_beta: bool = False) -> int:
|
||||
import urllib.request
|
||||
import json
|
||||
import importlib
|
||||
from sys import platform
|
||||
from .__init__ import __version__
|
||||
from packaging import version
|
||||
|
||||
package_url_json = "https://pypi.org/pypi/whatsapp-chat-exporter/json"
|
||||
PACKAGE_JSON = "https://pypi.org/pypi/whatsapp-chat-exporter/json"
|
||||
try:
|
||||
raw = urllib.request.urlopen(package_url_json)
|
||||
raw = urllib.request.urlopen(PACKAGE_JSON)
|
||||
except Exception:
|
||||
print("Failed to check for updates.")
|
||||
logging.error("Failed to check for updates.")
|
||||
return 1
|
||||
else:
|
||||
with raw:
|
||||
package_info = json.load(raw)
|
||||
latest_version = tuple(map(int, package_info["info"]["version"].split(".")))
|
||||
current_version = tuple(map(int, __version__.split(".")))
|
||||
if current_version < latest_version:
|
||||
print("===============Update===============")
|
||||
print("A newer version of WhatsApp Chat Exporter is available.")
|
||||
print("Current version: " + __version__)
|
||||
print("Latest version: " + package_info["info"]["version"])
|
||||
if platform == "win32":
|
||||
print("Update with: pip install --upgrade whatsapp-chat-exporter")
|
||||
else:
|
||||
print("Update with: pip3 install --upgrade whatsapp-chat-exporter")
|
||||
print("====================================")
|
||||
if include_beta:
|
||||
all_versions = [version.parse(v) for v in package_info["releases"].keys()]
|
||||
latest_version = max(all_versions, key=lambda v: (v.release, v.pre))
|
||||
else:
|
||||
print("You are using the latest version of WhatsApp Chat Exporter.")
|
||||
latest_version = version.parse(package_info["info"]["version"])
|
||||
current_version = version.parse(importlib.metadata.version("whatsapp_chat_exporter"))
|
||||
if current_version < latest_version:
|
||||
logging.info(
|
||||
"===============Update===============\n"
|
||||
"A newer version of WhatsApp Chat Exporter is available.\n"
|
||||
f"Current version: {current_version}\n"
|
||||
f"Latest version: {latest_version}"
|
||||
)
|
||||
pip_cmd = "pip" if platform == "win32" else "pip3"
|
||||
logging.info(f"Update with: {pip_cmd} install --upgrade whatsapp-chat-exporter {'--pre' if include_beta else ''}")
|
||||
logging.info("====================================")
|
||||
else:
|
||||
logging.info("You are using the latest version of WhatsApp Chat Exporter.")
|
||||
return 0
|
||||
|
||||
|
||||
def rendering(
|
||||
output_file_name,
|
||||
template,
|
||||
name,
|
||||
msgs,
|
||||
contact,
|
||||
w3css,
|
||||
next,
|
||||
chat,
|
||||
):
|
||||
output_file_name,
|
||||
template,
|
||||
name,
|
||||
msgs,
|
||||
contact,
|
||||
w3css,
|
||||
chat,
|
||||
headline,
|
||||
next=False,
|
||||
previous=False
|
||||
):
|
||||
if chat.their_avatar_thumb is None and chat.their_avatar is not None:
|
||||
their_avatar_thumb = chat.their_avatar
|
||||
else:
|
||||
their_avatar_thumb = chat.their_avatar_thumb
|
||||
if "??" not in headline:
|
||||
raise ValueError("Headline must contain '??' to replace with name")
|
||||
headline = headline.replace("??", name)
|
||||
with open(output_file_name, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
template.render(
|
||||
@@ -88,7 +223,10 @@ def rendering(
|
||||
their_avatar_thumb=their_avatar_thumb,
|
||||
w3css=w3css,
|
||||
next=next,
|
||||
previous=previous,
|
||||
status=chat.status,
|
||||
media_base=chat.media_base,
|
||||
headline=headline
|
||||
)
|
||||
)
|
||||
|
||||
@@ -99,46 +237,269 @@ class Device(StrEnum):
|
||||
EXPORTED = "exported"
|
||||
|
||||
|
||||
def import_from_json(json_file, data):
|
||||
from Whatsapp_Chat_Exporter.data_model import ChatStore, Message
|
||||
def import_from_json(json_file: str, data: ChatCollection):
|
||||
"""Imports chat data from a JSON file into the data dictionary.
|
||||
|
||||
Args:
|
||||
json_file: The path to the JSON file.
|
||||
data: The dictionary to store the imported chat data.
|
||||
"""
|
||||
with open(json_file, "r") as f:
|
||||
temp_data = json.loads(f.read())
|
||||
total_row_number = len(tuple(temp_data.keys()))
|
||||
print(f"Importing chats from JSON...(0/{total_row_number})", end="\r")
|
||||
for index, (jid, chat_data) in enumerate(temp_data.items()):
|
||||
chat = ChatStore(chat_data.get("type"), chat_data.get("name"))
|
||||
chat.my_avatar = chat_data.get("my_avatar")
|
||||
chat.their_avatar = chat_data.get("their_avatar")
|
||||
chat.their_avatar_thumb = chat_data.get("their_avatar_thumb")
|
||||
chat.status = chat_data.get("status")
|
||||
for id, msg in chat_data.get("messages").items():
|
||||
message = Message(
|
||||
msg["from_me"],
|
||||
msg["timestamp"],
|
||||
msg["time"],
|
||||
msg["key_id"],
|
||||
with tqdm(total=total_row_number, desc="Importing chats from JSON", unit="chat", leave=False) as pbar:
|
||||
for jid, chat_data in temp_data.items():
|
||||
chat = ChatStore.from_json(chat_data)
|
||||
data.add_chat(jid, chat)
|
||||
pbar.update(1)
|
||||
total_time = pbar.format_dict['elapsed']
|
||||
logging.info(f"Imported {total_row_number} chats from JSON in {convert_time_unit(total_time)}")
|
||||
|
||||
|
||||
class IncrementalMerger:
|
||||
"""Handles incremental merging of WhatsApp chat exports."""
|
||||
|
||||
def __init__(self, pretty_print_json: int, avoid_encoding_json: bool):
|
||||
"""Initialize the merger with JSON formatting options.
|
||||
|
||||
Args:
|
||||
pretty_print_json: JSON indentation level.
|
||||
avoid_encoding_json: Whether to avoid ASCII encoding.
|
||||
"""
|
||||
self.pretty_print_json = pretty_print_json
|
||||
self.avoid_encoding_json = avoid_encoding_json
|
||||
|
||||
def _get_json_files(self, source_dir: str) -> List[str]:
|
||||
"""Get list of JSON files from source directory.
|
||||
|
||||
Args:
|
||||
source_dir: Path to the source directory.
|
||||
|
||||
Returns:
|
||||
List of JSON filenames.
|
||||
|
||||
Raises:
|
||||
SystemExit: If no JSON files are found.
|
||||
"""
|
||||
json_files = [f for f in os.listdir(source_dir) if f.endswith('.json')]
|
||||
if not json_files:
|
||||
logging.error("No JSON files found in the source directory.")
|
||||
raise SystemExit(1)
|
||||
|
||||
logging.debug("JSON files found:", json_files)
|
||||
return json_files
|
||||
|
||||
def _copy_new_file(self, source_path: str, target_path: str, target_dir: str, json_file: str) -> None:
|
||||
"""Copy a new JSON file to target directory.
|
||||
|
||||
Args:
|
||||
source_path: Path to source file.
|
||||
target_path: Path to target file.
|
||||
target_dir: Target directory path.
|
||||
json_file: Name of the JSON file.
|
||||
"""
|
||||
logging.info(f"Copying '{json_file}' to target directory...")
|
||||
os.makedirs(target_dir, exist_ok=True)
|
||||
shutil.copy2(source_path, target_path)
|
||||
|
||||
def _load_chat_data(self, file_path: str) -> Dict[str, Any]:
|
||||
"""Load JSON data from file.
|
||||
|
||||
Args:
|
||||
file_path: Path to JSON file.
|
||||
|
||||
Returns:
|
||||
Loaded JSON data.
|
||||
"""
|
||||
with open(file_path, 'r') as file:
|
||||
return json.load(file)
|
||||
|
||||
def _parse_chats_from_json(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Parse JSON data into ChatStore objects.
|
||||
|
||||
Args:
|
||||
data: Raw JSON data.
|
||||
|
||||
Returns:
|
||||
Dictionary of JID to ChatStore objects.
|
||||
"""
|
||||
return {jid: ChatStore.from_json(chat) for jid, chat in data.items()}
|
||||
|
||||
def _merge_chat_stores(self, source_chats: Dict[str, Any], target_chats: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Merge source chats into target chats.
|
||||
|
||||
Args:
|
||||
source_chats: Source ChatStore objects.
|
||||
target_chats: Target ChatStore objects.
|
||||
|
||||
Returns:
|
||||
Merged ChatStore objects.
|
||||
"""
|
||||
for jid, chat in source_chats.items():
|
||||
if jid in target_chats:
|
||||
target_chats[jid].merge_with(chat)
|
||||
else:
|
||||
target_chats[jid] = chat
|
||||
return target_chats
|
||||
|
||||
def _serialize_chats(self, chats: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Serialize ChatStore objects to JSON format.
|
||||
|
||||
Args:
|
||||
chats: Dictionary of ChatStore objects.
|
||||
|
||||
Returns:
|
||||
Serialized JSON data.
|
||||
"""
|
||||
return {jid: chat.to_json() for jid, chat in chats.items()}
|
||||
|
||||
def _has_changes(self, merged_data: Dict[str, Any], original_data: Dict[str, Any]) -> bool:
|
||||
"""Check if merged data differs from original data.
|
||||
|
||||
Args:
|
||||
merged_data: Merged JSON data.
|
||||
original_data: Original JSON data.
|
||||
|
||||
Returns:
|
||||
True if changes detected, False otherwise.
|
||||
"""
|
||||
return json.dumps(merged_data, sort_keys=True) != json.dumps(original_data, sort_keys=True)
|
||||
|
||||
def _save_merged_data(self, target_path: str, merged_data: Dict[str, Any]) -> None:
|
||||
"""Save merged data to target file.
|
||||
|
||||
Args:
|
||||
target_path: Path to target file.
|
||||
merged_data: Merged JSON data.
|
||||
"""
|
||||
with open(target_path, 'w') as merged_file:
|
||||
json.dump(
|
||||
merged_data,
|
||||
merged_file,
|
||||
indent=self.pretty_print_json,
|
||||
ensure_ascii=not self.avoid_encoding_json,
|
||||
)
|
||||
message.media = msg.get("media")
|
||||
message.meta = msg.get("meta")
|
||||
message.data = msg.get("data")
|
||||
message.sender = msg.get("sender")
|
||||
message.safe = msg.get("safe")
|
||||
message.mime = msg.get("mime")
|
||||
message.reply = msg.get("reply")
|
||||
message.quoted_data = msg.get("quoted_data")
|
||||
message.caption = msg.get("caption")
|
||||
message.thumb = msg.get("thumb")
|
||||
message.sticker = msg.get("sticker")
|
||||
chat.add_message(id, message)
|
||||
data[jid] = chat
|
||||
print(f"Importing chats from JSON...({index + 1}/{total_row_number})", end="\r")
|
||||
|
||||
def _merge_json_file(self, source_path: str, target_path: str, json_file: str) -> None:
|
||||
"""Merge a single JSON file.
|
||||
|
||||
Args:
|
||||
source_path: Path to source file.
|
||||
target_path: Path to target file.
|
||||
json_file: Name of the JSON file.
|
||||
"""
|
||||
logging.info(f"Merging '{json_file}' with existing file in target directory...", extra={"clear": True})
|
||||
|
||||
source_data = self._load_chat_data(source_path)
|
||||
target_data = self._load_chat_data(target_path)
|
||||
|
||||
source_chats = self._parse_chats_from_json(source_data)
|
||||
target_chats = self._parse_chats_from_json(target_data)
|
||||
|
||||
merged_chats = self._merge_chat_stores(source_chats, target_chats)
|
||||
merged_data = self._serialize_chats(merged_chats)
|
||||
|
||||
if self._has_changes(merged_data, target_data):
|
||||
logging.info(f"Changes detected in '{json_file}', updating target file...")
|
||||
self._save_merged_data(target_path, merged_data)
|
||||
else:
|
||||
logging.info(f"No changes detected in '{json_file}', skipping update.")
|
||||
|
||||
def _should_copy_media_file(self, source_file: str, target_file: str) -> bool:
|
||||
"""Check if media file should be copied.
|
||||
|
||||
Args:
|
||||
source_file: Path to source media file.
|
||||
target_file: Path to target media file.
|
||||
|
||||
Returns:
|
||||
True if file should be copied, False otherwise.
|
||||
"""
|
||||
return not os.path.exists(target_file) or os.path.getmtime(source_file) > os.path.getmtime(target_file)
|
||||
|
||||
def _merge_media_directories(self, source_dir: str, target_dir: str, media_dir: str) -> None:
|
||||
"""Merge media directories from source to target.
|
||||
|
||||
Args:
|
||||
source_dir: Source directory path.
|
||||
target_dir: Target directory path.
|
||||
media_dir: Media directory name.
|
||||
"""
|
||||
source_media_path = os.path.join(source_dir, media_dir)
|
||||
target_media_path = os.path.join(target_dir, media_dir)
|
||||
|
||||
logging.info(f"Merging media directories. Source: {source_media_path}, target: {target_media_path}")
|
||||
|
||||
if not os.path.exists(source_media_path):
|
||||
return
|
||||
|
||||
for root, _, files in os.walk(source_media_path):
|
||||
relative_path = os.path.relpath(root, source_media_path)
|
||||
target_root = os.path.join(target_media_path, relative_path)
|
||||
os.makedirs(target_root, exist_ok=True)
|
||||
|
||||
for file in files:
|
||||
source_file = os.path.join(root, file)
|
||||
target_file = os.path.join(target_root, file)
|
||||
|
||||
if self._should_copy_media_file(source_file, target_file):
|
||||
logging.debug(f"Copying '{source_file}' to '{target_file}'...")
|
||||
shutil.copy2(source_file, target_file)
|
||||
|
||||
def merge(self, source_dir: str, target_dir: str, media_dir: str) -> None:
|
||||
"""Merge JSON files and media from source to target directory.
|
||||
|
||||
Args:
|
||||
source_dir: The path to the source directory containing JSON files.
|
||||
target_dir: The path to the target directory to merge into.
|
||||
media_dir: The path to the media directory.
|
||||
"""
|
||||
json_files = self._get_json_files(source_dir)
|
||||
|
||||
logging.info("Starting incremental merge process...")
|
||||
for json_file in json_files:
|
||||
source_path = os.path.join(source_dir, json_file)
|
||||
target_path = os.path.join(target_dir, json_file)
|
||||
|
||||
if not os.path.exists(target_path):
|
||||
self._copy_new_file(source_path, target_path, target_dir, json_file)
|
||||
else:
|
||||
self._merge_json_file(source_path, target_path, json_file)
|
||||
|
||||
self._merge_media_directories(source_dir, target_dir, media_dir)
|
||||
|
||||
|
||||
def get_file_name(contact: str, chat: ChatStore):
|
||||
def incremental_merge(source_dir: str, target_dir: str, media_dir: str, pretty_print_json: int, avoid_encoding_json: bool) -> None:
|
||||
"""Wrapper for merging JSON files from the source directory into the target directory.
|
||||
|
||||
Args:
|
||||
source_dir: The path to the source directory containing JSON files.
|
||||
target_dir: The path to the target directory to merge into.
|
||||
media_dir: The path to the media directory.
|
||||
pretty_print_json: JSON indentation level.
|
||||
avoid_encoding_json: Whether to avoid ASCII encoding.
|
||||
"""
|
||||
merger = IncrementalMerger(pretty_print_json, avoid_encoding_json)
|
||||
merger.merge(source_dir, target_dir, media_dir)
|
||||
|
||||
|
||||
def get_file_name(contact: str, chat: ChatStore) -> Tuple[str, str]:
|
||||
"""Generates a sanitized filename and contact name for a chat.
|
||||
|
||||
Args:
|
||||
contact: The contact identifier (e.g., a phone number or group ID).
|
||||
chat: The ChatStore object for the chat.
|
||||
|
||||
Returns:
|
||||
A tuple containing the sanitized filename and the contact name.
|
||||
|
||||
Raises:
|
||||
ValueError: If the contact format is unexpected.
|
||||
"""
|
||||
if "@" not in contact and contact not in ("000000000000000", "000000000000001", "ExportedChat"):
|
||||
raise ValueError("Unexpected contact format: " + contact)
|
||||
phone_number = contact.split('@')[0]
|
||||
if "-" in contact:
|
||||
if "-" in contact and chat.name is not None:
|
||||
file_name = ""
|
||||
else:
|
||||
file_name = phone_number
|
||||
@@ -146,12 +507,109 @@ def get_file_name(contact: str, chat: ChatStore):
|
||||
if chat.name is not None:
|
||||
if file_name != "":
|
||||
file_name += "-"
|
||||
file_name += chat.name.replace("/", "-")
|
||||
file_name += chat.name.replace("/", "-").replace("\\", "-")
|
||||
name = chat.name
|
||||
else:
|
||||
name = phone_number
|
||||
|
||||
return "".join(x for x in file_name if x.isalnum() or x in "- "), name
|
||||
return safe_name(file_name), name
|
||||
|
||||
|
||||
def get_cond_for_empty(enable: bool, jid_field: str, broadcast_field: str) -> str:
|
||||
"""Generates a SQL condition for filtering empty chats.
|
||||
|
||||
Args:
|
||||
enable: True to include non-empty chats, False to include empty chats.
|
||||
jid_field: The name of the JID field in the SQL query.
|
||||
broadcast_field: The column name of the broadcast field in the SQL query.
|
||||
|
||||
Returns:
|
||||
A SQL condition string.
|
||||
"""
|
||||
return f"AND (chat.hidden=0 OR {jid_field}='status@broadcast' OR {broadcast_field}>0)" if enable else ""
|
||||
|
||||
|
||||
def _get_group_condition(jid: str, platform: str) -> str:
|
||||
"""Generate platform-specific group identification condition.
|
||||
|
||||
Args:
|
||||
jid: The JID column name.
|
||||
platform: The platform ("android" or "ios").
|
||||
|
||||
Returns:
|
||||
SQL condition string for group identification.
|
||||
|
||||
Raises:
|
||||
ValueError: If platform is not supported.
|
||||
"""
|
||||
if platform == "android":
|
||||
return f"{jid}.type == 1"
|
||||
elif platform == "ios":
|
||||
return f"{jid} IS NOT NULL"
|
||||
else:
|
||||
raise ValueError(
|
||||
"Only android and ios are supported for argument platform if jid is not None")
|
||||
|
||||
|
||||
def get_chat_condition(
|
||||
filter: Optional[List[str]],
|
||||
include: bool,
|
||||
columns: List[str],
|
||||
jid: Optional[str] = None,
|
||||
platform: Optional[str] = None
|
||||
) -> str:
|
||||
"""Generates a SQL condition for filtering chats based on inclusion or exclusion criteria.
|
||||
|
||||
SQL injection risks from chat filters were evaluated during development and deemed negligible
|
||||
due to the tool's offline, trusted-input model (user running this tool on WhatsApp
|
||||
backups/databases on their own device).
|
||||
|
||||
Args:
|
||||
filter: A list of phone numbers to include or exclude.
|
||||
include: True to include chats that match the filter, False to exclude them.
|
||||
columns: A list of column names to check against the filter.
|
||||
jid: The JID column name (used for group identification).
|
||||
platform: The platform ("android" or "ios") for platform-specific JID queries.
|
||||
|
||||
Returns:
|
||||
A SQL condition string.
|
||||
|
||||
Raises:
|
||||
ValueError: If the column count is invalid or an unsupported platform is provided.
|
||||
"""
|
||||
if not filter:
|
||||
return ""
|
||||
|
||||
if jid is not None and len(columns) < 2:
|
||||
raise ValueError(
|
||||
"There must be at least two elements in argument columns if jid is not None")
|
||||
|
||||
# Get group condition if needed
|
||||
is_group_condition = None
|
||||
if jid is not None:
|
||||
is_group_condition = _get_group_condition(jid, platform)
|
||||
|
||||
# Build conditions for each chat filter
|
||||
conditions = []
|
||||
for index, chat in enumerate(filter):
|
||||
# Add connector for subsequent conditions (with double space)
|
||||
connector = " OR" if include else " AND"
|
||||
prefix = connector if index > 0 else ""
|
||||
|
||||
# Primary column condition
|
||||
operator = "LIKE" if include else "NOT LIKE"
|
||||
conditions.append(f"{prefix} {columns[0]} {operator} '%{chat}%'")
|
||||
|
||||
# Secondary column condition for groups
|
||||
if len(columns) > 1 and is_group_condition:
|
||||
if include:
|
||||
group_condition = f" OR ({columns[1]} {operator} '%{chat}%' AND {is_group_condition})"
|
||||
else:
|
||||
group_condition = f" AND ({columns[1]} {operator} '%{chat}%' AND {is_group_condition})"
|
||||
conditions.append(group_condition)
|
||||
|
||||
combined_conditions = "".join(conditions)
|
||||
return f"AND ({combined_conditions})"
|
||||
|
||||
|
||||
# Android Specific
|
||||
@@ -161,6 +619,8 @@ CRYPT14_OFFSETS = (
|
||||
{"iv": 66, "db": 99},
|
||||
{"iv": 67, "db": 193},
|
||||
{"iv": 67, "db": 194},
|
||||
{"iv": 67, "db": 158},
|
||||
{"iv": 67, "db": 196},
|
||||
)
|
||||
|
||||
|
||||
@@ -170,13 +630,21 @@ class Crypt(IntEnum):
|
||||
CRYPT12 = 12
|
||||
|
||||
|
||||
def brute_force_offset(max_iv=200, max_db=200):
|
||||
for iv in range(0, max_iv):
|
||||
for db in range(0, max_db):
|
||||
yield iv, iv + 16, db
|
||||
class DbType(StrEnum):
|
||||
MESSAGE = "message"
|
||||
CONTACT = "contact"
|
||||
|
||||
|
||||
def determine_metadata(content, init_msg):
|
||||
def determine_metadata(content: sqlite3.Row, init_msg: Optional[str]) -> Optional[str]:
|
||||
"""Determines the metadata of a message.
|
||||
|
||||
Args:
|
||||
content (sqlite3.Row): A row from the messages table.
|
||||
init_msg (Optional[str]): The initial message, if any.
|
||||
|
||||
Returns:
|
||||
The metadata as a string or None if the type is unsupported.
|
||||
"""
|
||||
msg = init_msg if init_msg else ""
|
||||
if content["is_me_joined"] == 1: # Override
|
||||
return f"You were added into the group by {msg}"
|
||||
@@ -224,7 +692,7 @@ def determine_metadata(content, init_msg):
|
||||
msg = "Someone joined this group by using a invite link" # TODO: Find out who
|
||||
elif content["action_type"] == 27:
|
||||
msg += " changed the group description to:<br>"
|
||||
msg += content['data'].replace("\n", '<br>')
|
||||
msg += (content['data'] or "Unknown").replace("\n", '<br>')
|
||||
elif content["action_type"] == 28:
|
||||
try:
|
||||
old = content['old_jid'].split('@')[0]
|
||||
@@ -234,7 +702,7 @@ def determine_metadata(content, init_msg):
|
||||
else:
|
||||
msg = f"{old} changed their number to {new}"
|
||||
elif content["action_type"] == 46:
|
||||
return # Voice message in PM??? Seems no need to handle.
|
||||
return # Voice message in PM??? Seems no need to handle.
|
||||
elif content["action_type"] == 47:
|
||||
msg = "The contact is an official business account"
|
||||
elif content["action_type"] == 50:
|
||||
@@ -247,17 +715,28 @@ def determine_metadata(content, init_msg):
|
||||
else:
|
||||
msg = "The security code in this chat changed"
|
||||
elif content["action_type"] == 58:
|
||||
msg = "You blocked this contact"
|
||||
msg = "You blocked/unblocked this contact"
|
||||
elif content["action_type"] == 67:
|
||||
return # (PM) this contact use secure service from Facebook???
|
||||
elif content["action_type"] == 69:
|
||||
return # (PM) this contact use secure service from Facebook??? What's the difference with 67????
|
||||
# (PM) this contact use secure service from Facebook??? What's the difference with 67????
|
||||
return
|
||||
else:
|
||||
return # Unsupported
|
||||
return msg
|
||||
|
||||
|
||||
def get_status_location(output_folder, offline_static):
|
||||
def get_status_location(output_folder: str, offline_static: str) -> str:
|
||||
"""
|
||||
Gets the location of the W3.CSS file, either from web or local storage.
|
||||
|
||||
Args:
|
||||
output_folder (str): The folder where offline static files will be stored.
|
||||
offline_static (str): The subfolder name for static files. If falsy, returns web URL.
|
||||
|
||||
Returns:
|
||||
str: The path or URL to the W3.CSS file.
|
||||
"""
|
||||
w3css = "https://www.w3schools.com/w3css/4/w3.css"
|
||||
if not offline_static:
|
||||
return w3css
|
||||
@@ -268,14 +747,90 @@ def get_status_location(output_folder, offline_static):
|
||||
w3css_path = os.path.join(static_folder, "w3.css")
|
||||
if not os.path.isfile(w3css_path):
|
||||
with urllib.request.urlopen(w3css) as resp:
|
||||
with open(w3css_path, "wb") as f: f.write(resp.read())
|
||||
with open(w3css_path, "wb") as f:
|
||||
f.write(resp.read())
|
||||
w3css = os.path.join(offline_static, "w3.css")
|
||||
return w3css
|
||||
|
||||
|
||||
def setup_template(template, no_avatar):
|
||||
if template is None:
|
||||
def check_jid_map(db: sqlite3.Connection) -> bool:
|
||||
"""
|
||||
Checks if the jid_map table exists in the database.
|
||||
|
||||
Args:
|
||||
db (sqlite3.Connection): The SQLite database connection.
|
||||
|
||||
Returns:
|
||||
bool: True if the jid_map table exists, False otherwise.
|
||||
"""
|
||||
cursor = db.cursor()
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='jid_map'")
|
||||
return cursor.fetchone() is not None
|
||||
|
||||
|
||||
def get_jid_map_join(jid_map_exists: bool) -> str:
|
||||
"""
|
||||
Returns the SQL JOIN statements for jid_map table.
|
||||
"""
|
||||
if not jid_map_exists:
|
||||
return ""
|
||||
else:
|
||||
return """LEFT JOIN jid_map as jid_map_global
|
||||
ON chat.jid_row_id = jid_map_global.lid_row_id
|
||||
LEFT JOIN jid lid_global
|
||||
ON jid_map_global.jid_row_id = lid_global._id
|
||||
LEFT JOIN jid_map as jid_map_group
|
||||
ON message.sender_jid_row_id = jid_map_group.lid_row_id
|
||||
LEFT JOIN jid lid_group
|
||||
ON jid_map_group.jid_row_id = lid_group._id"""
|
||||
|
||||
def get_jid_map_selection(jid_map_exists: bool) -> tuple:
|
||||
"""
|
||||
Returns the SQL selection statements for jid_map table.
|
||||
"""
|
||||
if not jid_map_exists:
|
||||
return "jid_global.raw_string", "jid_group.raw_string"
|
||||
else:
|
||||
return (
|
||||
"COALESCE(lid_global.raw_string, jid_global.raw_string)",
|
||||
"COALESCE(lid_group.raw_string, jid_group.raw_string)"
|
||||
)
|
||||
|
||||
|
||||
def get_transcription_selection(db: sqlite3.Connection) -> str:
|
||||
"""
|
||||
Returns the SQL selection statement for transcription text based on the database schema.
|
||||
|
||||
Args:
|
||||
db (sqlite3.Connection): The SQLite database connection.
|
||||
Returns:
|
||||
str: The SQL selection statement for transcription.
|
||||
"""
|
||||
cursor = db.cursor()
|
||||
cursor.execute("PRAGMA table_info(message_media)")
|
||||
columns = [row[1] for row in cursor.fetchall()]
|
||||
|
||||
if "raw_transcription_text" in columns:
|
||||
return "message_media.raw_transcription_text AS transcription_text"
|
||||
else:
|
||||
return "NULL AS transcription_text"
|
||||
|
||||
|
||||
def setup_template(template: Optional[str], no_avatar: bool, experimental: bool = False) -> jinja2.Template:
|
||||
"""
|
||||
Sets up the Jinja2 template environment and loads the template.
|
||||
|
||||
Args:
|
||||
template (Optional[str]): Path to custom template file. If None, uses default template.
|
||||
no_avatar (bool): Whether to disable avatar display in the template.
|
||||
experimental (bool, optional): Whether to use experimental template features. Defaults to False.
|
||||
|
||||
Returns:
|
||||
jinja2.Template: The configured Jinja2 template object.
|
||||
"""
|
||||
if template is None or experimental:
|
||||
template_dir = os.path.dirname(__file__)
|
||||
template_file = "whatsapp.html"
|
||||
template_file = "whatsapp.html" if not experimental else template
|
||||
else:
|
||||
template_dir = os.path.dirname(template)
|
||||
template_file = os.path.basename(template)
|
||||
@@ -288,5 +843,140 @@ def setup_template(template, no_avatar):
|
||||
template_env.filters['sanitize_except'] = sanitize_except
|
||||
return template_env.get_template(template_file)
|
||||
|
||||
|
||||
# iOS Specific
|
||||
APPLE_TIME = datetime.timestamp(datetime(2001, 1, 1))
|
||||
APPLE_TIME = 978307200
|
||||
|
||||
|
||||
def safe_name(text: Union[str, bytes]) -> str:
|
||||
"""
|
||||
Sanitize the input text and generates a safe file name.
|
||||
This function serves a similar purpose to slugify() from
|
||||
Django previously used in this project, but is a clean-room
|
||||
Reimplementation tailored for performance and a narrower
|
||||
Use case for this project. Licensed under the same terms
|
||||
As the project (MIT).
|
||||
|
||||
Args:
|
||||
text (str|bytes): The string to be sanitized.
|
||||
|
||||
Returns:
|
||||
str: The sanitized string with only alphanumerics, underscores, or hyphens.
|
||||
"""
|
||||
if isinstance(text, bytes):
|
||||
text = text.decode("utf-8", "ignore")
|
||||
elif not isinstance(text, str):
|
||||
raise TypeError("value must be a string or bytes")
|
||||
normalized_text = unicodedata.normalize("NFKC", text)
|
||||
safe_chars = [char for char in normalized_text if char.isalnum() or char in "-_ ."]
|
||||
return "-".join(''.join(safe_chars).split())
|
||||
|
||||
|
||||
def get_from_string(msg: Dict, chat_id: str) -> str:
|
||||
"""Return the number or name for the sender"""
|
||||
if msg["from_me"]:
|
||||
return "Me"
|
||||
if msg["sender"]:
|
||||
return str(msg["sender"])
|
||||
return str(chat_id)
|
||||
|
||||
|
||||
def get_chat_type(chat_id: str) -> str:
|
||||
"""Return the chat type based on the whatsapp id"""
|
||||
if chat_id == "000000000000000":
|
||||
return "calls"
|
||||
elif chat_id.endswith("@s.whatsapp.net"):
|
||||
return "personal_chat"
|
||||
elif chat_id.endswith("@g.us"):
|
||||
return "private_group"
|
||||
elif chat_id == "status@broadcast":
|
||||
return "status_broadcast"
|
||||
elif chat_id.endswith("@broadcast"):
|
||||
return "broadcast_channel"
|
||||
logging.warning(f"Unknown chat type for {chat_id}, defaulting to private_group")
|
||||
return "private_group"
|
||||
|
||||
|
||||
def get_from_id(msg: Dict, chat_id: str) -> str:
|
||||
"""Return the user id for the sender"""
|
||||
if msg["from_me"]:
|
||||
return "user00000"
|
||||
if msg["sender"]:
|
||||
return "user" + msg["sender"]
|
||||
return f"user{chat_id}"
|
||||
|
||||
|
||||
def get_reply_id(data: Dict, reply_key: int) -> Optional[int]:
|
||||
"""Get the id of the message corresponding to the reply"""
|
||||
if not reply_key:
|
||||
return None
|
||||
for msg_id, msg in data["messages"].items():
|
||||
if msg["key_id"] == reply_key:
|
||||
return msg_id
|
||||
return None
|
||||
|
||||
|
||||
def telegram_json_format(jik: str, data: Dict, timezone_offset) -> Dict:
|
||||
"""Convert the data to the Telegram export format"""
|
||||
timing = Timing(timezone_offset or CURRENT_TZ_OFFSET)
|
||||
try:
|
||||
chat_id = int(''.join([c for c in jik if c.isdigit()]))
|
||||
except ValueError:
|
||||
# not a real chat: e.g. statusbroadcast
|
||||
chat_id = 0
|
||||
json_obj = {
|
||||
"name": data["name"] if data["name"] else jik,
|
||||
"type": get_chat_type(jik),
|
||||
"id": chat_id,
|
||||
"messages": [ {
|
||||
"id": int(msgId),
|
||||
"type": "message",
|
||||
"date": timing.format_timestamp(msg["timestamp"], "%Y-%m-%dT%H:%M:%S"),
|
||||
"date_unixtime": int(msg["timestamp"]),
|
||||
"from": get_from_string(msg, chat_id),
|
||||
"from_id": get_from_id(msg, chat_id),
|
||||
"reply_to_message_id": get_reply_id(data, msg["reply"]),
|
||||
"text": msg["data"],
|
||||
"text_entities": [
|
||||
{
|
||||
# TODO this will lose formatting and different types
|
||||
"type": "plain",
|
||||
"text": msg["data"],
|
||||
}
|
||||
],
|
||||
}
|
||||
for msgId, msg in data["messages"].items()]
|
||||
}
|
||||
# remove empty messages and replies
|
||||
for msg_id, msg in enumerate(json_obj["messages"]):
|
||||
if not msg["reply_to_message_id"]:
|
||||
del json_obj["messages"][msg_id]["reply_to_message_id"]
|
||||
json_obj["messages"] = [m for m in json_obj["messages"] if m["text"]]
|
||||
return json_obj
|
||||
|
||||
|
||||
class WhatsAppIdentifier(StrEnum):
|
||||
# AppDomainGroup-group.net.whatsapp.WhatsApp.shared-ChatStorage.sqlite
|
||||
MESSAGE = "7c7fba66680ef796b916b067077cc246adacf01d"
|
||||
# AppDomainGroup-group.net.whatsapp.WhatsApp.shared-ContactsV2.sqlite
|
||||
CONTACT = "b8548dc30aa1030df0ce18ef08b882cf7ab5212f"
|
||||
# AppDomainGroup-group.net.whatsapp.WhatsApp.shared-CallHistory.sqlite
|
||||
CALL = "1b432994e958845fffe8e2f190f26d1511534088"
|
||||
DOMAIN = "AppDomainGroup-group.net.whatsapp.WhatsApp.shared"
|
||||
|
||||
|
||||
class WhatsAppBusinessIdentifier(StrEnum):
|
||||
# AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-ChatStorage.sqlite
|
||||
MESSAGE = "724bd3b98b18518b455a87c1f3ac3a0d189c4466"
|
||||
# AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-ContactsV2.sqlite
|
||||
CONTACT = "d7246a707f51ddf8b17ee2dddabd9e0a4da5c552"
|
||||
# AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-CallHistory.sqlite
|
||||
CALL = "b463f7c4365eefc5a8723930d97928d4e907c603"
|
||||
DOMAIN = "AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared"
|
||||
|
||||
|
||||
class JidType(IntEnum):
|
||||
PM = 0
|
||||
GROUP = 1
|
||||
SYSTEM_BROADCAST = 5
|
||||
STATUS = 11
|
||||
|
||||
211
Whatsapp_Chat_Exporter/vcards_contacts.py
Normal file
211
Whatsapp_Chat_Exporter/vcards_contacts.py
Normal file
@@ -0,0 +1,211 @@
|
||||
import logging
|
||||
import re
|
||||
import quopri
|
||||
from typing import List, TypedDict
|
||||
from Whatsapp_Chat_Exporter.data_model import ChatStore
|
||||
from Whatsapp_Chat_Exporter.utility import Device
|
||||
|
||||
|
||||
|
||||
|
||||
class ExportedContactNumbers(TypedDict):
|
||||
full_name: str
|
||||
numbers: List[str]
|
||||
|
||||
|
||||
class ContactsFromVCards:
|
||||
def __init__(self) -> None:
|
||||
self.contact_mapping = []
|
||||
|
||||
def is_empty(self):
|
||||
return self.contact_mapping == []
|
||||
|
||||
def load_vcf_file(self, vcf_file_path: str, default_country_code: str):
|
||||
self.contact_mapping = read_vcards_file(vcf_file_path, default_country_code)
|
||||
|
||||
def enrich_from_vcards(self, chats):
|
||||
for number, name in self.contact_mapping:
|
||||
# short number must be a bad contact, lets skip it
|
||||
if len(number) <= 5:
|
||||
continue
|
||||
chats_search = filter_chats_by_prefix(chats, number).values()
|
||||
if chats_search:
|
||||
for chat in chats_search:
|
||||
if not hasattr(chat, 'name') or (hasattr(chat, 'name') and chat.name is None):
|
||||
setattr(chat, 'name', name)
|
||||
else:
|
||||
chats.add_chat(number + "@s.whatsapp.net", ChatStore(Device.ANDROID, name))
|
||||
|
||||
|
||||
def decode_quoted_printable(value: str, charset: str) -> str:
|
||||
"""Decode a vCard value that may be quoted-printable UTF-8."""
|
||||
try:
|
||||
bytes_val = quopri.decodestring(value)
|
||||
return bytes_val.decode(charset, errors="replace")
|
||||
except Exception:
|
||||
# Fallback: return the original value if decoding fails
|
||||
logging.warning(
|
||||
f"Failed to decode quoted-printable value: {value}, "
|
||||
f"charset: {charset}. Please report this issue."
|
||||
)
|
||||
return value
|
||||
|
||||
def _parse_vcard_line(line: str) -> tuple[str, dict[str, str], str] | None:
|
||||
"""
|
||||
Parses a single vCard property line into its components:
|
||||
Property Name, Parameters (as a dict), and Value.
|
||||
|
||||
Example: 'FN;CHARSET=UTF-8:John Doe' -> ('FN', {'CHARSET': 'UTF-8'}, 'John Doe')
|
||||
"""
|
||||
# Find the first colon, which separates the property/parameters from the value.
|
||||
colon_index = line.find(':')
|
||||
if colon_index == -1:
|
||||
return None # Invalid vCard line format
|
||||
|
||||
prop_and_params = line[:colon_index].strip()
|
||||
value = line[colon_index + 1:].strip()
|
||||
|
||||
# Split property name from parameters
|
||||
parts = prop_and_params.split(';')
|
||||
property_name = parts[0].upper()
|
||||
|
||||
parameters = {}
|
||||
for part in parts[1:]:
|
||||
if '=' in part:
|
||||
key, val = part.split('=', 1)
|
||||
parameters[key.upper()] = val.strip('"') # Remove potential quotes from value
|
||||
|
||||
return property_name, parameters, value
|
||||
|
||||
|
||||
def get_vcard_value(entry: str, field_name: str) -> list[str]:
|
||||
"""
|
||||
Scans the vCard entry for lines starting with the specific field_name
|
||||
and returns a list of its decoded values, handling parameters like
|
||||
ENCODING and CHARSET.
|
||||
"""
|
||||
target_name = field_name.upper()
|
||||
cached_line = ""
|
||||
charset = "utf-8"
|
||||
values = []
|
||||
|
||||
for line in entry.splitlines():
|
||||
line = line.strip()
|
||||
if cached_line:
|
||||
if line.endswith('='):
|
||||
cached_line += line[:-1]
|
||||
continue # Wait for the next line to complete the value
|
||||
values.append(decode_quoted_printable(cached_line + line, charset))
|
||||
cached_line = ""
|
||||
else:
|
||||
# Skip empty lines or lines that don't start with the target field (after stripping)
|
||||
if not line or not line.upper().startswith(target_name):
|
||||
continue
|
||||
|
||||
parsed = _parse_vcard_line(line)
|
||||
if parsed is None:
|
||||
continue
|
||||
|
||||
prop_name, params, raw_value = parsed
|
||||
|
||||
if prop_name != target_name:
|
||||
continue
|
||||
|
||||
encoding = params.get('ENCODING')
|
||||
charset = params.get('CHARSET', 'utf-8')
|
||||
|
||||
# Apply decoding if ENCODING parameter is present
|
||||
if encoding == 'QUOTED-PRINTABLE':
|
||||
if raw_value.endswith('='):
|
||||
# Handle soft line breaks in quoted-printable and cache the line
|
||||
cached_line += raw_value[:-1]
|
||||
continue # Wait for the next line to complete the value
|
||||
values.append(decode_quoted_printable(raw_value, charset))
|
||||
elif encoding:
|
||||
raise NotImplementedError(f"Encoding '{encoding}' not supported yet.")
|
||||
else:
|
||||
values.append(raw_value)
|
||||
return values
|
||||
|
||||
|
||||
def process_vcard_entry(entry: str) -> dict | bool:
|
||||
"""
|
||||
Process a vCard entry using pure string manipulation
|
||||
|
||||
Args:
|
||||
entry: A string containing a single vCard block.
|
||||
|
||||
Returns:
|
||||
A dictionary of the extracted data or False if required fields are missing.
|
||||
"""
|
||||
|
||||
name = None
|
||||
|
||||
# Extract name in priority: FN -> N -> ORG
|
||||
for field in ("FN", "N", "ORG"):
|
||||
if name_values := get_vcard_value(entry, field):
|
||||
name = name_values[0].replace(';', ' ') # Simple cleanup for structured name
|
||||
break
|
||||
|
||||
if not name:
|
||||
return False
|
||||
|
||||
numbers = get_vcard_value(entry, "TEL")
|
||||
if not numbers:
|
||||
return False
|
||||
|
||||
return {
|
||||
"full_name": name,
|
||||
# Remove duplications
|
||||
"numbers": set(numbers),
|
||||
}
|
||||
|
||||
|
||||
def read_vcards_file(vcf_file_path, default_country_code: str):
|
||||
contacts = []
|
||||
with open(vcf_file_path, "r", encoding="utf-8", errors="ignore") as f:
|
||||
content = f.read()
|
||||
|
||||
# Split into individual vCards
|
||||
vcards = content.split("BEGIN:VCARD")
|
||||
for vcard in vcards:
|
||||
if "END:VCARD" not in vcard:
|
||||
continue
|
||||
|
||||
if contact := process_vcard_entry(vcard):
|
||||
contacts.append(contact)
|
||||
|
||||
logging.info(f"Imported {len(contacts)} contacts/vcards")
|
||||
return map_number_to_name(contacts, default_country_code)
|
||||
|
||||
|
||||
def filter_chats_by_prefix(chats, prefix: str):
|
||||
return {k: v for k, v in chats.items() if k.startswith(prefix)}
|
||||
|
||||
|
||||
def map_number_to_name(contacts, default_country_code: str):
|
||||
mapping = []
|
||||
for contact in contacts:
|
||||
for index, num in enumerate(contact['numbers']):
|
||||
normalized = normalize_number(num, default_country_code)
|
||||
if len(contact['numbers']) > 1:
|
||||
name = f"{contact['full_name']} ({index+1})"
|
||||
else:
|
||||
name = contact['full_name']
|
||||
mapping.append((normalized, name))
|
||||
return mapping
|
||||
|
||||
|
||||
def normalize_number(number: str, country_code: str):
|
||||
# Clean the number
|
||||
number = ''.join(c for c in number if c.isdigit() or c == "+")
|
||||
|
||||
# A number that starts with a + or 00 means it already have a country code
|
||||
for starting_char in ('+', "00"):
|
||||
if number.startswith(starting_char):
|
||||
return number[len(starting_char):]
|
||||
|
||||
# leading zero should be removed
|
||||
if number.startswith('0'):
|
||||
number = number[1:]
|
||||
return country_code + number # fall back
|
||||
@@ -1,269 +1,657 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Whatsapp - {{ name }}</title>
|
||||
<meta charset="UTF-8">
|
||||
<link rel="stylesheet" href="{{w3css}}">
|
||||
<style>
|
||||
html, body {
|
||||
font-size: 12px;
|
||||
scroll-behavior: smooth;
|
||||
}
|
||||
header {
|
||||
position: fixed;
|
||||
z-index: 20;
|
||||
border-bottom: 2px solid #e3e6e7;
|
||||
font-size: 2em;
|
||||
font-weight: bolder;
|
||||
background-color: white;
|
||||
padding: 20px 0 20px 0;
|
||||
}
|
||||
footer {
|
||||
border-top: 2px solid #e3e6e7;
|
||||
font-size: 2em;
|
||||
padding: 20px 0 20px 0;
|
||||
}
|
||||
article {
|
||||
width:500px;
|
||||
margin:100px auto;
|
||||
z-index:10;
|
||||
font-size: 15px;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
img, video {
|
||||
max-width:100%;
|
||||
}
|
||||
div.reply{
|
||||
font-size: 13px;
|
||||
text-decoration: none;
|
||||
}
|
||||
div:target::before {
|
||||
content: '';
|
||||
display: block;
|
||||
height: 115px;
|
||||
margin-top: -115px;
|
||||
visibility: hidden;
|
||||
}
|
||||
div:target {
|
||||
border-style: solid;
|
||||
border-width: 2px;
|
||||
animation: border-blink 0.5s steps(1) 5;
|
||||
border-color: rgba(0,0,0,0)
|
||||
}
|
||||
table {
|
||||
width: 100%;
|
||||
}
|
||||
@keyframes border-blink {
|
||||
0% {
|
||||
border-color: #2196F3;
|
||||
}
|
||||
50% {
|
||||
border-color: rgba(0,0,0,0);
|
||||
}
|
||||
}
|
||||
.avatar {
|
||||
border-radius:50%;
|
||||
overflow:hidden;
|
||||
max-width: 64px;
|
||||
max-height: 64px;
|
||||
}
|
||||
.name {
|
||||
color: #3892da;
|
||||
}
|
||||
.pad-left-10 {
|
||||
padding-left: 10px;
|
||||
}
|
||||
.pad-right-10 {
|
||||
padding-right: 10px;
|
||||
}
|
||||
.reply_link {
|
||||
color: #168acc;
|
||||
}
|
||||
.blue {
|
||||
color: #70777a;
|
||||
}
|
||||
.sticker {
|
||||
max-width: 100px !important;
|
||||
max-height: 100px !important;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<header class="w3-center w3-top">
|
||||
Chat history with {{ name }}
|
||||
{% if status is not none %}
|
||||
<br>
|
||||
<span class="w3-small">{{ status }}</span>
|
||||
{% endif %}
|
||||
</header>
|
||||
<article class="w3-container">
|
||||
<div class="table">
|
||||
{% set last = {'last': 946688461.001} %}
|
||||
{% for msg in msgs -%}
|
||||
<div class="w3-row w3-padding-small w3-margin-bottom" id="{{ msg.key_id }}">
|
||||
{% if determine_day(last.last, msg.timestamp) is not none %}
|
||||
<div class="w3-center w3-padding-16 blue">{{ determine_day(last.last, msg.timestamp) }}</div>
|
||||
{% if last.update({'last': msg.timestamp}) %}{% endif %}
|
||||
{% endif %}
|
||||
{% if msg.from_me == true %}
|
||||
<div class="w3-row">
|
||||
<div class="w3-left blue">{{ msg.time }}</div>
|
||||
<div class="name w3-right-align pad-left-10">You</div>
|
||||
</div>
|
||||
<div class="w3-row">
|
||||
{% if not no_avatar and my_avatar is not none %}
|
||||
<div class="w3-col m10 l10">
|
||||
{% else %}
|
||||
<div class="w3-col m12 l12">
|
||||
{% endif %}
|
||||
<div class="w3-right-align">
|
||||
{% if msg.reply is not none %}
|
||||
<div class="reply">
|
||||
<span class="blue">Replying to </span>
|
||||
<a href="#{{msg.reply}}" class="reply_link">
|
||||
{% if msg.quoted_data is not none %}
|
||||
"{{msg.quoted_data}}"
|
||||
{% else %}
|
||||
this message
|
||||
{% endif %}
|
||||
</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
||||
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
||||
{% if msg.safe %}
|
||||
<p>{{ msg.data | safe or 'Not supported WhatsApp internal message' }}</p>
|
||||
{% else %}
|
||||
<p>{{ msg.data or 'Not supported WhatsApp internal message' }}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% else %}
|
||||
{% if msg.media == false %}
|
||||
{{ msg.data | sanitize_except() }}
|
||||
{% else %}
|
||||
{% if "image/" in msg.mime %}
|
||||
<a href="{{ msg.data }}">
|
||||
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} />
|
||||
</a>
|
||||
{% elif "audio/" in msg.mime %}
|
||||
<audio controls="controls" autobuffer="autobuffer">
|
||||
<source src="{{ msg.data }}" />
|
||||
</audio>
|
||||
{% elif "video/" in msg.mime %}
|
||||
<video controls="controls" autobuffer="autobuffer">
|
||||
<source src="{{ msg.data }}" />
|
||||
</video>
|
||||
{% elif "/" in msg.mime %}
|
||||
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
||||
<p>The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a></p>
|
||||
</div>
|
||||
{% else %}
|
||||
{% filter escape %}{{ msg.data }}{% endfilter %}
|
||||
{% endif %}
|
||||
{% if msg.caption is not none %}
|
||||
<br>
|
||||
{{ msg.caption }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% if not no_avatar and my_avatar is not none %}
|
||||
<div class="w3-col m2 l2 pad-left-10">
|
||||
<a href="{{ my_avatar }}">
|
||||
<img src="{{ my_avatar }}" onerror="this.style.display='none'" class="avatar">
|
||||
</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="w3-row">
|
||||
<div class="w3-left pad-right-10 name">
|
||||
{% if msg.sender is not none %}
|
||||
{{ msg.sender }}
|
||||
{% else %}
|
||||
{{ name }}
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="w3-right-align blue">{{ msg.time }}</div>
|
||||
</div>
|
||||
<div class="w3-row">
|
||||
{% if not no_avatar %}
|
||||
<div class="w3-col m2 l2">
|
||||
{% if their_avatar is not none %}
|
||||
<a href="{{ their_avatar }}"><img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="avatar"></a>
|
||||
{% else %}
|
||||
<img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="avatar">
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="w3-col m10 l10">
|
||||
{% else %}
|
||||
<div class="w3-col m12 l12">
|
||||
{% endif %}
|
||||
<div class="w3-left-align">
|
||||
{% if msg.reply is not none %}
|
||||
<div class="reply">
|
||||
<span class="blue">Replying to </span>
|
||||
<a href="#{{msg.reply}}" class="reply_link">
|
||||
{% if msg.quoted_data is not none %}
|
||||
"{{msg.quoted_data}}"
|
||||
{% else %}
|
||||
this message
|
||||
{% endif %}
|
||||
</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
||||
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
||||
{% if msg.safe %}
|
||||
<p>{{ msg.data | safe or 'Not supported WhatsApp internal message' }}</p>
|
||||
{% else %}
|
||||
<p>{{ msg.data or 'Not supported WhatsApp internal message' }}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% else %}
|
||||
{% if msg.media == false %}
|
||||
{{ msg.data | sanitize_except() }}
|
||||
{% else %}
|
||||
{% if "image/" in msg.mime %}
|
||||
<a href="{{ msg.data }}">
|
||||
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} />
|
||||
</a>
|
||||
{% elif "audio/" in msg.mime %}
|
||||
<audio controls="controls" autobuffer="autobuffer">
|
||||
<source src="{{ msg.data }}" />
|
||||
</audio>
|
||||
{% elif "video/" in msg.mime %}
|
||||
<video controls="controls" autobuffer="autobuffer">
|
||||
<source src="{{ msg.data }}" />
|
||||
</video>
|
||||
{% elif "/" in msg.mime %}
|
||||
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
||||
<p>The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a></p>
|
||||
</div>
|
||||
{% else %}
|
||||
{% filter escape %}{{ msg.data }}{% endfilter %}
|
||||
{% endif %}
|
||||
{% if msg.caption is not none %}
|
||||
<br>
|
||||
{{ msg.caption }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</article>
|
||||
<footer class="w3-center">
|
||||
{% if next %}
|
||||
<a href="./{{ next }}">Next</a>
|
||||
{% else %}
|
||||
End of history
|
||||
{% endif %}
|
||||
</footer>
|
||||
</body>
|
||||
<head>
|
||||
<title>Whatsapp - {{ name }}</title>
|
||||
<meta charset="UTF-8">
|
||||
<script src="https://cdn.tailwindcss.com"></script>
|
||||
<script>
|
||||
tailwind.config = {
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
whatsapp: {
|
||||
light: '#e7ffdb',
|
||||
DEFAULT: '#25D366',
|
||||
dark: '#075E54',
|
||||
chat: '#efeae2',
|
||||
'chat-light': '#f0f2f5',
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
<style>
|
||||
body, html {
|
||||
height: 100%;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
scroll-behavior: smooth !important;
|
||||
}
|
||||
.chat-list {
|
||||
height: calc(100vh - 120px);
|
||||
overflow-y: auto;
|
||||
}
|
||||
.message-list {
|
||||
height: calc(100vh - 90px);
|
||||
overflow-y: auto;
|
||||
}
|
||||
@media (max-width: 640px) {
|
||||
.chat-list, .message-list {
|
||||
height: calc(100vh - 108px);
|
||||
}
|
||||
}
|
||||
header {
|
||||
position: fixed;
|
||||
z-index: 20;
|
||||
border-bottom: 2px solid #e3e6e7;
|
||||
font-size: 2em;
|
||||
font-weight: bolder;
|
||||
background-color: white;
|
||||
padding: 20px 0 20px 0;
|
||||
}
|
||||
footer {
|
||||
margin-top: 10px;
|
||||
border-top: 2px solid #e3e6e7;
|
||||
padding: 20px 0 20px 0;
|
||||
}
|
||||
article {
|
||||
width:430px;
|
||||
margin: auto;
|
||||
z-index:10;
|
||||
font-size: 15px;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
img, video, audio{
|
||||
max-width:100%;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
div.reply{
|
||||
font-size: 13px;
|
||||
text-decoration: none;
|
||||
}
|
||||
div:target::before {
|
||||
content: '';
|
||||
display: block;
|
||||
height: 115px;
|
||||
margin-top: -115px;
|
||||
visibility: hidden;
|
||||
}
|
||||
div:target {
|
||||
animation: 3s highlight;
|
||||
}
|
||||
.avatar {
|
||||
border-radius:50%;
|
||||
overflow:hidden;
|
||||
max-width: 64px;
|
||||
max-height: 64px;
|
||||
}
|
||||
.name {
|
||||
color: #3892da;
|
||||
}
|
||||
.pad-left-10 {
|
||||
padding-left: 10px;
|
||||
}
|
||||
.pad-right-10 {
|
||||
padding-right: 10px;
|
||||
}
|
||||
.reply_link {
|
||||
color: #168acc;
|
||||
}
|
||||
.blue {
|
||||
color: #70777a;
|
||||
}
|
||||
.sticker {
|
||||
max-width: 100px !important;
|
||||
max-height: 100px !important;
|
||||
}
|
||||
@keyframes highlight {
|
||||
from {
|
||||
background-color: rgba(37, 211, 102, 0.1);
|
||||
}
|
||||
to {
|
||||
background-color: transparent;
|
||||
}
|
||||
}
|
||||
.search-input {
|
||||
transform: translateY(-100%);
|
||||
transition: transform 0.3s ease-in-out;
|
||||
}
|
||||
.search-input.active {
|
||||
transform: translateY(0);
|
||||
}
|
||||
.reply-box:active {
|
||||
background-color:rgb(200 202 205 / var(--tw-bg-opacity, 1));
|
||||
}
|
||||
.info-box-tooltip {
|
||||
--tw-translate-x: -50%;
|
||||
transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));
|
||||
}
|
||||
|
||||
.status-indicator {
|
||||
display: inline-block;
|
||||
margin-left: 4px;
|
||||
font-size: 0.8em;
|
||||
color: #8c8c8c;
|
||||
}
|
||||
|
||||
.status-indicator.read {
|
||||
color: #34B7F1;
|
||||
}
|
||||
|
||||
.play-icon {
|
||||
width: 0;
|
||||
height: 0;
|
||||
border-left: 8px solid white;
|
||||
border-top: 5px solid transparent;
|
||||
border-bottom: 5px solid transparent;
|
||||
filter: drop-shadow(0 1px 2px rgba(0, 0, 0, 0.3));
|
||||
}
|
||||
|
||||
.speaker-icon {
|
||||
position: relative;
|
||||
width: 8px;
|
||||
height: 6px;
|
||||
background: #666;
|
||||
border-radius: 1px 0 0 1px;
|
||||
}
|
||||
|
||||
.speaker-icon::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
right: -4px;
|
||||
top: -1px;
|
||||
width: 0;
|
||||
height: 0;
|
||||
border-left: 4px solid #666;
|
||||
border-top: 4px solid transparent;
|
||||
border-bottom: 4px solid transparent;
|
||||
}
|
||||
|
||||
.speaker-icon::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
right: -8px;
|
||||
top: -3px;
|
||||
width: 8px;
|
||||
height: 12px;
|
||||
border: 2px solid #666;
|
||||
border-left: none;
|
||||
border-radius: 0 8px 8px 0;
|
||||
}
|
||||
|
||||
.search-icon {
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.search-icon::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
border: 2px solid #aebac1;
|
||||
border-radius: 50%;
|
||||
top: 2px;
|
||||
left: 2px;
|
||||
}
|
||||
|
||||
.search-icon::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
width: 2px;
|
||||
height: 6px;
|
||||
background: #aebac1;
|
||||
transform: rotate(45deg);
|
||||
top: 12px;
|
||||
left: 12px;
|
||||
}
|
||||
|
||||
.arrow-left {
|
||||
width: 0;
|
||||
height: 0;
|
||||
border-top: 6px solid transparent;
|
||||
border-bottom: 6px solid transparent;
|
||||
border-right: 8px solid #aebac1;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.arrow-right {
|
||||
width: 0;
|
||||
height: 0;
|
||||
border-top: 6px solid transparent;
|
||||
border-bottom: 6px solid transparent;
|
||||
border-left: 8px solid #aebac1;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.info-icon {
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
border: 2px solid currentColor;
|
||||
border-radius: 50%;
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.info-icon::before {
|
||||
content: 'i';
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
font-size: 12px;
|
||||
font-weight: bold;
|
||||
font-style: normal;
|
||||
}
|
||||
</style>
|
||||
<script>
|
||||
function search(event) {
|
||||
keywords = document.getElementById("mainHeaderSearchInput").value;
|
||||
hits = [];
|
||||
document.querySelectorAll(".message-text").forEach(elem => {
|
||||
if (elem.innerText.trim().includes(keywords)){
|
||||
hits.push(elem.parentElement.parentElement.id);
|
||||
}
|
||||
})
|
||||
console.log(hits);
|
||||
}
|
||||
</script>
|
||||
<base href="{{ media_base }}" target="_blank">
|
||||
</head>
|
||||
<body>
|
||||
<article class="h-screen bg-whatsapp-chat-light">
|
||||
<div class="w-full flex flex-col">
|
||||
<div class="p-3 bg-whatsapp-dark flex items-center justify-between border-l border-[#d1d7db]">
|
||||
<div class="flex items-center">
|
||||
{% if not no_avatar %}
|
||||
<div class="w3-col m2 l2">
|
||||
{% if their_avatar is not none %}
|
||||
<a href="{{ their_avatar }}"><img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="w-10 h-10 rounded-full mr-3" loading="lazy"></a>
|
||||
{% else %}
|
||||
<img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="w-10 h-10 rounded-full mr-3" loading="lazy">
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
<div>
|
||||
<h2 class="text-white font-medium">{{ headline }}</h2>
|
||||
{% if status is not none %}<p class="text-[#8696a0] text-xs">{{ status }}</p>{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex space-x-4">
|
||||
<!-- <button id="searchButton">
|
||||
<span class="search-icon"></span>
|
||||
</button> -->
|
||||
<!-- <span class="arrow-left"></span> -->
|
||||
{% if previous %}
|
||||
<a href="./{{ previous }}" target="_self">
|
||||
<span class="arrow-left"></span>
|
||||
</a>
|
||||
{% endif %}
|
||||
{% if next %}
|
||||
<a href="./{{ next }}" target="_self">
|
||||
<span class="arrow-right"></span>
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
<!-- Search Input Overlay -->
|
||||
<div id="mainSearchInput" class="search-input absolute article top-0 bg-whatsapp-dark p-3 flex items-center space-x-3">
|
||||
<button id="closeMainSearch" class="text-[#aebac1]">
|
||||
<span class="arrow-left"></span>
|
||||
</button>
|
||||
<input type="text" placeholder="Search..." class="flex-1 bg-[#1f2c34] text-white rounded-lg px-3 py-1 focus:outline-none" id="mainHeaderSearchInput" onkeyup="search(event)">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex-1 p-5 message-list">
|
||||
<div class="flex flex-col space-y-2">
|
||||
<!--Date-->
|
||||
{% set last = {'last': 946688461.001} %}
|
||||
{% for msg in msgs -%}
|
||||
{% if determine_day(last.last, msg.timestamp) is not none %}
|
||||
<div class="flex justify-center">
|
||||
<div class="bg-[#e1f2fb] rounded-lg px-2 py-1 text-xs text-[#54656f]">
|
||||
{{ determine_day(last.last, msg.timestamp) }}
|
||||
</div>
|
||||
</div>
|
||||
{% if last.update({'last': msg.timestamp}) %}{% endif %}
|
||||
{% endif %}
|
||||
<!--Actual messages-->
|
||||
{% if msg.from_me == true %}
|
||||
<div class="flex justify-end items-center group" id="{{ msg.key_id }}">
|
||||
<div class="opacity-0 group-hover:opacity-100 transition-opacity duration-200 relative mr-2">
|
||||
<div class="relative">
|
||||
<div class="relative group/tooltip">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 text-[#8696a0] hover:text-[#54656f] cursor-pointer" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||
<use href="#info-icon"></use>
|
||||
</svg>
|
||||
<div class="absolute bottom-full info-box-tooltip mb-2 hidden group-hover/tooltip:block z-50">
|
||||
<div class="bg-black text-white text-xs rounded py-1 px-2 whitespace-nowrap">
|
||||
Delivered at {{msg.received_timestamp or 'unknown'}}
|
||||
{% if msg.read_timestamp is not none %}
|
||||
<br>Read at {{ msg.read_timestamp }}
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="absolute top-full right-3 -mt-1 border-4 border-transparent border-t-black"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-whatsapp-light rounded-lg p-2 max-w-[80%] shadow-sm relative {% if msg.reactions %}mb-2{% endif %}">
|
||||
{% if msg.reply is not none %}
|
||||
<a href="#{{msg.reply}}" target="_self" class="no-base">
|
||||
<div
|
||||
class="mb-2 p-1 bg-whatsapp-chat-light rounded border-l-4 border-whatsapp text-sm reply-box">
|
||||
<div class="flex items-center gap-2">
|
||||
<div class="flex-1 overflow-hidden">
|
||||
<p class="text-whatsapp font-medium text-xs">Replying to</p>
|
||||
<p class="text-[#111b21] text-xs truncate">
|
||||
{% if msg.quoted_data is not none %}
|
||||
"{{msg.quoted_data}}"
|
||||
{% else %}
|
||||
this message
|
||||
{% endif %}
|
||||
</p>
|
||||
</div>
|
||||
{% set replied_msg = msgs | selectattr('key_id', 'equalto', msg.reply) | first %}
|
||||
{% if replied_msg and replied_msg.media == true %}
|
||||
<div class="flex-shrink-0">
|
||||
{% if "image/" in replied_msg.mime %}
|
||||
<img src="{{ replied_msg.thumb if replied_msg.thumb is not none else replied_msg.data }}"
|
||||
class="w-8 h-8 rounded object-cover" loading="lazy" />
|
||||
{% elif "video/" in replied_msg.mime %}
|
||||
<div class="relative w-8 h-8 rounded overflow-hidden bg-gray-200">
|
||||
<img src="{{ replied_msg.thumb if replied_msg.thumb is not none else replied_msg.data }}"
|
||||
class="w-full h-full object-cover" loading="lazy" />
|
||||
<div class="absolute inset-0 flex items-center justify-center">
|
||||
<div class="play-icon"></div>
|
||||
</div>
|
||||
</div>
|
||||
{% elif "audio/" in replied_msg.mime %}
|
||||
<div class="w-8 h-8 rounded bg-gray-200 flex items-center justify-center">
|
||||
<div class="speaker-icon"></div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
{% endif %}
|
||||
<p class="text-[#111b21] text-sm message-text">
|
||||
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
||||
<div class="flex justify-center mb-2">
|
||||
<div class="bg-[#FFF3C5] rounded-lg px-3 py-2 text-sm text-[#856404] flex items-center">
|
||||
{% if msg.safe %}
|
||||
{{ msg.data | safe or 'Not supported WhatsApp internal message' }}
|
||||
{% else %}
|
||||
{{ msg.data or 'Not supported WhatsApp internal message' }}
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% if msg.caption is not none %}
|
||||
<p>{{ msg.caption | urlize(none, true, '_blank') }}</p>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{% if msg.media == false %}
|
||||
{{ msg.data | sanitize_except() | urlize(none, true, '_blank') }}
|
||||
{% else %}
|
||||
{% if "image/" in msg.mime %}
|
||||
<a href="{{ msg.data }}">
|
||||
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} loading="lazy"/>
|
||||
</a>
|
||||
{% elif "audio/" in msg.mime %}
|
||||
<audio controls="controls" autobuffer="autobuffer">
|
||||
<source src="{{ msg.data }}" />
|
||||
</audio>
|
||||
{% elif "video/" in msg.mime %}
|
||||
<video class="lazy" autobuffer {% if msg.message_type|int == 13 or msg.message_type|int == 11 %}autoplay muted loop playsinline{%else%}controls{% endif %}>
|
||||
<source type="{{ msg.mime }}" data-src="{{ msg.data }}" />
|
||||
</video>
|
||||
{% elif "/" in msg.mime %}
|
||||
The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a>
|
||||
{% else %}
|
||||
{% filter escape %}{{ msg.data }}{% endfilter %}
|
||||
{% endif %}
|
||||
{% if msg.caption is not none %}
|
||||
<p class='mt-1 {% if "audio/" in msg.mime %}text-[#808080]{% endif %}'>
|
||||
{{ msg.caption | urlize(none, true, '_blank') }}
|
||||
</p>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</p>
|
||||
<p class="text-[10px] text-[#667781] text-right mt-1">{{ msg.time }}
|
||||
<span class="status-indicator{% if msg.read_timestamp %} read{% endif %}">
|
||||
{% if msg.received_timestamp %}
|
||||
✓✓
|
||||
{% else %}
|
||||
✓
|
||||
{% endif %}
|
||||
</span>
|
||||
</p>
|
||||
{% if msg.reactions %}
|
||||
<div class="flex flex-wrap gap-1 mt-1 justify-end absolute -bottom-3 -right-2">
|
||||
{% for sender, emoji in msg.reactions.items() %}
|
||||
<div class="bg-white rounded-full px-1.5 py-0.5 text-xs shadow-sm border border-gray-200 cursor-help" title="{{ sender }}">
|
||||
{{ emoji }}
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="flex justify-start items-center group" id="{{ msg.key_id }}">
|
||||
<div class="bg-white rounded-lg p-2 max-w-[80%] shadow-sm relative {% if msg.reactions %}mb-2{% endif %}">
|
||||
{% if msg.reply is not none %}
|
||||
<a href="#{{msg.reply}}" target="_self" class="no-base">
|
||||
<div
|
||||
class="mb-2 p-1 bg-whatsapp-chat-light rounded border-l-4 border-whatsapp text-sm reply-box">
|
||||
<div class="flex items-center gap-2">
|
||||
<div class="flex-1 overflow-hidden">
|
||||
<p class="text-whatsapp font-medium text-xs">Replying to</p>
|
||||
<p class="text-[#808080] text-xs truncate">
|
||||
{% if msg.quoted_data is not none %}
|
||||
{{msg.quoted_data}}
|
||||
{% else %}
|
||||
this message
|
||||
{% endif %}
|
||||
</p>
|
||||
</div>
|
||||
{% set replied_msg = msgs | selectattr('key_id', 'equalto', msg.reply) | first %}
|
||||
{% if replied_msg and replied_msg.media == true %}
|
||||
<div class="flex-shrink-0">
|
||||
{% if "image/" in replied_msg.mime %}
|
||||
<img src="{{ replied_msg.thumb if replied_msg.thumb is not none else replied_msg.data }}"
|
||||
class="w-8 h-8 rounded object-cover" loading="lazy" />
|
||||
{% elif "video/" in replied_msg.mime %}
|
||||
<div class="relative w-8 h-8 rounded overflow-hidden bg-gray-200">
|
||||
<img src="{{ replied_msg.thumb if replied_msg.thumb is not none else replied_msg.data }}"
|
||||
class="w-full h-full object-cover" loading="lazy" />
|
||||
<div class="absolute inset-0 flex items-center justify-center">
|
||||
<div class="play-icon"></div>
|
||||
</div>
|
||||
</div>
|
||||
{% elif "audio/" in replied_msg.mime %}
|
||||
<div class="w-8 h-8 rounded bg-gray-200 flex items-center justify-center">
|
||||
<div class="speaker-icon"></div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
{% endif %}
|
||||
<p class="text-[#111b21] text-sm">
|
||||
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
||||
<div class="flex justify-center mb-2">
|
||||
<div class="bg-[#FFF3C5] rounded-lg px-3 py-2 text-sm text-[#856404] flex items-center">
|
||||
{% if msg.safe %}
|
||||
{{ msg.data | safe or 'Not supported WhatsApp internal message' }}
|
||||
{% else %}
|
||||
{{ msg.data or 'Not supported WhatsApp internal message' }}
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% if msg.caption is not none %}
|
||||
<p>{{ msg.caption | urlize(none, true, '_blank') }}</p>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{% if msg.media == false %}
|
||||
{{ msg.data | sanitize_except() | urlize(none, true, '_blank') }}
|
||||
{% else %}
|
||||
{% if "image/" in msg.mime %}
|
||||
<a href="{{ msg.data }}">
|
||||
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} loading="lazy"/>
|
||||
</a>
|
||||
{% elif "audio/" in msg.mime %}
|
||||
<audio controls="controls" autobuffer="autobuffer">
|
||||
<source src="{{ msg.data }}" />
|
||||
</audio>
|
||||
{% elif "video/" in msg.mime %}
|
||||
<video class="lazy" autobuffer {% if msg.message_type|int == 13 or msg.message_type|int == 11 %}autoplay muted loop playsinline{%else%}controls{% endif %}>
|
||||
<source type="{{ msg.mime }}" data-src="{{ msg.data }}" />
|
||||
</video>
|
||||
{% elif "/" in msg.mime %}
|
||||
The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a>
|
||||
{% else %}
|
||||
{% filter escape %}{{ msg.data }}{% endfilter %}
|
||||
{% endif %}
|
||||
{% if msg.caption is not none %}
|
||||
<p class='mt-1 {% if "audio/" in msg.mime %}text-[#808080]{% endif %}'>
|
||||
{{ msg.caption | urlize(none, true, '_blank') }}
|
||||
</p>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</p>
|
||||
<div class="flex items-baseline text-[10px] text-[#667781] mt-1 gap-2">
|
||||
<span class="flex-shrink-0">
|
||||
{% if msg.sender is not none %}
|
||||
{{ msg.sender }}
|
||||
{% endif %}
|
||||
</span>
|
||||
<span class="flex-grow min-w-[4px]"></span>
|
||||
<span class="flex-shrink-0">{{ msg.time }}</span>
|
||||
</div>
|
||||
{% if msg.reactions %}
|
||||
<div class="flex flex-wrap gap-1 mt-1 justify-start absolute -bottom-3 -left-2">
|
||||
{% for sender, emoji in msg.reactions.items() %}
|
||||
<div class="bg-gray-100 rounded-full px-1.5 py-0.5 text-xs shadow-sm border border-gray-200 cursor-help" title="{{ sender }}">
|
||||
{{ emoji }}
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<!-- <div class="opacity-0 group-hover:opacity-100 transition-opacity duration-200 relative ml-2">
|
||||
<div class="relative">
|
||||
<div class="relative group/tooltip">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 text-[#8696a0] hover:text-[#54656f] cursor-pointer" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||
<use href="#info-icon"></use>
|
||||
</svg>
|
||||
<div class="absolute bottom-full info-box-tooltip mb-2 hidden group-hover/tooltip:block z-50">
|
||||
<div class="bg-black text-white text-xs rounded py-1 px-2 whitespace-nowrap">
|
||||
Received at {{msg.received_timestamp or 'unknown'}}
|
||||
</div>
|
||||
<div class="absolute top-full right-3 ml-1 border-4 border-transparent border-t-black"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div> -->
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</div>
|
||||
<footer>
|
||||
{% if not next %}
|
||||
<div class="flex justify-center mb-6">
|
||||
<div class="bg-[#e1f2fb] rounded-lg px-3 py-2 text-sm text-[#54656f]">
|
||||
End of History
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
<br>
|
||||
Portions of this page are reproduced from <a href="https://web.dev/articles/lazy-loading-video">work</a>
|
||||
created and <a href="https://developers.google.com/readme/policies">shared by Google</a> and used
|
||||
according to terms described in the <a href="https://www.apache.org/licenses/LICENSE-2.0">Apache 2.0
|
||||
License</a>.
|
||||
</footer>
|
||||
</div>
|
||||
</article>
|
||||
</body>
|
||||
<script>
|
||||
// Search functionality
|
||||
const searchButton = document.getElementById('searchButton');
|
||||
const mainSearchInput = document.getElementById('mainSearchInput');
|
||||
const closeMainSearch = document.getElementById('closeMainSearch');
|
||||
const mainHeaderSearchInput = document.getElementById('mainHeaderSearchInput');
|
||||
|
||||
// Function to show search input
|
||||
const showSearch = () => {
|
||||
mainSearchInput.classList.add('active');
|
||||
mainHeaderSearchInput.focus();
|
||||
};
|
||||
|
||||
// Function to hide search input
|
||||
const hideSearch = () => {
|
||||
mainSearchInput.classList.remove('active');
|
||||
mainHeaderSearchInput.value = '';
|
||||
};
|
||||
|
||||
// Event listeners
|
||||
searchButton.addEventListener('click', showSearch);
|
||||
closeMainSearch.addEventListener('click', hideSearch);
|
||||
|
||||
// Handle ESC key
|
||||
document.addEventListener('keydown', (event) => {
|
||||
if (event.key === 'Escape' && mainSearchInput.classList.contains('active')) {
|
||||
hideSearch();
|
||||
}
|
||||
});
|
||||
</script>
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", function() {
|
||||
var lazyVideos = [].slice.call(document.querySelectorAll("video.lazy"));
|
||||
|
||||
if ("IntersectionObserver" in window) {
|
||||
var lazyVideoObserver = new IntersectionObserver(function(entries, observer) {
|
||||
entries.forEach(function(video) {
|
||||
if (video.isIntersecting) {
|
||||
for (var source in video.target.children) {
|
||||
var videoSource = video.target.children[source];
|
||||
if (typeof videoSource.tagName === "string" && videoSource.tagName === "SOURCE") {
|
||||
videoSource.src = videoSource.dataset.src;
|
||||
}
|
||||
}
|
||||
|
||||
video.target.load();
|
||||
video.target.classList.remove("lazy");
|
||||
lazyVideoObserver.unobserve(video.target);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
lazyVideos.forEach(function(lazyVideo) {
|
||||
lazyVideoObserver.observe(lazyVideo);
|
||||
});
|
||||
}
|
||||
});
|
||||
</script>
|
||||
<script>
|
||||
// Prevent the <base> tag from affecting links with the class "no-base"
|
||||
document.querySelectorAll('.no-base').forEach(link => {
|
||||
link.addEventListener('click', function(event) {
|
||||
const href = this.getAttribute('href');
|
||||
if (href.startsWith('#')) {
|
||||
window.location.hash = href;
|
||||
event.preventDefault();
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</html>
|
||||
329
Whatsapp_Chat_Exporter/whatsapp_old.html
Normal file
329
Whatsapp_Chat_Exporter/whatsapp_old.html
Normal file
@@ -0,0 +1,329 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Whatsapp - {{ name }}</title>
|
||||
<meta charset="UTF-8">
|
||||
<link rel="stylesheet" href="{{w3css}}">
|
||||
<style>
|
||||
html, body {
|
||||
font-size: 12px;
|
||||
scroll-behavior: smooth;
|
||||
}
|
||||
header {
|
||||
position: fixed;
|
||||
z-index: 20;
|
||||
border-bottom: 2px solid #e3e6e7;
|
||||
font-size: 2em;
|
||||
font-weight: bolder;
|
||||
background-color: white;
|
||||
padding: 20px 0 20px 0;
|
||||
}
|
||||
footer {
|
||||
border-top: 2px solid #e3e6e7;
|
||||
padding: 20px 0 20px 0;
|
||||
}
|
||||
article {
|
||||
width:500px;
|
||||
margin:100px auto;
|
||||
z-index:10;
|
||||
font-size: 15px;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
img, video {
|
||||
max-width:100%;
|
||||
}
|
||||
div.reply{
|
||||
font-size: 13px;
|
||||
text-decoration: none;
|
||||
}
|
||||
div:target::before {
|
||||
content: '';
|
||||
display: block;
|
||||
height: 115px;
|
||||
margin-top: -115px;
|
||||
visibility: hidden;
|
||||
}
|
||||
div:target {
|
||||
border-style: solid;
|
||||
border-width: 2px;
|
||||
animation: border-blink 0.5s steps(1) 5;
|
||||
border-color: rgba(0,0,0,0)
|
||||
}
|
||||
table {
|
||||
width: 100%;
|
||||
}
|
||||
@keyframes border-blink {
|
||||
0% {
|
||||
border-color: #2196F3;
|
||||
}
|
||||
50% {
|
||||
border-color: rgba(0,0,0,0);
|
||||
}
|
||||
}
|
||||
.avatar {
|
||||
border-radius:50%;
|
||||
overflow:hidden;
|
||||
max-width: 64px;
|
||||
max-height: 64px;
|
||||
}
|
||||
.name {
|
||||
color: #3892da;
|
||||
}
|
||||
.pad-left-10 {
|
||||
padding-left: 10px;
|
||||
}
|
||||
.pad-right-10 {
|
||||
padding-right: 10px;
|
||||
}
|
||||
.reply_link {
|
||||
color: #168acc;
|
||||
}
|
||||
.blue {
|
||||
color: #70777a;
|
||||
}
|
||||
.sticker {
|
||||
max-width: 100px !important;
|
||||
max-height: 100px !important;
|
||||
}
|
||||
</style>
|
||||
<base href="{{ media_base }}" target="_blank">
|
||||
</head>
|
||||
<body>
|
||||
<header class="w3-center w3-top">
|
||||
{{ headline }}
|
||||
{% if status is not none %}
|
||||
<br>
|
||||
<span class="w3-small">{{ status }}</span>
|
||||
{% endif %}
|
||||
</header>
|
||||
<article class="w3-container">
|
||||
<div class="table">
|
||||
{% set last = {'last': 946688461.001} %}
|
||||
{% for msg in msgs -%}
|
||||
<div class="w3-row w3-padding-small w3-margin-bottom" id="{{ msg.key_id }}">
|
||||
{% if determine_day(last.last, msg.timestamp) is not none %}
|
||||
<div class="w3-center w3-padding-16 blue">{{ determine_day(last.last, msg.timestamp) }}</div>
|
||||
{% if last.update({'last': msg.timestamp}) %}{% endif %}
|
||||
{% endif %}
|
||||
{% if msg.from_me == true %}
|
||||
<div class="w3-row">
|
||||
<div class="w3-left blue">{{ msg.time }}</div>
|
||||
<div class="name w3-right-align pad-left-10">You</div>
|
||||
</div>
|
||||
<div class="w3-row">
|
||||
{% if not no_avatar and my_avatar is not none %}
|
||||
<div class="w3-col m10 l10">
|
||||
{% else %}
|
||||
<div class="w3-col m12 l12">
|
||||
{% endif %}
|
||||
<div class="w3-right-align">
|
||||
{% if msg.reply is not none %}
|
||||
<div class="reply">
|
||||
<span class="blue">Replying to </span>
|
||||
<a href="#{{msg.reply}}" target="_self" class="reply_link no-base">
|
||||
{% if msg.quoted_data is not none %}
|
||||
"{{msg.quoted_data}}"
|
||||
{% else %}
|
||||
this message
|
||||
{% endif %}
|
||||
</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
||||
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
||||
{% if msg.safe %}
|
||||
<p>{{ msg.data | safe or 'Not supported WhatsApp internal message' }}</p>
|
||||
{% else %}
|
||||
<p>{{ msg.data or 'Not supported WhatsApp internal message' }}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% if msg.caption is not none %}
|
||||
<div class="w3-container">
|
||||
{{ msg.caption | urlize(none, true, '_blank') }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{% if msg.media == false %}
|
||||
{{ msg.data | sanitize_except() | urlize(none, true, '_blank') }}
|
||||
{% else %}
|
||||
{% if "image/" in msg.mime %}
|
||||
<a href="{{ msg.data }}">
|
||||
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} loading="lazy"/>
|
||||
</a>
|
||||
{% elif "audio/" in msg.mime %}
|
||||
<audio controls="controls" autobuffer="autobuffer">
|
||||
<source src="{{ msg.data }}" />
|
||||
</audio>
|
||||
{% elif "video/" in msg.mime %}
|
||||
<video class="lazy" autobuffer {% if msg.message_type|int == 13 or msg.message_type|int == 11 %}autoplay muted loop playsinline{%else%}controls{% endif %}>
|
||||
<source type="{{ msg.mime }}" data-src="{{ msg.data }}" />
|
||||
</video>
|
||||
{% elif "/" in msg.mime %}
|
||||
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
||||
<p>The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a></p>
|
||||
</div>
|
||||
{% else %}
|
||||
{% filter escape %}{{ msg.data }}{% endfilter %}
|
||||
{% endif %}
|
||||
{% if msg.caption is not none %}
|
||||
<div class="w3-container">
|
||||
{{ msg.caption | urlize(none, true, '_blank') }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% if not no_avatar and my_avatar is not none %}
|
||||
<div class="w3-col m2 l2 pad-left-10">
|
||||
<a href="{{ my_avatar }}">
|
||||
<img src="{{ my_avatar }}" onerror="this.style.display='none'" class="avatar" loading="lazy">
|
||||
</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="w3-row">
|
||||
<div class="w3-left pad-right-10 name">
|
||||
{% if msg.sender is not none %}
|
||||
{{ msg.sender }}
|
||||
{% else %}
|
||||
{{ name }}
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="w3-right-align blue">{{ msg.time }}</div>
|
||||
</div>
|
||||
<div class="w3-row">
|
||||
{% if not no_avatar %}
|
||||
<div class="w3-col m2 l2">
|
||||
{% if their_avatar is not none %}
|
||||
<a href="{{ their_avatar }}"><img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="avatar" loading="lazy"></a>
|
||||
{% else %}
|
||||
<img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="avatar" loading="lazy">
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="w3-col m10 l10">
|
||||
{% else %}
|
||||
<div class="w3-col m12 l12">
|
||||
{% endif %}
|
||||
<div class="w3-left-align">
|
||||
{% if msg.reply is not none %}
|
||||
<div class="reply">
|
||||
<span class="blue">Replying to </span>
|
||||
<a href="#{{msg.reply}}" target="_self" class="reply_link no-base">
|
||||
{% if msg.quoted_data is not none %}
|
||||
"{{msg.quoted_data}}"
|
||||
{% else %}
|
||||
this message
|
||||
{% endif %}
|
||||
</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
||||
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
||||
{% if msg.safe %}
|
||||
<p>{{ msg.data | safe or 'Not supported WhatsApp internal message' }}</p>
|
||||
{% else %}
|
||||
<p>{{ msg.data or 'Not supported WhatsApp internal message' }}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% if msg.caption is not none %}
|
||||
<div class="w3-container">
|
||||
{{ msg.caption | urlize(none, true, '_blank') }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{% if msg.media == false %}
|
||||
{{ msg.data | sanitize_except() | urlize(none, true, '_blank') }}
|
||||
{% else %}
|
||||
{% if "image/" in msg.mime %}
|
||||
<a href="{{ msg.data }}">
|
||||
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} loading="lazy"/>
|
||||
</a>
|
||||
{% elif "audio/" in msg.mime %}
|
||||
<audio controls="controls" autobuffer="autobuffer">
|
||||
<source src="{{ msg.data }}" />
|
||||
</audio>
|
||||
{% elif "video/" in msg.mime %}
|
||||
<video class="lazy" autobuffer {% if msg.message_type|int == 13 or msg.message_type|int == 11 %}autoplay muted loop playsinline{%else%}controls{% endif %}>
|
||||
<source type="{{ msg.mime }}" data-src="{{ msg.data }}" />
|
||||
</video>
|
||||
{% elif "/" in msg.mime %}
|
||||
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
||||
<p>The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a></p>
|
||||
</div>
|
||||
{% else %}
|
||||
{% filter escape %}{{ msg.data }}{% endfilter %}
|
||||
{% endif %}
|
||||
{% if msg.caption is not none %}
|
||||
<div class="w3-container">
|
||||
{{ msg.caption | urlize(none, true, '_blank') }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</article>
|
||||
<footer class="w3-center">
|
||||
<h2>
|
||||
{% if previous %}
|
||||
<a href="./{{ previous }}" target="_self">Previous</a>
|
||||
{% endif %}
|
||||
<h2>
|
||||
{% if next %}
|
||||
<a href="./{{ next }}" target="_self">Next</a>
|
||||
{% else %}
|
||||
End of History
|
||||
{% endif %}
|
||||
</h2>
|
||||
<br>
|
||||
Portions of this page are reproduced from <a href="https://web.dev/articles/lazy-loading-video">work</a> created and <a href="https://developers.google.com/readme/policies">shared by Google</a> and used according to terms described in the <a href="https://www.apache.org/licenses/LICENSE-2.0">Apache 2.0 License</a>.
|
||||
</footer>
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", function() {
|
||||
var lazyVideos = [].slice.call(document.querySelectorAll("video.lazy"));
|
||||
|
||||
if ("IntersectionObserver" in window) {
|
||||
var lazyVideoObserver = new IntersectionObserver(function(entries, observer) {
|
||||
entries.forEach(function(video) {
|
||||
if (video.isIntersecting) {
|
||||
for (var source in video.target.children) {
|
||||
var videoSource = video.target.children[source];
|
||||
if (typeof videoSource.tagName === "string" && videoSource.tagName === "SOURCE") {
|
||||
videoSource.src = videoSource.dataset.src;
|
||||
}
|
||||
}
|
||||
|
||||
video.target.load();
|
||||
video.target.classList.remove("lazy");
|
||||
lazyVideoObserver.unobserve(video.target);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
lazyVideos.forEach(function(lazyVideo) {
|
||||
lazyVideoObserver.observe(lazyVideo);
|
||||
});
|
||||
}
|
||||
});
|
||||
</script>
|
||||
<script>
|
||||
// Prevent the <base> tag from affecting links with the class "no-base"
|
||||
document.querySelectorAll('.no-base').forEach(link => {
|
||||
link.addEventListener('click', function(event) {
|
||||
const href = this.getAttribute('href');
|
||||
if (href.startsWith('#')) {
|
||||
window.location.hash = href;
|
||||
event.preventDefault();
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
BIN
imgs/group.png
BIN
imgs/group.png
Binary file not shown.
|
Before Width: | Height: | Size: 36 KiB |
BIN
imgs/pm.png
BIN
imgs/pm.png
Binary file not shown.
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 116 KiB |
67
pyproject.toml
Normal file
67
pyproject.toml
Normal file
@@ -0,0 +1,67 @@
|
||||
[build-system]
|
||||
requires = ["setuptools", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "whatsapp-chat-exporter"
|
||||
version = "0.13.0"
|
||||
description = "A Whatsapp database parser that provides history of your Whatsapp conversations in HTML and JSON. Android, iOS, iPadOS, Crypt12, Crypt14, Crypt15 supported."
|
||||
readme = "README.md"
|
||||
authors = [
|
||||
{ name = "KnugiHK", email = "hello@knugi.com" }
|
||||
]
|
||||
license = { text = "MIT" }
|
||||
keywords = [
|
||||
"android", "ios", "parsing", "history", "iphone", "message", "crypt15",
|
||||
"customizable", "whatsapp", "android-backup", "messages", "crypt14",
|
||||
"crypt12", "whatsapp-chat-exporter", "whatsapp-export", "iphone-backup",
|
||||
"whatsapp-database", "whatsapp-database-parser", "whatsapp-conversations"
|
||||
]
|
||||
classifiers = [
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3.14",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Development Status :: 4 - Beta",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Topic :: Communications :: Chat",
|
||||
"Topic :: Utilities",
|
||||
"Topic :: Database"
|
||||
]
|
||||
requires-python = ">=3.10"
|
||||
dependencies = [
|
||||
"jinja2",
|
||||
"bleach",
|
||||
"tqdm"
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
android_backup = ["pycryptodome", "javaobj-py3"]
|
||||
crypt12 = ["pycryptodome"]
|
||||
crypt14 = ["pycryptodome"]
|
||||
crypt15 = ["pycryptodome", "javaobj-py3"]
|
||||
all = ["pycryptodome", "javaobj-py3"]
|
||||
everything = ["pycryptodome", "javaobj-py3"]
|
||||
backup = ["pycryptodome", "javaobj-py3"]
|
||||
|
||||
[project.scripts]
|
||||
wtsexporter = "Whatsapp_Chat_Exporter.__main__:main"
|
||||
waexporter = "Whatsapp_Chat_Exporter.__main__:main"
|
||||
whatsapp-chat-exporter = "Whatsapp_Chat_Exporter.__main__:main"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["."]
|
||||
include = ["Whatsapp_Chat_Exporter"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
Whatsapp_Chat_Exporter = ["*.html"]
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"pytest>=8.3.5",
|
||||
]
|
||||
118
scripts/brazilian_number_processing.py
Normal file
118
scripts/brazilian_number_processing.py
Normal file
@@ -0,0 +1,118 @@
|
||||
"""
|
||||
This script processes a VCARD file to standardize telephone entries and add a second TEL line with the modified number (removing the extra ninth digit) for contacts with 9-digit subscribers.
|
||||
It handles numbers that may already include a "+55" prefix and ensures that the output format is consistent.
|
||||
Contributed by @magpires https://github.com/KnugiHK/WhatsApp-Chat-Exporter/issues/127#issuecomment-2646660625
|
||||
"""
|
||||
import re
|
||||
import argparse
|
||||
|
||||
|
||||
def process_phone_number(raw_phone):
|
||||
"""
|
||||
Process the raw phone string from the VCARD and return two formatted numbers:
|
||||
- The original formatted number, and
|
||||
- A modified formatted number with the extra (ninth) digit removed, if applicable.
|
||||
|
||||
Desired output:
|
||||
For a number with a 9-digit subscriber:
|
||||
Original: "+55 {area} {first 5 of subscriber}-{last 4 of subscriber}"
|
||||
Modified: "+55 {area} {subscriber[1:5]}-{subscriber[5:]}"
|
||||
For example, for an input that should represent "027912345678", the outputs are:
|
||||
"+55 27 91234-5678" and "+55 27 1234-5678"
|
||||
|
||||
This function handles numbers that may already include a "+55" prefix.
|
||||
It expects that after cleaning, a valid number (without the country code) should have either 10 digits
|
||||
(2 for area + 8 for subscriber) or 11 digits (2 for area + 9 for subscriber).
|
||||
If extra digits are present, it takes the last 11 (or 10) digits.
|
||||
"""
|
||||
# Store the original input for processing
|
||||
number_to_process = raw_phone.strip()
|
||||
|
||||
# Remove all non-digit characters
|
||||
digits = re.sub(r'\D', '', number_to_process)
|
||||
|
||||
# If the number starts with '55', remove it for processing
|
||||
if digits.startswith("55") and len(digits) > 11:
|
||||
digits = digits[2:]
|
||||
|
||||
# Remove trunk zero if present
|
||||
if digits.startswith("0"):
|
||||
digits = digits[1:]
|
||||
|
||||
# After cleaning, we expect a valid number to have either 10 or 11 digits
|
||||
# If there are extra digits, use the last 11 (for a 9-digit subscriber) or last 10 (for an 8-digit subscriber)
|
||||
if len(digits) > 11:
|
||||
# Here, we assume the valid number is the last 11 digits
|
||||
digits = digits[-11:]
|
||||
elif len(digits) > 10 and len(digits) < 11:
|
||||
# In some cases with an 8-digit subscriber, take the last 10 digits
|
||||
digits = digits[-10:]
|
||||
|
||||
# Check if we have a valid number after processing
|
||||
if len(digits) not in (10, 11):
|
||||
return None, None
|
||||
|
||||
area = digits[:2]
|
||||
subscriber = digits[2:]
|
||||
|
||||
if len(subscriber) == 9:
|
||||
# Format the original number (5-4 split, e.g., "91234-5678")
|
||||
orig_subscriber = f"{subscriber[:5]}-{subscriber[5:]}"
|
||||
# Create a modified version: drop the first digit of the subscriber to form an 8-digit subscriber (4-4 split)
|
||||
mod_subscriber = f"{subscriber[1:5]}-{subscriber[5:]}"
|
||||
original_formatted = f"+55 {area} {orig_subscriber}"
|
||||
modified_formatted = f"+55 {area} {mod_subscriber}"
|
||||
elif len(subscriber) == 8:
|
||||
original_formatted = f"+55 {area} {subscriber[:4]}-{subscriber[4:]}"
|
||||
modified_formatted = None
|
||||
else:
|
||||
# This shouldn't happen given the earlier check, but just to be safe
|
||||
return None, None
|
||||
|
||||
return original_formatted, modified_formatted
|
||||
|
||||
|
||||
def process_vcard(input_vcard, output_vcard):
|
||||
"""
|
||||
Process a VCARD file to standardize telephone entries and add a second TEL line
|
||||
with the modified number (removing the extra ninth digit) for contacts with 9-digit subscribers.
|
||||
"""
|
||||
with open(input_vcard, 'r', encoding='utf-8') as file:
|
||||
lines = file.readlines()
|
||||
|
||||
output_lines = []
|
||||
|
||||
# Regex to capture any telephone line.
|
||||
# It matches lines starting with "TEL:" or "TEL;TYPE=..." or with prefixes like "item1.TEL:".
|
||||
phone_pattern = re.compile(r'^(?P<prefix>.*TEL(?:;TYPE=[^:]+)?):(?P<number>.*)$')
|
||||
|
||||
for line in lines:
|
||||
stripped_line = line.rstrip("\n")
|
||||
match = phone_pattern.match(stripped_line)
|
||||
if match:
|
||||
raw_phone = match.group("number").strip()
|
||||
orig_formatted, mod_formatted = process_phone_number(raw_phone)
|
||||
if orig_formatted:
|
||||
# Always output using the standardized prefix.
|
||||
output_lines.append(f"TEL;TYPE=CELL:{orig_formatted}\n")
|
||||
else:
|
||||
output_lines.append(line)
|
||||
if mod_formatted:
|
||||
output_lines.append(f"TEL;TYPE=CELL:{mod_formatted}\n")
|
||||
else:
|
||||
output_lines.append(line)
|
||||
|
||||
with open(output_vcard, 'w', encoding='utf-8') as file:
|
||||
file.writelines(output_lines)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Process a VCARD file to standardize telephone entries and add a second TEL line with the modified number (removing the extra ninth digit) for contacts with 9-digit subscribers."
|
||||
)
|
||||
parser.add_argument('input_vcard', type=str, help='Input VCARD file')
|
||||
parser.add_argument('output_vcard', type=str, help='Output VCARD file')
|
||||
args = parser.parse_args()
|
||||
|
||||
process_vcard(args.input_vcard, args.output_vcard)
|
||||
print(f"VCARD processed and saved to {args.output_vcard}")
|
||||
50
scripts/bruteforce_crypt15.py
Normal file
50
scripts/bruteforce_crypt15.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import hmac
|
||||
import javaobj
|
||||
import zlib
|
||||
from Crypto.Cipher import AES
|
||||
from hashlib import sha256
|
||||
from sys import exit
|
||||
|
||||
|
||||
def _generate_hmac_of_hmac(key_stream):
|
||||
key = hmac.new(
|
||||
hmac.new(
|
||||
b'\x00' * 32,
|
||||
key_stream,
|
||||
sha256
|
||||
).digest(),
|
||||
b"backup encryption\x01",
|
||||
sha256
|
||||
)
|
||||
return key.digest(), key_stream
|
||||
|
||||
|
||||
def _extract_encrypted_key(keyfile):
|
||||
key_stream = b""
|
||||
for byte in javaobj.loads(keyfile):
|
||||
key_stream += byte.to_bytes(1, "big", signed=True)
|
||||
|
||||
return _generate_hmac_of_hmac(key_stream)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
key = open("encrypted_backup.key", "rb").read()
|
||||
database = open("wa.db.crypt15", "rb").read()
|
||||
main_key, hex_key = _extract_encrypted_key(key)
|
||||
for i in range(100):
|
||||
iv = database[i:i+16]
|
||||
for j in range(100):
|
||||
cipher = AES.new(main_key, AES.MODE_GCM, iv)
|
||||
db_ciphertext = database[j:]
|
||||
db_compressed = cipher.decrypt(db_ciphertext)
|
||||
try:
|
||||
db = zlib.decompress(db_compressed)
|
||||
except zlib.error:
|
||||
...
|
||||
else:
|
||||
if db[0:6] == b"SQLite":
|
||||
print(f"Found!\nIV: {i}\nOffset: {j}")
|
||||
print(db_compressed[:10])
|
||||
exit()
|
||||
|
||||
print("Not found! Try to increase maximum search.")
|
||||
69
setup.py
69
setup.py
@@ -1,69 +0,0 @@
|
||||
import setuptools
|
||||
from re import search
|
||||
|
||||
with open("README.md", "r") as fh:
|
||||
long_description = fh.read()
|
||||
|
||||
with open("Whatsapp_Chat_Exporter/__init__.py", encoding="utf8") as f:
|
||||
version = search(r'__version__ = "(.*?)"', f.read()).group(1)
|
||||
|
||||
setuptools.setup(
|
||||
name="whatsapp-chat-exporter",
|
||||
version=version,
|
||||
author="KnugiHK",
|
||||
author_email="hello@knugi.com",
|
||||
description=("A Whatsapp database parser that will give you the "
|
||||
"history of your Whatsapp conversations in HTML and JSON. "
|
||||
"Android, iOS, iPadOS, Crypt12, Crypt14, Crypt15 supported."),
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
license="MIT",
|
||||
keywords=[
|
||||
"android", "ios", "parsing", "history", "iphone", "message", "crypt15",
|
||||
"customizable", "whatsapp", "android-backup", "messages", "crypt14",
|
||||
"crypt12", "whatsapp-chat-exporter", "whatsapp-export", "iphone-backup",
|
||||
"whatsapp-database", "whatsapp-database-parser", "whatsapp-conversations"
|
||||
],
|
||||
platforms=["any"],
|
||||
url="https://github.com/KnugiHK/Whatsapp-Chat-Exporter",
|
||||
packages=setuptools.find_packages(),
|
||||
package_data={
|
||||
'': ['whatsapp.html']
|
||||
},
|
||||
classifiers=[
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Development Status :: 4 - Beta",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Topic :: Communications :: Chat",
|
||||
"Topic :: Utilities",
|
||||
"Topic :: Database"
|
||||
],
|
||||
python_requires='>=3.8',
|
||||
install_requires=[
|
||||
'jinja2',
|
||||
'bleach'
|
||||
],
|
||||
extras_require={
|
||||
'android_backup': ["pycryptodome", "javaobj-py3"],
|
||||
'crypt12': ["pycryptodome"],
|
||||
'crypt14': ["pycryptodome"],
|
||||
'crypt15': ["pycryptodome", "javaobj-py3"],
|
||||
'all': ["pycryptodome", "javaobj-py3"],
|
||||
'everything': ["pycryptodome", "javaobj-py3"],
|
||||
'backup': ["pycryptodome", "javaobj-py3"]
|
||||
},
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"wtsexporter = Whatsapp_Chat_Exporter.__main__:main",
|
||||
"waexporter = Whatsapp_Chat_Exporter.__main__:main",
|
||||
"whatsapp-chat-exporter = Whatsapp_Chat_Exporter.__main__:main"
|
||||
]
|
||||
}
|
||||
)
|
||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
27
tests/conftest.py
Normal file
27
tests/conftest.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import pytest
|
||||
import os
|
||||
|
||||
def pytest_collection_modifyitems(config, items):
|
||||
"""
|
||||
Moves test_nuitka_binary.py to the end and fails if the file is missing.
|
||||
"""
|
||||
target_file = "test_nuitka_binary.py"
|
||||
|
||||
# Sanity Check: Ensure the file actually exists in the tests directory
|
||||
test_dir = os.path.join(config.rootdir, "tests")
|
||||
file_path = os.path.join(test_dir, target_file)
|
||||
|
||||
if not os.path.exists(file_path):
|
||||
pytest.exit(f"\n[FATAL] Required test file '{target_file}' not found in {test_dir}. "
|
||||
f"Order enforcement failed!", returncode=1)
|
||||
|
||||
nuitka_tests = []
|
||||
remaining_tests = []
|
||||
|
||||
for item in items:
|
||||
if target_file in item.nodeid:
|
||||
nuitka_tests.append(item)
|
||||
else:
|
||||
remaining_tests.append(item)
|
||||
|
||||
items[:] = remaining_tests + nuitka_tests
|
||||
44
tests/data/contacts.vcf
Normal file
44
tests/data/contacts.vcf
Normal file
@@ -0,0 +1,44 @@
|
||||
BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
FN:Sample Contact
|
||||
TEL;TYPE=CELL:+85288888888
|
||||
END:VCARD
|
||||
|
||||
BEGIN:VCARD
|
||||
VERSION:2.1
|
||||
N:Lopez;Yard Lawn Guy;Jose;;
|
||||
FN:Yard Lawn Guy, Jose Lopez
|
||||
TEL;HOME:5673334444
|
||||
END:VCARD
|
||||
|
||||
BEGIN:VCARD
|
||||
VERSION:2.1
|
||||
N;CHARSET=UTF-8;ENCODING=QUOTED-PRINTABLE:;=4A=6F=68=6E=20=42=75=74=6C=65=72=20=F0=9F=8C=9F=
|
||||
=F0=9F=92=AB=F0=9F=8C=9F;;;
|
||||
FN;CHARSET=UTF-8;ENCODING=QUOTED-PRINTABLE:=4A=6F=68=6E=20=42=75=74=6C=65=72=20=F0=9F=8C=9F=
|
||||
=F0=9F=92=AB=F0=9F=8C=9F
|
||||
TEL;PREF:5556667777
|
||||
END:VCARD
|
||||
|
||||
BEGIN:VCARD
|
||||
VERSION:2.1
|
||||
TEL;WORK;PREF:1234567890
|
||||
ORG:Airline Contact #'s
|
||||
NOTE;ENCODING=QUOTED-PRINTABLE:=53=70=69=72=69=74=20=41=69=72=6C=69=
|
||||
=6E=65=73=20=38=30=30=2D=37=37=32=2D=37=31=31=37=55=6E=69=74=65=64=
|
||||
=20=41=69=72=6C=69=6E=65=73=20=38=30=30=2D=32=34=31=2D=36=35=32=32
|
||||
END:VCARD
|
||||
|
||||
BEGIN:VCARD
|
||||
VERSION:2.1
|
||||
TEL;WORK;PREF:3451112222
|
||||
X-SAMSUNGADR;ENCODING=QUOTED-PRINTABLE:;;=31=31=31=31=32=20=4E=6F=72=74=68=20=45=6C=64=72=
|
||||
=69=64=67=65=20=50=61=72=6B=77=61=79;=44=61=6C=6C=61=73;=54=58;=32=32=32=32=32
|
||||
ORG:James Peacock Elementary
|
||||
END:VCARD
|
||||
|
||||
BEGIN:VCARD
|
||||
VERSION:2.1
|
||||
TEL;CELL:8889990001
|
||||
ORG:AAA Car Service
|
||||
END:VCARD
|
||||
272
tests/test_brazilian_number_processing.py
Normal file
272
tests/test_brazilian_number_processing.py
Normal file
@@ -0,0 +1,272 @@
|
||||
import subprocess
|
||||
import unittest
|
||||
import tempfile
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
||||
from scripts.brazilian_number_processing import process_phone_number, process_vcard
|
||||
|
||||
|
||||
class TestVCardProcessor(unittest.TestCase):
|
||||
|
||||
def test_process_phone_number(self):
|
||||
"""Test the process_phone_number function with various inputs."""
|
||||
|
||||
# Test cases for 9-digit subscriber numbers
|
||||
test_cases_9_digit = [
|
||||
# Standard 11-digit number (2 area + 9 subscriber)
|
||||
("27912345678", "+55 27 91234-5678", "+55 27 1234-5678"),
|
||||
# With country code prefix
|
||||
("5527912345678", "+55 27 91234-5678", "+55 27 1234-5678"),
|
||||
# With plus in country code
|
||||
("+5527912345678", "+55 27 91234-5678", "+55 27 1234-5678"),
|
||||
# With spaces and formatting
|
||||
("+55 27 9 1234-5678", "+55 27 91234-5678", "+55 27 1234-5678"),
|
||||
# With trunk zero
|
||||
("027912345678", "+55 27 91234-5678", "+55 27 1234-5678"),
|
||||
# With country code and trunk zero
|
||||
("+55027912345678", "+55 27 91234-5678", "+55 27 1234-5678"),
|
||||
# With extra digits at the beginning (should use last 11)
|
||||
("99927912345678", "+55 27 91234-5678", "+55 27 1234-5678"),
|
||||
# With extra non-digit characters
|
||||
("+55-27-9.1234_5678", "+55 27 91234-5678", "+55 27 1234-5678"),
|
||||
]
|
||||
|
||||
# Test cases for 8-digit subscriber numbers
|
||||
test_cases_8_digit = [
|
||||
# Standard 10-digit number (2 area + 8 subscriber)
|
||||
("2712345678", "+55 27 1234-5678", None),
|
||||
# With country code prefix
|
||||
("552712345678", "+55 27 1234-5678", None),
|
||||
# With plus in country code
|
||||
("+552712345678", "+55 27 1234-5678", None),
|
||||
# With spaces and formatting
|
||||
("+55 27 1234-5678", "+55 27 1234-5678", None),
|
||||
# With trunk zero
|
||||
("02712345678", "+55 27 1234-5678", None),
|
||||
# With country code and trunk zero
|
||||
("+55 0 27 1234-5678", "+55 27 1234-5678", None),
|
||||
]
|
||||
|
||||
# Edge cases
|
||||
edge_cases = [
|
||||
# Too few digits
|
||||
("271234567", None, None),
|
||||
# Empty string
|
||||
("", None, None),
|
||||
# Non-numeric characters only
|
||||
("abc-def+ghi", None, None),
|
||||
# Single digit
|
||||
("1", None, None),
|
||||
# Unusual formatting but valid number
|
||||
("(+55) [27] 9.1234_5678", "+55 27 91234-5678", "+55 27 1234-5678"),
|
||||
]
|
||||
|
||||
# Run tests for all cases
|
||||
all_cases = test_cases_9_digit + test_cases_8_digit + edge_cases
|
||||
|
||||
for raw_phone, expected_orig, expected_mod in all_cases:
|
||||
with self.subTest(raw_phone=raw_phone):
|
||||
orig, mod = process_phone_number(raw_phone)
|
||||
self.assertEqual(orig, expected_orig)
|
||||
self.assertEqual(mod, expected_mod)
|
||||
|
||||
def test_process_vcard(self):
|
||||
"""Test the process_vcard function with various VCARD formats."""
|
||||
|
||||
# Test case 1: Standard TEL entries
|
||||
vcard1 = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:Doe;John;;;
|
||||
FN:John Doe
|
||||
TEL:+5527912345678
|
||||
TEL:+552712345678
|
||||
END:VCARD
|
||||
"""
|
||||
expected1 = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:Doe;John;;;
|
||||
FN:John Doe
|
||||
TEL;TYPE=CELL:+55 27 91234-5678
|
||||
TEL;TYPE=CELL:+55 27 1234-5678
|
||||
TEL;TYPE=CELL:+55 27 1234-5678
|
||||
END:VCARD
|
||||
"""
|
||||
|
||||
# Test case 2: TEL entries with TYPE attributes
|
||||
vcard2 = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:Smith;Jane;;;
|
||||
FN:Jane Smith
|
||||
TEL;TYPE=CELL:+5527912345678
|
||||
TEL;TYPE=HOME:+552712345678
|
||||
END:VCARD
|
||||
"""
|
||||
expected2 = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:Smith;Jane;;;
|
||||
FN:Jane Smith
|
||||
TEL;TYPE=CELL:+55 27 91234-5678
|
||||
TEL;TYPE=CELL:+55 27 1234-5678
|
||||
TEL;TYPE=CELL:+55 27 1234-5678
|
||||
END:VCARD
|
||||
"""
|
||||
|
||||
# Test case 3: Complex TEL entries with prefixes
|
||||
vcard3 = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:Brown;Robert;;;
|
||||
FN:Robert Brown
|
||||
item1.TEL:+5527912345678
|
||||
item2.TEL;TYPE=CELL:+552712345678
|
||||
END:VCARD
|
||||
"""
|
||||
expected3 = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:Brown;Robert;;;
|
||||
FN:Robert Brown
|
||||
TEL;TYPE=CELL:+55 27 91234-5678
|
||||
TEL;TYPE=CELL:+55 27 1234-5678
|
||||
TEL;TYPE=CELL:+55 27 1234-5678
|
||||
END:VCARD
|
||||
"""
|
||||
|
||||
# Test case 4: Mixed valid and invalid phone numbers
|
||||
vcard4 = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:White;Alice;;;
|
||||
FN:Alice White
|
||||
TEL:123
|
||||
TEL:+5527912345678
|
||||
END:VCARD
|
||||
"""
|
||||
expected4 = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:White;Alice;;;
|
||||
FN:Alice White
|
||||
TEL:123
|
||||
TEL;TYPE=CELL:+55 27 91234-5678
|
||||
TEL;TYPE=CELL:+55 27 1234-5678
|
||||
END:VCARD
|
||||
"""
|
||||
|
||||
# Test case 5: Multiple contacts with different formats
|
||||
vcard5 = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:Johnson;Mike;;;
|
||||
FN:Mike Johnson
|
||||
TEL:27912345678
|
||||
END:VCARD
|
||||
BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:Williams;Sarah;;;
|
||||
FN:Sarah Williams
|
||||
TEL;TYPE=CELL:2712345678
|
||||
END:VCARD
|
||||
"""
|
||||
expected5 = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:Johnson;Mike;;;
|
||||
FN:Mike Johnson
|
||||
TEL;TYPE=CELL:+55 27 91234-5678
|
||||
TEL;TYPE=CELL:+55 27 1234-5678
|
||||
END:VCARD
|
||||
BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:Williams;Sarah;;;
|
||||
FN:Sarah Williams
|
||||
TEL;TYPE=CELL:+55 27 1234-5678
|
||||
END:VCARD
|
||||
"""
|
||||
|
||||
# Test case 6: VCARD with no phone numbers
|
||||
vcard6 = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:Davis;Tom;;;
|
||||
FN:Tom Davis
|
||||
EMAIL:tom@example.com
|
||||
END:VCARD
|
||||
"""
|
||||
expected6 = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:Davis;Tom;;;
|
||||
FN:Tom Davis
|
||||
EMAIL:tom@example.com
|
||||
END:VCARD
|
||||
"""
|
||||
|
||||
test_cases = [
|
||||
(vcard1, expected1),
|
||||
(vcard2, expected2),
|
||||
(vcard3, expected3),
|
||||
(vcard4, expected4),
|
||||
(vcard5, expected5),
|
||||
(vcard6, expected6)
|
||||
]
|
||||
|
||||
for i, (input_vcard, expected_output) in enumerate(test_cases):
|
||||
with self.subTest(case=i+1):
|
||||
# Create temporary files for input and output
|
||||
with tempfile.NamedTemporaryFile(mode='w+', delete=False, encoding='utf-8') as input_file:
|
||||
input_file.write(input_vcard)
|
||||
input_path = input_file.name
|
||||
|
||||
output_path = input_path + '.out'
|
||||
|
||||
try:
|
||||
# Process the VCARD
|
||||
process_vcard(input_path, output_path)
|
||||
|
||||
# Read and verify the output
|
||||
with open(output_path, 'r', encoding='utf-8') as output_file:
|
||||
actual_output = output_file.read()
|
||||
|
||||
self.assertEqual(actual_output, expected_output)
|
||||
|
||||
finally:
|
||||
# Clean up temporary files
|
||||
if os.path.exists(input_path):
|
||||
os.unlink(input_path)
|
||||
if os.path.exists(output_path):
|
||||
os.unlink(output_path)
|
||||
|
||||
def test_script_argument_handling(self):
|
||||
"""Test the script's command-line argument handling."""
|
||||
|
||||
test_input = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:Test;User;;;
|
||||
FN:User Test
|
||||
TEL:+5527912345678
|
||||
END:VCARD
|
||||
"""
|
||||
|
||||
# Create a temporary input file
|
||||
with tempfile.NamedTemporaryFile(mode='w+', delete=False, encoding='utf-8') as input_file:
|
||||
input_file.write(test_input)
|
||||
input_path = input_file.name
|
||||
|
||||
output_path = input_path + '.out'
|
||||
|
||||
try:
|
||||
test_args = ['python' if os.name == 'nt' else 'python3',
|
||||
'scripts/brazilian_number_processing.py', input_path, output_path]
|
||||
# We're just testing that the argument parsing works
|
||||
subprocess.call(
|
||||
test_args,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.STDOUT
|
||||
)
|
||||
# Check if the output file was created
|
||||
self.assertTrue(os.path.exists(output_path))
|
||||
|
||||
finally:
|
||||
# Clean up temporary files
|
||||
if os.path.exists(input_path):
|
||||
os.unlink(input_path)
|
||||
if os.path.exists(output_path):
|
||||
os.unlink(output_path)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
50
tests/test_exporter.py
Normal file
50
tests/test_exporter.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import subprocess
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def command_runner():
|
||||
"""
|
||||
A pytest fixture to simplify running commands. This is a helper
|
||||
function that you can use in multiple tests.
|
||||
"""
|
||||
def _run_command(command_list, check=True):
|
||||
"""
|
||||
Runs a command and returns the result.
|
||||
|
||||
Args:
|
||||
command_list (list): A list of strings representing the command
|
||||
and its arguments (e.g., ["python", "my_script.py", "arg1"]).
|
||||
check (bool, optional): If True, raise an exception if the
|
||||
command returns a non-zero exit code. Defaults to True.
|
||||
|
||||
Returns:
|
||||
subprocess.CompletedProcess: The result of the command.
|
||||
"""
|
||||
return subprocess.run(
|
||||
command_list,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=check,
|
||||
)
|
||||
return _run_command
|
||||
|
||||
|
||||
def test_sanity_check(command_runner):
|
||||
"""
|
||||
This is a basic sanity check to make sure all modules can be imported
|
||||
This runs the exporter without any arguments. It should fail with a
|
||||
message about missing arguments.
|
||||
"""
|
||||
result = command_runner(["wtsexporter"], False)
|
||||
expected_stderr = "You must define the device type"
|
||||
assert expected_stderr in result.stderr, f"STDERR was: {result.stderr}"
|
||||
assert result.returncode == 2
|
||||
|
||||
|
||||
def test_android(command_runner):
|
||||
...
|
||||
|
||||
|
||||
def test_ios(command_runner):
|
||||
...
|
||||
344
tests/test_incremental_merge.py
Normal file
344
tests/test_incremental_merge.py
Normal file
@@ -0,0 +1,344 @@
|
||||
import os
|
||||
import json
|
||||
import pytest
|
||||
from unittest.mock import patch, mock_open, call, MagicMock
|
||||
from Whatsapp_Chat_Exporter.utility import incremental_merge
|
||||
from Whatsapp_Chat_Exporter.data_model import ChatStore
|
||||
|
||||
# Test data setup
|
||||
BASE_PATH = "AppDomainGroup-group.net.whatsapp.WhatsApp.shared"
|
||||
chat_data_1 = {
|
||||
"12345678@s.whatsapp.net": {
|
||||
"name": "Friend",
|
||||
"type": "ios",
|
||||
"my_avatar": os.path.join(BASE_PATH, "Media", "Profile", "Photo.jpg"),
|
||||
"their_avatar": os.path.join(BASE_PATH, "Media", "Profile", "12345678-1709851420.thumb"),
|
||||
"their_avatar_thumb": None,
|
||||
"status": None,
|
||||
"messages": {
|
||||
"24690": {
|
||||
"from_me": True,
|
||||
"timestamp": 1463926635.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B7E",
|
||||
"meta": False,
|
||||
"data": "I'm here",
|
||||
"safe": False,
|
||||
"sticker": False
|
||||
},
|
||||
"24691": { # This message only exists in target
|
||||
"from_me": False,
|
||||
"timestamp": 1463926641.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B8E",
|
||||
"meta": False,
|
||||
"data": "Great to see you",
|
||||
"safe": False,
|
||||
"sticker": False
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
chat_data_2 = {
|
||||
"12345678@s.whatsapp.net": {
|
||||
"name": "Friend",
|
||||
"type": "ios",
|
||||
"my_avatar": os.path.join(BASE_PATH, "Media", "Profile", "Photo.jpg"),
|
||||
"their_avatar": os.path.join(BASE_PATH, "Media", "Profile", "12345678-1709851420.thumb"),
|
||||
"their_avatar_thumb": None,
|
||||
"status": None,
|
||||
"messages": {
|
||||
"24690": {
|
||||
"from_me": True,
|
||||
"timestamp": 1463926635.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B7E",
|
||||
"meta": False,
|
||||
"data": "I'm here",
|
||||
"safe": False,
|
||||
"sticker": False
|
||||
},
|
||||
"24692": { # This message only exists in source
|
||||
"from_me": False,
|
||||
"timestamp": 1463926642.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B9E",
|
||||
"meta": False,
|
||||
"data": "Hi there!",
|
||||
"safe": False,
|
||||
"sticker": False
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Expected merged data - should contain all messages with all fields initialized as they would be by Message class
|
||||
chat_data_merged = {
|
||||
"12345678@s.whatsapp.net": {
|
||||
"name": "Friend",
|
||||
"type": "ios",
|
||||
"my_avatar": os.path.join(BASE_PATH, "Media", "Profile", "Photo.jpg"),
|
||||
"their_avatar": os.path.join(BASE_PATH, "Media", "Profile", "12345678-1709851420.thumb"),
|
||||
"their_avatar_thumb": None,
|
||||
"status": None,
|
||||
"media_base": "",
|
||||
"messages": {
|
||||
"24690": {
|
||||
"from_me": True,
|
||||
"timestamp": 1463926635.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B7E",
|
||||
"meta": False,
|
||||
"data": "I'm here",
|
||||
"sender": None,
|
||||
"safe": False,
|
||||
"mime": None,
|
||||
"reply": None,
|
||||
"quoted_data": None,
|
||||
'reactions': {},
|
||||
"caption": None,
|
||||
"thumb": None,
|
||||
"sticker": False,
|
||||
"message_type": None,
|
||||
"received_timestamp": None,
|
||||
"read_timestamp": None
|
||||
},
|
||||
"24691": {
|
||||
"from_me": False,
|
||||
"timestamp": 1463926641.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B8E",
|
||||
"meta": False,
|
||||
"data": "Great to see you",
|
||||
"sender": None,
|
||||
"safe": False,
|
||||
"mime": None,
|
||||
"reply": None,
|
||||
"quoted_data": None,
|
||||
'reactions': {},
|
||||
"caption": None,
|
||||
"thumb": None,
|
||||
"sticker": False,
|
||||
"message_type": None,
|
||||
"received_timestamp": None,
|
||||
"read_timestamp": None
|
||||
},
|
||||
"24692": {
|
||||
"from_me": False,
|
||||
"timestamp": 1463926642.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B9E",
|
||||
"meta": False,
|
||||
"data": "Hi there!",
|
||||
"sender": None,
|
||||
"safe": False,
|
||||
"mime": None,
|
||||
"reply": None,
|
||||
"quoted_data": None,
|
||||
'reactions': {},
|
||||
"caption": None,
|
||||
"thumb": None,
|
||||
"sticker": False,
|
||||
"message_type": None,
|
||||
"received_timestamp": None,
|
||||
"read_timestamp": None
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_filesystem():
|
||||
with (
|
||||
patch("os.path.exists") as mock_exists,
|
||||
patch("os.makedirs") as mock_makedirs,
|
||||
patch("os.path.getmtime") as mock_getmtime,
|
||||
patch("os.listdir") as mock_listdir,
|
||||
patch("os.walk") as mock_walk,
|
||||
patch("shutil.copy2") as mock_copy2,
|
||||
):
|
||||
yield {
|
||||
"exists": mock_exists,
|
||||
"makedirs": mock_makedirs,
|
||||
"getmtime": mock_getmtime,
|
||||
"listdir": mock_listdir,
|
||||
"walk": mock_walk,
|
||||
"copy2": mock_copy2,
|
||||
}
|
||||
|
||||
|
||||
def test_incremental_merge_new_file(mock_filesystem):
|
||||
"""Test merging when target file doesn't exist"""
|
||||
source_dir = "/source"
|
||||
target_dir = "/target"
|
||||
media_dir = "media"
|
||||
|
||||
# Setup mock filesystem
|
||||
mock_filesystem["exists"].side_effect = lambda x: x == "/source"
|
||||
mock_filesystem["listdir"].return_value = ["chat.json"]
|
||||
|
||||
# Run the function
|
||||
incremental_merge(source_dir, target_dir, media_dir, 2, True)
|
||||
|
||||
# Verify the operations
|
||||
mock_filesystem["makedirs"].assert_called_once_with(target_dir, exist_ok=True)
|
||||
mock_filesystem["copy2"].assert_called_once_with(
|
||||
os.path.join(source_dir, "chat.json"),
|
||||
os.path.join(target_dir, "chat.json")
|
||||
)
|
||||
|
||||
|
||||
def test_incremental_merge_existing_file_with_changes(mock_filesystem):
|
||||
"""Test merging when target file exists and has changes"""
|
||||
source_dir = "source"
|
||||
target_dir = "target"
|
||||
media_dir = "media"
|
||||
|
||||
# Setup mock filesystem
|
||||
mock_filesystem["exists"].side_effect = lambda x: True
|
||||
mock_filesystem["listdir"].return_value = ["chat.json"]
|
||||
|
||||
# Mock file operations with consistent path separators
|
||||
source_file = os.path.join(source_dir, "chat.json")
|
||||
target_file = os.path.join(target_dir, "chat.json")
|
||||
mock_file_content = {
|
||||
source_file: json.dumps(chat_data_2),
|
||||
target_file: json.dumps(chat_data_1),
|
||||
}
|
||||
|
||||
written_chunks = []
|
||||
|
||||
def mock_file_write(data):
|
||||
written_chunks.append(data)
|
||||
|
||||
mock_write = MagicMock(side_effect=mock_file_write)
|
||||
|
||||
with patch("builtins.open", mock_open()) as mock_file:
|
||||
def mock_file_read(filename, mode="r"):
|
||||
if mode == 'w':
|
||||
file_mock = mock_open().return_value
|
||||
file_mock.write.side_effect = mock_write
|
||||
return file_mock
|
||||
else:
|
||||
# Use normalized path for lookup
|
||||
norm_filename = os.path.normpath(filename)
|
||||
content = mock_file_content.get(norm_filename, '')
|
||||
file_mock = mock_open(read_data=content).return_value
|
||||
return file_mock
|
||||
|
||||
mock_file.side_effect = mock_file_read
|
||||
|
||||
# Run the function
|
||||
incremental_merge(source_dir, target_dir, media_dir, 2, True)
|
||||
|
||||
# Verify file operations using os.path.join
|
||||
mock_file.assert_any_call(source_file, "r")
|
||||
mock_file.assert_any_call(target_file, "r")
|
||||
mock_file.assert_any_call(target_file, "w")
|
||||
|
||||
# Rest of verification code...
|
||||
assert mock_write.called, "Write method was never called"
|
||||
written_data = json.loads(''.join(written_chunks))
|
||||
assert written_data is not None, "No data was written"
|
||||
assert written_data == chat_data_merged, "Merged data does not match expected result"
|
||||
|
||||
messages = written_data["12345678@s.whatsapp.net"]["messages"]
|
||||
assert "24690" in messages, "Common message should be present"
|
||||
assert "24691" in messages, "Target-only message should be preserved"
|
||||
assert "24692" in messages, "Source-only message should be added"
|
||||
assert len(messages) == 3, "Should have exactly 3 messages"
|
||||
|
||||
|
||||
def test_incremental_merge_existing_file_no_changes(mock_filesystem):
|
||||
"""Test merging when target file exists but has no changes"""
|
||||
source_dir = "source"
|
||||
target_dir = "target"
|
||||
media_dir = "media"
|
||||
|
||||
# Setup mock filesystem
|
||||
mock_filesystem["exists"].side_effect = lambda x: True
|
||||
mock_filesystem["listdir"].return_value = ["chat.json"]
|
||||
|
||||
# Mock file operations with consistent path separators
|
||||
source_file = os.path.join(source_dir, "chat.json")
|
||||
target_file = os.path.join(target_dir, "chat.json")
|
||||
mock_file_content = {
|
||||
source_file: json.dumps(chat_data_1),
|
||||
target_file: json.dumps(chat_data_1),
|
||||
}
|
||||
|
||||
with patch("builtins.open", mock_open()) as mock_file:
|
||||
def mock_file_read(filename, mode="r"):
|
||||
if mode == 'w':
|
||||
file_mock = mock_open().return_value
|
||||
return file_mock
|
||||
else:
|
||||
# Use normalized path for lookup
|
||||
norm_filename = os.path.normpath(filename)
|
||||
content = mock_file_content.get(norm_filename, '')
|
||||
file_mock = mock_open(read_data=content).return_value
|
||||
return file_mock
|
||||
|
||||
mock_file.side_effect = mock_file_read
|
||||
|
||||
# Run the function
|
||||
incremental_merge(source_dir, target_dir, media_dir, 2, True)
|
||||
|
||||
# Verify no write operations occurred on target file
|
||||
write_calls = [
|
||||
call for call in mock_file.mock_calls if call[0] == "().write"]
|
||||
assert len(write_calls) == 0
|
||||
|
||||
|
||||
def test_incremental_merge_media_copy(mock_filesystem):
|
||||
"""Test media file copying during merge"""
|
||||
source_dir = "source"
|
||||
target_dir = "target"
|
||||
media_dir = "media"
|
||||
|
||||
# Setup mock filesystem
|
||||
mock_filesystem["exists"].side_effect = lambda x: True
|
||||
mock_filesystem["listdir"].return_value = ["chat.json"]
|
||||
mock_filesystem["walk"].return_value = [
|
||||
(os.path.join(source_dir, "media"), ["subfolder"], ["file1.jpg"]),
|
||||
(os.path.join(source_dir, "media", "subfolder"), [], ["file2.jpg"]),
|
||||
]
|
||||
mock_filesystem["getmtime"].side_effect = lambda x: 1000 if "source" in x else 500
|
||||
|
||||
# Mock file operations with consistent path separators
|
||||
source_file = os.path.join(source_dir, "chat.json")
|
||||
target_file = os.path.join(target_dir, "chat.json")
|
||||
mock_file_content = {
|
||||
source_file: json.dumps(chat_data_1),
|
||||
target_file: json.dumps(chat_data_1),
|
||||
}
|
||||
|
||||
with patch("builtins.open", mock_open()) as mock_file:
|
||||
def mock_file_read(filename, mode="r"):
|
||||
if mode == 'w':
|
||||
file_mock = mock_open().return_value
|
||||
return file_mock
|
||||
else:
|
||||
# Use normalized path for lookup
|
||||
norm_filename = os.path.normpath(filename)
|
||||
content = mock_file_content.get(norm_filename, '')
|
||||
file_mock = mock_open(read_data=content).return_value
|
||||
return file_mock
|
||||
|
||||
mock_file.side_effect = mock_file_read
|
||||
|
||||
# Run the function
|
||||
incremental_merge(source_dir, target_dir, media_dir, 2, True)
|
||||
|
||||
# Verify media file operations
|
||||
assert mock_filesystem["makedirs"].call_count >= 2 # At least target dir and media dir
|
||||
assert mock_filesystem["copy2"].call_count == 2 # Two media files copied
|
||||
76
tests/test_nuitka_binary.py
Normal file
76
tests/test_nuitka_binary.py
Normal file
@@ -0,0 +1,76 @@
|
||||
import os
|
||||
import sys
|
||||
import pytest
|
||||
import subprocess
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def command_runner():
|
||||
"""
|
||||
A pytest fixture to simplify running commands. This is a helper
|
||||
function that you can use in multiple tests.
|
||||
"""
|
||||
def _run_command(command_list, check=True):
|
||||
"""
|
||||
Runs a command and returns the result.
|
||||
|
||||
Args:
|
||||
command_list (list): A list of strings representing the command
|
||||
and its arguments (e.g., ["python", "my_script.py", "arg1"]).
|
||||
check (bool, optional): If True, raise an exception if the
|
||||
command returns a non-zero exit code. Defaults to True.
|
||||
|
||||
Returns:
|
||||
subprocess.CompletedProcess: The result of the command.
|
||||
"""
|
||||
return subprocess.run(
|
||||
command_list,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=check,
|
||||
)
|
||||
return _run_command
|
||||
|
||||
|
||||
def test_nuitka_binary():
|
||||
"""
|
||||
Tests the creation and execution of a Nuitka-compiled binary.
|
||||
"""
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
print("Skipping Nuitka test: Python 3.14 is not yet fully supported by Nuitka.")
|
||||
return
|
||||
|
||||
nuitka_command = [
|
||||
"python", "-m", "nuitka", "--onefile", "--assume-yes-for-downloads",
|
||||
"--include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html",
|
||||
"Whatsapp_Chat_Exporter",
|
||||
"--output-filename=wtsexporter.exe" # use .exe on all platforms for compatibility
|
||||
]
|
||||
|
||||
compile_result = subprocess.run(
|
||||
nuitka_command,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
print(f"Nuitka compilation output: {compile_result.stdout}")
|
||||
|
||||
binary_path = "./wtsexporter.exe"
|
||||
assert os.path.exists(binary_path), f"Binary {binary_path} was not created."
|
||||
|
||||
try:
|
||||
execute_result = subprocess.run(
|
||||
[binary_path, "--help"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
print(f"Binary execution output: {execute_result.stdout}")
|
||||
assert "usage:" in execute_result.stdout.lower(), "Binary did not produce expected help output."
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Binary execution failed with error: {e.stderr}")
|
||||
raise
|
||||
finally:
|
||||
if os.path.exists(binary_path):
|
||||
os.remove(binary_path)
|
||||
352
tests/test_utility.py
Normal file
352
tests/test_utility.py
Normal file
@@ -0,0 +1,352 @@
|
||||
import pytest
|
||||
import random
|
||||
import string
|
||||
from unittest.mock import patch, mock_open, MagicMock
|
||||
from Whatsapp_Chat_Exporter.utility import *
|
||||
|
||||
|
||||
def test_convert_time_unit():
|
||||
assert convert_time_unit(0) == "less than a second"
|
||||
assert convert_time_unit(1) == "a second"
|
||||
assert convert_time_unit(10) == "10 seconds"
|
||||
assert convert_time_unit(60) == "1 minute"
|
||||
assert convert_time_unit(61) == "1 minute 1 second"
|
||||
assert convert_time_unit(122) == "2 minutes 2 seconds"
|
||||
assert convert_time_unit(3600) == "1 hour"
|
||||
assert convert_time_unit(3661) == "1 hour 1 minute 1 second"
|
||||
assert convert_time_unit(3720) == "1 hour 2 minutes"
|
||||
assert convert_time_unit(3660) == "1 hour 1 minute"
|
||||
assert convert_time_unit(7263) == "2 hours 1 minute 3 seconds"
|
||||
assert convert_time_unit(86400) == "1 day"
|
||||
assert convert_time_unit(86461) == "1 day 1 minute 1 second"
|
||||
assert convert_time_unit(172805) == "2 days 5 seconds"
|
||||
|
||||
|
||||
class TestBytesToReadable:
|
||||
assert bytes_to_readable(0) == "0 B"
|
||||
assert bytes_to_readable(500) == "500 B"
|
||||
assert bytes_to_readable(1024) == "1.0 KB"
|
||||
assert bytes_to_readable(2048) == "2.0 KB"
|
||||
assert bytes_to_readable(1536) == "1.5 KB"
|
||||
assert bytes_to_readable(1024**2) == "1.0 MB"
|
||||
assert bytes_to_readable(5 * 1024**2) == "5.0 MB"
|
||||
assert bytes_to_readable(1024**3) == "1.0 GB"
|
||||
assert bytes_to_readable(1024**4) == "1.0 TB"
|
||||
assert bytes_to_readable(1024**5) == "1.0 PB"
|
||||
assert bytes_to_readable(1024**6) == "1.0 EB"
|
||||
assert bytes_to_readable(1024**7) == "1.0 ZB"
|
||||
assert bytes_to_readable(1024**8) == "1.0 YB"
|
||||
|
||||
|
||||
class TestReadableToBytes:
|
||||
def test_conversion(self):
|
||||
assert readable_to_bytes("0B") == 0
|
||||
assert readable_to_bytes("100B") == 100
|
||||
assert readable_to_bytes("50 B") == 50
|
||||
assert readable_to_bytes("1KB") == 1024
|
||||
assert readable_to_bytes("2.5 KB") == 2560
|
||||
assert readable_to_bytes("2.0 KB") == 2048
|
||||
assert readable_to_bytes("1MB") == 1024**2
|
||||
assert readable_to_bytes("0.5 MB") == 524288
|
||||
assert readable_to_bytes("1. MB") == 1048576
|
||||
assert readable_to_bytes("1GB") == 1024**3
|
||||
assert readable_to_bytes("1.GB") == 1024**3
|
||||
assert readable_to_bytes("1TB") == 1024**4
|
||||
assert readable_to_bytes("1PB") == 1024**5
|
||||
assert readable_to_bytes("1EB") == 1024**6
|
||||
assert readable_to_bytes("1ZB") == 1024**7
|
||||
assert readable_to_bytes("1YB") == 1024**8
|
||||
|
||||
def test_case_insensitivity(self):
|
||||
assert readable_to_bytes("1kb") == 1024
|
||||
assert readable_to_bytes("2mB") == 2 * 1024**2
|
||||
|
||||
def test_whitespace(self):
|
||||
assert readable_to_bytes(" 10 KB ") == 10 * 1024
|
||||
assert readable_to_bytes(" 1 MB") == 1024**2
|
||||
|
||||
def test_invalid_unit(self):
|
||||
with pytest.raises(ValueError, match="Invalid size format for size_str"):
|
||||
readable_to_bytes("100X")
|
||||
readable_to_bytes("A100")
|
||||
readable_to_bytes("100$$$$$")
|
||||
|
||||
def test_invalid_number(self):
|
||||
with pytest.raises(ValueError, match="Invalid size format for size_str"):
|
||||
readable_to_bytes("ABC KB")
|
||||
|
||||
def test_missing_unit(self):
|
||||
assert readable_to_bytes("100") == 100
|
||||
|
||||
|
||||
class TestSanitizeExcept:
|
||||
def test_no_tags(self):
|
||||
html = "This is plain text."
|
||||
assert sanitize_except(html) == Markup("This is plain text.")
|
||||
|
||||
def test_allowed_br_tag(self):
|
||||
html = "Line 1<br>Line 2"
|
||||
assert sanitize_except(html) == Markup("Line 1<br>Line 2")
|
||||
html = "<br/>Line"
|
||||
assert sanitize_except(html) == Markup("<br>Line")
|
||||
html = "Line<br />"
|
||||
assert sanitize_except(html) == Markup("Line<br>")
|
||||
|
||||
def test_mixed_tags(self):
|
||||
html = "<b>Bold</b><br><i>Italic</i><img src='evil.gif'><script>alert('XSS')</script>"
|
||||
assert sanitize_except(html) == Markup(
|
||||
"<b>Bold</b><br><i>Italic</i><img src='evil.gif'><script>alert('XSS')</script>")
|
||||
|
||||
def test_attribute_stripping(self):
|
||||
html = "<br class='someclass'>"
|
||||
assert sanitize_except(html) == Markup("<br>")
|
||||
|
||||
|
||||
class TestDetermineDay:
|
||||
def test_same_day(self):
|
||||
timestamp1 = 1678838400 # March 15, 2023 00:00:00 GMT
|
||||
timestamp2 = 1678881600 # March 15, 2023 12:00:00 GMT
|
||||
assert determine_day(timestamp1, timestamp2) is None
|
||||
|
||||
def test_different_day(self):
|
||||
timestamp1 = 1678886400 # March 15, 2023 00:00:00 GMT
|
||||
timestamp2 = 1678972800 # March 16, 2023 00:00:00 GMT
|
||||
assert determine_day(timestamp1, timestamp2) == datetime(2023, 3, 16).date()
|
||||
|
||||
def test_crossing_month(self):
|
||||
timestamp1 = 1680220800 # March 31, 2023 00:00:00 GMT
|
||||
timestamp2 = 1680307200 # April 1, 2023 00:00:00 GMT
|
||||
assert determine_day(timestamp1, timestamp2) == datetime(2023, 4, 1).date()
|
||||
|
||||
def test_crossing_year(self):
|
||||
timestamp1 = 1703980800 # December 31, 2023 00:00:00 GMT
|
||||
timestamp2 = 1704067200 # January 1, 2024 00:00:00 GMT
|
||||
assert determine_day(timestamp1, timestamp2) == datetime(2024, 1, 1).date()
|
||||
|
||||
|
||||
class TestGetFileName:
|
||||
def test_valid_contact_phone_number_no_chat_name(self):
|
||||
chat = ChatStore(Device.ANDROID, name=None)
|
||||
filename, name = get_file_name("1234567890@s.whatsapp.net", chat)
|
||||
assert filename == "1234567890"
|
||||
assert name == "1234567890"
|
||||
|
||||
def test_valid_contact_phone_number_with_chat_name(self):
|
||||
chat = ChatStore(Device.IOS, name="My Chat Group")
|
||||
filename, name = get_file_name("1234567890@s.whatsapp.net", chat)
|
||||
assert filename == "1234567890-My-Chat-Group"
|
||||
assert name == "My Chat Group"
|
||||
|
||||
def test_valid_contact_exported_chat(self):
|
||||
chat = ChatStore(Device.ANDROID, name="Testing")
|
||||
filename, name = get_file_name("ExportedChat", chat)
|
||||
assert filename == "ExportedChat-Testing"
|
||||
assert name == "Testing"
|
||||
|
||||
def test_valid_contact_special_ids(self):
|
||||
chat = ChatStore(Device.ANDROID, name="Special Chat")
|
||||
filename_000, name_000 = get_file_name("000000000000000", chat)
|
||||
assert filename_000 == "000000000000000-Special-Chat"
|
||||
assert name_000 == "Special Chat"
|
||||
filename_001, name_001 = get_file_name("000000000000001", chat)
|
||||
assert filename_001 == "000000000000001-Special-Chat"
|
||||
assert name_001 == "Special Chat"
|
||||
|
||||
def test_unexpected_contact_format(self):
|
||||
chat = ChatStore(Device.ANDROID, name="Some Chat")
|
||||
with pytest.raises(ValueError, match="Unexpected contact format: invalid-contact"):
|
||||
get_file_name("invalid-contact", chat)
|
||||
|
||||
def test_contact_with_hyphen_and_chat_name(self):
|
||||
chat = ChatStore(Device.ANDROID, name="Another Chat")
|
||||
filename, name = get_file_name("123-456-7890@g.us", chat)
|
||||
assert filename == "Another-Chat"
|
||||
assert name == "Another Chat"
|
||||
|
||||
def test_contact_with_hyphen_no_chat_name(self):
|
||||
chat = ChatStore(Device.ANDROID, name=None)
|
||||
filename, name = get_file_name("123-456-7890@g.us", chat)
|
||||
assert filename == "123-456-7890"
|
||||
assert name == "123-456-7890"
|
||||
|
||||
|
||||
class TestGetCondForEmpty:
|
||||
def test_enable_true(self):
|
||||
condition = get_cond_for_empty(True, "c.jid", "c.broadcast")
|
||||
assert condition == "AND (chat.hidden=0 OR c.jid='status@broadcast' OR c.broadcast>0)"
|
||||
|
||||
def test_enable_false(self):
|
||||
condition = get_cond_for_empty(False, "other_jid", "other_broadcast")
|
||||
assert condition == ""
|
||||
|
||||
|
||||
class TestGetChatCondition:
|
||||
...
|
||||
|
||||
|
||||
class TestGetStatusLocation:
|
||||
@patch('os.path.isdir')
|
||||
@patch('os.path.isfile')
|
||||
@patch('os.mkdir')
|
||||
@patch('urllib.request.urlopen')
|
||||
@patch('builtins.open', new_callable=mock_open)
|
||||
def test_offline_static_set(self, mock_open_file, mock_urlopen, mock_mkdir, mock_isfile, mock_isdir):
|
||||
mock_isdir.return_value = False
|
||||
mock_isfile.return_value = False
|
||||
mock_response = MagicMock()
|
||||
mock_response.read.return_value = b'W3.CSS Content'
|
||||
mock_urlopen.return_value.__enter__.return_value = mock_response
|
||||
output_folder = "output_folder"
|
||||
offline_static = "offline_static"
|
||||
|
||||
result = get_status_location(output_folder, offline_static)
|
||||
|
||||
assert result == os.path.join(offline_static, "w3.css")
|
||||
mock_mkdir.assert_called_once_with(os.path.join(output_folder, offline_static))
|
||||
mock_urlopen.assert_called_once_with("https://www.w3schools.com/w3css/4/w3.css")
|
||||
mock_open_file.assert_called_once_with(os.path.join(output_folder, offline_static, "w3.css"), "wb")
|
||||
mock_open_file().write.assert_called_once_with(b'W3.CSS Content')
|
||||
|
||||
def test_offline_static_not_set(self):
|
||||
result = get_status_location("output_folder", "")
|
||||
assert result == "https://www.w3schools.com/w3css/4/w3.css"
|
||||
|
||||
|
||||
class TestSafeName:
|
||||
def generate_random_string(length=50):
|
||||
random.seed(10)
|
||||
return ''.join(random.choice(string.ascii_letters + string.digits + "äöüß") for _ in range(length))
|
||||
|
||||
safe_name_test_cases = [
|
||||
("This is a test string", "This-is-a-test-string"),
|
||||
("This is a test string with special characters!@#$%^&*()",
|
||||
"This-is-a-test-string-with-special-characters"),
|
||||
("This is a test string with numbers 1234567890", "This-is-a-test-string-with-numbers-1234567890"),
|
||||
("This is a test string with mixed case ThisIsATestString",
|
||||
"This-is-a-test-string-with-mixed-case-ThisIsATestString"),
|
||||
("This is a test string with extra spaces \u00A0 \u00A0 \u00A0 ThisIsATestString",
|
||||
"This-is-a-test-string-with-extra-spaces-ThisIsATestString"),
|
||||
("This is a test string with unicode characters äöüß",
|
||||
"This-is-a-test-string-with-unicode-characters-äöüß"),
|
||||
("這是一個包含中文的測試字符串", "這是一個包含中文的測試字符串"), # Chinese characters, should stay as is
|
||||
(
|
||||
f"This is a test string with long length {generate_random_string(1000)}",
|
||||
f"This-is-a-test-string-with-long-length-{generate_random_string(1000)}",
|
||||
),
|
||||
("", ""), # Empty string
|
||||
(" ", ""), # String with only space
|
||||
("---", "---"), # String with only hyphens
|
||||
("___", "___"), # String with only underscores
|
||||
("a" * 100, "a" * 100), # Long string with single character
|
||||
("a-b-c-d-e", "a-b-c-d-e"), # String with hyphen
|
||||
("a_b_c_d_e", "a_b_c_d_e"), # String with underscore
|
||||
("a b c d e", "a-b-c-d-e"), # String with spaces
|
||||
("test.com/path/to/resource?param1=value1¶m2=value2",
|
||||
"test.compathtoresourceparam1value1param2value2"), # Test with URL
|
||||
("filename.txt", "filename.txt"), # Test with filename
|
||||
("Αυτή είναι μια δοκιμαστική συμβολοσειρά με ελληνικούς χαρακτήρες.",
|
||||
"Αυτή-είναι-μια-δοκιμαστική-συμβολοσειρά-με-ελληνικούς-χαρακτήρες."), # Greek characters
|
||||
("This is a test with комбинированные знаки ̆ example",
|
||||
"This-is-a-test-with-комбинированные-знаки-example") # Mixed with unicode
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize("input_text, expected_output", safe_name_test_cases)
|
||||
def test_safe_name(self, input_text, expected_output):
|
||||
result = safe_name(input_text)
|
||||
assert result == expected_output
|
||||
|
||||
|
||||
class TestGetChatCondition:
|
||||
def test_no_filter(self):
|
||||
"""Test when filter is None"""
|
||||
result = get_chat_condition(None, True, ["column1", "column2"])
|
||||
assert result == ""
|
||||
|
||||
result = get_chat_condition(None, False, ["column1"])
|
||||
assert result == ""
|
||||
|
||||
def test_include_single_chat_single_column(self):
|
||||
"""Test including a single chat with single column"""
|
||||
result = get_chat_condition(["1234567890"], True, ["phone"])
|
||||
assert result == "AND ( phone LIKE '%1234567890%')"
|
||||
|
||||
def test_include_multiple_chats_single_column(self):
|
||||
"""Test including multiple chats with single column"""
|
||||
result = get_chat_condition(["1234567890", "0987654321"], True, ["phone"])
|
||||
assert result == "AND ( phone LIKE '%1234567890%' OR phone LIKE '%0987654321%')"
|
||||
|
||||
def test_exclude_single_chat_single_column(self):
|
||||
"""Test excluding a single chat with single column"""
|
||||
result = get_chat_condition(["1234567890"], False, ["phone"])
|
||||
assert result == "AND ( phone NOT LIKE '%1234567890%')"
|
||||
|
||||
def test_exclude_multiple_chats_single_column(self):
|
||||
"""Test excluding multiple chats with single column"""
|
||||
result = get_chat_condition(["1234567890", "0987654321"], False, ["phone"])
|
||||
assert result == "AND ( phone NOT LIKE '%1234567890%' AND phone NOT LIKE '%0987654321%')"
|
||||
|
||||
def test_include_with_jid_android(self):
|
||||
"""Test including chats with JID for Android platform"""
|
||||
result = get_chat_condition(["1234567890"], True, ["phone", "name"], "jid", "android")
|
||||
assert result == "AND ( phone LIKE '%1234567890%' OR (name LIKE '%1234567890%' AND jid.type == 1))"
|
||||
|
||||
def test_include_with_jid_ios(self):
|
||||
"""Test including chats with JID for iOS platform"""
|
||||
result = get_chat_condition(["1234567890"], True, ["phone", "name"], "jid", "ios")
|
||||
assert result == "AND ( phone LIKE '%1234567890%' OR (name LIKE '%1234567890%' AND jid IS NOT NULL))"
|
||||
|
||||
def test_exclude_with_jid_android(self):
|
||||
"""Test excluding chats with JID for Android platform"""
|
||||
result = get_chat_condition(["1234567890"], False, ["phone", "name"], "jid", "android")
|
||||
assert result == "AND ( phone NOT LIKE '%1234567890%' AND (name NOT LIKE '%1234567890%' AND jid.type == 1))"
|
||||
|
||||
def test_exclude_with_jid_ios(self):
|
||||
"""Test excluding chats with JID for iOS platform"""
|
||||
result = get_chat_condition(["1234567890"], False, ["phone", "name"], "jid", "ios")
|
||||
assert result == "AND ( phone NOT LIKE '%1234567890%' AND (name NOT LIKE '%1234567890%' AND jid IS NOT NULL))"
|
||||
|
||||
def test_multiple_chats_with_jid_android(self):
|
||||
"""Test multiple chats with JID for Android platform"""
|
||||
result = get_chat_condition(["1234567890", "0987654321"], True, ["phone", "name"], "jid", "android")
|
||||
expected = "AND ( phone LIKE '%1234567890%' OR (name LIKE '%1234567890%' AND jid.type == 1) OR phone LIKE '%0987654321%' OR (name LIKE '%0987654321%' AND jid.type == 1))"
|
||||
assert result == expected
|
||||
|
||||
def test_multiple_chats_exclude_with_jid_android(self):
|
||||
"""Test excluding multiple chats with JID for Android platform"""
|
||||
result = get_chat_condition(["1234567890", "0987654321"], False, ["phone", "name"], "jid", "android")
|
||||
expected = "AND ( phone NOT LIKE '%1234567890%' AND (name NOT LIKE '%1234567890%' AND jid.type == 1) AND phone NOT LIKE '%0987654321%' AND (name NOT LIKE '%0987654321%' AND jid.type == 1))"
|
||||
assert result == expected
|
||||
|
||||
def test_invalid_column_count_with_jid(self):
|
||||
"""Test error when column count is less than 2 but jid is provided"""
|
||||
with pytest.raises(ValueError, match="There must be at least two elements in argument columns if jid is not None"):
|
||||
get_chat_condition(["1234567890"], True, ["phone"], "jid", "android")
|
||||
|
||||
def test_unsupported_platform(self):
|
||||
"""Test error when unsupported platform is provided"""
|
||||
with pytest.raises(ValueError, match="Only android and ios are supported for argument platform if jid is not None"):
|
||||
get_chat_condition(["1234567890"], True, ["phone", "name"], "jid", "windows")
|
||||
|
||||
def test_empty_filter_list(self):
|
||||
"""Test with empty filter list"""
|
||||
result = get_chat_condition([], True, ["phone"])
|
||||
assert result == ""
|
||||
|
||||
result = get_chat_condition([], False, ["phone"])
|
||||
assert result == ""
|
||||
|
||||
def test_filter_with_empty_strings(self):
|
||||
"""Test with filter containing empty strings"""
|
||||
result = get_chat_condition(["", "1234567890"], True, ["phone"])
|
||||
assert result == "AND ( phone LIKE '%%' OR phone LIKE '%1234567890%')"
|
||||
|
||||
result = get_chat_condition([""], True, ["phone"])
|
||||
assert result == "AND ( phone LIKE '%%')"
|
||||
|
||||
def test_special_characters_in_filter(self):
|
||||
"""Test with special characters in filter values"""
|
||||
result = get_chat_condition(["test@example.com"], True, ["email"])
|
||||
assert result == "AND ( email LIKE '%test@example.com%')"
|
||||
|
||||
result = get_chat_condition(["user-name"], True, ["username"])
|
||||
assert result == "AND ( username LIKE '%user-name%')"
|
||||
48
tests/test_vcards_contacts.py
Normal file
48
tests/test_vcards_contacts.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# from contacts_names_from_vcards import readVCardsFile
|
||||
|
||||
import os
|
||||
from Whatsapp_Chat_Exporter.vcards_contacts import normalize_number, read_vcards_file
|
||||
|
||||
|
||||
def test_readVCardsFile():
|
||||
data_dir = os.path.join(os.path.dirname(__file__), "data")
|
||||
data = read_vcards_file(os.path.join(data_dir, "contacts.vcf"), "852")
|
||||
if data:
|
||||
print("Found Names")
|
||||
print("-----------------------")
|
||||
for count, contact_tuple in enumerate(data, start=1):
|
||||
# The name is the second element of the tuple (at index 1)
|
||||
name = contact_tuple[1]
|
||||
|
||||
# Print the count and the name
|
||||
print(f"{count}. {name}")
|
||||
print(data)
|
||||
assert len(data) == 6
|
||||
# Test simple contact name
|
||||
assert data[0][1] == "Sample Contact"
|
||||
# Test complex name
|
||||
assert data[1][1] == "Yard Lawn Guy, Jose Lopez"
|
||||
# Test name with emoji
|
||||
assert data[2][1] == "John Butler 🌟💫🌟"
|
||||
# Test note with multi-line encoding
|
||||
assert data[3][1] == "Airline Contact #'s"
|
||||
# Test address with multi-line encoding
|
||||
assert data[4][1] == "James Peacock Elementary"
|
||||
# Test business entry using ORG but not F/FN
|
||||
assert data[5][1] == "AAA Car Service"
|
||||
|
||||
|
||||
def test_create_number_to_name_dicts():
|
||||
pass
|
||||
|
||||
|
||||
def test_fuzzy_match_numbers():
|
||||
pass
|
||||
|
||||
|
||||
def test_normalize_number():
|
||||
assert normalize_number('0531234567', '1') == '1531234567'
|
||||
assert normalize_number('001531234567', '2') == '1531234567'
|
||||
assert normalize_number('+1531234567', '34') == '1531234567'
|
||||
assert normalize_number('053(123)4567', '34') == '34531234567'
|
||||
assert normalize_number('0531-234-567', '58') == '58531234567'
|
||||
Reference in New Issue
Block a user