mirror of
https://github.com/KnugiHK/WhatsApp-Chat-Exporter.git
synced 2026-02-01 07:09:08 +00:00
Compare commits
221 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bac2efe15a | ||
|
|
9a6ee3ce5f | ||
|
|
823a89e677 | ||
|
|
945b422f71 | ||
|
|
19008a80bc | ||
|
|
4e877987fb | ||
|
|
322b12a5a4 | ||
|
|
1560c49644 | ||
|
|
28ba97d72f | ||
|
|
eab98ba0d6 | ||
|
|
f920ca82b4 | ||
|
|
4eed3ca321 | ||
|
|
746e4e1ac5 | ||
|
|
1694ae7dd9 | ||
|
|
f05e0d3451 | ||
|
|
0c5f2b7f13 | ||
|
|
db01d05263 | ||
|
|
2e7953f4ca | ||
|
|
95a52231be | ||
|
|
e0aab06192 | ||
|
|
43b00d8b48 | ||
|
|
bf230db595 | ||
|
|
242e8ee43a | ||
|
|
c32096b26b | ||
|
|
4aa1c26232 | ||
|
|
feca9ae8e0 | ||
|
|
92c325294c | ||
|
|
7dbd0dbe3c | ||
|
|
035e61c4d7 | ||
|
|
96d323e0ed | ||
|
|
35ad2559d7 | ||
|
|
8058ed8219 | ||
|
|
908d8f71ca | ||
|
|
f2b6a39011 | ||
|
|
4f531ec52a | ||
|
|
b69f645ac3 | ||
|
|
f8b959e1e1 | ||
|
|
9be210f34a | ||
|
|
ae7ba3da96 | ||
|
|
00e58ce2c9 | ||
|
|
4245ecc615 | ||
|
|
68dcc6abe0 | ||
|
|
c05e76569b | ||
|
|
a6fe0d93b1 | ||
|
|
2d096eff4d | ||
|
|
ea9675973c | ||
|
|
064b923cfa | ||
|
|
cd35ffc185 | ||
|
|
05bd26b8ed | ||
|
|
d200130335 | ||
|
|
1c7d6f7912 | ||
|
|
94960e4a23 | ||
|
|
79578d867f | ||
|
|
32c93159ac | ||
|
|
6910cc46a4 | ||
|
|
9e0457e720 | ||
|
|
e0967a3104 | ||
|
|
db50f24dd8 | ||
|
|
75fcf33fda | ||
|
|
0ba81e0863 | ||
|
|
647e406ac0 | ||
|
|
9cedcf1767 | ||
|
|
93a020f68d | ||
|
|
401abfb732 | ||
|
|
3538c81605 | ||
|
|
5a20953a81 | ||
|
|
8f29fa0505 | ||
|
|
0a14da9108 | ||
|
|
929534ff80 | ||
|
|
87c1555f03 | ||
|
|
fd325b6b59 | ||
|
|
17e927ffd6 | ||
|
|
5b488359c8 | ||
|
|
d2186447c6 | ||
|
|
82abf7d874 | ||
|
|
5e676f2663 | ||
|
|
5da2772112 | ||
|
|
04a21728a8 | ||
|
|
412efd66a0 | ||
|
|
0ac1612c6c | ||
|
|
8ffeabfca6 | ||
|
|
d5ad085210 | ||
|
|
baaafe1eca | ||
|
|
91f160fc2a | ||
|
|
21cae9fe93 | ||
|
|
a70895f959 | ||
|
|
79d12b9c8b | ||
|
|
ff27918705 | ||
|
|
a1c53c3db2 | ||
|
|
173eb5d02e | ||
|
|
b39aae365a | ||
|
|
10691b954a | ||
|
|
60c421a7d0 | ||
|
|
60ddcc08ed | ||
|
|
02b770a6f4 | ||
|
|
5e1bca53d1 | ||
|
|
968447fef9 | ||
|
|
506442392c | ||
|
|
1c2d3acf1b | ||
|
|
aef568b80b | ||
|
|
42e583ac7c | ||
|
|
ea60f878be | ||
|
|
9d2e06f973 | ||
|
|
dffce977de | ||
|
|
71ca293557 | ||
|
|
75720c6d0a | ||
|
|
5a80fe189d | ||
|
|
bb10203b44 | ||
|
|
ddd0ac3143 | ||
|
|
43658a92c4 | ||
|
|
194ed29a6e | ||
|
|
fa629503f7 | ||
|
|
f6442f9d73 | ||
|
|
02363af637 | ||
|
|
8c9c69a536 | ||
|
|
029700359e | ||
|
|
beaf272a63 | ||
|
|
1d5bad92a7 | ||
|
|
09162bf522 | ||
|
|
da4cea6230 | ||
|
|
2b8af6a2fc | ||
|
|
f04205cb49 | ||
|
|
177b936b25 | ||
|
|
101e554413 | ||
|
|
49851f5874 | ||
|
|
8cf1071c90 | ||
|
|
25fa1cc530 | ||
|
|
deebd6c87e | ||
|
|
f623eddc23 | ||
|
|
5cd8d953ac | ||
|
|
265afc1312 | ||
|
|
9d3e65bd92 | ||
|
|
5aa12482e0 | ||
|
|
716d4af3f3 | ||
|
|
4742ffd858 | ||
|
|
5ed260b0b7 | ||
|
|
99213503c4 | ||
|
|
f89f53cf2d | ||
|
|
0ecfe6c59a | ||
|
|
706466f63b | ||
|
|
24653b8753 | ||
|
|
e408c31415 | ||
|
|
6a0fca3e9d | ||
|
|
bbb558713f | ||
|
|
ea6e72bf0b | ||
|
|
d7ded16239 | ||
|
|
8c2868a60e | ||
|
|
a53e5a2b3d | ||
|
|
3f88f7fe08 | ||
|
|
7b66fe2ee2 | ||
|
|
c70143fb4b | ||
|
|
9c9c4d9ad2 | ||
|
|
96e483a6b0 | ||
|
|
587b743522 | ||
|
|
33149075d3 | ||
|
|
cc410b8503 | ||
|
|
e8acf6da32 | ||
|
|
667c005a67 | ||
|
|
bb48cd381b | ||
|
|
ae6e8ba7e2 | ||
|
|
1eea5fc5c1 | ||
|
|
dd795f3282 | ||
|
|
75c3999567 | ||
|
|
fa41572753 | ||
|
|
0681661660 | ||
|
|
907fe4aa91 | ||
|
|
4bd3c1d74a | ||
|
|
80cb868beb | ||
|
|
904f44dc12 | ||
|
|
520f31651c | ||
|
|
c346199d05 | ||
|
|
3e37bbb021 | ||
|
|
0bb4f52a26 | ||
|
|
a3294ead11 | ||
|
|
e2b773eac5 | ||
|
|
170a108109 | ||
|
|
1348ec89f0 | ||
|
|
db42ad123d | ||
|
|
dad7666adb | ||
|
|
f7d1332a14 | ||
|
|
a58dd78be8 | ||
|
|
3220ed2d3f | ||
|
|
4e1d994aa5 | ||
|
|
4ca56b1c5c | ||
|
|
60790d89e3 | ||
|
|
ed2ec7cb9e | ||
|
|
75c2db6d5c | ||
|
|
352be849a7 | ||
|
|
3e3aeae7ad | ||
|
|
9d76cf60af | ||
|
|
eded9a140f | ||
|
|
5a9944d14b | ||
|
|
b8652fcb96 | ||
|
|
ad267a7226 | ||
|
|
534aea924d | ||
|
|
d0fc620ba6 | ||
|
|
1f9cbc3ad2 | ||
|
|
fab9bc7649 | ||
|
|
8d34300ea5 | ||
|
|
fbffc16452 | ||
|
|
2f15360526 | ||
|
|
5291ed0d6f | ||
|
|
cab54658ee | ||
|
|
96e5823faa | ||
|
|
d7ba73047a | ||
|
|
81f072f899 | ||
|
|
2d8960d5e3 | ||
|
|
bacbcda474 | ||
|
|
9cfbb560eb | ||
|
|
c37e505408 | ||
|
|
f460f76441 | ||
|
|
0dda7b7bd9 | ||
|
|
7cf7329124 | ||
|
|
1207b1e0cc | ||
|
|
b3ce22ddbc | ||
|
|
15d6674644 | ||
|
|
07b525b0c6 | ||
|
|
bd503a0c7f | ||
|
|
dc639d5dac | ||
|
|
ae6a65f98d | ||
|
|
578c961932 |
0
docs.html → .github/docs.html
vendored
0
docs.html → .github/docs.html
vendored
489
.github/generate-website.js
vendored
Normal file
489
.github/generate-website.js
vendored
Normal file
@@ -0,0 +1,489 @@
|
|||||||
|
const fs = require('fs-extra');
|
||||||
|
const marked = require('marked');
|
||||||
|
const path = require('path');
|
||||||
|
const markedAlert = require('marked-alert');
|
||||||
|
|
||||||
|
fs.ensureDirSync('docs');
|
||||||
|
fs.ensureDirSync('docs/imgs');
|
||||||
|
|
||||||
|
if (fs.existsSync('imgs')) {
|
||||||
|
fs.copySync('imgs', 'docs/imgs');
|
||||||
|
}
|
||||||
|
if (fs.existsSync('.github/docs.html')) {
|
||||||
|
fs.copySync('.github/docs.html', 'docs/docs.html');
|
||||||
|
}
|
||||||
|
|
||||||
|
const readmeContent = fs.readFileSync('README.md', 'utf8');
|
||||||
|
|
||||||
|
const toc = `<div class="table-of-contents">
|
||||||
|
<h3>Table of Contents</h3>
|
||||||
|
<ul>
|
||||||
|
<li><a href="#intro">Introduction</a></li>
|
||||||
|
<li><a href="#usage">Usage</a></li>
|
||||||
|
<li><a href="#todo">To Do</a></li>
|
||||||
|
<li><a href="#legal">Legal Stuff & Disclaimer</a></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
`
|
||||||
|
|
||||||
|
const generateHTML = (content) =>
|
||||||
|
`<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<meta name="title" content="WhatsApp Chat Exporter">
|
||||||
|
<meta name="description" content="Export your WhatsApp conversations from Android and iOS/iPadOS devices to HTML, JSON, or text formats. Supports encrypted backups (Crypt12, Crypt14, Crypt15) and customizable templates.">
|
||||||
|
<meta name="keywords" content="WhatsApp, WhatsApp Chat Exporter, WhatsApp export tool, WhatsApp backup decryption, Crypt12, Crypt14, Crypt15, WhatsApp database parser, WhatsApp chat history, HTML export, JSON export, text export, customizable templates, media handling, vCard import, Python tool, open source, MIT license">
|
||||||
|
<meta name="robots" content="index, follow">
|
||||||
|
<meta name="author" content="KnugiHK">
|
||||||
|
<meta name="license" content="MIT">
|
||||||
|
<meta name="generator" content="Python">
|
||||||
|
<title>WhatsApp Chat Exporter</title>
|
||||||
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
|
||||||
|
<style>
|
||||||
|
:root {
|
||||||
|
--primary-color: #128C7E;
|
||||||
|
--secondary-color: #25D366;
|
||||||
|
--dark-color: #075E54;
|
||||||
|
--light-color: #DCF8C6;
|
||||||
|
--text-color: #333;
|
||||||
|
--light-text: #777;
|
||||||
|
--code-bg: #f6f8fa;
|
||||||
|
--border-color: #e1e4e8;
|
||||||
|
}
|
||||||
|
|
||||||
|
* {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif;
|
||||||
|
line-height: 1.6;
|
||||||
|
color: var(--text-color);
|
||||||
|
background-color: #f9f9f9;
|
||||||
|
}
|
||||||
|
|
||||||
|
.container {
|
||||||
|
max-width: 1200px;
|
||||||
|
margin: 0 auto;
|
||||||
|
padding: 0 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
header {
|
||||||
|
background-color: var(--primary-color);
|
||||||
|
color: white;
|
||||||
|
padding: 60px 0 40px;
|
||||||
|
text-align: center;
|
||||||
|
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
header h1 {
|
||||||
|
font-size: 2.8rem;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.badges {
|
||||||
|
margin: 20px 0;
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
gap: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.badge {
|
||||||
|
display: inline-block;
|
||||||
|
margin: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tagline {
|
||||||
|
font-size: 1.2rem;
|
||||||
|
max-width: 800px;
|
||||||
|
margin: 0 auto;
|
||||||
|
padding: 0 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.main-content {
|
||||||
|
background: white;
|
||||||
|
padding: 40px 0;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.inner-content {
|
||||||
|
padding: 0 30px;
|
||||||
|
max-width: 900px;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
h2 {
|
||||||
|
color: var(--dark-color);
|
||||||
|
margin: 30px 0 15px;
|
||||||
|
padding-bottom: 8px;
|
||||||
|
border-bottom: 2px solid var(--light-color);
|
||||||
|
font-size: 1.8rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
h3 {
|
||||||
|
color: var(--dark-color);
|
||||||
|
margin: 25px 0 15px;
|
||||||
|
font-size: 1.4rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
h4 {
|
||||||
|
color: var(--dark-color);
|
||||||
|
margin: 20px 0 10px;
|
||||||
|
font-size: 1.2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
p, ul, ol {
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul, ol {
|
||||||
|
padding-left: 25px;
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: var(--primary-color);
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
.alert {
|
||||||
|
background-color: #f8f9fa;
|
||||||
|
border-left: 4px solid #f0ad4e;
|
||||||
|
padding: 15px;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.alert--tip {
|
||||||
|
border-color: var(--secondary-color);
|
||||||
|
background-color: rgba(37, 211, 102, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.alert--note {
|
||||||
|
border-color: #0088cc;
|
||||||
|
background-color: rgba(0, 136, 204, 0.1);
|
||||||
|
}
|
||||||
|
.markdown-alert {
|
||||||
|
background-color: #f8f9fa;
|
||||||
|
border-left: 4px solid #f0ad4e;
|
||||||
|
padding: 15px;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.markdown-alert-note {
|
||||||
|
border-color: #0088cc;
|
||||||
|
background-color: rgba(0, 136, 204, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.markdown-alert-tip {
|
||||||
|
border-color: var(--secondary-color);
|
||||||
|
background-color: rgba(37, 211, 102, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.markdown-alert-important {
|
||||||
|
border-color: #d9534f;
|
||||||
|
background-color: rgba(217, 83, 79, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.markdown-alert-warning {
|
||||||
|
border-color: #f0ad4e;
|
||||||
|
background-color: rgba(240, 173, 78, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.markdown-alert-caution {
|
||||||
|
border-color: #ff9800;
|
||||||
|
background-color: rgba(255, 152, 0, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.markdown-alert p {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.markdown-alert-title {
|
||||||
|
font-weight: 600;
|
||||||
|
margin-bottom: 8px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
pre {
|
||||||
|
background-color: var(--code-bg);
|
||||||
|
border-radius: 6px;
|
||||||
|
padding: 16px;
|
||||||
|
overflow-x: auto;
|
||||||
|
margin: 16px 0;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
code {
|
||||||
|
font-family: SFMono-Regular, Consolas, Liberation Mono, Menlo, monospace;
|
||||||
|
font-size: 85%;
|
||||||
|
background-color: var(--code-bg);
|
||||||
|
padding: 0.2em 0.4em;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre code {
|
||||||
|
padding: 0;
|
||||||
|
background-color: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.screenshot {
|
||||||
|
max-width: 100%;
|
||||||
|
border-radius: 8px;
|
||||||
|
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
|
||||||
|
margin: 20px 0;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.feature-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fill, minmax(280px, 1fr));
|
||||||
|
gap: 20px;
|
||||||
|
margin: 30px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.feature-card {
|
||||||
|
background: white;
|
||||||
|
border-radius: 8px;
|
||||||
|
box-shadow: 0 2px 5px rgba(0, 0, 0, 0.1);
|
||||||
|
padding: 20px;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
transition: transform 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.feature-card:hover {
|
||||||
|
transform: translateY(-5px);
|
||||||
|
box-shadow: 0 5px 15px rgba(0, 0, 0, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.feature-icon {
|
||||||
|
font-size: 2rem;
|
||||||
|
color: var(--primary-color);
|
||||||
|
margin-bottom: 15px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.feature-title {
|
||||||
|
font-weight: 600;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
footer {
|
||||||
|
background-color: var(--dark-color);
|
||||||
|
color: white;
|
||||||
|
text-align: center;
|
||||||
|
padding: 30px 0;
|
||||||
|
margin-top: 50px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn {
|
||||||
|
display: inline-block;
|
||||||
|
background-color: var(--primary-color);
|
||||||
|
color: white;
|
||||||
|
padding: 10px 20px;
|
||||||
|
border-radius: 4px;
|
||||||
|
text-decoration: none;
|
||||||
|
font-weight: 500;
|
||||||
|
transition: background-color 0.3s ease;
|
||||||
|
margin: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn:hover {
|
||||||
|
background-color: var(--dark-color);
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-secondary {
|
||||||
|
background-color: white;
|
||||||
|
color: var(--primary-color);
|
||||||
|
border: 1px solid var(--primary-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-secondary:hover {
|
||||||
|
background-color: var(--light-color);
|
||||||
|
color: var(--dark-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.action-buttons {
|
||||||
|
margin: 30px 0;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table-of-contents {
|
||||||
|
background-color: #f8f9fa;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 6px;
|
||||||
|
padding: 15px 25px;
|
||||||
|
margin: 30px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table-of-contents h3 {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table-of-contents ul {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.help-text {
|
||||||
|
color: var(--light-text);
|
||||||
|
font-size: 0.9rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.device-section {
|
||||||
|
padding: 15px;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 6px;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
background-color: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
header {
|
||||||
|
padding: 40px 0 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
header h1 {
|
||||||
|
font-size: 2.2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tagline {
|
||||||
|
font-size: 1.1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.feature-grid {
|
||||||
|
grid-template-columns: 1fr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<header>
|
||||||
|
<div class="container">
|
||||||
|
<h1>WhatsApp Chat Exporter</h1>
|
||||||
|
<div class="badges">
|
||||||
|
<a href="https://pypi.org/project/whatsapp-chat-exporter/" class="badge"><img src="https://img.shields.io/pypi/v/whatsapp-chat-exporter?label=Latest%20in%20PyPI" alt="Latest in PyPI"></a>
|
||||||
|
<a href="https://github.com/KnugiHK/WhatsApp-Chat-Exporter/blob/main/LICENSE" class="badge"><img src="https://img.shields.io/pypi/l/whatsapp-chat-exporter?color=427B93" alt="License MIT"></a>
|
||||||
|
<a href="https://pypi.org/project/Whatsapp-Chat-Exporter/" class="badge"><img src="https://img.shields.io/pypi/pyversions/Whatsapp-Chat-Exporter" alt="Python"></a>
|
||||||
|
<a href="https://matrix.to/#/#wtsexporter:matrix.org" class="badge"><img src="https://img.shields.io/matrix/wtsexporter:matrix.org.svg?label=Matrix%20Chat%20Room" alt="Matrix Chat Room"></a>
|
||||||
|
</div>
|
||||||
|
<p class="tagline">A customizable Android and iPhone Whatsapp database parser that will give you the history of your Whatsapp conversations in HTML and JSON</p>
|
||||||
|
<div class="action-buttons">
|
||||||
|
<a href="https://github.com/KnugiHK/WhatsApp-Chat-Exporter" class="btn"><i class="fab fa-github"></i> GitHub</a>
|
||||||
|
<a href="https://pypi.org/project/whatsapp-chat-exporter/" class="btn btn-secondary"><i class="fab fa-python"></i> PyPI</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<div class="main-content">
|
||||||
|
<div class="inner-content">
|
||||||
|
<section id="features">
|
||||||
|
<h2>Key Features</h2>
|
||||||
|
|
||||||
|
<div class="feature-grid">
|
||||||
|
<div class="feature-card">
|
||||||
|
<div class="feature-icon"><i class="fas fa-mobile-alt"></i></div>
|
||||||
|
<h3 class="feature-title">Cross-Platform</h3>
|
||||||
|
<p>Support for both Android and iOS/iPadOS WhatsApp databases</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="feature-card">
|
||||||
|
<div class="feature-icon"><i class="fas fa-lock"></i></div>
|
||||||
|
<h3 class="feature-title">Backup Decryption</h3>
|
||||||
|
<p>Support for Crypt12, Crypt14, and Crypt15 (End-to-End) encrypted backups</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="feature-card">
|
||||||
|
<div class="feature-icon"><i class="fas fa-file-export"></i></div>
|
||||||
|
<h3 class="feature-title">Multiple Formats</h3>
|
||||||
|
<p>Export your chats in HTML, JSON, and text formats</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="feature-card">
|
||||||
|
<div class="feature-icon"><i class="fas fa-paint-brush"></i></div>
|
||||||
|
<h3 class="feature-title">Customizable</h3>
|
||||||
|
<p>Use custom HTML templates and styling for your chat exports</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="feature-card">
|
||||||
|
<div class="feature-icon"><i class="fas fa-images"></i></div>
|
||||||
|
<h3 class="feature-title">Media Support</h3>
|
||||||
|
<p>Properly handles and organizes your media files in the exports</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="feature-card">
|
||||||
|
<div class="feature-icon"><i class="fas fa-filter"></i></div>
|
||||||
|
<h3 class="feature-title">Filtering Options</h3>
|
||||||
|
<p>Filter chats by date, phone number, and more</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<div class="readme-content">
|
||||||
|
${content}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class="action-buttons">
|
||||||
|
<a href="https://github.com/KnugiHK/WhatsApp-Chat-Exporter" class="btn"><i class="fab fa-github"></i> View on GitHub</a>
|
||||||
|
<a href="https://pypi.org/project/whatsapp-chat-exporter/" class="btn btn-secondary"><i class="fab fa-python"></i> PyPI Package</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<footer>
|
||||||
|
<div class="container">
|
||||||
|
<p>© 2021-${new Date().getFullYear()} WhatsApp Chat Exporter</p>
|
||||||
|
<p>Licensed under MIT License</p>
|
||||||
|
<p>
|
||||||
|
<a href="https://github.com/KnugiHK/WhatsApp-Chat-Exporter" style="color: white; margin: 0 10px;"><i class="fab fa-github fa-lg"></i></a>
|
||||||
|
<a href="https://matrix.to/#/#wtsexporter:matrix.org" style="color: white; margin: 0 10px;"><i class="fas fa-comments fa-lg"></i></a>
|
||||||
|
</p>
|
||||||
|
<p><small>Last updated: ${new Date().toLocaleDateString()}</small></p>
|
||||||
|
</div>
|
||||||
|
</footer>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
// Simple script to handle smooth scrolling for anchor links
|
||||||
|
document.querySelectorAll('a[href^="#"]').forEach(anchor => {
|
||||||
|
anchor.addEventListener('click', function(e) {
|
||||||
|
e.preventDefault();
|
||||||
|
|
||||||
|
const targetId = this.getAttribute('href');
|
||||||
|
const targetElement = document.querySelector(targetId);
|
||||||
|
|
||||||
|
if (targetElement) {
|
||||||
|
window.scrollTo({
|
||||||
|
top: targetElement.offsetTop - 20,
|
||||||
|
behavior: 'smooth'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`;
|
||||||
|
|
||||||
|
const processedContent = readmeContent.replace(/\[!\[.*?\]\(.*?\)\]\(.*?\)/g, '')
|
||||||
|
|
||||||
|
const htmlContent = marked.use(markedAlert()).parse(processedContent, {
|
||||||
|
gfm: true,
|
||||||
|
breaks: true,
|
||||||
|
renderer: new marked.Renderer()
|
||||||
|
});
|
||||||
|
|
||||||
|
const finalHTML = generateHTML(htmlContent);
|
||||||
|
fs.writeFileSync('docs/index.html', finalHTML);
|
||||||
|
|
||||||
|
console.log('Website generated successfully!');
|
||||||
11
.github/pull_request_template.md
vendored
11
.github/pull_request_template.md
vendored
@@ -1,8 +1,11 @@
|
|||||||
|
# Important Note
|
||||||
|
|
||||||
|
**All PRs (except for changes unrelated to source files) should target and start from the `dev` branch.**
|
||||||
|
|
||||||
## Related Issue
|
## Related Issue
|
||||||
- Please reference the related issue here (e.g., `Fixes #123` or `Closes #456`), if there are any.
|
|
||||||
|
- Please put a reference to the related issue here (e.g., `Fixes #123` or `Closes #456`), if there are any.
|
||||||
|
|
||||||
## Description of Changes
|
## Description of Changes
|
||||||
- Briefly describe the changes made in this PR. Explain the purpose, the implementation details, and any important information that reviewers should be aware of.
|
|
||||||
|
|
||||||
## Important (Please remove this section before submitting the PR)
|
- Briefly describe the changes made in this PR. Explain the purpose, the implementation details, and any important information that reviewers should be aware of.
|
||||||
- Before submitting this PR, please make sure to look at **[this issue](https://github.com/KnugiHK/WhatsApp-Chat-Exporter/issues/137)**. It contains crucial context and discussion that may affect the changes in this PR.
|
|
||||||
|
|||||||
50
.github/workflows/ci.yml
vendored
Normal file
50
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
name: Run Pytest on Dev Branch Push
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- dev
|
||||||
|
pull_request:
|
||||||
|
jobs:
|
||||||
|
ci:
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest]
|
||||||
|
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||||
|
include:
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: "3.13"
|
||||||
|
python_utf8: "1"
|
||||||
|
- os: macos-latest
|
||||||
|
python-version: "3.13"
|
||||||
|
- os: windows-11-arm
|
||||||
|
python-version: "3.13"
|
||||||
|
python_utf8: "1"
|
||||||
|
- os: macos-15-intel
|
||||||
|
python-version: "3.13"
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: "3.14"
|
||||||
|
python_utf8: "1"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python ${{ matrix.python-version }} on ${{ matrix.os }}
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install .[all] pytest nuitka
|
||||||
|
|
||||||
|
- name: Run pytest
|
||||||
|
env:
|
||||||
|
PYTHONUTF8: ${{ matrix.python_utf8 || '0' }}
|
||||||
|
run: pytest
|
||||||
100
.github/workflows/codeql.yml
vendored
Normal file
100
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
# For most projects, this workflow file will not need changing; you simply need
|
||||||
|
# to commit it to your repository.
|
||||||
|
#
|
||||||
|
# You may wish to alter this file to override the set of languages analyzed,
|
||||||
|
# or to provide custom queries or build logic.
|
||||||
|
#
|
||||||
|
# ******** NOTE ********
|
||||||
|
# We have attempted to detect the languages in your repository. Please check
|
||||||
|
# the `language` matrix defined below to confirm you have the correct set of
|
||||||
|
# supported CodeQL languages.
|
||||||
|
#
|
||||||
|
name: "CodeQL Advanced"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "main", "dev" ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ "main", "dev" ]
|
||||||
|
schedule:
|
||||||
|
- cron: '25 21 * * 5'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze (${{ matrix.language }})
|
||||||
|
# Runner size impacts CodeQL analysis time. To learn more, please see:
|
||||||
|
# - https://gh.io/recommended-hardware-resources-for-running-codeql
|
||||||
|
# - https://gh.io/supported-runners-and-hardware-resources
|
||||||
|
# - https://gh.io/using-larger-runners (GitHub.com only)
|
||||||
|
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
|
||||||
|
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||||
|
permissions:
|
||||||
|
# required for all workflows
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
# required to fetch internal or private CodeQL packs
|
||||||
|
packages: read
|
||||||
|
|
||||||
|
# only required for workflows in private repositories
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- language: actions
|
||||||
|
build-mode: none
|
||||||
|
- language: python
|
||||||
|
build-mode: none
|
||||||
|
# CodeQL supports the following values keywords for 'language': 'actions', 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
|
||||||
|
# Use `c-cpp` to analyze code written in C, C++ or both
|
||||||
|
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||||
|
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||||
|
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
|
||||||
|
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
|
||||||
|
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
|
||||||
|
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
# Add any setup steps before running the `github/codeql-action/init` action.
|
||||||
|
# This includes steps like installing compilers or runtimes (`actions/setup-node`
|
||||||
|
# or others). This is typically only required for manual builds.
|
||||||
|
# - name: Setup runtime (example)
|
||||||
|
# uses: actions/setup-example@v1
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v4
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
build-mode: ${{ matrix.build-mode }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
|
||||||
|
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||||
|
# queries: security-extended,security-and-quality
|
||||||
|
|
||||||
|
# If the analyze step fails for one of the languages you are analyzing with
|
||||||
|
# "We were unable to automatically build your code", modify the matrix above
|
||||||
|
# to set the build mode to "manual" for that language. Then modify this step
|
||||||
|
# to build your code.
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
|
- if: matrix.build-mode == 'manual'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo 'If you are using a "manual" build mode for one or more of the' \
|
||||||
|
'languages you are analyzing, replace this with the commands to build' \
|
||||||
|
'your code, for example:'
|
||||||
|
echo ' make bootstrap'
|
||||||
|
echo ' make release'
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v4
|
||||||
|
with:
|
||||||
|
category: "/language:${{matrix.language}}"
|
||||||
130
.github/workflows/compile-binary.yml
vendored
130
.github/workflows/compile-binary.yml
vendored
@@ -7,78 +7,146 @@ on:
|
|||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
id-token: write
|
||||||
|
attestations: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
linux:
|
linux:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.13'
|
python-version: '3.13'
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install pycryptodome vobject javaobj-py3 ordered-set zstandard nuitka==2.6.7
|
pip install pycryptodome javaobj-py3 ordered-set zstandard nuitka==2.8.9
|
||||||
pip install .
|
pip install .
|
||||||
- name: Build binary with Nuitka
|
- name: Build binary with Nuitka
|
||||||
run: |
|
run: |
|
||||||
python -m nuitka --no-deployment-flag=self-execution --onefile \
|
python -m nuitka --onefile \
|
||||||
--include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html \
|
--include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html \
|
||||||
--assume-yes-for-downloads --follow-imports Whatsapp_Chat_Exporter/__main__.py --output-filename=wtsexporter_linux_x64
|
--assume-yes-for-downloads Whatsapp_Chat_Exporter --output-filename=wtsexporter_linux_x64
|
||||||
sha256sum wtsexporter_linux_x64
|
sha256sum wtsexporter_linux_x64
|
||||||
- uses: actions/upload-artifact@v4
|
- name: Generate artifact attestation
|
||||||
|
uses: actions/attest-build-provenance@v3
|
||||||
with:
|
with:
|
||||||
name: binary-linux
|
subject-path: ./wtsexporter_linux_x64
|
||||||
path: |
|
- uses: actions/upload-artifact@v6
|
||||||
./wtsexporter_linux_x64
|
with:
|
||||||
|
name: binary-linux-x64
|
||||||
|
path: ./wtsexporter_linux_x64
|
||||||
|
|
||||||
windows:
|
windows-x64:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.13'
|
python-version: '3.13'
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install pycryptodome vobject javaobj-py3 ordered-set zstandard nuitka==2.6.7
|
pip install pycryptodome javaobj-py3 ordered-set zstandard nuitka==2.8.9
|
||||||
pip install .
|
pip install .
|
||||||
- name: Build binary with Nuitka
|
- name: Build binary with Nuitka
|
||||||
run: |
|
run: |
|
||||||
python -m nuitka --no-deployment-flag=self-execution --onefile --include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html --assume-yes-for-downloads --follow-imports Whatsapp_Chat_Exporter\__main__.py --output-filename=wtsexporter
|
python -m nuitka --onefile --include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html --assume-yes-for-downloads Whatsapp_Chat_Exporter --output-filename=wtsexporter
|
||||||
copy wtsexporter.exe wtsexporter_x64.exe
|
Rename-Item -Path "wtsexporter.exe" -NewName "wtsexporter_win_x64.exe"
|
||||||
Get-FileHash wtsexporter_x64.exe
|
Get-FileHash wtsexporter_win_x64.exe
|
||||||
- uses: actions/upload-artifact@v4
|
- name: Generate artifact attestation
|
||||||
|
uses: actions/attest-build-provenance@v3
|
||||||
with:
|
with:
|
||||||
name: binary-windows
|
subject-path: .\wtsexporter_win_x64.exe
|
||||||
path: |
|
- uses: actions/upload-artifact@v6
|
||||||
.\wtsexporter_x64.exe
|
with:
|
||||||
|
name: binary-windows-x64
|
||||||
|
path: .\wtsexporter_win_x64.exe
|
||||||
|
|
||||||
macos:
|
windows-arm:
|
||||||
|
runs-on: windows-11-arm
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install pycryptodome javaobj-py3 ordered-set zstandard nuitka==2.8.9
|
||||||
|
pip install .
|
||||||
|
- name: Build binary with Nuitka
|
||||||
|
run: |
|
||||||
|
python -m nuitka --onefile --include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html --assume-yes-for-downloads Whatsapp_Chat_Exporter --output-filename=wtsexporter
|
||||||
|
Rename-Item -Path "wtsexporter.exe" -NewName "wtsexporter_win_arm64.exe"
|
||||||
|
Get-FileHash wtsexporter_win_arm64.exe
|
||||||
|
- name: Generate artifact attestation
|
||||||
|
uses: actions/attest-build-provenance@v3
|
||||||
|
with:
|
||||||
|
subject-path: .\wtsexporter_win_arm64.exe
|
||||||
|
- uses: actions/upload-artifact@v6
|
||||||
|
with:
|
||||||
|
name: binary-windows-arm64
|
||||||
|
path: .\wtsexporter_win_arm64.exe
|
||||||
|
|
||||||
|
macos-arm:
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.13'
|
python-version: '3.13'
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install pycryptodome vobject javaobj-py3 ordered-set zstandard nuitka==2.6.7
|
pip install pycryptodome javaobj-py3 ordered-set zstandard nuitka==2.8.9
|
||||||
pip install .
|
pip install .
|
||||||
- name: Build binary with Nuitka
|
- name: Build binary with Nuitka
|
||||||
run: |
|
run: |
|
||||||
python -m nuitka --no-deployment-flag=self-execution --onefile \
|
python -m nuitka --onefile \
|
||||||
--include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html \
|
--include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html \
|
||||||
--assume-yes-for-downloads --follow-imports Whatsapp_Chat_Exporter/__main__.py --output-filename=wtsexporter_macos_x64
|
--assume-yes-for-downloads Whatsapp_Chat_Exporter --output-filename=wtsexporter
|
||||||
shasum -a 256 wtsexporter_macos_x64
|
mv wtsexporter wtsexporter_macos_arm64
|
||||||
- uses: actions/upload-artifact@v4
|
shasum -a 256 wtsexporter_macos_arm64
|
||||||
|
- name: Generate artifact attestation
|
||||||
|
uses: actions/attest-build-provenance@v3
|
||||||
with:
|
with:
|
||||||
name: binary-macos
|
subject-path: ./wtsexporter_macos_arm64
|
||||||
path: |
|
- uses: actions/upload-artifact@v6
|
||||||
./wtsexporter_macos_x64
|
with:
|
||||||
|
name: binary-macos-arm64
|
||||||
|
path: ./wtsexporter_macos_arm64
|
||||||
|
|
||||||
|
macos-intel:
|
||||||
|
runs-on: macos-15-intel
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install pycryptodome javaobj-py3 ordered-set zstandard nuitka==2.8.9
|
||||||
|
pip install .
|
||||||
|
- name: Build binary with Nuitka
|
||||||
|
run: |
|
||||||
|
python -m nuitka --onefile \
|
||||||
|
--include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html \
|
||||||
|
--assume-yes-for-downloads Whatsapp_Chat_Exporter --output-filename=wtsexporter
|
||||||
|
mv wtsexporter wtsexporter_macos_x64
|
||||||
|
shasum -a 256 wtsexporter_macos_x64
|
||||||
|
- name: Generate artifact attestation
|
||||||
|
uses: actions/attest-build-provenance@v3
|
||||||
|
with:
|
||||||
|
subject-path: ./wtsexporter_macos_x64
|
||||||
|
- uses: actions/upload-artifact@v6
|
||||||
|
with:
|
||||||
|
name: binary-macos-x64
|
||||||
|
path: ./wtsexporter_macos_x64
|
||||||
43
.github/workflows/generate-website.yml
vendored
Normal file
43
.github/workflows/generate-website.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
name: Generate Website from README
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- 'README.md'
|
||||||
|
- '.github/workflows/generate-website.yml'
|
||||||
|
- '.github/generate-website.js'
|
||||||
|
- '.github/docs.html'
|
||||||
|
workflow_dispatch:
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pages: write
|
||||||
|
jobs:
|
||||||
|
build-and-deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v6
|
||||||
|
with:
|
||||||
|
node-version: '24'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm install marked fs-extra marked-alert
|
||||||
|
|
||||||
|
- name: Generate website from README
|
||||||
|
run: |
|
||||||
|
node .github/generate-website.js
|
||||||
|
echo 'wts.knugi.dev' > ./docs/CNAME
|
||||||
|
|
||||||
|
- name: Deploy to gh-pages
|
||||||
|
if: github.ref == 'refs/heads/main' # Ensure deployment only happens from main
|
||||||
|
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
publish_dir: ./docs
|
||||||
|
publish_branch: gh-pages
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -138,7 +138,9 @@ __main__
|
|||||||
|
|
||||||
# Dev time intermidiates & temp files
|
# Dev time intermidiates & temp files
|
||||||
result/
|
result/
|
||||||
|
output/
|
||||||
WhatsApp/
|
WhatsApp/
|
||||||
|
AppDomainGroup-group.net.whatsapp.WhatsApp.shared/
|
||||||
/*.db
|
/*.db
|
||||||
/*.db-*
|
/*.db-*
|
||||||
/myout
|
/myout
|
||||||
|
|||||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2021-2023 Knugi
|
Copyright (c) 2021-2026 Knugi
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
@@ -1,36 +0,0 @@
|
|||||||
The Whatsapp Chat Exporter is licensed under the MIT license. For more information,
|
|
||||||
refer to the file LICENSE.
|
|
||||||
|
|
||||||
Whatsapp Chat Exporter incorporates code from Django, governed by the three-clause
|
|
||||||
BSD license—a permissive open-source license. The copyright and license details are
|
|
||||||
provided below to adhere to Django's terms.
|
|
||||||
|
|
||||||
------
|
|
||||||
|
|
||||||
Copyright (c) Django Software Foundation and individual contributors.
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without modification,
|
|
||||||
are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer in the
|
|
||||||
documentation and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
3. Neither the name of Django nor the names of its contributors may be used
|
|
||||||
to endorse or promote products derived from this software without
|
|
||||||
specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
|
||||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
||||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
||||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
|
||||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
129
README.md
129
README.md
@@ -3,7 +3,7 @@
|
|||||||
[](https://github.com/KnugiHK/WhatsApp-Chat-Exporter/blob/main/LICENSE)
|
[](https://github.com/KnugiHK/WhatsApp-Chat-Exporter/blob/main/LICENSE)
|
||||||
[](https://pypi.org/project/Whatsapp-Chat-Exporter/)
|
[](https://pypi.org/project/Whatsapp-Chat-Exporter/)
|
||||||
[](https://matrix.to/#/#wtsexporter:matrix.org)
|
[](https://matrix.to/#/#wtsexporter:matrix.org)
|
||||||

|
[](https://wts.knugi.dev)
|
||||||
|
|
||||||
A customizable Android and iPhone Whatsapp database parser that will give you the history of your Whatsapp conversations in HTML and JSON. Inspired by [Telegram Chat Export Tool](https://telegram.org/blog/export-and-more).
|
A customizable Android and iPhone Whatsapp database parser that will give you the history of your Whatsapp conversations in HTML and JSON. Inspired by [Telegram Chat Export Tool](https://telegram.org/blog/export-and-more).
|
||||||
> [!TIP]
|
> [!TIP]
|
||||||
@@ -17,6 +17,8 @@ To contribute, see the [Contributing Guidelines](https://github.com/KnugiHK/What
|
|||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
> Usage in README may be removed in the future. Check the usage in [Wiki](https://github.com/KnugiHK/Whatsapp-Chat-Exporter/wiki)
|
> Usage in README may be removed in the future. Check the usage in [Wiki](https://github.com/KnugiHK/Whatsapp-Chat-Exporter/wiki)
|
||||||
|
>
|
||||||
|
> Click [here](https://github.com/KnugiHK/WhatsApp-Chat-Exporter/wiki/Android-Usage#crypt15-end-to-end-encrypted-backup) for the most trivia way for exporting from Android
|
||||||
|
|
||||||
First, install the exporter by:
|
First, install the exporter by:
|
||||||
```shell
|
```shell
|
||||||
@@ -50,7 +52,7 @@ wtsexporter -a
|
|||||||
The default WhatsApp contact database typically contained contact names extracted from your phone, which the exporter used to map your chats. However, in some reported cases, the database may have never been populated. In such case, you can export your contacts to a vCard file from your phone or a cloud provider like Google Contacts. Then, install the necessary dependency and run the following command from the shell:
|
The default WhatsApp contact database typically contained contact names extracted from your phone, which the exporter used to map your chats. However, in some reported cases, the database may have never been populated. In such case, you can export your contacts to a vCard file from your phone or a cloud provider like Google Contacts. Then, install the necessary dependency and run the following command from the shell:
|
||||||
```sh
|
```sh
|
||||||
pip install whatsapp-chat-exporter["vcards"]
|
pip install whatsapp-chat-exporter["vcards"]
|
||||||
wtsexporter -a --enrich-from-vcard contacts.vcf --default-country-code 852
|
wtsexporter -a --enrich-from-vcards contacts.vcf --default-country-code 852
|
||||||
```
|
```
|
||||||
|
|
||||||
### Encrypted Android WhatsApp Backup
|
### Encrypted Android WhatsApp Backup
|
||||||
@@ -99,7 +101,7 @@ wtsexporter -a -k encrypted_backup.key -b msgstore.db.crypt15
|
|||||||
```
|
```
|
||||||
If you have the 32 bytes hex key, simply put the hex key in the -k option and invoke the command from shell like this:
|
If you have the 32 bytes hex key, simply put the hex key in the -k option and invoke the command from shell like this:
|
||||||
```sh
|
```sh
|
||||||
wtsexporter -a -k 432435053b5204b08e5c3823423399aa30ff061435ab89bc4e6713969cdaa5a8 -b msgstore.db.crypt15
|
wtsexporter -a -k 133735053b5204b08e5c3823423399aa30ff061435ab89bc4e6713969cda1337 -b msgstore.db.crypt15
|
||||||
```
|
```
|
||||||
|
|
||||||
## Working with iOS/iPadOS (iPhone or iPad)
|
## Working with iOS/iPadOS (iPhone or iPad)
|
||||||
@@ -134,33 +136,42 @@ wtsexporter -i -b ~/Library/Application\ Support/MobileSync/Backup/[device id]
|
|||||||
```
|
```
|
||||||
|
|
||||||
## Results
|
## Results
|
||||||
After extracting, you will get these:
|
After extracting, you will get this:
|
||||||
#### Private Message
|
|
||||||

|

|
||||||
|
|
||||||
#### Group Message
|
|
||||||

|
## Working with Business
|
||||||
|
If you are working with WhatsApp Business, add the `--business` flag to the command
|
||||||
|
```sh
|
||||||
|
wtsexporter -a --business ...other flags
|
||||||
|
wtsexporter -i --business ...other flags
|
||||||
|
```
|
||||||
|
|
||||||
## More options
|
## More options
|
||||||
Invoke the wtsexporter with --help option will show you all options available.
|
Invoke the wtsexporter with --help option will show you all options available.
|
||||||
```sh
|
```sh
|
||||||
> wtsexporter --help
|
> wtsexporter --help
|
||||||
usage: wtsexporter [-h] [-a] [-i] [-e EXPORTED] [-w WA] [-m MEDIA] [-b BACKUP] [-d DB] [-k [KEY]]
|
usage: wtsexporter [-h] [--debug] [-a] [-i] [-e EXPORTED] [-w WA] [-m MEDIA] [-b BACKUP] [-d DB]
|
||||||
[--call-db [CALL_DB_IOS]] [--wab WAB] [-o OUTPUT] [-j [JSON]] [--txt [TEXT_FORMAT]] [--no-html]
|
[-k [KEY]] [--call-db [CALL_DB_IOS]] [--wab WAB] [-o OUTPUT] [-j [JSON]]
|
||||||
[--size [SIZE]] [--avoid-encoding-json] [--pretty-print-json [PRETTY_PRINT_JSON]] [--per-chat]
|
[--txt [TEXT_FORMAT]] [--no-html] [--size [SIZE]] [--no-reply] [--avoid-encoding-json]
|
||||||
[--import] [-t TEMPLATE] [--offline OFFLINE] [--no-avatar] [--experimental-new-theme]
|
[--pretty-print-json [PRETTY_PRINT_JSON]] [--tg] [--per-chat] [--import] [-t TEMPLATE]
|
||||||
[--headline HEADLINE] [-c] [--create-separated-media] [--time-offset {-12 to 14}] [--date DATE]
|
[--offline OFFLINE] [--no-avatar] [--old-theme] [--headline HEADLINE] [-c]
|
||||||
|
[--create-separated-media] [--time-offset {-12 to 14}] [--date DATE]
|
||||||
[--date-format FORMAT] [--include [phone number ...]] [--exclude [phone number ...]]
|
[--date-format FORMAT] [--include [phone number ...]] [--exclude [phone number ...]]
|
||||||
[--dont-filter-empty] [--enrich-from-vcards ENRICH_FROM_VCARDS]
|
[--dont-filter-empty] [--enrich-from-vcards ENRICH_FROM_VCARDS]
|
||||||
[--default-country-code DEFAULT_COUNTRY_CODE] [-s] [--check-update] [--assume-first-as-me]
|
[--default-country-code DEFAULT_COUNTRY_CODE] [--incremental-merge]
|
||||||
[--business] [--decrypt-chunk-size DECRYPT_CHUNK_SIZE]
|
[--source-dir SOURCE_DIR] [--target-dir TARGET_DIR] [-s] [--check-update]
|
||||||
[--max-bruteforce-worker MAX_BRUTEFORCE_WORKER]
|
[--check-update-pre] [--assume-first-as-me] [--business]
|
||||||
|
[--decrypt-chunk-size DECRYPT_CHUNK_SIZE]
|
||||||
|
[--max-bruteforce-worker MAX_BRUTEFORCE_WORKER] [--no-banner] [--fix-dot-files]
|
||||||
|
|
||||||
A customizable Android and iOS/iPadOS WhatsApp database parser that will give you the history of your WhatsApp
|
A customizable Android and iOS/iPadOS WhatsApp database parser that will give you the history of your
|
||||||
conversations in HTML and JSON. Android Backup Crypt12, Crypt14 and Crypt15 supported.
|
WhatsApp conversations in HTML and JSON. Android Backup Crypt12, Crypt14 and Crypt15 supported.
|
||||||
|
|
||||||
options:
|
options:
|
||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
|
--debug Enable debug mode
|
||||||
|
|
||||||
Device Type:
|
Device Type:
|
||||||
-a, --android Define the target as Android
|
-a, --android Define the target as Android
|
||||||
@@ -172,9 +183,10 @@ Input Files:
|
|||||||
-w, --wa WA Path to contact database (default: wa.db/ContactsV2.sqlite)
|
-w, --wa WA Path to contact database (default: wa.db/ContactsV2.sqlite)
|
||||||
-m, --media MEDIA Path to WhatsApp media folder (default: WhatsApp)
|
-m, --media MEDIA Path to WhatsApp media folder (default: WhatsApp)
|
||||||
-b, --backup BACKUP Path to Android (must be used together with -k)/iOS WhatsApp backup
|
-b, --backup BACKUP Path to Android (must be used together with -k)/iOS WhatsApp backup
|
||||||
-d, --db DB Path to database file (default: msgstore.db/7c7fba66680ef796b916b067077cc246adacf01d)
|
-d, --db DB Path to database file (default:
|
||||||
-k, --key [KEY] Path to key file. If this option is set for crypt15 backup but nothing is specified, you will
|
msgstore.db/7c7fba66680ef796b916b067077cc246adacf01d)
|
||||||
be prompted to enter the key.
|
-k, --key [KEY] Path to key file. If this option is set for crypt15 backup but nothing is
|
||||||
|
specified, you will be prompted to enter the key.
|
||||||
--call-db [CALL_DB_IOS]
|
--call-db [CALL_DB_IOS]
|
||||||
Path to call database (default: 1b432994e958845fffe8e2f190f26d1511534088) iOS only
|
Path to call database (default: 1b432994e958845fffe8e2f190f26d1511534088) iOS only
|
||||||
--wab, --wa-backup WAB
|
--wab, --wa-backup WAB
|
||||||
@@ -183,17 +195,20 @@ Input Files:
|
|||||||
Output Options:
|
Output Options:
|
||||||
-o, --output OUTPUT Output to specific directory (default: result)
|
-o, --output OUTPUT Output to specific directory (default: result)
|
||||||
-j, --json [JSON] Save the result to a single JSON file (default if present: result.json)
|
-j, --json [JSON] Save the result to a single JSON file (default if present: result.json)
|
||||||
--txt [TEXT_FORMAT] Export chats in text format similar to what WhatsApp officially provided (default if present:
|
--txt [TEXT_FORMAT] Export chats in text format similar to what WhatsApp officially provided (default
|
||||||
result/)
|
if present: result/)
|
||||||
--no-html Do not output html files
|
--no-html Do not output html files
|
||||||
--size, --output-size, --split [SIZE]
|
--size, --output-size, --split [SIZE]
|
||||||
Maximum (rough) size of a single output file in bytes, 0 for auto
|
Maximum (rough) size of a single output file in bytes, 0 for auto
|
||||||
|
--no-reply Do not process replies (iOS only) (default: handle replies)
|
||||||
|
|
||||||
JSON Options:
|
JSON Options:
|
||||||
--avoid-encoding-json
|
--avoid-encoding-json
|
||||||
Don't encode non-ascii characters in the output JSON files
|
Don't encode non-ascii characters in the output JSON files
|
||||||
--pretty-print-json [PRETTY_PRINT_JSON]
|
--pretty-print-json [PRETTY_PRINT_JSON]
|
||||||
Pretty print the output JSON.
|
Pretty print the output JSON.
|
||||||
|
--tg, --telegram Output the JSON in a format compatible with Telegram export (implies json-per-
|
||||||
|
chat)
|
||||||
--per-chat Output the JSON file per chat
|
--per-chat Output the JSON file per chat
|
||||||
--import Import JSON file and convert to HTML output
|
--import Import JSON file and convert to HTML output
|
||||||
|
|
||||||
@@ -202,9 +217,9 @@ HTML Options:
|
|||||||
Path to custom HTML template
|
Path to custom HTML template
|
||||||
--offline OFFLINE Relative path to offline static files
|
--offline OFFLINE Relative path to offline static files
|
||||||
--no-avatar Do not render avatar in HTML output
|
--no-avatar Do not render avatar in HTML output
|
||||||
--experimental-new-theme
|
--old-theme Use the old Telegram-alike theme
|
||||||
Use the newly designed WhatsApp-alike theme
|
--headline HEADLINE The custom headline for the HTML output. Use '??' as a placeholder for the chat
|
||||||
--headline HEADLINE The custom headline for the HTML output. Use '??' as a placeholder for the chat name
|
name
|
||||||
|
|
||||||
Media Handling:
|
Media Handling:
|
||||||
-c, --move-media Move the media directory to output directory if the flag is set, otherwise copy it
|
-c, --move-media Move the media directory to output directory if the flag is set, otherwise copy it
|
||||||
@@ -220,35 +235,77 @@ Filtering Options:
|
|||||||
Include chats that match the supplied phone number
|
Include chats that match the supplied phone number
|
||||||
--exclude [phone number ...]
|
--exclude [phone number ...]
|
||||||
Exclude chats that match the supplied phone number
|
Exclude chats that match the supplied phone number
|
||||||
--dont-filter-empty By default, the exporter will not render chats with no valid message. Setting this flag will
|
--dont-filter-empty By default, the exporter will not render chats with no valid message. Setting this
|
||||||
cause the exporter to render those. This is useful if chat(s) are missing from the output
|
flag will cause the exporter to render those. This is useful if chat(s) are
|
||||||
|
missing from the output
|
||||||
|
|
||||||
Contact Enrichment:
|
Contact Enrichment:
|
||||||
--enrich-from-vcards ENRICH_FROM_VCARDS
|
--enrich-from-vcards ENRICH_FROM_VCARDS
|
||||||
Path to an exported vcf file from Google contacts export. Add names missing from WhatsApp's
|
Path to an exported vcf file from Google contacts export. Add names missing from
|
||||||
default database
|
WhatsApp's default database
|
||||||
--default-country-code DEFAULT_COUNTRY_CODE
|
--default-country-code DEFAULT_COUNTRY_CODE
|
||||||
Use with --enrich-from-vcards. When numbers in the vcf file does not have a country code, this
|
Use with --enrich-from-vcards. When numbers in the vcf file does not have a
|
||||||
will be used. 1 is for US, 66 for Thailand etc. Most likely use the number of your own country
|
country code, this will be used. 1 is for US, 66 for Thailand etc. Most likely use
|
||||||
|
the number of your own country
|
||||||
|
|
||||||
|
Incremental Merging:
|
||||||
|
--incremental-merge Performs an incremental merge of two exports. Requires setting both --source-dir
|
||||||
|
and --target-dir. The chats (JSON files only) and media from the source directory
|
||||||
|
will be merged into the target directory. No chat messages or media will be
|
||||||
|
deleted from the target directory; only new chat messages and media will be added
|
||||||
|
to it. This enables chat messages and media to be deleted from the device to free
|
||||||
|
up space, while ensuring they are preserved in the exported backups.
|
||||||
|
--source-dir SOURCE_DIR
|
||||||
|
Sets the source directory. Used for performing incremental merges.
|
||||||
|
--target-dir TARGET_DIR
|
||||||
|
Sets the target directory. Used for performing incremental merges.
|
||||||
|
|
||||||
Miscellaneous:
|
Miscellaneous:
|
||||||
-s, --showkey Show the HEX key used to decrypt the database
|
-s, --showkey Show the HEX key used to decrypt the database
|
||||||
--check-update Check for updates (require Internet access)
|
--check-update Check for updates (require Internet access)
|
||||||
|
--check-update-pre Check for updates including pre-releases (require Internet access)
|
||||||
--assume-first-as-me Assume the first message in a chat as sent by me (must be used together with -e)
|
--assume-first-as-me Assume the first message in a chat as sent by me (must be used together with -e)
|
||||||
--business Use Whatsapp Business default files (iOS only)
|
--business Use Whatsapp Business default files (iOS only)
|
||||||
--decrypt-chunk-size DECRYPT_CHUNK_SIZE
|
--decrypt-chunk-size DECRYPT_CHUNK_SIZE
|
||||||
Specify the chunk size for decrypting iOS backup, which may affect the decryption speed.
|
Specify the chunk size for decrypting iOS backup, which may affect the decryption
|
||||||
|
speed.
|
||||||
--max-bruteforce-worker MAX_BRUTEFORCE_WORKER
|
--max-bruteforce-worker MAX_BRUTEFORCE_WORKER
|
||||||
Specify the maximum number of worker for bruteforce decryption.
|
Specify the maximum number of worker for bruteforce decryption.
|
||||||
|
--no-banner Do not show the banner
|
||||||
|
--fix-dot-files Fix files with a dot at the end of their name (allowing the outputs be stored in
|
||||||
|
FAT filesystems)
|
||||||
|
|
||||||
WhatsApp Chat Exporter: 0.12.0 Licensed with MIT. See https://wts.knugi.dev/docs?dest=osl for all open source
|
WhatsApp Chat Exporter: 0.13.0 Licensed with MIT. See https://wts.knugi.dev/docs?dest=osl for all open
|
||||||
licenses.
|
source licenses.
|
||||||
```
|
```
|
||||||
|
|
||||||
# To do
|
# Verifying Build Integrity
|
||||||
See [issues](https://github.com/KnugiHK/Whatsapp-Chat-Exporter/issues).
|
|
||||||
|
To ensure that the binaries provided in the releases were built directly from this source code via GitHub Actions and have not been tampered with, GitHub Artifact Attestations is used. You can verify the authenticity of any pre-built binaries using the GitHub CLI.
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> Requires version 0.13.0 or newer. Legacy binaries are unsupported.
|
||||||
|
|
||||||
|
### Using Bash (Linux/WSL/macOS)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
for file in wtsexporter*; do ; gh attestation verify "$file" -R KnugiHK/WhatsApp-Chat-Exporter; done
|
||||||
|
```
|
||||||
|
|
||||||
|
### Using PowerShell (Windows)
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
gci "wtsexporter*" | % { gh attestation verify $_.FullName -R KnugiHK/WhatsApp-Chat-Exporter }
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
# Python Support Policy
|
||||||
|
|
||||||
|
This project officially supports all non-EOL (End-of-Life) versions of Python. Once a Python version reaches EOL, it is dropped in the next release. See [Python's EOL Schedule](https://devguide.python.org/versions/).
|
||||||
|
|
||||||
|
|
||||||
# Legal Stuff & Disclaimer
|
# Legal Stuff & Disclaimer
|
||||||
|
|
||||||
This is a MIT licensed project.
|
This is a MIT licensed project.
|
||||||
|
|
||||||
The Telegram Desktop's export is the reference for whatsapp.html in this repo.
|
The Telegram Desktop's export is the reference for whatsapp.html in this repo.
|
||||||
|
|||||||
@@ -7,39 +7,61 @@ import shutil
|
|||||||
import json
|
import json
|
||||||
import string
|
import string
|
||||||
import glob
|
import glob
|
||||||
|
import logging
|
||||||
import importlib.metadata
|
import importlib.metadata
|
||||||
from Whatsapp_Chat_Exporter import android_crypt, exported_handler, android_handler
|
from Whatsapp_Chat_Exporter import android_crypt, exported_handler, android_handler
|
||||||
from Whatsapp_Chat_Exporter import ios_handler, ios_media_handler
|
from Whatsapp_Chat_Exporter import ios_handler, ios_media_handler
|
||||||
from Whatsapp_Chat_Exporter.data_model import ChatCollection, ChatStore
|
from Whatsapp_Chat_Exporter.data_model import ChatCollection, ChatStore, Timing
|
||||||
from Whatsapp_Chat_Exporter.utility import APPLE_TIME, Crypt, check_update, DbType
|
from Whatsapp_Chat_Exporter.utility import APPLE_TIME, CURRENT_TZ_OFFSET, Crypt
|
||||||
from Whatsapp_Chat_Exporter.utility import readable_to_bytes, sanitize_filename
|
from Whatsapp_Chat_Exporter.utility import readable_to_bytes, safe_name, bytes_to_readable
|
||||||
from Whatsapp_Chat_Exporter.utility import import_from_json, bytes_to_readable
|
from Whatsapp_Chat_Exporter.utility import import_from_json, incremental_merge, check_update
|
||||||
|
from Whatsapp_Chat_Exporter.utility import telegram_json_format, convert_time_unit, DbType
|
||||||
|
from Whatsapp_Chat_Exporter.utility import get_transcription_selection, check_jid_map
|
||||||
from argparse import ArgumentParser, SUPPRESS
|
from argparse import ArgumentParser, SUPPRESS
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from getpass import getpass
|
from getpass import getpass
|
||||||
|
from tqdm import tqdm
|
||||||
from sys import exit
|
from sys import exit
|
||||||
from typing import Tuple, Optional, List, Dict, Any, Union
|
from typing import Optional, List, Dict
|
||||||
|
from Whatsapp_Chat_Exporter.vcards_contacts import ContactsFromVCards
|
||||||
|
|
||||||
# Try to import vobject for contacts processing
|
|
||||||
try:
|
__version__ = importlib.metadata.version("whatsapp_chat_exporter")
|
||||||
import vobject
|
WTSEXPORTER_BANNER = f"""========================================================================================================
|
||||||
except ModuleNotFoundError:
|
██╗ ██╗██╗ ██╗ █████╗ ████████╗███████╗ █████╗ ██████╗ ██████╗
|
||||||
vcards_deps_installed = False
|
██║ ██║██║ ██║██╔══██╗╚══██╔══╝██╔════╝██╔══██╗██╔══██╗██╔══██╗
|
||||||
else:
|
██║ █╗ ██║███████║███████║ ██║ ███████╗███████║██████╔╝██████╔╝
|
||||||
from Whatsapp_Chat_Exporter.vcards_contacts import ContactsFromVCards
|
██║███╗██║██╔══██║██╔══██║ ██║ ╚════██║██╔══██║██╔═══╝ ██╔═══╝
|
||||||
vcards_deps_installed = True
|
╚███╔███╔╝██║ ██║██║ ██║ ██║ ███████║██║ ██║██║ ██║
|
||||||
|
╚══╝╚══╝ ╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝
|
||||||
|
|
||||||
|
██████╗██╗ ██╗ █████╗ ████████╗ ███████╗██╗ ██╗██████╗ ██████╗ ██████╗ ████████╗███████╗██████╗
|
||||||
|
██╔════╝██║ ██║██╔══██╗╚══██╔══╝ ██╔════╝╚██╗██╔╝██╔══██╗██╔═══██╗██╔══██╗╚══██╔══╝██╔════╝██╔══██╗
|
||||||
|
██║ ███████║███████║ ██║ █████╗ ╚███╔╝ ██████╔╝██║ ██║██████╔╝ ██║ █████╗ ██████╔╝
|
||||||
|
██║ ██╔══██║██╔══██║ ██║ ██╔══╝ ██╔██╗ ██╔═══╝ ██║ ██║██╔══██╗ ██║ ██╔══╝ ██╔══██╗
|
||||||
|
╚██████╗██║ ██║██║ ██║ ██║ ███████╗██╔╝ ██╗██║ ╚██████╔╝██║ ██║ ██║ ███████╗██║ ██║
|
||||||
|
╚═════╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚══════╝╚═╝ ╚═╝╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝╚═╝ ╚═╝
|
||||||
|
|
||||||
|
WhatsApp Chat Exporter: A customizable Android and iOS/iPadOS WhatsApp database parser
|
||||||
|
{f"Version: {__version__}".center(104)}
|
||||||
|
========================================================================================================"""
|
||||||
|
|
||||||
|
|
||||||
def setup_argument_parser() -> ArgumentParser:
|
def setup_argument_parser() -> ArgumentParser:
|
||||||
"""Set up and return the argument parser with all options."""
|
"""Set up and return the argument parser with all options."""
|
||||||
parser = ArgumentParser(
|
parser = ArgumentParser(
|
||||||
description='A customizable Android and iOS/iPadOS WhatsApp database parser that '
|
description='A customizable Android and iOS/iPadOS WhatsApp database parser that '
|
||||||
'will give you the history of your WhatsApp conversations in HTML '
|
'will give you the history of your WhatsApp conversations in HTML '
|
||||||
'and JSON. Android Backup Crypt12, Crypt14 and Crypt15 supported.',
|
'and JSON. Android Backup Crypt12, Crypt14 and Crypt15 supported.',
|
||||||
epilog=f'WhatsApp Chat Exporter: {importlib.metadata.version("whatsapp_chat_exporter")} Licensed with MIT. See '
|
epilog=f'WhatsApp Chat Exporter: {__version__} Licensed with MIT. See '
|
||||||
'https://wts.knugi.dev/docs?dest=osl for all open source licenses.'
|
'https://wts.knugi.dev/docs?dest=osl for all open source licenses.'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# General options
|
||||||
|
parser.add_argument(
|
||||||
|
"--debug", dest="debug", default=False, action='store_true',
|
||||||
|
help="Enable debug mode"
|
||||||
|
)
|
||||||
# Device type arguments
|
# Device type arguments
|
||||||
device_group = parser.add_argument_group('Device Type')
|
device_group = parser.add_argument_group('Device Type')
|
||||||
device_group.add_argument(
|
device_group.add_argument(
|
||||||
@@ -106,9 +128,13 @@ def setup_argument_parser() -> ArgumentParser:
|
|||||||
help="Do not output html files"
|
help="Do not output html files"
|
||||||
)
|
)
|
||||||
output_group.add_argument(
|
output_group.add_argument(
|
||||||
"--size", "--output-size", "--split", dest="size", nargs='?', const=0, default=None,
|
"--size", "--output-size", "--split", dest="size", nargs='?', const="0", default=None,
|
||||||
help="Maximum (rough) size of a single output file in bytes, 0 for auto"
|
help="Maximum (rough) size of a single output file in bytes, 0 for auto"
|
||||||
)
|
)
|
||||||
|
output_group.add_argument(
|
||||||
|
"--no-reply", dest="no_reply_ios", default=False, action='store_true',
|
||||||
|
help="Do not process replies (iOS only) (default: handle replies)"
|
||||||
|
)
|
||||||
|
|
||||||
# JSON formatting options
|
# JSON formatting options
|
||||||
json_group = parser.add_argument_group('JSON Options')
|
json_group = parser.add_argument_group('JSON Options')
|
||||||
@@ -120,6 +146,10 @@ def setup_argument_parser() -> ArgumentParser:
|
|||||||
'--pretty-print-json', dest='pretty_print_json', default=None, nargs='?', const=2, type=int,
|
'--pretty-print-json', dest='pretty_print_json', default=None, nargs='?', const=2, type=int,
|
||||||
help="Pretty print the output JSON."
|
help="Pretty print the output JSON."
|
||||||
)
|
)
|
||||||
|
json_group.add_argument(
|
||||||
|
"--tg", "--telegram", dest="telegram", default=False, action='store_true',
|
||||||
|
help="Output the JSON in a format compatible with Telegram export (implies json-per-chat)"
|
||||||
|
)
|
||||||
json_group.add_argument(
|
json_group.add_argument(
|
||||||
"--per-chat", dest="json_per_chat", default=False, action='store_true',
|
"--per-chat", dest="json_per_chat", default=False, action='store_true',
|
||||||
help="Output the JSON file per chat"
|
help="Output the JSON file per chat"
|
||||||
@@ -148,8 +178,8 @@ def setup_argument_parser() -> ArgumentParser:
|
|||||||
help="Do not render avatar in HTML output"
|
help="Do not render avatar in HTML output"
|
||||||
)
|
)
|
||||||
html_group.add_argument(
|
html_group.add_argument(
|
||||||
"--experimental-new-theme", dest="whatsapp_theme", default=False, action='store_true',
|
"--old-theme", dest="telegram_theme", default=False, action='store_true',
|
||||||
help="Use the newly designed WhatsApp-alike theme"
|
help="Use the old Telegram-alike theme"
|
||||||
)
|
)
|
||||||
html_group.add_argument(
|
html_group.add_argument(
|
||||||
"--headline", dest="headline", default="Chat history with ??",
|
"--headline", dest="headline", default="Chat history with ??",
|
||||||
@@ -207,6 +237,33 @@ def setup_argument_parser() -> ArgumentParser:
|
|||||||
help="Use with --enrich-from-vcards. When numbers in the vcf file does not have a country code, this will be used. 1 is for US, 66 for Thailand etc. Most likely use the number of your own country"
|
help="Use with --enrich-from-vcards. When numbers in the vcf file does not have a country code, this will be used. 1 is for US, 66 for Thailand etc. Most likely use the number of your own country"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Incremental merging
|
||||||
|
inc_merging_group = parser.add_argument_group('Incremental Merging')
|
||||||
|
inc_merging_group.add_argument(
|
||||||
|
"--incremental-merge",
|
||||||
|
dest="incremental_merge",
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help=("Performs an incremental merge of two exports. "
|
||||||
|
"Requires setting both --source-dir and --target-dir. "
|
||||||
|
"The chats (JSON files only) and media from the source directory will be merged into the target directory. "
|
||||||
|
"No chat messages or media will be deleted from the target directory; only new chat messages and media will be added to it. "
|
||||||
|
"This enables chat messages and media to be deleted from the device to free up space, while ensuring they are preserved in the exported backups."
|
||||||
|
)
|
||||||
|
)
|
||||||
|
inc_merging_group.add_argument(
|
||||||
|
"--source-dir",
|
||||||
|
dest="source_dir",
|
||||||
|
default=None,
|
||||||
|
help="Sets the source directory. Used for performing incremental merges."
|
||||||
|
)
|
||||||
|
inc_merging_group.add_argument(
|
||||||
|
"--target-dir",
|
||||||
|
dest="target_dir",
|
||||||
|
default=None,
|
||||||
|
help="Sets the target directory. Used for performing incremental merges."
|
||||||
|
)
|
||||||
|
|
||||||
# Miscellaneous
|
# Miscellaneous
|
||||||
misc_group = parser.add_argument_group('Miscellaneous')
|
misc_group = parser.add_argument_group('Miscellaneous')
|
||||||
misc_group.add_argument(
|
misc_group.add_argument(
|
||||||
@@ -217,6 +274,10 @@ def setup_argument_parser() -> ArgumentParser:
|
|||||||
"--check-update", dest="check_update", default=False, action='store_true',
|
"--check-update", dest="check_update", default=False, action='store_true',
|
||||||
help="Check for updates (require Internet access)"
|
help="Check for updates (require Internet access)"
|
||||||
)
|
)
|
||||||
|
misc_group.add_argument(
|
||||||
|
"--check-update-pre", dest="check_update_pre", default=False, action='store_true',
|
||||||
|
help="Check for updates including pre-releases (require Internet access)"
|
||||||
|
)
|
||||||
misc_group.add_argument(
|
misc_group.add_argument(
|
||||||
"--assume-first-as-me", dest="assume_first_as_me", default=False, action='store_true',
|
"--assume-first-as-me", dest="assume_first_as_me", default=False, action='store_true',
|
||||||
help="Assume the first message in a chat as sent by me (must be used together with -e)"
|
help="Assume the first message in a chat as sent by me (must be used together with -e)"
|
||||||
@@ -230,9 +291,17 @@ def setup_argument_parser() -> ArgumentParser:
|
|||||||
help="Specify the chunk size for decrypting iOS backup, which may affect the decryption speed."
|
help="Specify the chunk size for decrypting iOS backup, which may affect the decryption speed."
|
||||||
)
|
)
|
||||||
misc_group.add_argument(
|
misc_group.add_argument(
|
||||||
"--max-bruteforce-worker", dest="max_bruteforce_worker", default=10, type=int,
|
"--max-bruteforce-worker", dest="max_bruteforce_worker", default=4, type=int,
|
||||||
help="Specify the maximum number of worker for bruteforce decryption."
|
help="Specify the maximum number of worker for bruteforce decryption."
|
||||||
)
|
)
|
||||||
|
misc_group.add_argument(
|
||||||
|
"--no-banner", dest="no_banner", default=False, action='store_true',
|
||||||
|
help="Do not show the banner"
|
||||||
|
)
|
||||||
|
misc_group.add_argument(
|
||||||
|
"--fix-dot-files", dest="fix_dot_files", default=False, action='store_true',
|
||||||
|
help="Fix files with a dot at the end of their name (allowing the outputs be stored in FAT filesystems)"
|
||||||
|
)
|
||||||
|
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
@@ -245,11 +314,16 @@ def validate_args(parser: ArgumentParser, args) -> None:
|
|||||||
if not args.android and not args.ios and not args.exported and not args.import_json:
|
if not args.android and not args.ios and not args.exported and not args.import_json:
|
||||||
parser.error("You must define the device type.")
|
parser.error("You must define the device type.")
|
||||||
if args.no_html and not args.json and not args.text_format:
|
if args.no_html and not args.json and not args.text_format:
|
||||||
parser.error("You must either specify a JSON output file, text file output directory or enable HTML output.")
|
parser.error(
|
||||||
|
"You must either specify a JSON output file, text file output directory or enable HTML output.")
|
||||||
if args.import_json and (args.android or args.ios or args.exported or args.no_html):
|
if args.import_json and (args.android or args.ios or args.exported or args.no_html):
|
||||||
parser.error("You can only use --import with -j and without --no-html, -a, -i, -e.")
|
parser.error(
|
||||||
|
"You can only use --import with -j and without --no-html, -a, -i, -e.")
|
||||||
elif args.import_json and not os.path.isfile(args.json):
|
elif args.import_json and not os.path.isfile(args.json):
|
||||||
parser.error("JSON file not found.")
|
parser.error("JSON file not found.")
|
||||||
|
if args.incremental_merge and (args.source_dir is None or args.target_dir is None):
|
||||||
|
parser.error(
|
||||||
|
"You must specify both --source-dir and --target-dir for incremental merge.")
|
||||||
if args.android and args.business:
|
if args.android and args.business:
|
||||||
parser.error("WhatsApp Business is only available on iOS for now.")
|
parser.error("WhatsApp Business is only available on iOS for now.")
|
||||||
if "??" not in args.headline:
|
if "??" not in args.headline:
|
||||||
@@ -260,18 +334,22 @@ def validate_args(parser: ArgumentParser, args) -> None:
|
|||||||
(args.json.endswith(".json") and os.path.isfile(args.json)) or
|
(args.json.endswith(".json") and os.path.isfile(args.json)) or
|
||||||
(not args.json.endswith(".json") and os.path.isfile(args.json))
|
(not args.json.endswith(".json") and os.path.isfile(args.json))
|
||||||
):
|
):
|
||||||
parser.error("When --per-chat is enabled, the destination of --json must be a directory.")
|
parser.error(
|
||||||
|
"When --per-chat is enabled, the destination of --json must be a directory.")
|
||||||
|
|
||||||
# vCards validation
|
# vCards validation
|
||||||
if args.enrich_from_vcards is not None and args.default_country_code is None:
|
if args.enrich_from_vcards is not None and args.default_country_code is None:
|
||||||
parser.error("When --enrich-from-vcards is provided, you must also set --default-country-code")
|
parser.error(
|
||||||
|
"When --enrich-from-vcards is provided, you must also set --default-country-code")
|
||||||
|
|
||||||
# Size validation
|
# Size validation and conversion
|
||||||
if args.size is not None and not isinstance(args.size, int) and not args.size.isnumeric():
|
if args.size is not None:
|
||||||
try:
|
try:
|
||||||
args.size = readable_to_bytes(args.size)
|
args.size = readable_to_bytes(args.size)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
parser.error("The value for --split must be ended in pure bytes or with a proper unit (e.g., 1048576 or 1MB)")
|
parser.error(
|
||||||
|
"The value for --split must be pure bytes or use a proper unit (e.g., 1048576 or 1MB)"
|
||||||
|
)
|
||||||
|
|
||||||
# Date filter validation and processing
|
# Date filter validation and processing
|
||||||
if args.filter_date is not None:
|
if args.filter_date is not None:
|
||||||
@@ -282,12 +360,13 @@ def validate_args(parser: ArgumentParser, args) -> None:
|
|||||||
args.key = getpass("Enter your encryption key: ")
|
args.key = getpass("Enter your encryption key: ")
|
||||||
|
|
||||||
# Theme validation
|
# Theme validation
|
||||||
if args.whatsapp_theme:
|
if args.telegram_theme:
|
||||||
args.template = "whatsapp_new.html"
|
args.template = "whatsapp_old.html"
|
||||||
|
|
||||||
# Chat filter validation
|
# Chat filter validation
|
||||||
if args.filter_chat_include is not None and args.filter_chat_exclude is not None:
|
if args.filter_chat_include is not None and args.filter_chat_exclude is not None:
|
||||||
parser.error("Chat inclusion and exclusion filters cannot be used together.")
|
parser.error(
|
||||||
|
"Chat inclusion and exclusion filters cannot be used together.")
|
||||||
|
|
||||||
validate_chat_filters(parser, args.filter_chat_include)
|
validate_chat_filters(parser, args.filter_chat_include)
|
||||||
validate_chat_filters(parser, args.filter_chat_exclude)
|
validate_chat_filters(parser, args.filter_chat_exclude)
|
||||||
@@ -298,20 +377,23 @@ def validate_chat_filters(parser: ArgumentParser, chat_filter: Optional[List[str
|
|||||||
if chat_filter is not None:
|
if chat_filter is not None:
|
||||||
for chat in chat_filter:
|
for chat in chat_filter:
|
||||||
if not chat.isnumeric():
|
if not chat.isnumeric():
|
||||||
parser.error("Enter a phone number in the chat filter. See https://wts.knugi.dev/docs?dest=chat")
|
parser.error(
|
||||||
|
"Enter a phone number in the chat filter. See https://wts.knugi.dev/docs?dest=chat")
|
||||||
|
|
||||||
|
|
||||||
def process_date_filter(parser: ArgumentParser, args) -> None:
|
def process_date_filter(parser: ArgumentParser, args) -> None:
|
||||||
"""Process and validate date filter arguments."""
|
"""Process and validate date filter arguments."""
|
||||||
if " - " in args.filter_date:
|
if " - " in args.filter_date:
|
||||||
start, end = args.filter_date.split(" - ")
|
start, end = args.filter_date.split(" - ")
|
||||||
start = int(datetime.strptime(start, args.filter_date_format).timestamp())
|
start = int(datetime.strptime(
|
||||||
|
start, args.filter_date_format).timestamp())
|
||||||
end = int(datetime.strptime(end, args.filter_date_format).timestamp())
|
end = int(datetime.strptime(end, args.filter_date_format).timestamp())
|
||||||
|
|
||||||
if start < 1009843200 or end < 1009843200:
|
if start < 1009843200 or end < 1009843200:
|
||||||
parser.error("WhatsApp was first released in 2009...")
|
parser.error("WhatsApp was first released in 2009...")
|
||||||
if start > end:
|
if start > end:
|
||||||
parser.error("The start date cannot be a moment after the end date.")
|
parser.error(
|
||||||
|
"The start date cannot be a moment after the end date.")
|
||||||
|
|
||||||
if args.android:
|
if args.android:
|
||||||
args.filter_date = f"BETWEEN {start}000 AND {end}000"
|
args.filter_date = f"BETWEEN {start}000 AND {end}000"
|
||||||
@@ -324,9 +406,11 @@ def process_date_filter(parser: ArgumentParser, args) -> None:
|
|||||||
def process_single_date_filter(parser: ArgumentParser, args) -> None:
|
def process_single_date_filter(parser: ArgumentParser, args) -> None:
|
||||||
"""Process single date comparison filters."""
|
"""Process single date comparison filters."""
|
||||||
if len(args.filter_date) < 3:
|
if len(args.filter_date) < 3:
|
||||||
parser.error("Unsupported date format. See https://wts.knugi.dev/docs?dest=date")
|
parser.error(
|
||||||
|
"Unsupported date format. See https://wts.knugi.dev/docs?dest=date")
|
||||||
|
|
||||||
_timestamp = int(datetime.strptime(args.filter_date[2:], args.filter_date_format).timestamp())
|
_timestamp = int(datetime.strptime(
|
||||||
|
args.filter_date[2:], args.filter_date_format).timestamp())
|
||||||
|
|
||||||
if _timestamp < 1009843200:
|
if _timestamp < 1009843200:
|
||||||
parser.error("WhatsApp was first released in 2009...")
|
parser.error("WhatsApp was first released in 2009...")
|
||||||
@@ -342,21 +426,16 @@ def process_single_date_filter(parser: ArgumentParser, args) -> None:
|
|||||||
elif args.ios:
|
elif args.ios:
|
||||||
args.filter_date = f"<= {_timestamp - APPLE_TIME}"
|
args.filter_date = f"<= {_timestamp - APPLE_TIME}"
|
||||||
else:
|
else:
|
||||||
parser.error("Unsupported date format. See https://wts.knugi.dev/docs?dest=date")
|
parser.error(
|
||||||
|
"Unsupported date format. See https://wts.knugi.dev/docs?dest=date")
|
||||||
|
|
||||||
|
|
||||||
def setup_contact_store(args) -> Optional['ContactsFromVCards']:
|
def setup_contact_store(args) -> Optional['ContactsFromVCards']:
|
||||||
"""Set up and return a contact store if needed."""
|
"""Set up and return a contact store if needed."""
|
||||||
if args.enrich_from_vcards is not None:
|
if args.enrich_from_vcards is not None:
|
||||||
if not vcards_deps_installed:
|
|
||||||
print(
|
|
||||||
"You don't have the dependency to enrich contacts with vCard.\n"
|
|
||||||
"Read more on how to deal with enriching contacts:\n"
|
|
||||||
"https://github.com/KnugiHK/Whatsapp-Chat-Exporter/blob/main/README.md#usage"
|
|
||||||
)
|
|
||||||
exit(1)
|
|
||||||
contact_store = ContactsFromVCards()
|
contact_store = ContactsFromVCards()
|
||||||
contact_store.load_vcf_file(args.enrich_from_vcards, args.default_country_code)
|
contact_store.load_vcf_file(
|
||||||
|
args.enrich_from_vcards, args.default_country_code)
|
||||||
return contact_store
|
return contact_store
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -364,10 +443,10 @@ def setup_contact_store(args) -> Optional['ContactsFromVCards']:
|
|||||||
def decrypt_android_backup(args) -> int:
|
def decrypt_android_backup(args) -> int:
|
||||||
"""Decrypt Android backup files and return error code."""
|
"""Decrypt Android backup files and return error code."""
|
||||||
if args.key is None or args.backup is None:
|
if args.key is None or args.backup is None:
|
||||||
print("You must specify the backup file with -b and a key with -k")
|
logging.error(f"You must specify the backup file with -b and a key with -k")
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
print("Decryption key specified, decrypting WhatsApp backup...")
|
logging.info(f"Decryption key specified, decrypting WhatsApp backup...")
|
||||||
|
|
||||||
# Determine crypt type
|
# Determine crypt type
|
||||||
if "crypt12" in args.backup:
|
if "crypt12" in args.backup:
|
||||||
@@ -377,7 +456,8 @@ def decrypt_android_backup(args) -> int:
|
|||||||
elif "crypt15" in args.backup:
|
elif "crypt15" in args.backup:
|
||||||
crypt = Crypt.CRYPT15
|
crypt = Crypt.CRYPT15
|
||||||
else:
|
else:
|
||||||
print("Unknown backup format. The backup file must be crypt12, crypt14 or crypt15.")
|
logging.error(
|
||||||
|
f"Unknown backup format. The backup file must be crypt12, crypt14 or crypt15.")
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
# Get key
|
# Get key
|
||||||
@@ -429,25 +509,26 @@ def decrypt_android_backup(args) -> int:
|
|||||||
def handle_decrypt_error(error: int) -> None:
|
def handle_decrypt_error(error: int) -> None:
|
||||||
"""Handle decryption errors with appropriate messages."""
|
"""Handle decryption errors with appropriate messages."""
|
||||||
if error == 1:
|
if error == 1:
|
||||||
print("Dependencies of decrypt_backup and/or extract_encrypted_key"
|
logging.error("Dependencies of decrypt_backup and/or extract_encrypted_key"
|
||||||
" are not present. For details, see README.md.")
|
" are not present. For details, see README.md.")
|
||||||
exit(3)
|
exit(3)
|
||||||
elif error == 2:
|
elif error == 2:
|
||||||
print("Failed when decompressing the decrypted backup. "
|
logging.error("Failed when decompressing the decrypted backup. "
|
||||||
"Possibly incorrect offsets used in decryption.")
|
"Possibly incorrect offsets used in decryption.")
|
||||||
exit(4)
|
exit(4)
|
||||||
else:
|
else:
|
||||||
print("Unknown error occurred.", error)
|
logging.error("Unknown error occurred.")
|
||||||
exit(5)
|
exit(5)
|
||||||
|
|
||||||
|
|
||||||
def process_contacts(args, data: ChatCollection, contact_store=None) -> None:
|
def process_contacts(args, data: ChatCollection) -> None:
|
||||||
"""Process contacts from the database."""
|
"""Process contacts from the database."""
|
||||||
contact_db = args.wa if args.wa else "wa.db" if args.android else "ContactsV2.sqlite"
|
contact_db = args.wa if args.wa else "wa.db" if args.android else "ContactsV2.sqlite"
|
||||||
|
|
||||||
if os.path.isfile(contact_db):
|
if os.path.isfile(contact_db):
|
||||||
with sqlite3.connect(contact_db) as db:
|
with sqlite3.connect(contact_db) as db:
|
||||||
db.row_factory = sqlite3.Row
|
db.row_factory = sqlite3.Row
|
||||||
|
db.text_factory = lambda b: b.decode(encoding="utf-8", errors="replace")
|
||||||
if args.android:
|
if args.android:
|
||||||
android_handler.contacts(db, data, args.enrich_from_vcards)
|
android_handler.contacts(db, data, args.enrich_from_vcards)
|
||||||
else:
|
else:
|
||||||
@@ -459,32 +540,36 @@ def process_messages(args, data: ChatCollection) -> None:
|
|||||||
msg_db = args.db if args.db else "msgstore.db" if args.android else args.identifiers.MESSAGE
|
msg_db = args.db if args.db else "msgstore.db" if args.android else args.identifiers.MESSAGE
|
||||||
|
|
||||||
if not os.path.isfile(msg_db):
|
if not os.path.isfile(msg_db):
|
||||||
print(
|
logging.error(
|
||||||
"The message database does not exist. You may specify the path "
|
"The message database does not exist. You may specify the path "
|
||||||
"to database file with option -d or check your provided path."
|
"to database file with option -d or check your provided path."
|
||||||
)
|
)
|
||||||
exit(6)
|
exit(6)
|
||||||
|
|
||||||
filter_chat = (args.filter_chat_include, args.filter_chat_exclude)
|
filter_chat = (args.filter_chat_include, args.filter_chat_exclude)
|
||||||
|
timing = Timing(args.timezone_offset if args.timezone_offset else CURRENT_TZ_OFFSET)
|
||||||
|
|
||||||
with sqlite3.connect(msg_db) as db:
|
with sqlite3.connect(msg_db) as db:
|
||||||
db.row_factory = sqlite3.Row
|
db.row_factory = sqlite3.Row
|
||||||
|
db.text_factory = lambda b: b.decode(encoding="utf-8", errors="replace")
|
||||||
|
|
||||||
# Process messages
|
# Process messages
|
||||||
if args.android:
|
if args.android:
|
||||||
message_handler = android_handler
|
message_handler = android_handler
|
||||||
|
data.set_system("jid_map_exists", check_jid_map(db))
|
||||||
|
data.set_system("transcription_selection", get_transcription_selection(db))
|
||||||
else:
|
else:
|
||||||
message_handler = ios_handler
|
message_handler = ios_handler
|
||||||
|
|
||||||
message_handler.messages(
|
message_handler.messages(
|
||||||
db, data, args.media, args.timezone_offset,
|
db, data, args.media, timing, args.filter_date,
|
||||||
args.filter_date, filter_chat, args.filter_empty
|
filter_chat, args.filter_empty, args.no_reply_ios
|
||||||
)
|
)
|
||||||
|
|
||||||
# Process media
|
# Process media
|
||||||
message_handler.media(
|
message_handler.media(
|
||||||
db, data, args.media, args.filter_date,
|
db, data, args.media, args.filter_date,
|
||||||
filter_chat, args.filter_empty, args.separate_media
|
filter_chat, args.filter_empty, args.separate_media, args.fix_dot_files
|
||||||
)
|
)
|
||||||
|
|
||||||
# Process vcards
|
# Process vcards
|
||||||
@@ -494,17 +579,18 @@ def process_messages(args, data: ChatCollection) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Process calls
|
# Process calls
|
||||||
process_calls(args, db, data, filter_chat)
|
process_calls(args, db, data, filter_chat, timing)
|
||||||
|
|
||||||
|
|
||||||
def process_calls(args, db, data: ChatCollection, filter_chat) -> None:
|
def process_calls(args, db, data: ChatCollection, filter_chat, timing) -> None:
|
||||||
"""Process call history if available."""
|
"""Process call history if available."""
|
||||||
if args.android:
|
if args.android:
|
||||||
android_handler.calls(db, data, args.timezone_offset, filter_chat)
|
android_handler.calls(db, data, timing, filter_chat)
|
||||||
elif args.ios and args.call_db_ios is not None:
|
elif args.ios and args.call_db_ios is not None:
|
||||||
with sqlite3.connect(args.call_db_ios) as cdb:
|
with sqlite3.connect(args.call_db_ios) as cdb:
|
||||||
cdb.row_factory = sqlite3.Row
|
cdb.row_factory = sqlite3.Row
|
||||||
ios_handler.calls(cdb, data, args.timezone_offset, filter_chat)
|
cdb.text_factory = lambda b: b.decode(encoding="utf-8", errors="replace")
|
||||||
|
ios_handler.calls(cdb, data, timing, filter_chat)
|
||||||
|
|
||||||
|
|
||||||
def handle_media_directory(args) -> None:
|
def handle_media_directory(args) -> None:
|
||||||
@@ -513,28 +599,27 @@ def handle_media_directory(args) -> None:
|
|||||||
media_path = os.path.join(args.output, args.media)
|
media_path = os.path.join(args.output, args.media)
|
||||||
|
|
||||||
if os.path.isdir(media_path):
|
if os.path.isdir(media_path):
|
||||||
print("\nWhatsApp directory already exists in output directory. Skipping...", end="\n")
|
logging.info(
|
||||||
|
f"WhatsApp directory already exists in output directory. Skipping...")
|
||||||
else:
|
else:
|
||||||
if args.move_media:
|
if args.move_media:
|
||||||
try:
|
try:
|
||||||
print("\nMoving media directory...", end="\n")
|
logging.info(f"Moving media directory...", extra={"clear": True})
|
||||||
shutil.move(args.media, f"{args.output}/")
|
shutil.move(args.media, f"{args.output}/")
|
||||||
|
logging.info(f"Media directory has been moved to the output directory")
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
print("\nCannot remove original WhatsApp directory. "
|
logging.warning("Cannot remove original WhatsApp directory. "
|
||||||
"Perhaps the directory is opened?", end="\n")
|
"Perhaps the directory is opened?")
|
||||||
else:
|
else:
|
||||||
print("\nCopying media directory...", end="\n")
|
logging.info(f"Copying media directory...", extra={"clear": True})
|
||||||
shutil.copytree(args.media, media_path)
|
shutil.copytree(args.media, media_path)
|
||||||
|
logging.info(f"Media directory has been copied to the output directory")
|
||||||
|
|
||||||
|
|
||||||
def create_output_files(args, data: ChatCollection, contact_store=None) -> None:
|
def create_output_files(args, data: ChatCollection) -> None:
|
||||||
"""Create output files in the specified formats."""
|
"""Create output files in the specified formats."""
|
||||||
# Create HTML files if requested
|
# Create HTML files if requested
|
||||||
if not args.no_html:
|
if not args.no_html:
|
||||||
# Enrich from vcards if available
|
|
||||||
if contact_store and not contact_store.is_empty():
|
|
||||||
contact_store.enrich_from_vcards(data)
|
|
||||||
|
|
||||||
android_handler.create_html(
|
android_handler.create_html(
|
||||||
data,
|
data,
|
||||||
args.output,
|
args.output,
|
||||||
@@ -543,32 +628,29 @@ def create_output_files(args, data: ChatCollection, contact_store=None) -> None:
|
|||||||
args.offline,
|
args.offline,
|
||||||
args.size,
|
args.size,
|
||||||
args.no_avatar,
|
args.no_avatar,
|
||||||
args.whatsapp_theme,
|
args.telegram_theme,
|
||||||
args.headline
|
args.headline
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create text files if requested
|
# Create text files if requested
|
||||||
if args.text_format:
|
if args.text_format:
|
||||||
print("Writing text file...")
|
logging.info(f"Writing text file...")
|
||||||
android_handler.create_txt(data, args.text_format)
|
android_handler.create_txt(data, args.text_format)
|
||||||
|
|
||||||
# Create JSON files if requested
|
# Create JSON files if requested
|
||||||
if args.json and not args.import_json:
|
if args.json and not args.import_json:
|
||||||
export_json(args, data, contact_store)
|
export_json(args, data)
|
||||||
|
|
||||||
|
|
||||||
def export_json(args, data: ChatCollection, contact_store=None) -> None:
|
def export_json(args, data: ChatCollection) -> None:
|
||||||
"""Export data to JSON format."""
|
"""Export data to JSON format."""
|
||||||
# Enrich from vcards if available
|
# TODO: remove all non-target chats from data if filtering is applied?
|
||||||
if contact_store and not contact_store.is_empty():
|
|
||||||
contact_store.enrich_from_vcards(data)
|
|
||||||
|
|
||||||
# Convert ChatStore objects to JSON
|
# Convert ChatStore objects to JSON
|
||||||
if isinstance(data.get(next(iter(data), None)), ChatStore):
|
if isinstance(data.get(next(iter(data), None)), ChatStore):
|
||||||
data = {jik: chat.to_json() for jik, chat in data.items()}
|
data = {jik: chat.to_json() for jik, chat in data.items()}
|
||||||
|
|
||||||
# Export as a single file or per chat
|
# Export as a single file or per chat
|
||||||
if not args.json_per_chat:
|
if not args.json_per_chat and not args.telegram:
|
||||||
export_single_json(args, data)
|
export_single_json(args, data)
|
||||||
else:
|
else:
|
||||||
export_multiple_json(args, data)
|
export_multiple_json(args, data)
|
||||||
@@ -582,8 +664,9 @@ def export_single_json(args, data: Dict) -> None:
|
|||||||
ensure_ascii=not args.avoid_encoding_json,
|
ensure_ascii=not args.avoid_encoding_json,
|
||||||
indent=args.pretty_print_json
|
indent=args.pretty_print_json
|
||||||
)
|
)
|
||||||
print(f"\nWriting JSON file...({bytes_to_readable(len(json_data))})")
|
logging.info(f"Writing JSON file...", extra={"clear": True})
|
||||||
f.write(json_data)
|
f.write(json_data)
|
||||||
|
logging.info(f"JSON file saved...({bytes_to_readable(len(json_data))})")
|
||||||
|
|
||||||
|
|
||||||
def export_multiple_json(args, data: Dict) -> None:
|
def export_multiple_json(args, data: Dict) -> None:
|
||||||
@@ -597,21 +680,27 @@ def export_multiple_json(args, data: Dict) -> None:
|
|||||||
|
|
||||||
# Export each chat
|
# Export each chat
|
||||||
total = len(data.keys())
|
total = len(data.keys())
|
||||||
for index, jik in enumerate(data.keys()):
|
with tqdm(total=total, desc="Generating JSON files", unit="file", leave=False) as pbar:
|
||||||
if data[jik]["name"] is not None:
|
for jik in data.keys():
|
||||||
contact = data[jik]["name"].replace('/', '')
|
if data[jik]["name"] is not None:
|
||||||
else:
|
contact = data[jik]["name"].replace('/', '')
|
||||||
contact = jik.replace('+', '')
|
else:
|
||||||
|
contact = jik.replace('+', '')
|
||||||
|
|
||||||
with open(f"{json_path}/{sanitize_filename(contact)}.json", "w") as f:
|
if args.telegram:
|
||||||
file_content = json.dumps(
|
messages = telegram_json_format(jik, data[jik], args.timezone_offset)
|
||||||
{jik: data[jik]},
|
else:
|
||||||
ensure_ascii=not args.avoid_encoding_json,
|
messages = {jik: data[jik]}
|
||||||
indent=args.pretty_print_json
|
with open(f"{json_path}/{safe_name(contact)}.json", "w") as f:
|
||||||
)
|
file_content = json.dumps(
|
||||||
f.write(file_content)
|
messages,
|
||||||
print(f"Writing JSON file...({index + 1}/{total})", end="\r")
|
ensure_ascii=not args.avoid_encoding_json,
|
||||||
print()
|
indent=args.pretty_print_json
|
||||||
|
)
|
||||||
|
f.write(file_content)
|
||||||
|
pbar.update(1)
|
||||||
|
total_time = pbar.format_dict['elapsed']
|
||||||
|
logging.info(f"Generated {total} JSON files in {convert_time_unit(total_time)}")
|
||||||
|
|
||||||
|
|
||||||
def process_exported_chat(args, data: ChatCollection) -> None:
|
def process_exported_chat(args, data: ChatCollection) -> None:
|
||||||
@@ -627,7 +716,7 @@ def process_exported_chat(args, data: ChatCollection) -> None:
|
|||||||
args.offline,
|
args.offline,
|
||||||
args.size,
|
args.size,
|
||||||
args.no_avatar,
|
args.no_avatar,
|
||||||
args.whatsapp_theme,
|
args.telegram_theme,
|
||||||
args.headline
|
args.headline
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -636,15 +725,65 @@ def process_exported_chat(args, data: ChatCollection) -> None:
|
|||||||
shutil.copy(file, args.output)
|
shutil.copy(file, args.output)
|
||||||
|
|
||||||
|
|
||||||
|
class ClearLineFilter(logging.Filter):
|
||||||
|
def filter(self, record):
|
||||||
|
is_clear = getattr(record, 'clear', False)
|
||||||
|
if is_clear:
|
||||||
|
record.line_end = "\r"
|
||||||
|
record.prefix = "\x1b[K"
|
||||||
|
else:
|
||||||
|
record.line_end = "\n"
|
||||||
|
record.prefix = ""
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logging(level):
|
||||||
|
log_handler_stdout = logging.StreamHandler()
|
||||||
|
log_handler_stdout.terminator = ""
|
||||||
|
log_handler_stdout.addFilter(ClearLineFilter())
|
||||||
|
log_handler_stdout.set_name("console")
|
||||||
|
|
||||||
|
handlers = [log_handler_stdout]
|
||||||
|
|
||||||
|
if level == logging.DEBUG:
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
|
||||||
|
log_handler_file = logging.FileHandler(f"wtsexpoter-debug-{timestamp}.log", mode="w")
|
||||||
|
log_handler_file.terminator = ""
|
||||||
|
log_handler_file.addFilter(ClearLineFilter())
|
||||||
|
handlers.append(log_handler_file)
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=level,
|
||||||
|
format="[%(levelname)s] %(message)s%(line_end)s",
|
||||||
|
handlers=handlers
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Main function to run the WhatsApp Chat Exporter."""
|
"""Main function to run the WhatsApp Chat Exporter."""
|
||||||
# Set up and parse arguments
|
# Set up and parse arguments
|
||||||
parser = setup_argument_parser()
|
parser = setup_argument_parser()
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Print banner if not suppressed
|
||||||
|
if not args.no_banner:
|
||||||
|
# Note: This may raise UnicodeEncodeError on Windows if the terminal
|
||||||
|
# doesn't support UTF-8 (e.g., Legacy CMD). Use a modern terminal
|
||||||
|
# or set PYTHONUTF8=1 in your environment.
|
||||||
|
print(WTSEXPORTER_BANNER)
|
||||||
|
|
||||||
|
if args.debug:
|
||||||
|
setup_logging(logging.DEBUG)
|
||||||
|
logging.debug("Debug mode enabled.")
|
||||||
|
for handler in logging.getLogger().handlers:
|
||||||
|
if handler.name == "console":
|
||||||
|
handler.setLevel(logging.INFO)
|
||||||
|
else:
|
||||||
|
setup_logging(logging.INFO)
|
||||||
|
|
||||||
# Check for updates
|
# Check for updates
|
||||||
if args.check_update:
|
if args.check_update or args.check_update_pre:
|
||||||
exit(check_update())
|
exit(check_update(args.check_update_pre))
|
||||||
|
|
||||||
# Validate arguments
|
# Validate arguments
|
||||||
validate_args(parser, args)
|
validate_args(parser, args)
|
||||||
@@ -669,7 +808,7 @@ def main():
|
|||||||
args.offline,
|
args.offline,
|
||||||
args.size,
|
args.size,
|
||||||
args.no_avatar,
|
args.no_avatar,
|
||||||
args.whatsapp_theme,
|
args.telegram_theme,
|
||||||
args.headline
|
args.headline
|
||||||
)
|
)
|
||||||
elif args.exported:
|
elif args.exported:
|
||||||
@@ -708,9 +847,11 @@ def main():
|
|||||||
# Extract media from backup if needed
|
# Extract media from backup if needed
|
||||||
if args.backup is not None:
|
if args.backup is not None:
|
||||||
if not os.path.isdir(args.media):
|
if not os.path.isdir(args.media):
|
||||||
ios_media_handler.extract_media(args.backup, identifiers, args.decrypt_chunk_size)
|
ios_media_handler.extract_media(
|
||||||
|
args.backup, identifiers, args.decrypt_chunk_size)
|
||||||
else:
|
else:
|
||||||
print("WhatsApp directory already exists, skipping WhatsApp file extraction.")
|
logging.info(
|
||||||
|
f"WhatsApp directory already exists, skipping WhatsApp file extraction.")
|
||||||
|
|
||||||
# Set default DB paths if not provided
|
# Set default DB paths if not provided
|
||||||
if args.db is None:
|
if args.db is None:
|
||||||
@@ -718,16 +859,34 @@ def main():
|
|||||||
if args.wa is None:
|
if args.wa is None:
|
||||||
args.wa = "ContactsV2.sqlite"
|
args.wa = "ContactsV2.sqlite"
|
||||||
|
|
||||||
# Process contacts
|
if args.incremental_merge:
|
||||||
process_contacts(args, data, contact_store)
|
incremental_merge(
|
||||||
|
args.source_dir,
|
||||||
|
args.target_dir,
|
||||||
|
args.media,
|
||||||
|
args.pretty_print_json,
|
||||||
|
args.avoid_encoding_json
|
||||||
|
)
|
||||||
|
logging.info(f"Incremental merge completed successfully.")
|
||||||
|
else:
|
||||||
|
# Process contacts
|
||||||
|
process_contacts(args, data)
|
||||||
|
|
||||||
# Process messages, media, and calls
|
# Enrich contacts from vCards if needed
|
||||||
process_messages(args, data)
|
if args.android and contact_store and not contact_store.is_empty():
|
||||||
|
contact_store.enrich_from_vcards(data)
|
||||||
|
|
||||||
# Create output files
|
# Process messages, media, and calls
|
||||||
create_output_files(args, data, contact_store)
|
process_messages(args, data)
|
||||||
|
|
||||||
# Handle media directory
|
# Create output files
|
||||||
handle_media_directory(args)
|
create_output_files(args, data)
|
||||||
|
|
||||||
print("Everything is done!")
|
# Handle media directory
|
||||||
|
handle_media_directory(args)
|
||||||
|
|
||||||
|
logging.info("Everything is done!")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
import hmac
|
import hmac
|
||||||
import io
|
import io
|
||||||
|
import logging
|
||||||
import zlib
|
import zlib
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
|
from tqdm import tqdm
|
||||||
from typing import Tuple, Union
|
from typing import Tuple, Union
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from sys import exit
|
from functools import partial
|
||||||
from Whatsapp_Chat_Exporter.utility import CRYPT14_OFFSETS, Crypt, DbType
|
from Whatsapp_Chat_Exporter.utility import CRYPT14_OFFSETS, Crypt, DbType
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -23,6 +25,8 @@ else:
|
|||||||
support_crypt15 = True
|
support_crypt15 = True
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class DecryptionError(Exception):
|
class DecryptionError(Exception):
|
||||||
"""Base class for decryption-related exceptions."""
|
"""Base class for decryption-related exceptions."""
|
||||||
pass
|
pass
|
||||||
@@ -106,15 +110,39 @@ def _decrypt_database(db_ciphertext: bytes, main_key: bytes, iv: bytes) -> bytes
|
|||||||
zlib.error: If decompression fails.
|
zlib.error: If decompression fails.
|
||||||
ValueError: if the plaintext is not a SQLite database.
|
ValueError: if the plaintext is not a SQLite database.
|
||||||
"""
|
"""
|
||||||
|
FOOTER_SIZE = 32
|
||||||
|
if len(db_ciphertext) <= FOOTER_SIZE:
|
||||||
|
raise ValueError("Input data too short to contain a valid GCM tag.")
|
||||||
|
|
||||||
|
actual_ciphertext = db_ciphertext[:-FOOTER_SIZE]
|
||||||
|
tag = db_ciphertext[-FOOTER_SIZE: -FOOTER_SIZE + 16]
|
||||||
|
|
||||||
cipher = AES.new(main_key, AES.MODE_GCM, iv)
|
cipher = AES.new(main_key, AES.MODE_GCM, iv)
|
||||||
db_compressed = cipher.decrypt(db_ciphertext)
|
try:
|
||||||
db = zlib.decompress(db_compressed)
|
db_compressed = cipher.decrypt_and_verify(actual_ciphertext, tag)
|
||||||
if db[0:6].upper() != b"SQLITE":
|
except ValueError:
|
||||||
|
# This could be key, IV, or tag is wrong, but likely the key is wrong.
|
||||||
|
raise ValueError("Decryption/Authentication failed. Ensure you are using the correct key.")
|
||||||
|
|
||||||
|
if len(db_compressed) < 2 or db_compressed[0] != 0x78:
|
||||||
|
logging.debug(f"Data passes GCM but is not Zlib. Header: {db_compressed[:2].hex()}")
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"The plaintext is not a SQLite database. Ensure you are using the correct key."
|
"Key is correct, but decrypted data is not a valid compressed stream. "
|
||||||
|
"Is this even a valid WhatsApp database backup?"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
db = zlib.decompress(db_compressed)
|
||||||
|
except zlib.error as e:
|
||||||
|
raise zlib.error(f"Decompression failed (The backup file likely corrupted at source): {e}")
|
||||||
|
|
||||||
|
if not db.startswith(b"SQLite"):
|
||||||
|
raise ValueError(
|
||||||
|
"Data is valid and decompressed, but it is not a SQLite database. "
|
||||||
|
"Is this even a valid WhatsApp database backup?")
|
||||||
return db
|
return db
|
||||||
|
|
||||||
|
|
||||||
def _decrypt_crypt14(database: bytes, main_key: bytes, max_worker: int = 10) -> bytes:
|
def _decrypt_crypt14(database: bytes, main_key: bytes, max_worker: int = 10) -> bytes:
|
||||||
"""Decrypt a crypt14 database using multithreading for brute-force offset detection.
|
"""Decrypt a crypt14 database using multithreading for brute-force offset detection.
|
||||||
|
|
||||||
@@ -135,55 +163,68 @@ def _decrypt_crypt14(database: bytes, main_key: bytes, max_worker: int = 10) ->
|
|||||||
|
|
||||||
# Attempt known offsets first
|
# Attempt known offsets first
|
||||||
for offsets in CRYPT14_OFFSETS:
|
for offsets in CRYPT14_OFFSETS:
|
||||||
iv = database[offsets["iv"]:offsets["iv"] + 16]
|
iv = offsets["iv"]
|
||||||
db_ciphertext = database[offsets["db"]:]
|
db = offsets["db"]
|
||||||
try:
|
try:
|
||||||
return _decrypt_database(db_ciphertext, main_key, iv)
|
decrypted_db = _attempt_decrypt_task((iv, iv + 16, db), database, main_key)
|
||||||
except (zlib.error, ValueError):
|
except (zlib.error, ValueError):
|
||||||
pass # Try next offset
|
continue
|
||||||
|
else:
|
||||||
print("Common offsets failed. Initiating brute-force with multithreading...")
|
logging.debug(
|
||||||
|
f"Decryption successful with known offsets: IV {iv}, DB {db}"
|
||||||
# Convert brute force generator into a list for parallel processing
|
|
||||||
offset_combinations = list(brute_force_offset())
|
|
||||||
|
|
||||||
def attempt_decrypt(offset_tuple):
|
|
||||||
"""Attempt decryption with the given offsets."""
|
|
||||||
start_iv, end_iv, start_db = offset_tuple
|
|
||||||
iv = database[start_iv:end_iv]
|
|
||||||
db_ciphertext = database[start_db:]
|
|
||||||
|
|
||||||
try:
|
|
||||||
db = _decrypt_database(db_ciphertext, main_key, iv)
|
|
||||||
print(
|
|
||||||
f"The offsets of your IV and database are {start_iv} and "
|
|
||||||
f"{start_db}, respectively. To include your offsets in the "
|
|
||||||
"program, please report it by creating an issue on GitHub: "
|
|
||||||
"https://github.com/KnugiHK/Whatsapp-Chat-Exporter/discussions/47"
|
|
||||||
"\nShutting down other threads..."
|
|
||||||
)
|
)
|
||||||
return db
|
return decrypted_db # Successful decryption
|
||||||
except (zlib.error, ValueError):
|
|
||||||
return None # Decryption failed, move to next
|
|
||||||
|
|
||||||
with concurrent.futures.ThreadPoolExecutor(max_worker) as executor:
|
logging.info(f"Common offsets failed. Will attempt to brute-force")
|
||||||
future_to_offset = {executor.submit(attempt_decrypt, offset): offset for offset in offset_combinations}
|
offset_max = 200
|
||||||
|
workers = max_worker
|
||||||
try:
|
check_offset = partial(_attempt_decrypt_task, database=database, main_key=main_key)
|
||||||
for future in concurrent.futures.as_completed(future_to_offset):
|
all_offsets = list(brute_force_offset(offset_max, offset_max))
|
||||||
result = future.result()
|
executor = concurrent.futures.ProcessPoolExecutor(max_workers=workers)
|
||||||
if result is not None:
|
try:
|
||||||
# Shutdown remaining threads
|
with tqdm(total=len(all_offsets), desc="Brute-forcing offsets", unit="trial", leave=False) as pbar:
|
||||||
|
results = executor.map(check_offset, all_offsets, chunksize=8)
|
||||||
|
found = False
|
||||||
|
for offset_info, result in zip(all_offsets, results):
|
||||||
|
pbar.update(1)
|
||||||
|
if result:
|
||||||
|
start_iv, _, start_db = offset_info
|
||||||
|
# Clean shutdown on success
|
||||||
executor.shutdown(wait=False, cancel_futures=True)
|
executor.shutdown(wait=False, cancel_futures=True)
|
||||||
return result
|
found = True
|
||||||
|
break
|
||||||
|
if found:
|
||||||
|
logging.info(
|
||||||
|
f"The offsets of your IV and database are {start_iv} and {start_db}, respectively."
|
||||||
|
)
|
||||||
|
logging.info(
|
||||||
|
f"To include your offsets in the expoter, please report it in the discussion thread on GitHub:"
|
||||||
|
)
|
||||||
|
logging.info(f"https://github.com/KnugiHK/Whatsapp-Chat-Exporter/discussions/47")
|
||||||
|
return result
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("\nBrute force interrupted by user (Ctrl+C). Exiting gracefully...")
|
executor.shutdown(wait=False, cancel_futures=True)
|
||||||
executor.shutdown(wait=False, cancel_futures=True)
|
logging.info("")
|
||||||
exit(1)
|
raise KeyboardInterrupt(
|
||||||
|
f"Brute force interrupted by user (Ctrl+C). Shutting down gracefully..."
|
||||||
|
)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
executor.shutdown(wait=False)
|
||||||
|
|
||||||
raise OffsetNotFoundError("Could not find the correct offsets for decryption.")
|
raise OffsetNotFoundError("Could not find the correct offsets for decryption.")
|
||||||
|
|
||||||
|
def _attempt_decrypt_task(offset_tuple, database, main_key):
|
||||||
|
"""Attempt decryption with the given offsets."""
|
||||||
|
start_iv, end_iv, start_db = offset_tuple
|
||||||
|
iv = database[start_iv:end_iv]
|
||||||
|
db_ciphertext = database[start_db:]
|
||||||
|
|
||||||
|
try:
|
||||||
|
return _decrypt_database(db_ciphertext, main_key, iv)
|
||||||
|
except (zlib.error, ValueError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _decrypt_crypt12(database: bytes, main_key: bytes) -> bytes:
|
def _decrypt_crypt12(database: bytes, main_key: bytes) -> bytes:
|
||||||
@@ -287,7 +328,7 @@ def decrypt_backup(
|
|||||||
if crypt is not Crypt.CRYPT15 and len(key) != 158:
|
if crypt is not Crypt.CRYPT15 and len(key) != 158:
|
||||||
raise InvalidKeyError("The key file must be 158 bytes")
|
raise InvalidKeyError("The key file must be 158 bytes")
|
||||||
|
|
||||||
#signature check, this is check is used in crypt 12 and 14
|
# signature check, this is check is used in crypt 12 and 14
|
||||||
if crypt != Crypt.CRYPT15:
|
if crypt != Crypt.CRYPT15:
|
||||||
t1 = key[30:62]
|
t1 = key[30:62]
|
||||||
|
|
||||||
@@ -297,7 +338,6 @@ def decrypt_backup(
|
|||||||
if t1 != database[3:35] and crypt == Crypt.CRYPT12:
|
if t1 != database[3:35] and crypt == Crypt.CRYPT12:
|
||||||
raise ValueError("The signature of key file and backup file mismatch")
|
raise ValueError("The signature of key file and backup file mismatch")
|
||||||
|
|
||||||
|
|
||||||
if crypt == Crypt.CRYPT15:
|
if crypt == Crypt.CRYPT15:
|
||||||
if keyfile_stream:
|
if keyfile_stream:
|
||||||
main_key, hex_key = _extract_enc_key(key)
|
main_key, hex_key = _extract_enc_key(key)
|
||||||
@@ -305,7 +345,7 @@ def decrypt_backup(
|
|||||||
main_key, hex_key = _derive_main_enc_key(key)
|
main_key, hex_key = _derive_main_enc_key(key)
|
||||||
if show_crypt15:
|
if show_crypt15:
|
||||||
hex_key_str = ' '.join([hex_key.hex()[c:c+4] for c in range(0, len(hex_key.hex()), 4)])
|
hex_key_str = ' '.join([hex_key.hex()[c:c+4] for c in range(0, len(hex_key.hex()), 4)])
|
||||||
print(f"The HEX key of the crypt15 backup is: {hex_key_str}")
|
logging.info(f"The HEX key of the crypt15 backup is: {hex_key_str}")
|
||||||
else:
|
else:
|
||||||
main_key = key[126:]
|
main_key = key[126:]
|
||||||
|
|
||||||
@@ -321,7 +361,6 @@ def decrypt_backup(
|
|||||||
except (InvalidFileFormatError, OffsetNotFoundError, ValueError) as e:
|
except (InvalidFileFormatError, OffsetNotFoundError, ValueError) as e:
|
||||||
raise DecryptionError(f"Decryption failed: {e}") from e
|
raise DecryptionError(f"Decryption failed: {e}") from e
|
||||||
|
|
||||||
|
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
with open(output, "wb") as f:
|
with open(output, "wb") as f:
|
||||||
f.write(db)
|
f.write(db)
|
||||||
|
|||||||
@@ -1,18 +1,22 @@
|
|||||||
#!/usr/bin/python3
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
import logging
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
from tqdm import tqdm
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from mimetypes import MimeTypes
|
from mimetypes import MimeTypes
|
||||||
from markupsafe import escape as htmle
|
from markupsafe import escape as htmle
|
||||||
from base64 import b64decode, b64encode
|
from base64 import b64decode, b64encode
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from Whatsapp_Chat_Exporter.data_model import ChatStore, Message
|
from Whatsapp_Chat_Exporter.data_model import ChatStore, Message
|
||||||
from Whatsapp_Chat_Exporter.utility import CURRENT_TZ_OFFSET, MAX_SIZE, ROW_SIZE, JidType, Device
|
from Whatsapp_Chat_Exporter.utility import MAX_SIZE, ROW_SIZE, JidType, Device, get_jid_map_join
|
||||||
from Whatsapp_Chat_Exporter.utility import rendering, get_file_name, setup_template, get_cond_for_empty
|
from Whatsapp_Chat_Exporter.utility import rendering, get_file_name, setup_template, get_cond_for_empty
|
||||||
from Whatsapp_Chat_Exporter.utility import get_status_location, convert_time_unit, determine_metadata
|
from Whatsapp_Chat_Exporter.utility import get_status_location, convert_time_unit, get_jid_map_selection
|
||||||
from Whatsapp_Chat_Exporter.utility import get_chat_condition, slugify, bytes_to_readable
|
from Whatsapp_Chat_Exporter.utility import get_chat_condition, safe_name, bytes_to_readable, determine_metadata
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def contacts(db, data, enrich_from_vcards):
|
def contacts(db, data, enrich_from_vcards):
|
||||||
@@ -33,25 +37,30 @@ def contacts(db, data, enrich_from_vcards):
|
|||||||
|
|
||||||
if total_row_number == 0:
|
if total_row_number == 0:
|
||||||
if enrich_from_vcards is not None:
|
if enrich_from_vcards is not None:
|
||||||
print("No contacts profiles found in the default database, contacts will be imported from the specified vCard file.")
|
logging.info(
|
||||||
|
"No contacts profiles found in the default database, contacts will be imported from the specified vCard file.")
|
||||||
else:
|
else:
|
||||||
print("No contacts profiles found in the default database, consider using --enrich-from-vcards for adopting names from exported contacts from Google")
|
logging.warning(
|
||||||
|
"No contacts profiles found in the default database, consider using --enrich-from-vcards for adopting names from exported contacts from Google")
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
print(f"Processing contacts...({total_row_number})")
|
logging.info(f"Processed {total_row_number} contacts")
|
||||||
|
|
||||||
c.execute("SELECT jid, COALESCE(display_name, wa_name) as display_name, status FROM wa_contacts;")
|
c.execute("SELECT jid, COALESCE(display_name, wa_name) as display_name, status FROM wa_contacts;")
|
||||||
row = c.fetchone()
|
|
||||||
while row is not None:
|
with tqdm(total=total_row_number, desc="Processing contacts", unit="contact", leave=False) as pbar:
|
||||||
current_chat = data.add_chat(row["jid"], ChatStore(Device.ANDROID, row["display_name"]))
|
while (row := _fetch_row_safely(c)) is not None:
|
||||||
if row["status"] is not None:
|
current_chat = data.add_chat(row["jid"], ChatStore(Device.ANDROID, row["display_name"]))
|
||||||
current_chat.status = row["status"]
|
if row["status"] is not None:
|
||||||
row = c.fetchone()
|
current_chat.status = row["status"]
|
||||||
|
pbar.update(1)
|
||||||
|
total_time = pbar.format_dict['elapsed']
|
||||||
|
logging.info(f"Processed {total_row_number} contacts in {convert_time_unit(total_time)}")
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def messages(db, data, media_folder, timezone_offset, filter_date, filter_chat, filter_empty):
|
def messages(db, data, media_folder, timezone_offset, filter_date, filter_chat, filter_empty, no_reply):
|
||||||
"""
|
"""
|
||||||
Process WhatsApp messages from the database.
|
Process WhatsApp messages from the database.
|
||||||
|
|
||||||
@@ -65,45 +74,45 @@ def messages(db, data, media_folder, timezone_offset, filter_date, filter_chat,
|
|||||||
filter_empty: Filter for empty chats
|
filter_empty: Filter for empty chats
|
||||||
"""
|
"""
|
||||||
c = db.cursor()
|
c = db.cursor()
|
||||||
total_row_number = _get_message_count(c, filter_empty, filter_date, filter_chat)
|
total_row_number = _get_message_count(c, filter_empty, filter_date, filter_chat, data.get_system("jid_map_exists"))
|
||||||
print(f"Processing messages...(0/{total_row_number})", end="\r")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
content_cursor = _get_messages_cursor_legacy(c, filter_empty, filter_date, filter_chat)
|
content_cursor = _get_messages_cursor_legacy(c, filter_empty, filter_date, filter_chat)
|
||||||
table_message = False
|
table_message = False
|
||||||
except sqlite3.OperationalError:
|
except sqlite3.OperationalError as e:
|
||||||
|
logging.debug(f'Got sql error "{e}" in _get_message_cursor_legacy trying fallback.\n')
|
||||||
try:
|
try:
|
||||||
content_cursor = _get_messages_cursor_new(c, filter_empty, filter_date, filter_chat)
|
content_cursor = _get_messages_cursor_new(
|
||||||
|
c,
|
||||||
|
filter_empty,
|
||||||
|
filter_date,
|
||||||
|
filter_chat,
|
||||||
|
data.get_system("transcription_selection"),
|
||||||
|
data.get_system("jid_map_exists")
|
||||||
|
)
|
||||||
table_message = True
|
table_message = True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
i = 0
|
with tqdm(total=total_row_number, desc="Processing messages", unit="msg", leave=False) as pbar:
|
||||||
# Fetch the first row safely
|
while (content := _fetch_row_safely(content_cursor)) is not None:
|
||||||
content = _fetch_row_safely(content_cursor)
|
_process_single_message(data, content, table_message, timezone_offset)
|
||||||
|
pbar.update(1)
|
||||||
while content is not None:
|
total_time = pbar.format_dict['elapsed']
|
||||||
_process_single_message(data, content, table_message, timezone_offset)
|
_get_reactions(db, data)
|
||||||
|
logging.info(f"Processed {total_row_number} messages in {convert_time_unit(total_time)}")
|
||||||
i += 1
|
|
||||||
if i % 1000 == 0:
|
|
||||||
print(f"Processing messages...({i}/{total_row_number})", end="\r")
|
|
||||||
|
|
||||||
# Fetch the next row safely
|
|
||||||
content = _fetch_row_safely(content_cursor)
|
|
||||||
|
|
||||||
print(f"Processing messages...({total_row_number}/{total_row_number})", end="\r")
|
|
||||||
|
|
||||||
|
|
||||||
# Helper functions for message processing
|
# Helper functions for message processing
|
||||||
|
|
||||||
def _get_message_count(cursor, filter_empty, filter_date, filter_chat):
|
def _get_message_count(cursor, filter_empty, filter_date, filter_chat, jid_map_exists):
|
||||||
"""Get the total number of messages to process."""
|
"""Get the total number of messages to process."""
|
||||||
try:
|
try:
|
||||||
empty_filter = get_cond_for_empty(filter_empty, "messages.key_remote_jid", "messages.needs_push")
|
empty_filter = get_cond_for_empty(filter_empty, "messages.key_remote_jid", "messages.needs_push")
|
||||||
date_filter = f'AND timestamp {filter_date}' if filter_date is not None else ''
|
date_filter = f'AND timestamp {filter_date}' if filter_date is not None else ''
|
||||||
include_filter = get_chat_condition(filter_chat[0], True, ["messages.key_remote_jid", "messages.remote_resource"], "jid", "android")
|
include_filter = get_chat_condition(
|
||||||
exclude_filter = get_chat_condition(filter_chat[1], False, ["messages.key_remote_jid", "messages.remote_resource"], "jid", "android")
|
filter_chat[0], True, ["messages.key_remote_jid", "messages.remote_resource"], "jid", "android")
|
||||||
|
exclude_filter = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["messages.key_remote_jid", "messages.remote_resource"], "jid", "android")
|
||||||
|
|
||||||
cursor.execute(f"""SELECT count()
|
cursor.execute(f"""SELECT count()
|
||||||
FROM messages
|
FROM messages
|
||||||
@@ -116,20 +125,30 @@ def _get_message_count(cursor, filter_empty, filter_date, filter_chat):
|
|||||||
{date_filter}
|
{date_filter}
|
||||||
{include_filter}
|
{include_filter}
|
||||||
{exclude_filter}""")
|
{exclude_filter}""")
|
||||||
except sqlite3.OperationalError:
|
except sqlite3.OperationalError as e:
|
||||||
empty_filter = get_cond_for_empty(filter_empty, "jid.raw_string", "broadcast")
|
logging.debug(f'Got sql error "{e}" in _get_message_count trying fallback.\n')
|
||||||
date_filter = f'AND timestamp {filter_date}' if filter_date is not None else ''
|
|
||||||
include_filter = get_chat_condition(filter_chat[0], True, ["jid.raw_string", "jid_group.raw_string"], "jid", "android")
|
|
||||||
exclude_filter = get_chat_condition(filter_chat[1], False, ["jid.raw_string", "jid_group.raw_string"], "jid", "android")
|
|
||||||
|
|
||||||
cursor.execute(f"""SELECT count()
|
empty_filter = get_cond_for_empty(filter_empty, "key_remote_jid", "broadcast")
|
||||||
|
date_filter = f'AND timestamp {filter_date}' if filter_date is not None else ''
|
||||||
|
remote_jid_selection, group_jid_selection = get_jid_map_selection(jid_map_exists)
|
||||||
|
include_filter = get_chat_condition(
|
||||||
|
filter_chat[0], True, ["key_remote_jid", "group_sender_jid"], "jid", "android")
|
||||||
|
exclude_filter = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["key_remote_jid", "group_sender_jid"], "jid", "android")
|
||||||
|
|
||||||
|
cursor.execute(f"""SELECT count(),
|
||||||
|
{remote_jid_selection} as key_remote_jid,
|
||||||
|
{group_jid_selection} as group_sender_jid
|
||||||
FROM message
|
FROM message
|
||||||
LEFT JOIN chat
|
LEFT JOIN chat
|
||||||
ON chat._id = message.chat_row_id
|
ON chat._id = message.chat_row_id
|
||||||
INNER JOIN jid
|
INNER JOIN jid
|
||||||
ON jid._id = chat.jid_row_id
|
ON jid._id = chat.jid_row_id
|
||||||
|
INNER JOIN jid jid_global
|
||||||
|
ON jid_global._id = chat.jid_row_id
|
||||||
LEFT JOIN jid jid_group
|
LEFT JOIN jid jid_group
|
||||||
ON jid_group._id = message.sender_jid_row_id
|
ON jid_group._id = message.sender_jid_row_id
|
||||||
|
{get_jid_map_join(jid_map_exists)}
|
||||||
WHERE 1=1
|
WHERE 1=1
|
||||||
{empty_filter}
|
{empty_filter}
|
||||||
{date_filter}
|
{date_filter}
|
||||||
@@ -142,8 +161,10 @@ def _get_messages_cursor_legacy(cursor, filter_empty, filter_date, filter_chat):
|
|||||||
"""Get cursor for legacy database schema."""
|
"""Get cursor for legacy database schema."""
|
||||||
empty_filter = get_cond_for_empty(filter_empty, "messages.key_remote_jid", "messages.needs_push")
|
empty_filter = get_cond_for_empty(filter_empty, "messages.key_remote_jid", "messages.needs_push")
|
||||||
date_filter = f'AND messages.timestamp {filter_date}' if filter_date is not None else ''
|
date_filter = f'AND messages.timestamp {filter_date}' if filter_date is not None else ''
|
||||||
include_filter = get_chat_condition(filter_chat[0], True, ["messages.key_remote_jid", "messages.remote_resource"], "jid_global", "android")
|
include_filter = get_chat_condition(
|
||||||
exclude_filter = get_chat_condition(filter_chat[1], False, ["messages.key_remote_jid", "messages.remote_resource"], "jid_global", "android")
|
filter_chat[0], True, ["messages.key_remote_jid", "messages.remote_resource"], "jid_global", "android")
|
||||||
|
exclude_filter = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["messages.key_remote_jid", "messages.remote_resource"], "jid_global", "android")
|
||||||
|
|
||||||
cursor.execute(f"""SELECT messages.key_remote_jid,
|
cursor.execute(f"""SELECT messages.key_remote_jid,
|
||||||
messages._id,
|
messages._id,
|
||||||
@@ -201,14 +222,24 @@ def _get_messages_cursor_legacy(cursor, filter_empty, filter_date, filter_chat):
|
|||||||
return cursor
|
return cursor
|
||||||
|
|
||||||
|
|
||||||
def _get_messages_cursor_new(cursor, filter_empty, filter_date, filter_chat):
|
def _get_messages_cursor_new(
|
||||||
|
cursor,
|
||||||
|
filter_empty,
|
||||||
|
filter_date,
|
||||||
|
filter_chat,
|
||||||
|
transcription_selection,
|
||||||
|
jid_map_exists
|
||||||
|
):
|
||||||
"""Get cursor for new database schema."""
|
"""Get cursor for new database schema."""
|
||||||
empty_filter = get_cond_for_empty(filter_empty, "key_remote_jid", "broadcast")
|
empty_filter = get_cond_for_empty(filter_empty, "key_remote_jid", "broadcast")
|
||||||
date_filter = f'AND message.timestamp {filter_date}' if filter_date is not None else ''
|
date_filter = f'AND message.timestamp {filter_date}' if filter_date is not None else ''
|
||||||
include_filter = get_chat_condition(filter_chat[0], True, ["key_remote_jid", "jid_group.raw_string"], "jid_global", "android")
|
remote_jid_selection, group_jid_selection = get_jid_map_selection(jid_map_exists)
|
||||||
exclude_filter = get_chat_condition(filter_chat[1], False, ["key_remote_jid", "jid_group.raw_string"], "jid_global", "android")
|
include_filter = get_chat_condition(
|
||||||
|
filter_chat[0], True, ["key_remote_jid", "group_sender_jid"], "jid_global", "android")
|
||||||
|
exclude_filter = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["key_remote_jid", "group_sender_jid"], "jid_global", "android")
|
||||||
|
|
||||||
cursor.execute(f"""SELECT jid_global.raw_string as key_remote_jid,
|
cursor.execute(f"""SELECT {remote_jid_selection} as key_remote_jid,
|
||||||
message._id,
|
message._id,
|
||||||
message.from_me as key_from_me,
|
message.from_me as key_from_me,
|
||||||
message.timestamp,
|
message.timestamp,
|
||||||
@@ -223,7 +254,7 @@ def _get_messages_cursor_new(cursor, filter_empty, filter_date, filter_chat):
|
|||||||
message.key_id,
|
message.key_id,
|
||||||
message_quoted.text_data as quoted_data,
|
message_quoted.text_data as quoted_data,
|
||||||
message.message_type as media_wa_type,
|
message.message_type as media_wa_type,
|
||||||
jid_group.raw_string as group_sender_jid,
|
{group_jid_selection} as group_sender_jid,
|
||||||
chat.subject as chat_subject,
|
chat.subject as chat_subject,
|
||||||
missed_call_logs.video_call,
|
missed_call_logs.video_call,
|
||||||
message.sender_jid_row_id,
|
message.sender_jid_row_id,
|
||||||
@@ -233,7 +264,8 @@ def _get_messages_cursor_new(cursor, filter_empty, filter_date, filter_chat):
|
|||||||
jid_new.raw_string as new_jid,
|
jid_new.raw_string as new_jid,
|
||||||
jid_global.type as jid_type,
|
jid_global.type as jid_type,
|
||||||
COALESCE(receipt_user.receipt_timestamp, message.received_timestamp) as received_timestamp,
|
COALESCE(receipt_user.receipt_timestamp, message.received_timestamp) as received_timestamp,
|
||||||
COALESCE(receipt_user.read_timestamp, receipt_user.played_timestamp) as read_timestamp
|
COALESCE(receipt_user.read_timestamp, receipt_user.played_timestamp) as read_timestamp,
|
||||||
|
{transcription_selection}
|
||||||
FROM message
|
FROM message
|
||||||
LEFT JOIN message_quoted
|
LEFT JOIN message_quoted
|
||||||
ON message_quoted.message_row_id = message._id
|
ON message_quoted.message_row_id = message._id
|
||||||
@@ -265,6 +297,7 @@ def _get_messages_cursor_new(cursor, filter_empty, filter_date, filter_chat):
|
|||||||
ON jid_new._id = message_system_number_change.new_jid_row_id
|
ON jid_new._id = message_system_number_change.new_jid_row_id
|
||||||
LEFT JOIN receipt_user
|
LEFT JOIN receipt_user
|
||||||
ON receipt_user.message_row_id = message._id
|
ON receipt_user.message_row_id = message._id
|
||||||
|
{get_jid_map_join(jid_map_exists)}
|
||||||
WHERE key_remote_jid <> '-1'
|
WHERE key_remote_jid <> '-1'
|
||||||
{empty_filter}
|
{empty_filter}
|
||||||
{date_filter}
|
{date_filter}
|
||||||
@@ -280,7 +313,11 @@ def _fetch_row_safely(cursor):
|
|||||||
try:
|
try:
|
||||||
content = cursor.fetchone()
|
content = cursor.fetchone()
|
||||||
return content
|
return content
|
||||||
except sqlite3.OperationalError:
|
except sqlite3.OperationalError as e:
|
||||||
|
# Not sure how often this might happen, but this check should reduce the overhead
|
||||||
|
# if DEBUG flag is not set.
|
||||||
|
if logging.isEnabledFor(logging.DEBUG):
|
||||||
|
logging.debug(f'Got sql error "{e}" in _fetch_row_safely ignoring row.\n')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
@@ -290,11 +327,10 @@ def _process_single_message(data, content, table_message, timezone_offset):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Get or create the chat
|
# Get or create the chat
|
||||||
if not data.get_chat(content["key_remote_jid"]):
|
current_chat = data.get_chat(content["key_remote_jid"])
|
||||||
current_chat = data.add_chat(content["key_remote_jid"], ChatStore(Device.ANDROID, content["chat_subject"]))
|
if current_chat is None:
|
||||||
else:
|
current_chat = data.add_chat(content["key_remote_jid"], ChatStore(
|
||||||
current_chat = data.get_chat(content["key_remote_jid"])
|
Device.ANDROID, content["chat_subject"]))
|
||||||
|
|
||||||
# Determine sender_jid_row_id
|
# Determine sender_jid_row_id
|
||||||
if "sender_jid_row_id" in content:
|
if "sender_jid_row_id" in content:
|
||||||
sender_jid_row_id = content["sender_jid_row_id"]
|
sender_jid_row_id = content["sender_jid_row_id"]
|
||||||
@@ -307,7 +343,7 @@ def _process_single_message(data, content, table_message, timezone_offset):
|
|||||||
timestamp=content["timestamp"],
|
timestamp=content["timestamp"],
|
||||||
time=content["timestamp"],
|
time=content["timestamp"],
|
||||||
key_id=content["key_id"],
|
key_id=content["key_id"],
|
||||||
timezone_offset=timezone_offset if timezone_offset else CURRENT_TZ_OFFSET,
|
timezone_offset=timezone_offset,
|
||||||
message_type=content["media_wa_type"],
|
message_type=content["media_wa_type"],
|
||||||
received_timestamp=content["received_timestamp"],
|
received_timestamp=content["received_timestamp"],
|
||||||
read_timestamp=content["read_timestamp"]
|
read_timestamp=content["read_timestamp"]
|
||||||
@@ -339,9 +375,12 @@ def _process_single_message(data, content, table_message, timezone_offset):
|
|||||||
if not table_message and content["media_caption"] is not None:
|
if not table_message and content["media_caption"] is not None:
|
||||||
# Old schema
|
# Old schema
|
||||||
message.caption = content["media_caption"]
|
message.caption = content["media_caption"]
|
||||||
elif table_message and content["media_wa_type"] == 1 and content["data"] is not None:
|
elif table_message:
|
||||||
# New schema
|
# New schema
|
||||||
message.caption = content["data"]
|
if content["media_wa_type"] == 1 and content["data"] is not None:
|
||||||
|
message.caption = content["data"]
|
||||||
|
elif content["media_wa_type"] == 2 and content["transcription_text"] is not None:
|
||||||
|
message.caption = f'"{content["transcription_text"]}"'
|
||||||
else:
|
else:
|
||||||
message.caption = None
|
message.caption = None
|
||||||
|
|
||||||
@@ -467,7 +506,79 @@ def _format_message_text(text):
|
|||||||
return text
|
return text
|
||||||
|
|
||||||
|
|
||||||
def media(db, data, media_folder, filter_date, filter_chat, filter_empty, separate_media=True):
|
def _get_reactions(db, data):
|
||||||
|
"""
|
||||||
|
Process message reactions. Only new schema is supported.
|
||||||
|
Chat filter is not applied here at the moment. Maybe in the future.
|
||||||
|
"""
|
||||||
|
c = db.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check if tables exist, old schema might not have reactions or in somewhere else
|
||||||
|
c.execute("SELECT count(*) FROM sqlite_master WHERE type='table' AND name='message_add_on'")
|
||||||
|
if c.fetchone()[0] == 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
logging.info("Processing reactions...", extra={"clear": True})
|
||||||
|
|
||||||
|
c.execute("""
|
||||||
|
SELECT
|
||||||
|
message_add_on.parent_message_row_id,
|
||||||
|
message_add_on_reaction.reaction,
|
||||||
|
message_add_on.from_me,
|
||||||
|
jid.raw_string as sender_jid_raw,
|
||||||
|
chat_jid.raw_string as chat_jid_raw,
|
||||||
|
message_add_on_reaction.sender_timestamp
|
||||||
|
FROM message_add_on
|
||||||
|
INNER JOIN message_add_on_reaction
|
||||||
|
ON message_add_on._id = message_add_on_reaction.message_add_on_row_id
|
||||||
|
LEFT JOIN jid
|
||||||
|
ON message_add_on.sender_jid_row_id = jid._id
|
||||||
|
LEFT JOIN chat
|
||||||
|
ON message_add_on.chat_row_id = chat._id
|
||||||
|
LEFT JOIN jid chat_jid
|
||||||
|
ON chat.jid_row_id = chat_jid._id
|
||||||
|
""")
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
logging.warning(f"Could not fetch reactions (schema might be too old or incompatible)")
|
||||||
|
return
|
||||||
|
|
||||||
|
rows = c.fetchall()
|
||||||
|
total_row_number = len(rows)
|
||||||
|
|
||||||
|
with tqdm(total=total_row_number, desc="Processing reactions", unit="reaction", leave=False) as pbar:
|
||||||
|
for row in rows:
|
||||||
|
parent_id = row["parent_message_row_id"]
|
||||||
|
reaction = row["reaction"]
|
||||||
|
chat_id = row["chat_jid_raw"]
|
||||||
|
_react_timestamp = row["sender_timestamp"]
|
||||||
|
|
||||||
|
if chat_id and chat_id in data:
|
||||||
|
chat = data[chat_id]
|
||||||
|
if parent_id in chat._messages:
|
||||||
|
message = chat._messages[parent_id]
|
||||||
|
|
||||||
|
# Determine sender name
|
||||||
|
sender_name = None
|
||||||
|
if row["from_me"]:
|
||||||
|
sender_name = "You"
|
||||||
|
elif row["sender_jid_raw"]:
|
||||||
|
sender_jid = row["sender_jid_raw"]
|
||||||
|
if sender_jid in data:
|
||||||
|
sender_name = data[sender_jid].name
|
||||||
|
if not sender_name:
|
||||||
|
sender_name = sender_jid.split('@')[0] if "@" in sender_jid else sender_jid
|
||||||
|
|
||||||
|
if not sender_name:
|
||||||
|
sender_name = "Unknown"
|
||||||
|
|
||||||
|
message.reactions[sender_name] = reaction
|
||||||
|
pbar.update(1)
|
||||||
|
total_time = pbar.format_dict['elapsed']
|
||||||
|
logging.info(f"Processed {total_row_number} reactions in {convert_time_unit(total_time)}")
|
||||||
|
|
||||||
|
|
||||||
|
def media(db, data, media_folder, filter_date, filter_chat, filter_empty, separate_media=True, fix_dot_files=False):
|
||||||
"""
|
"""
|
||||||
Process WhatsApp media files from the database.
|
Process WhatsApp media files from the database.
|
||||||
|
|
||||||
@@ -482,11 +593,10 @@ def media(db, data, media_folder, filter_date, filter_chat, filter_empty, separa
|
|||||||
"""
|
"""
|
||||||
c = db.cursor()
|
c = db.cursor()
|
||||||
total_row_number = _get_media_count(c, filter_empty, filter_date, filter_chat)
|
total_row_number = _get_media_count(c, filter_empty, filter_date, filter_chat)
|
||||||
print(f"\nProcessing media...(0/{total_row_number})", end="\r")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
content_cursor = _get_media_cursor_legacy(c, filter_empty, filter_date, filter_chat)
|
content_cursor = _get_media_cursor_legacy(c, filter_empty, filter_date, filter_chat)
|
||||||
except sqlite3.OperationalError:
|
except sqlite3.OperationalError as e:
|
||||||
|
logging.debug(f'Got sql error "{e}" in _get_media_cursor_legacy trying fallback.\n')
|
||||||
content_cursor = _get_media_cursor_new(c, filter_empty, filter_date, filter_chat)
|
content_cursor = _get_media_cursor_new(c, filter_empty, filter_date, filter_chat)
|
||||||
|
|
||||||
content = content_cursor.fetchone()
|
content = content_cursor.fetchone()
|
||||||
@@ -495,18 +605,12 @@ def media(db, data, media_folder, filter_date, filter_chat, filter_empty, separa
|
|||||||
# Ensure thumbnails directory exists
|
# Ensure thumbnails directory exists
|
||||||
Path(f"{media_folder}/thumbnails").mkdir(parents=True, exist_ok=True)
|
Path(f"{media_folder}/thumbnails").mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
i = 0
|
with tqdm(total=total_row_number, desc="Processing media", unit="media", leave=False) as pbar:
|
||||||
while content is not None:
|
while (content := _fetch_row_safely(content_cursor)) is not None:
|
||||||
_process_single_media(data, content, media_folder, mime, separate_media)
|
_process_single_media(data, content, media_folder, mime, separate_media, fix_dot_files)
|
||||||
|
pbar.update(1)
|
||||||
i += 1
|
total_time = pbar.format_dict['elapsed']
|
||||||
if i % 100 == 0:
|
logging.info(f"Processed {total_row_number} media in {convert_time_unit(total_time)}")
|
||||||
print(f"Processing media...({i}/{total_row_number})", end="\r")
|
|
||||||
|
|
||||||
content = content_cursor.fetchone()
|
|
||||||
|
|
||||||
print(f"Processing media...({total_row_number}/{total_row_number})", end="\r")
|
|
||||||
|
|
||||||
|
|
||||||
# Helper functions for media processing
|
# Helper functions for media processing
|
||||||
|
|
||||||
@@ -515,8 +619,10 @@ def _get_media_count(cursor, filter_empty, filter_date, filter_chat):
|
|||||||
try:
|
try:
|
||||||
empty_filter = get_cond_for_empty(filter_empty, "key_remote_jid", "messages.needs_push")
|
empty_filter = get_cond_for_empty(filter_empty, "key_remote_jid", "messages.needs_push")
|
||||||
date_filter = f'AND messages.timestamp {filter_date}' if filter_date is not None else ''
|
date_filter = f'AND messages.timestamp {filter_date}' if filter_date is not None else ''
|
||||||
include_filter = get_chat_condition(filter_chat[0], True, ["messages.key_remote_jid", "remote_resource"], "jid", "android")
|
include_filter = get_chat_condition(
|
||||||
exclude_filter = get_chat_condition(filter_chat[1], False, ["messages.key_remote_jid", "remote_resource"], "jid", "android")
|
filter_chat[0], True, ["messages.key_remote_jid", "remote_resource"], "jid", "android")
|
||||||
|
exclude_filter = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["messages.key_remote_jid", "remote_resource"], "jid", "android")
|
||||||
|
|
||||||
cursor.execute(f"""SELECT count()
|
cursor.execute(f"""SELECT count()
|
||||||
FROM message_media
|
FROM message_media
|
||||||
@@ -531,13 +637,18 @@ def _get_media_count(cursor, filter_empty, filter_date, filter_chat):
|
|||||||
{date_filter}
|
{date_filter}
|
||||||
{include_filter}
|
{include_filter}
|
||||||
{exclude_filter}""")
|
{exclude_filter}""")
|
||||||
except sqlite3.OperationalError:
|
except sqlite3.OperationalError as e:
|
||||||
|
logging.debug(f'Got sql error "{e}" in _get_media_count trying fallback.\n')
|
||||||
empty_filter = get_cond_for_empty(filter_empty, "jid.raw_string", "broadcast")
|
empty_filter = get_cond_for_empty(filter_empty, "jid.raw_string", "broadcast")
|
||||||
date_filter = f'AND message.timestamp {filter_date}' if filter_date is not None else ''
|
date_filter = f'AND message.timestamp {filter_date}' if filter_date is not None else ''
|
||||||
include_filter = get_chat_condition(filter_chat[0], True, ["jid.raw_string", "jid_group.raw_string"], "jid", "android")
|
include_filter = get_chat_condition(
|
||||||
exclude_filter = get_chat_condition(filter_chat[1], False, ["jid.raw_string", "jid_group.raw_string"], "jid", "android")
|
filter_chat[0], True, ["key_remote_jid", "group_sender_jid"], "jid", "android")
|
||||||
|
exclude_filter = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["key_remote_jid", "group_sender_jid"], "jid", "android")
|
||||||
|
|
||||||
cursor.execute(f"""SELECT count()
|
cursor.execute(f"""SELECT count(),
|
||||||
|
COALESCE(lid_global.raw_string, jid.raw_string) as key_remote_jid,
|
||||||
|
COALESCE(lid_group.raw_string, jid_group.raw_string) as group_sender_jid
|
||||||
FROM message_media
|
FROM message_media
|
||||||
INNER JOIN message
|
INNER JOIN message
|
||||||
ON message_media.message_row_id = message._id
|
ON message_media.message_row_id = message._id
|
||||||
@@ -547,6 +658,14 @@ def _get_media_count(cursor, filter_empty, filter_date, filter_chat):
|
|||||||
ON jid._id = chat.jid_row_id
|
ON jid._id = chat.jid_row_id
|
||||||
LEFT JOIN jid jid_group
|
LEFT JOIN jid jid_group
|
||||||
ON jid_group._id = message.sender_jid_row_id
|
ON jid_group._id = message.sender_jid_row_id
|
||||||
|
LEFT JOIN jid_map as jid_map_global
|
||||||
|
ON chat.jid_row_id = jid_map_global.lid_row_id
|
||||||
|
LEFT JOIN jid lid_global
|
||||||
|
ON jid_map_global.jid_row_id = lid_global._id
|
||||||
|
LEFT JOIN jid_map as jid_map_group
|
||||||
|
ON message.sender_jid_row_id = jid_map_group.lid_row_id
|
||||||
|
LEFT JOIN jid lid_group
|
||||||
|
ON jid_map_group.jid_row_id = lid_group._id
|
||||||
WHERE 1=1
|
WHERE 1=1
|
||||||
{empty_filter}
|
{empty_filter}
|
||||||
{date_filter}
|
{date_filter}
|
||||||
@@ -557,10 +676,12 @@ def _get_media_count(cursor, filter_empty, filter_date, filter_chat):
|
|||||||
|
|
||||||
def _get_media_cursor_legacy(cursor, filter_empty, filter_date, filter_chat):
|
def _get_media_cursor_legacy(cursor, filter_empty, filter_date, filter_chat):
|
||||||
"""Get cursor for legacy media database schema."""
|
"""Get cursor for legacy media database schema."""
|
||||||
empty_filter = get_cond_for_empty(filter_empty, "key_remote_jid", "broadcast")
|
empty_filter = get_cond_for_empty(filter_empty, "messages.key_remote_jid", "messages.needs_push")
|
||||||
date_filter = f'AND messages.timestamp {filter_date}' if filter_date is not None else ''
|
date_filter = f'AND messages.timestamp {filter_date}' if filter_date is not None else ''
|
||||||
include_filter = get_chat_condition(filter_chat[0], True, ["messages.key_remote_jid", "remote_resource"], "jid", "android")
|
include_filter = get_chat_condition(
|
||||||
exclude_filter = get_chat_condition(filter_chat[1], False, ["messages.key_remote_jid", "remote_resource"], "jid", "android")
|
filter_chat[0], True, ["messages.key_remote_jid", "remote_resource"], "jid", "android")
|
||||||
|
exclude_filter = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["messages.key_remote_jid", "remote_resource"], "jid", "android")
|
||||||
|
|
||||||
cursor.execute(f"""SELECT messages.key_remote_jid,
|
cursor.execute(f"""SELECT messages.key_remote_jid,
|
||||||
message_row_id,
|
message_row_id,
|
||||||
@@ -592,17 +713,20 @@ def _get_media_cursor_new(cursor, filter_empty, filter_date, filter_chat):
|
|||||||
"""Get cursor for new media database schema."""
|
"""Get cursor for new media database schema."""
|
||||||
empty_filter = get_cond_for_empty(filter_empty, "key_remote_jid", "broadcast")
|
empty_filter = get_cond_for_empty(filter_empty, "key_remote_jid", "broadcast")
|
||||||
date_filter = f'AND message.timestamp {filter_date}' if filter_date is not None else ''
|
date_filter = f'AND message.timestamp {filter_date}' if filter_date is not None else ''
|
||||||
include_filter = get_chat_condition(filter_chat[0], True, ["key_remote_jid", "jid_group.raw_string"], "jid", "android")
|
include_filter = get_chat_condition(
|
||||||
exclude_filter = get_chat_condition(filter_chat[1], False, ["key_remote_jid", "jid_group.raw_string"], "jid", "android")
|
filter_chat[0], True, ["key_remote_jid", "group_sender_jid"], "jid", "android")
|
||||||
|
exclude_filter = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["key_remote_jid", "group_sender_jid"], "jid", "android")
|
||||||
|
|
||||||
cursor.execute(f"""SELECT jid.raw_string as key_remote_jid,
|
cursor.execute(f"""SELECT COALESCE(lid_global.raw_string, jid.raw_string) as key_remote_jid,
|
||||||
message_row_id,
|
message_row_id,
|
||||||
file_path,
|
file_path,
|
||||||
message_url,
|
message_url,
|
||||||
mime_type,
|
mime_type,
|
||||||
media_key,
|
media_key,
|
||||||
file_hash,
|
file_hash,
|
||||||
thumbnail
|
thumbnail,
|
||||||
|
COALESCE(lid_group.raw_string, jid_group.raw_string) as group_sender_jid
|
||||||
FROM message_media
|
FROM message_media
|
||||||
INNER JOIN message
|
INNER JOIN message
|
||||||
ON message_media.message_row_id = message._id
|
ON message_media.message_row_id = message._id
|
||||||
@@ -614,6 +738,14 @@ def _get_media_cursor_new(cursor, filter_empty, filter_date, filter_chat):
|
|||||||
ON message_media.file_hash = media_hash_thumbnail.media_hash
|
ON message_media.file_hash = media_hash_thumbnail.media_hash
|
||||||
LEFT JOIN jid jid_group
|
LEFT JOIN jid jid_group
|
||||||
ON jid_group._id = message.sender_jid_row_id
|
ON jid_group._id = message.sender_jid_row_id
|
||||||
|
LEFT JOIN jid_map as jid_map_global
|
||||||
|
ON chat.jid_row_id = jid_map_global.lid_row_id
|
||||||
|
LEFT JOIN jid lid_global
|
||||||
|
ON jid_map_global.jid_row_id = lid_global._id
|
||||||
|
LEFT JOIN jid_map as jid_map_group
|
||||||
|
ON message.sender_jid_row_id = jid_map_group.lid_row_id
|
||||||
|
LEFT JOIN jid lid_group
|
||||||
|
ON jid_map_group.jid_row_id = lid_group._id
|
||||||
WHERE jid.type <> 7
|
WHERE jid.type <> 7
|
||||||
{empty_filter}
|
{empty_filter}
|
||||||
{date_filter}
|
{date_filter}
|
||||||
@@ -623,7 +755,7 @@ def _get_media_cursor_new(cursor, filter_empty, filter_date, filter_chat):
|
|||||||
return cursor
|
return cursor
|
||||||
|
|
||||||
|
|
||||||
def _process_single_media(data, content, media_folder, mime, separate_media):
|
def _process_single_media(data, content, media_folder, mime, separate_media, fix_dot_files=False):
|
||||||
"""Process a single media file."""
|
"""Process a single media file."""
|
||||||
file_path = f"{media_folder}/{content['file_path']}"
|
file_path = f"{media_folder}/{content['file_path']}"
|
||||||
current_chat = data.get_chat(content["key_remote_jid"])
|
current_chat = data.get_chat(content["key_remote_jid"])
|
||||||
@@ -631,8 +763,6 @@ def _process_single_media(data, content, media_folder, mime, separate_media):
|
|||||||
message.media = True
|
message.media = True
|
||||||
|
|
||||||
if os.path.isfile(file_path):
|
if os.path.isfile(file_path):
|
||||||
message.data = file_path
|
|
||||||
|
|
||||||
# Set mime type
|
# Set mime type
|
||||||
if content["mime_type"] is None:
|
if content["mime_type"] is None:
|
||||||
guess = mime.guess_type(file_path)[0]
|
guess = mime.guess_type(file_path)[0]
|
||||||
@@ -643,16 +773,28 @@ def _process_single_media(data, content, media_folder, mime, separate_media):
|
|||||||
else:
|
else:
|
||||||
message.mime = content["mime_type"]
|
message.mime = content["mime_type"]
|
||||||
|
|
||||||
|
if fix_dot_files and file_path.endswith("."):
|
||||||
|
extension = mime.guess_extension(message.mime)
|
||||||
|
if message.mime == "application/octet-stream" or not extension:
|
||||||
|
new_file_path = file_path[:-1]
|
||||||
|
else:
|
||||||
|
extension = mime.guess_extension(message.mime)
|
||||||
|
new_file_path = file_path[:-1] + extension
|
||||||
|
os.rename(file_path, new_file_path)
|
||||||
|
file_path = new_file_path
|
||||||
|
|
||||||
# Copy media to separate folder if needed
|
# Copy media to separate folder if needed
|
||||||
if separate_media:
|
if separate_media:
|
||||||
chat_display_name = slugify(current_chat.name or message.sender
|
chat_display_name = safe_name(current_chat.name or message.sender
|
||||||
or content["key_remote_jid"].split('@')[0], True)
|
or content["key_remote_jid"].split('@')[0])
|
||||||
current_filename = file_path.split("/")[-1]
|
current_filename = file_path.split("/")[-1]
|
||||||
new_folder = os.path.join(media_folder, "separated", chat_display_name)
|
new_folder = os.path.join(media_folder, "separated", chat_display_name)
|
||||||
Path(new_folder).mkdir(parents=True, exist_ok=True)
|
Path(new_folder).mkdir(parents=True, exist_ok=True)
|
||||||
new_path = os.path.join(new_folder, current_filename)
|
new_path = os.path.join(new_folder, current_filename)
|
||||||
shutil.copy2(file_path, new_path)
|
shutil.copy2(file_path, new_path)
|
||||||
message.data = new_path
|
message.data = new_path
|
||||||
|
else:
|
||||||
|
message.data = file_path
|
||||||
else:
|
else:
|
||||||
message.data = "The media is missing"
|
message.data = "The media is missing"
|
||||||
message.mime = "media"
|
message.mime = "media"
|
||||||
@@ -672,46 +814,61 @@ def vcard(db, data, media_folder, filter_date, filter_chat, filter_empty):
|
|||||||
c = db.cursor()
|
c = db.cursor()
|
||||||
try:
|
try:
|
||||||
rows = _execute_vcard_query_modern(c, filter_date, filter_chat, filter_empty)
|
rows = _execute_vcard_query_modern(c, filter_date, filter_chat, filter_empty)
|
||||||
except sqlite3.OperationalError:
|
except sqlite3.OperationalError as e:
|
||||||
|
logging.debug(f'Got sql error "{e}" in _execute_vcard_query_modern trying fallback.\n')
|
||||||
rows = _execute_vcard_query_legacy(c, filter_date, filter_chat, filter_empty)
|
rows = _execute_vcard_query_legacy(c, filter_date, filter_chat, filter_empty)
|
||||||
|
|
||||||
total_row_number = len(rows)
|
total_row_number = len(rows)
|
||||||
print(f"\nProcessing vCards...(0/{total_row_number})", end="\r")
|
|
||||||
|
|
||||||
# Create vCards directory if it doesn't exist
|
# Create vCards directory if it doesn't exist
|
||||||
path = os.path.join(media_folder, "vCards")
|
path = os.path.join(media_folder, "vCards")
|
||||||
Path(path).mkdir(parents=True, exist_ok=True)
|
Path(path).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
for index, row in enumerate(rows):
|
with tqdm(total=total_row_number, desc="Processing vCards", unit="vcard", leave=False) as pbar:
|
||||||
_process_vcard_row(row, path, data)
|
for row in rows:
|
||||||
print(f"Processing vCards...({index + 1}/{total_row_number})", end="\r")
|
_process_vcard_row(row, path, data)
|
||||||
|
pbar.update(1)
|
||||||
|
total_time = pbar.format_dict['elapsed']
|
||||||
|
logging.info(f"Processed {total_row_number} vCards in {convert_time_unit(total_time)}")
|
||||||
|
|
||||||
def _execute_vcard_query_modern(c, filter_date, filter_chat, filter_empty):
|
def _execute_vcard_query_modern(c, filter_date, filter_chat, filter_empty):
|
||||||
"""Execute vCard query for modern WhatsApp database schema."""
|
"""Execute vCard query for modern WhatsApp database schema."""
|
||||||
|
|
||||||
# Build the filter conditions
|
# Build the filter conditions
|
||||||
chat_filter_include = get_chat_condition(filter_chat[0], True, ["messages.key_remote_jid", "remote_resource"], "jid", "android")
|
|
||||||
chat_filter_exclude = get_chat_condition(filter_chat[1], False, ["messages.key_remote_jid", "remote_resource"], "jid", "android")
|
|
||||||
date_filter = f'AND messages.timestamp {filter_date}' if filter_date is not None else ''
|
date_filter = f'AND messages.timestamp {filter_date}' if filter_date is not None else ''
|
||||||
empty_filter = get_cond_for_empty(filter_empty, "key_remote_jid", "messages.needs_push")
|
empty_filter = get_cond_for_empty(filter_empty, "key_remote_jid", "messages.needs_push")
|
||||||
|
include_filter = get_chat_condition(
|
||||||
|
filter_chat[0], True, ["key_remote_jid", "group_sender_jid"], "jid", "android")
|
||||||
|
exclude_filter = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["key_remote_jid", "group_sender_jid"], "jid", "android")
|
||||||
|
|
||||||
query = f"""SELECT message_row_id,
|
query = f"""SELECT message_row_id,
|
||||||
messages.key_remote_jid,
|
COALESCE(lid_global.raw_string, jid.raw_string) as key_remote_jid,
|
||||||
vcard,
|
vcard,
|
||||||
messages.media_name
|
messages.media_name,
|
||||||
FROM messages_vcards
|
COALESCE(lid_group.raw_string, jid_group.raw_string) as group_sender_jid
|
||||||
INNER JOIN messages
|
FROM messages_vcards
|
||||||
ON messages_vcards.message_row_id = messages._id
|
INNER JOIN messages
|
||||||
INNER JOIN jid
|
ON messages_vcards.message_row_id = messages._id
|
||||||
ON messages.key_remote_jid = jid.raw_string
|
INNER JOIN jid
|
||||||
LEFT JOIN chat
|
ON messages.key_remote_jid = jid.raw_string
|
||||||
ON chat.jid_row_id = jid._id
|
LEFT JOIN chat
|
||||||
|
ON chat.jid_row_id = jid._id
|
||||||
|
LEFT JOIN jid jid_group
|
||||||
|
ON jid_group._id = message.sender_jid_row_id
|
||||||
|
LEFT JOIN jid_map as jid_map_global
|
||||||
|
ON chat.jid_row_id = jid_map_global.lid_row_id
|
||||||
|
LEFT JOIN jid lid_global
|
||||||
|
ON jid_map_global.jid_row_id = lid_global._id
|
||||||
|
LEFT JOIN jid_map as jid_map_group
|
||||||
|
ON message.sender_jid_row_id = jid_map_group.lid_row_id
|
||||||
|
LEFT JOIN jid lid_group
|
||||||
|
ON jid_map_group.jid_row_id = lid_group._id
|
||||||
WHERE 1=1
|
WHERE 1=1
|
||||||
{empty_filter}
|
{empty_filter}
|
||||||
{date_filter}
|
{date_filter}
|
||||||
{chat_filter_include}
|
{include_filter}
|
||||||
{chat_filter_exclude}
|
{exclude_filter}
|
||||||
ORDER BY messages.key_remote_jid ASC;"""
|
ORDER BY messages.key_remote_jid ASC;"""
|
||||||
c.execute(query)
|
c.execute(query)
|
||||||
return c.fetchall()
|
return c.fetchall()
|
||||||
@@ -721,8 +878,10 @@ def _execute_vcard_query_legacy(c, filter_date, filter_chat, filter_empty):
|
|||||||
"""Execute vCard query for legacy WhatsApp database schema."""
|
"""Execute vCard query for legacy WhatsApp database schema."""
|
||||||
|
|
||||||
# Build the filter conditions
|
# Build the filter conditions
|
||||||
chat_filter_include = get_chat_condition(filter_chat[0], True, ["key_remote_jid", "jid_group.raw_string"], "jid", "android")
|
chat_filter_include = get_chat_condition(
|
||||||
chat_filter_exclude = get_chat_condition(filter_chat[1], False, ["key_remote_jid", "jid_group.raw_string"], "jid", "android")
|
filter_chat[0], True, ["key_remote_jid", "jid_group.raw_string"], "jid", "android")
|
||||||
|
chat_filter_exclude = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["key_remote_jid", "jid_group.raw_string"], "jid", "android")
|
||||||
date_filter = f'AND message.timestamp {filter_date}' if filter_date is not None else ''
|
date_filter = f'AND message.timestamp {filter_date}' if filter_date is not None else ''
|
||||||
empty_filter = get_cond_for_empty(filter_empty, "key_remote_jid", "broadcast")
|
empty_filter = get_cond_for_empty(filter_empty, "key_remote_jid", "broadcast")
|
||||||
|
|
||||||
@@ -777,7 +936,7 @@ def calls(db, data, timezone_offset, filter_chat):
|
|||||||
if total_row_number == 0:
|
if total_row_number == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
print(f"\nProcessing calls...({total_row_number})", end="\r")
|
logging.info(f"Processing calls...({total_row_number})", extra={"clear": True})
|
||||||
|
|
||||||
# Fetch call data
|
# Fetch call data
|
||||||
calls_data = _fetch_calls_data(c, filter_chat)
|
calls_data = _fetch_calls_data(c, filter_chat)
|
||||||
@@ -786,31 +945,37 @@ def calls(db, data, timezone_offset, filter_chat):
|
|||||||
chat = ChatStore(Device.ANDROID, "WhatsApp Calls")
|
chat = ChatStore(Device.ANDROID, "WhatsApp Calls")
|
||||||
|
|
||||||
# Process each call
|
# Process each call
|
||||||
content = calls_data.fetchone()
|
with tqdm(total=total_row_number, desc="Processing calls", unit="call", leave=False) as pbar:
|
||||||
while content is not None:
|
while (content := _fetch_row_safely(calls_data)) is not None:
|
||||||
_process_call_record(content, chat, data, timezone_offset)
|
_process_call_record(content, chat, data, timezone_offset)
|
||||||
content = calls_data.fetchone()
|
pbar.update(1)
|
||||||
|
total_time = pbar.format_dict['elapsed']
|
||||||
|
|
||||||
# Add the calls chat to the data
|
# Add the calls chat to the data
|
||||||
data.add_chat("000000000000000", chat)
|
data.add_chat("000000000000000", chat)
|
||||||
|
logging.info(f"Processed {total_row_number} calls in {convert_time_unit(total_time)}")
|
||||||
|
|
||||||
def _get_calls_count(c, filter_chat):
|
def _get_calls_count(c, filter_chat):
|
||||||
"""Get the count of call records that match the filter."""
|
"""Get the count of call records that match the filter."""
|
||||||
|
|
||||||
# Build the filter conditions
|
# Build the filter conditions
|
||||||
chat_filter_include = get_chat_condition(filter_chat[0], True, ["jid.raw_string"])
|
include_filter = get_chat_condition(filter_chat[0], True, ["key_remote_jid"])
|
||||||
chat_filter_exclude = get_chat_condition(filter_chat[1], False, ["jid.raw_string"])
|
exclude_filter = get_chat_condition(filter_chat[1], False, ["key_remote_jid"])
|
||||||
|
|
||||||
query = f"""SELECT count()
|
query = f"""SELECT count(),
|
||||||
|
COALESCE(lid_global.raw_string, jid.raw_string) as key_remote_jid
|
||||||
FROM call_log
|
FROM call_log
|
||||||
INNER JOIN jid
|
INNER JOIN jid
|
||||||
ON call_log.jid_row_id = jid._id
|
ON call_log.jid_row_id = jid._id
|
||||||
LEFT JOIN chat
|
LEFT JOIN chat
|
||||||
ON call_log.jid_row_id = chat.jid_row_id
|
ON call_log.jid_row_id = chat.jid_row_id
|
||||||
|
LEFT JOIN jid_map as jid_map_global
|
||||||
|
ON chat.jid_row_id = jid_map_global.lid_row_id
|
||||||
|
LEFT JOIN jid lid_global
|
||||||
|
ON jid_map_global.jid_row_id = lid_global._id
|
||||||
WHERE 1=1
|
WHERE 1=1
|
||||||
{chat_filter_include}
|
{include_filter}
|
||||||
{chat_filter_exclude}"""
|
{exclude_filter}"""
|
||||||
c.execute(query)
|
c.execute(query)
|
||||||
return c.fetchone()[0]
|
return c.fetchone()[0]
|
||||||
|
|
||||||
@@ -819,11 +984,11 @@ def _fetch_calls_data(c, filter_chat):
|
|||||||
"""Fetch call data from the database."""
|
"""Fetch call data from the database."""
|
||||||
|
|
||||||
# Build the filter conditions
|
# Build the filter conditions
|
||||||
chat_filter_include = get_chat_condition(filter_chat[0], True, ["jid.raw_string"])
|
include_filter = get_chat_condition(filter_chat[0], True, ["key_remote_jid"])
|
||||||
chat_filter_exclude = get_chat_condition(filter_chat[1], False, ["jid.raw_string"])
|
exclude_filter = get_chat_condition(filter_chat[1], False, ["key_remote_jid"])
|
||||||
|
|
||||||
query = f"""SELECT call_log._id,
|
query = f"""SELECT call_log._id,
|
||||||
jid.raw_string,
|
COALESCE(lid_global.raw_string, jid.raw_string) as key_remote_jid,
|
||||||
from_me,
|
from_me,
|
||||||
call_id,
|
call_id,
|
||||||
timestamp,
|
timestamp,
|
||||||
@@ -837,9 +1002,13 @@ def _fetch_calls_data(c, filter_chat):
|
|||||||
ON call_log.jid_row_id = jid._id
|
ON call_log.jid_row_id = jid._id
|
||||||
LEFT JOIN chat
|
LEFT JOIN chat
|
||||||
ON call_log.jid_row_id = chat.jid_row_id
|
ON call_log.jid_row_id = chat.jid_row_id
|
||||||
|
LEFT JOIN jid_map as jid_map_global
|
||||||
|
ON chat.jid_row_id = jid_map_global.lid_row_id
|
||||||
|
LEFT JOIN jid lid_global
|
||||||
|
ON jid_map_global.jid_row_id = lid_global._id
|
||||||
WHERE 1=1
|
WHERE 1=1
|
||||||
{chat_filter_include}
|
{include_filter}
|
||||||
{chat_filter_exclude}"""
|
{exclude_filter}"""
|
||||||
c.execute(query)
|
c.execute(query)
|
||||||
return c
|
return c
|
||||||
|
|
||||||
@@ -851,13 +1020,13 @@ def _process_call_record(content, chat, data, timezone_offset):
|
|||||||
timestamp=content["timestamp"],
|
timestamp=content["timestamp"],
|
||||||
time=content["timestamp"],
|
time=content["timestamp"],
|
||||||
key_id=content["call_id"],
|
key_id=content["call_id"],
|
||||||
timezone_offset=timezone_offset if timezone_offset else CURRENT_TZ_OFFSET,
|
timezone_offset=timezone_offset,
|
||||||
received_timestamp=None, # TODO: Add timestamp
|
received_timestamp=None, # TODO: Add timestamp
|
||||||
read_timestamp=None # TODO: Add timestamp
|
read_timestamp=None # TODO: Add timestamp
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get caller/callee name
|
# Get caller/callee name
|
||||||
_jid = content["raw_string"]
|
_jid = content["key_remote_jid"]
|
||||||
name = data.get_chat(_jid).name if _jid in data else content["chat_subject"] or None
|
name = data.get_chat(_jid).name if _jid in data else content["chat_subject"] or None
|
||||||
if _jid is not None and "@" in _jid:
|
if _jid is not None and "@" in _jid:
|
||||||
fallback = _jid.split('@')[0]
|
fallback = _jid.split('@')[0]
|
||||||
@@ -902,22 +1071,22 @@ def _construct_call_description(content, call):
|
|||||||
return description
|
return description
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Marked for enhancement on multi-threaded processing
|
||||||
def create_html(
|
def create_html(
|
||||||
data,
|
data,
|
||||||
output_folder,
|
output_folder,
|
||||||
template=None,
|
template=None,
|
||||||
embedded=False,
|
embedded=False,
|
||||||
offline_static=False,
|
offline_static=False,
|
||||||
maximum_size=None,
|
maximum_size=None,
|
||||||
no_avatar=False,
|
no_avatar=False,
|
||||||
experimental=False,
|
experimental=False,
|
||||||
headline=None
|
headline=None
|
||||||
):
|
):
|
||||||
"""Generate HTML chat files from data."""
|
"""Generate HTML chat files from data."""
|
||||||
template = setup_template(template, no_avatar, experimental)
|
template = setup_template(template, no_avatar, experimental)
|
||||||
|
|
||||||
total_row_number = len(data)
|
total_row_number = len(data)
|
||||||
print(f"\nGenerating chats...(0/{total_row_number})", end="\r")
|
|
||||||
|
|
||||||
# Create output directory if it doesn't exist
|
# Create output directory if it doesn't exist
|
||||||
if not os.path.isdir(output_folder):
|
if not os.path.isdir(output_folder):
|
||||||
@@ -925,43 +1094,42 @@ def create_html(
|
|||||||
|
|
||||||
w3css = get_status_location(output_folder, offline_static)
|
w3css = get_status_location(output_folder, offline_static)
|
||||||
|
|
||||||
for current, contact in enumerate(data):
|
with tqdm(total=total_row_number, desc="Generating HTML", unit="file", leave=False) as pbar:
|
||||||
current_chat = data.get_chat(contact)
|
for contact in data:
|
||||||
if len(current_chat) == 0:
|
current_chat = data.get_chat(contact)
|
||||||
# Skip empty chats
|
if len(current_chat) == 0:
|
||||||
continue
|
# Skip empty chats
|
||||||
|
continue
|
||||||
|
|
||||||
safe_file_name, name = get_file_name(contact, current_chat)
|
safe_file_name, name = get_file_name(contact, current_chat)
|
||||||
|
|
||||||
if maximum_size is not None:
|
if maximum_size is not None:
|
||||||
_generate_paginated_chat(
|
_generate_paginated_chat(
|
||||||
current_chat,
|
current_chat,
|
||||||
safe_file_name,
|
safe_file_name,
|
||||||
name,
|
name,
|
||||||
contact,
|
contact,
|
||||||
output_folder,
|
output_folder,
|
||||||
template,
|
template,
|
||||||
w3css,
|
w3css,
|
||||||
maximum_size,
|
maximum_size,
|
||||||
headline
|
headline
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
_generate_single_chat(
|
_generate_single_chat(
|
||||||
current_chat,
|
current_chat,
|
||||||
safe_file_name,
|
safe_file_name,
|
||||||
name,
|
name,
|
||||||
contact,
|
contact,
|
||||||
output_folder,
|
output_folder,
|
||||||
template,
|
template,
|
||||||
w3css,
|
w3css,
|
||||||
headline
|
headline
|
||||||
)
|
)
|
||||||
|
|
||||||
if current % 10 == 0:
|
|
||||||
print(f"Generating chats...({current}/{total_row_number})", end="\r")
|
|
||||||
|
|
||||||
print(f"Generating chats...({total_row_number}/{total_row_number})", end="\r")
|
|
||||||
|
|
||||||
|
pbar.update(1)
|
||||||
|
total_time = pbar.format_dict['elapsed']
|
||||||
|
logging.info(f"Generated {total_row_number} chats in {convert_time_unit(total_time)}")
|
||||||
|
|
||||||
def _generate_single_chat(current_chat, safe_file_name, name, contact, output_folder, template, w3css, headline):
|
def _generate_single_chat(current_chat, safe_file_name, name, contact, output_folder, template, w3css, headline):
|
||||||
"""Generate a single HTML file for a chat."""
|
"""Generate a single HTML file for a chat."""
|
||||||
|
|||||||
@@ -24,38 +24,6 @@ import struct
|
|||||||
import codecs
|
import codecs
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
class BPListWriter(object):
|
|
||||||
def __init__(self, objects):
|
|
||||||
self.bplist = ""
|
|
||||||
self.objects = objects
|
|
||||||
|
|
||||||
def binary(self):
|
|
||||||
'''binary -> string
|
|
||||||
|
|
||||||
Generates bplist
|
|
||||||
'''
|
|
||||||
self.data = 'bplist00'
|
|
||||||
|
|
||||||
# TODO: flatten objects and count max length size
|
|
||||||
|
|
||||||
# TODO: write objects and save offsets
|
|
||||||
|
|
||||||
# TODO: write offsets
|
|
||||||
|
|
||||||
# TODO: write metadata
|
|
||||||
|
|
||||||
return self.data
|
|
||||||
|
|
||||||
def write(self, filename):
|
|
||||||
'''
|
|
||||||
|
|
||||||
Writes bplist to file
|
|
||||||
'''
|
|
||||||
if self.bplist != "":
|
|
||||||
pass
|
|
||||||
# TODO: save self.bplist to file
|
|
||||||
else:
|
|
||||||
raise Exception('BPlist not yet generated')
|
|
||||||
|
|
||||||
class BPListReader(object):
|
class BPListReader(object):
|
||||||
def __init__(self, s):
|
def __init__(self, s):
|
||||||
@@ -68,7 +36,7 @@ class BPListReader(object):
|
|||||||
|
|
||||||
Unpacks the integer of given size (1, 2 or 4 bytes) from string
|
Unpacks the integer of given size (1, 2 or 4 bytes) from string
|
||||||
'''
|
'''
|
||||||
if sz == 1:
|
if sz == 1:
|
||||||
ot = '!B'
|
ot = '!B'
|
||||||
elif sz == 2:
|
elif sz == 2:
|
||||||
ot = '!H'
|
ot = '!H'
|
||||||
@@ -115,7 +83,7 @@ class BPListReader(object):
|
|||||||
|
|
||||||
Unpacks the float of given size (4 or 8 bytes) from string
|
Unpacks the float of given size (4 or 8 bytes) from string
|
||||||
'''
|
'''
|
||||||
if sz == 4:
|
if sz == 4:
|
||||||
ot = '!f'
|
ot = '!f'
|
||||||
elif sz == 8:
|
elif sz == 8:
|
||||||
ot = '!d'
|
ot = '!d'
|
||||||
@@ -135,7 +103,7 @@ class BPListReader(object):
|
|||||||
|
|
||||||
def __unpackDate(self, offset):
|
def __unpackDate(self, offset):
|
||||||
td = int(struct.unpack(">d", self.data[offset+1:offset+9])[0])
|
td = int(struct.unpack(">d", self.data[offset+1:offset+9])[0])
|
||||||
return datetime(year=2001,month=1,day=1) + timedelta(seconds=td)
|
return datetime(year=2001, month=1, day=1) + timedelta(seconds=td)
|
||||||
|
|
||||||
def __unpackItem(self, offset):
|
def __unpackItem(self, offset):
|
||||||
'''__unpackItem(offset)
|
'''__unpackItem(offset)
|
||||||
@@ -144,54 +112,63 @@ class BPListReader(object):
|
|||||||
'''
|
'''
|
||||||
obj_header = self.data[offset]
|
obj_header = self.data[offset]
|
||||||
obj_type, obj_info = (obj_header & 0xF0), (obj_header & 0x0F)
|
obj_type, obj_info = (obj_header & 0xF0), (obj_header & 0x0F)
|
||||||
if obj_type == 0x00:
|
if obj_type == 0x00:
|
||||||
if obj_info == 0x00: # null 0000 0000
|
if obj_info == 0x00: # null 0000 0000
|
||||||
return None
|
return None
|
||||||
elif obj_info == 0x08: # bool 0000 1000 // false
|
elif obj_info == 0x08: # bool 0000 1000 // false
|
||||||
return False
|
return False
|
||||||
elif obj_info == 0x09: # bool 0000 1001 // true
|
elif obj_info == 0x09: # bool 0000 1001 // true
|
||||||
return True
|
return True
|
||||||
elif obj_info == 0x0F: # fill 0000 1111 // fill byte
|
elif obj_info == 0x0F: # fill 0000 1111 // fill byte
|
||||||
raise Exception("0x0F Not Implemented") # this is really pad byte, FIXME
|
raise Exception("0x0F Not Implemented") # this is really pad byte, FIXME
|
||||||
else:
|
else:
|
||||||
raise Exception('unpack item type '+str(obj_header)+' at '+str(offset)+ 'failed')
|
raise Exception('unpack item type '+str(obj_header)+' at '+str(offset) + 'failed')
|
||||||
elif obj_type == 0x10: # int 0001 nnnn ... // # of bytes is 2^nnnn, big-endian bytes
|
elif obj_type == 0x10: # int 0001 nnnn ... // # of bytes is 2^nnnn, big-endian bytes
|
||||||
return self.__unpackInt(offset)
|
return self.__unpackInt(offset)
|
||||||
elif obj_type == 0x20: # real 0010 nnnn ... // # of bytes is 2^nnnn, big-endian bytes
|
elif obj_type == 0x20: # real 0010 nnnn ... // # of bytes is 2^nnnn, big-endian bytes
|
||||||
return self.__unpackFloat(offset)
|
return self.__unpackFloat(offset)
|
||||||
elif obj_type == 0x30: # date 0011 0011 ... // 8 byte float follows, big-endian bytes
|
elif obj_type == 0x30: # date 0011 0011 ... // 8 byte float follows, big-endian bytes
|
||||||
return self.__unpackDate(offset)
|
return self.__unpackDate(offset)
|
||||||
elif obj_type == 0x40: # data 0100 nnnn [int] ... // nnnn is number of bytes unless 1111 then int count follows, followed by bytes
|
# data 0100 nnnn [int] ... // nnnn is number of bytes unless 1111 then int count follows, followed by bytes
|
||||||
|
elif obj_type == 0x40:
|
||||||
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
||||||
return self.data[objref:objref+obj_count] # XXX: we return data as str
|
return self.data[objref:objref+obj_count] # XXX: we return data as str
|
||||||
elif obj_type == 0x50: # string 0101 nnnn [int] ... // ASCII string, nnnn is # of chars, else 1111 then int count, then bytes
|
# string 0101 nnnn [int] ... // ASCII string, nnnn is # of chars, else 1111 then int count, then bytes
|
||||||
|
elif obj_type == 0x50:
|
||||||
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
||||||
return self.data[objref:objref+obj_count]
|
return self.data[objref:objref+obj_count]
|
||||||
elif obj_type == 0x60: # string 0110 nnnn [int] ... // Unicode string, nnnn is # of chars, else 1111 then int count, then big-endian 2-byte uint16_t
|
# string 0110 nnnn [int] ... // Unicode string, nnnn is # of chars, else 1111 then int count, then big-endian 2-byte uint16_t
|
||||||
|
elif obj_type == 0x60:
|
||||||
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
||||||
return self.data[objref:objref+obj_count*2].decode('utf-16be')
|
return self.data[objref:objref+obj_count*2].decode('utf-16be')
|
||||||
elif obj_type == 0x80: # uid 1000 nnnn ... // nnnn+1 is # of bytes
|
elif obj_type == 0x80: # uid 1000 nnnn ... // nnnn+1 is # of bytes
|
||||||
# FIXME: Accept as a string for now
|
# FIXME: Accept as a string for now
|
||||||
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
||||||
return self.data[objref:objref+obj_count]
|
return self.data[objref:objref+obj_count]
|
||||||
elif obj_type == 0xA0: # array 1010 nnnn [int] objref* // nnnn is count, unless '1111', then int count follows
|
# array 1010 nnnn [int] objref* // nnnn is count, unless '1111', then int count follows
|
||||||
|
elif obj_type == 0xA0:
|
||||||
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
||||||
arr = []
|
arr = []
|
||||||
for i in range(obj_count):
|
for i in range(obj_count):
|
||||||
arr.append(self.__unpackIntStruct(self.object_ref_size, self.data[objref+i*self.object_ref_size:objref+i*self.object_ref_size+self.object_ref_size]))
|
arr.append(self.__unpackIntStruct(
|
||||||
|
self.object_ref_size, self.data[objref+i*self.object_ref_size:objref+i*self.object_ref_size+self.object_ref_size]))
|
||||||
return arr
|
return arr
|
||||||
elif obj_type == 0xC0: # set 1100 nnnn [int] objref* // nnnn is count, unless '1111', then int count follows
|
# set 1100 nnnn [int] objref* // nnnn is count, unless '1111', then int count follows
|
||||||
|
elif obj_type == 0xC0:
|
||||||
# XXX: not serializable via apple implementation
|
# XXX: not serializable via apple implementation
|
||||||
raise Exception("0xC0 Not Implemented") # FIXME: implement
|
raise Exception("0xC0 Not Implemented") # FIXME: implement
|
||||||
elif obj_type == 0xD0: # dict 1101 nnnn [int] keyref* objref* // nnnn is count, unless '1111', then int count follows
|
# dict 1101 nnnn [int] keyref* objref* // nnnn is count, unless '1111', then int count follows
|
||||||
|
elif obj_type == 0xD0:
|
||||||
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
obj_count, objref = self.__resolveIntSize(obj_info, offset)
|
||||||
keys = []
|
keys = []
|
||||||
for i in range(obj_count):
|
for i in range(obj_count):
|
||||||
keys.append(self.__unpackIntStruct(self.object_ref_size, self.data[objref+i*self.object_ref_size:objref+i*self.object_ref_size+self.object_ref_size]))
|
keys.append(self.__unpackIntStruct(
|
||||||
|
self.object_ref_size, self.data[objref+i*self.object_ref_size:objref+i*self.object_ref_size+self.object_ref_size]))
|
||||||
values = []
|
values = []
|
||||||
objref += obj_count*self.object_ref_size
|
objref += obj_count*self.object_ref_size
|
||||||
for i in range(obj_count):
|
for i in range(obj_count):
|
||||||
values.append(self.__unpackIntStruct(self.object_ref_size, self.data[objref+i*self.object_ref_size:objref+i*self.object_ref_size+self.object_ref_size]))
|
values.append(self.__unpackIntStruct(
|
||||||
|
self.object_ref_size, self.data[objref+i*self.object_ref_size:objref+i*self.object_ref_size+self.object_ref_size]))
|
||||||
dic = {}
|
dic = {}
|
||||||
for i in range(obj_count):
|
for i in range(obj_count):
|
||||||
dic[keys[i]] = values[i]
|
dic[keys[i]] = values[i]
|
||||||
@@ -212,7 +189,7 @@ class BPListReader(object):
|
|||||||
return newArr
|
return newArr
|
||||||
if type(obj) == dict:
|
if type(obj) == dict:
|
||||||
newDic = {}
|
newDic = {}
|
||||||
for k,v in obj.items():
|
for k, v in obj.items():
|
||||||
key_resolved = self.__resolveObject(k)
|
key_resolved = self.__resolveObject(k)
|
||||||
if isinstance(key_resolved, str):
|
if isinstance(key_resolved, str):
|
||||||
rk = key_resolved
|
rk = key_resolved
|
||||||
@@ -232,8 +209,9 @@ class BPListReader(object):
|
|||||||
raise Exception('Bad magic')
|
raise Exception('Bad magic')
|
||||||
|
|
||||||
# read trailer
|
# read trailer
|
||||||
self.offset_size, self.object_ref_size, self.number_of_objects, self.top_object, self.table_offset = struct.unpack('!6xBB4xI4xI4xI', self.data[-32:])
|
self.offset_size, self.object_ref_size, self.number_of_objects, self.top_object, self.table_offset = struct.unpack(
|
||||||
#print "** plist offset_size:",self.offset_size,"objref_size:",self.object_ref_size,"num_objs:",self.number_of_objects,"top:",self.top_object,"table_ofs:",self.table_offset
|
'!6xBB4xI4xI4xI', self.data[-32:])
|
||||||
|
# print "** plist offset_size:",self.offset_size,"objref_size:",self.object_ref_size,"num_objs:",self.number_of_objects,"top:",self.top_object,"table_ofs:",self.table_offset
|
||||||
|
|
||||||
# read offset table
|
# read offset table
|
||||||
self.offset_table = self.data[self.table_offset:-32]
|
self.offset_table = self.data[self.table_offset:-32]
|
||||||
@@ -243,19 +221,19 @@ class BPListReader(object):
|
|||||||
offset_entry = ot[:self.offset_size]
|
offset_entry = ot[:self.offset_size]
|
||||||
ot = ot[self.offset_size:]
|
ot = ot[self.offset_size:]
|
||||||
self.offsets.append(self.__unpackIntStruct(self.offset_size, offset_entry))
|
self.offsets.append(self.__unpackIntStruct(self.offset_size, offset_entry))
|
||||||
#print "** plist offsets:",self.offsets
|
# print "** plist offsets:",self.offsets
|
||||||
|
|
||||||
# read object table
|
# read object table
|
||||||
self.objects = []
|
self.objects = []
|
||||||
k = 0
|
k = 0
|
||||||
for i in self.offsets:
|
for i in self.offsets:
|
||||||
obj = self.__unpackItem(i)
|
obj = self.__unpackItem(i)
|
||||||
#print "** plist unpacked",k,type(obj),obj,"at",i
|
# print "** plist unpacked",k,type(obj),obj,"at",i
|
||||||
k += 1
|
k += 1
|
||||||
self.objects.append(obj)
|
self.objects.append(obj)
|
||||||
|
|
||||||
# rebuild object tree
|
# rebuild object tree
|
||||||
#for i in range(len(self.objects)):
|
# for i in range(len(self.objects)):
|
||||||
# self.__resolveObject(i)
|
# self.__resolveObject(i)
|
||||||
|
|
||||||
# return root object
|
# return root object
|
||||||
@@ -265,28 +243,3 @@ class BPListReader(object):
|
|||||||
def plistWithString(cls, s):
|
def plistWithString(cls, s):
|
||||||
parser = cls(s)
|
parser = cls(s)
|
||||||
return parser.parse()
|
return parser.parse()
|
||||||
|
|
||||||
# helpers for testing
|
|
||||||
def plist(obj):
|
|
||||||
from Foundation import NSPropertyListSerialization, NSPropertyListBinaryFormat_v1_0
|
|
||||||
b = NSPropertyListSerialization.dataWithPropertyList_format_options_error_(obj, NSPropertyListBinaryFormat_v1_0, 0, None)
|
|
||||||
return str(b.bytes())
|
|
||||||
|
|
||||||
def unplist(s):
|
|
||||||
from Foundation import NSData, NSPropertyListSerialization
|
|
||||||
d = NSData.dataWithBytes_length_(s, len(s))
|
|
||||||
return NSPropertyListSerialization.propertyListWithData_options_format_error_(d, 0, None, None)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
file_path = sys.argv[1]
|
|
||||||
|
|
||||||
with open(file_path, "rb") as fp:
|
|
||||||
data = fp.read()
|
|
||||||
|
|
||||||
out = BPListReader(data).parse()
|
|
||||||
|
|
||||||
with open(file_path + ".json", "w") as fp:
|
|
||||||
json.dump(out, indent=4)
|
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ class Timing:
|
|||||||
"""
|
"""
|
||||||
Handles timestamp formatting with timezone support.
|
Handles timestamp formatting with timezone support.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, timezone_offset: Optional[int]) -> None:
|
def __init__(self, timezone_offset: Optional[int]) -> None:
|
||||||
"""
|
"""
|
||||||
Initialize Timing object.
|
Initialize Timing object.
|
||||||
@@ -27,7 +28,7 @@ class Timing:
|
|||||||
Returns:
|
Returns:
|
||||||
Optional[str]: Formatted timestamp string, or None if timestamp is None
|
Optional[str]: Formatted timestamp string, or None if timestamp is None
|
||||||
"""
|
"""
|
||||||
if timestamp:
|
if timestamp is not None:
|
||||||
timestamp = timestamp / 1000 if timestamp > 9999999999 else timestamp
|
timestamp = timestamp / 1000 if timestamp > 9999999999 else timestamp
|
||||||
return datetime.fromtimestamp(timestamp, TimeZone(self.timezone_offset)).strftime(format)
|
return datetime.fromtimestamp(timestamp, TimeZone(self.timezone_offset)).strftime(format)
|
||||||
return None
|
return None
|
||||||
@@ -37,6 +38,7 @@ class TimeZone(tzinfo):
|
|||||||
"""
|
"""
|
||||||
Custom timezone class with fixed offset.
|
Custom timezone class with fixed offset.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, offset: int) -> None:
|
def __init__(self, offset: int) -> None:
|
||||||
"""
|
"""
|
||||||
Initialize TimeZone object.
|
Initialize TimeZone object.
|
||||||
@@ -64,6 +66,7 @@ class ChatCollection(MutableMapping):
|
|||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
"""Initialize an empty chat collection."""
|
"""Initialize an empty chat collection."""
|
||||||
self._chats: Dict[str, ChatStore] = {}
|
self._chats: Dict[str, ChatStore] = {}
|
||||||
|
self._system: Dict[str, Any] = {}
|
||||||
|
|
||||||
def __getitem__(self, key: str) -> 'ChatStore':
|
def __getitem__(self, key: str) -> 'ChatStore':
|
||||||
"""Get a chat by its ID. Required for dict-like access."""
|
"""Get a chat by its ID. Required for dict-like access."""
|
||||||
@@ -146,11 +149,34 @@ class ChatCollection(MutableMapping):
|
|||||||
"""
|
"""
|
||||||
return {chat_id: chat.to_json() for chat_id, chat in self._chats.items()}
|
return {chat_id: chat.to_json() for chat_id, chat in self._chats.items()}
|
||||||
|
|
||||||
|
def get_system(self, key: str) -> Any:
|
||||||
|
"""
|
||||||
|
Get a system value by its key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): The key of the system value to retrieve
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Any: The system value if found, None otherwise
|
||||||
|
"""
|
||||||
|
return self._system.get(key)
|
||||||
|
|
||||||
|
def set_system(self, key: str, value: Any) -> None:
|
||||||
|
"""
|
||||||
|
Set a system value by its key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): The key of the system value to set
|
||||||
|
value (Any): The value to set
|
||||||
|
"""
|
||||||
|
self._system[key] = value
|
||||||
|
|
||||||
|
|
||||||
class ChatStore:
|
class ChatStore:
|
||||||
"""
|
"""
|
||||||
Stores chat information and messages.
|
Stores chat information and messages.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, type: str, name: Optional[str] = None, media: Optional[str] = None) -> None:
|
def __init__(self, type: str, name: Optional[str] = None, media: Optional[str] = None) -> None:
|
||||||
"""
|
"""
|
||||||
Initialize ChatStore object.
|
Initialize ChatStore object.
|
||||||
@@ -204,15 +230,25 @@ class ChatStore:
|
|||||||
|
|
||||||
def to_json(self) -> Dict[str, Any]:
|
def to_json(self) -> Dict[str, Any]:
|
||||||
"""Convert chat store to JSON-serializable dict."""
|
"""Convert chat store to JSON-serializable dict."""
|
||||||
return {
|
json_dict = {
|
||||||
'name': self.name,
|
key: value
|
||||||
'type': self.type,
|
for key, value in self.__dict__.items()
|
||||||
'my_avatar': self.my_avatar,
|
if key != '_messages'
|
||||||
'their_avatar': self.their_avatar,
|
|
||||||
'their_avatar_thumb': self.their_avatar_thumb,
|
|
||||||
'status': self.status,
|
|
||||||
'messages': {id: msg.to_json() for id, msg in self._messages.items()}
|
|
||||||
}
|
}
|
||||||
|
json_dict['messages'] = {id: msg.to_json() for id, msg in self._messages.items()}
|
||||||
|
return json_dict
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_json(cls, data: Dict) -> 'ChatStore':
|
||||||
|
"""Create a chat store from JSON data."""
|
||||||
|
chat = cls(data.get("type"), data.get("name"))
|
||||||
|
for key, value in data.items():
|
||||||
|
if hasattr(chat, key) and key not in ("messages", "type", "name"):
|
||||||
|
setattr(chat, key, value)
|
||||||
|
for id, msg_data in data.get("messages", {}).items():
|
||||||
|
message = Message.from_json(msg_data)
|
||||||
|
chat.add_message(id, message)
|
||||||
|
return chat
|
||||||
|
|
||||||
def get_last_message(self) -> 'Message':
|
def get_last_message(self) -> 'Message':
|
||||||
"""Get the most recent message in the chat."""
|
"""Get the most recent message in the chat."""
|
||||||
@@ -230,21 +266,43 @@ class ChatStore:
|
|||||||
"""Get all message keys in the chat."""
|
"""Get all message keys in the chat."""
|
||||||
return self._messages.keys()
|
return self._messages.keys()
|
||||||
|
|
||||||
|
def merge_with(self, other: 'ChatStore'):
|
||||||
|
"""Merge another ChatStore into this one.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
other (ChatStore): The ChatStore to merge with
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not isinstance(other, ChatStore):
|
||||||
|
raise TypeError("Can only merge with another ChatStore object")
|
||||||
|
|
||||||
|
# Update fields if they are not None in the other ChatStore
|
||||||
|
self.name = other.name or self.name
|
||||||
|
self.type = other.type or self.type
|
||||||
|
self.my_avatar = other.my_avatar or self.my_avatar
|
||||||
|
self.their_avatar = other.their_avatar or self.their_avatar
|
||||||
|
self.their_avatar_thumb = other.their_avatar_thumb or self.their_avatar_thumb
|
||||||
|
self.status = other.status or self.status
|
||||||
|
|
||||||
|
# Merge messages
|
||||||
|
self._messages.update(other._messages)
|
||||||
|
|
||||||
|
|
||||||
class Message:
|
class Message:
|
||||||
"""
|
"""
|
||||||
Represents a single message in a chat.
|
Represents a single message in a chat.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
from_me: Union[bool, int],
|
from_me: Union[bool, int],
|
||||||
timestamp: int,
|
timestamp: int,
|
||||||
time: Union[int, float, str],
|
time: Union[int, float, str],
|
||||||
key_id: int,
|
key_id: Union[int, str],
|
||||||
received_timestamp: int,
|
received_timestamp: int = None,
|
||||||
read_timestamp: int,
|
read_timestamp: int = None,
|
||||||
timezone_offset: int = 0,
|
timezone_offset: Optional[Timing] = Timing(0),
|
||||||
message_type: Optional[int] = None
|
message_type: Optional[int] = None
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -255,8 +313,8 @@ class Message:
|
|||||||
timestamp (int): Message timestamp
|
timestamp (int): Message timestamp
|
||||||
time (Union[int, float, str]): Message time
|
time (Union[int, float, str]): Message time
|
||||||
key_id (int): Message unique identifier
|
key_id (int): Message unique identifier
|
||||||
received_timestamp (int): When message was received
|
received_timestamp (int, optional): When message was received. Defaults to None
|
||||||
read_timestamp (int): When message was read
|
read_timestamp (int, optional): When message was read. Defaults to None
|
||||||
timezone_offset (int, optional): Hours offset from UTC. Defaults to 0
|
timezone_offset (int, optional): Hours offset from UTC. Defaults to 0
|
||||||
message_type (Optional[int], optional): Type of message. Defaults to None
|
message_type (Optional[int], optional): Type of message. Defaults to None
|
||||||
|
|
||||||
@@ -265,10 +323,9 @@ class Message:
|
|||||||
"""
|
"""
|
||||||
self.from_me = bool(from_me)
|
self.from_me = bool(from_me)
|
||||||
self.timestamp = timestamp / 1000 if timestamp > 9999999999 else timestamp
|
self.timestamp = timestamp / 1000 if timestamp > 9999999999 else timestamp
|
||||||
timing = Timing(timezone_offset)
|
|
||||||
|
|
||||||
if isinstance(time, (int, float)):
|
if isinstance(time, (int, float)):
|
||||||
self.time = timing.format_timestamp(self.timestamp, "%H:%M")
|
self.time = timezone_offset.format_timestamp(self.timestamp, "%H:%M")
|
||||||
elif isinstance(time, str):
|
elif isinstance(time, str):
|
||||||
self.time = time
|
self.time = time
|
||||||
else:
|
else:
|
||||||
@@ -281,9 +338,21 @@ class Message:
|
|||||||
self.sender = None
|
self.sender = None
|
||||||
self.safe = False
|
self.safe = False
|
||||||
self.mime = None
|
self.mime = None
|
||||||
self.message_type = message_type,
|
self.message_type = message_type
|
||||||
self.received_timestamp = timing.format_timestamp(received_timestamp, "%Y/%m/%d %H:%M")
|
if isinstance(received_timestamp, (int, float)):
|
||||||
self.read_timestamp = timing.format_timestamp(read_timestamp, "%Y/%m/%d %H:%M")
|
self.received_timestamp = timezone_offset.format_timestamp(
|
||||||
|
received_timestamp, "%Y/%m/%d %H:%M")
|
||||||
|
elif isinstance(received_timestamp, str):
|
||||||
|
self.received_timestamp = received_timestamp
|
||||||
|
else:
|
||||||
|
self.received_timestamp = None
|
||||||
|
if isinstance(read_timestamp, (int, float)):
|
||||||
|
self.read_timestamp = timezone_offset.format_timestamp(
|
||||||
|
read_timestamp, "%Y/%m/%d %H:%M")
|
||||||
|
elif isinstance(read_timestamp, str):
|
||||||
|
self.read_timestamp = read_timestamp
|
||||||
|
else:
|
||||||
|
self.read_timestamp = None
|
||||||
|
|
||||||
# Extra attributes
|
# Extra attributes
|
||||||
self.reply = None
|
self.reply = None
|
||||||
@@ -291,23 +360,29 @@ class Message:
|
|||||||
self.caption = None
|
self.caption = None
|
||||||
self.thumb = None # Android specific
|
self.thumb = None # Android specific
|
||||||
self.sticker = False
|
self.sticker = False
|
||||||
|
self.reactions = {}
|
||||||
|
|
||||||
def to_json(self) -> Dict[str, Any]:
|
def to_json(self) -> Dict[str, Any]:
|
||||||
"""Convert message to JSON-serializable dict."""
|
"""Convert message to JSON-serializable dict."""
|
||||||
return {
|
return {
|
||||||
'from_me': self.from_me,
|
key: value
|
||||||
'timestamp': self.timestamp,
|
for key, value in self.__dict__.items()
|
||||||
'time': self.time,
|
|
||||||
'media': self.media,
|
|
||||||
'key_id': self.key_id,
|
|
||||||
'meta': self.meta,
|
|
||||||
'data': self.data,
|
|
||||||
'sender': self.sender,
|
|
||||||
'safe': self.safe,
|
|
||||||
'mime': self.mime,
|
|
||||||
'reply': self.reply,
|
|
||||||
'quoted_data': self.quoted_data,
|
|
||||||
'caption': self.caption,
|
|
||||||
'thumb': self.thumb,
|
|
||||||
'sticker': self.sticker
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_json(cls, data: Dict) -> 'Message':
|
||||||
|
message = cls(
|
||||||
|
from_me=data["from_me"],
|
||||||
|
timestamp=data["timestamp"],
|
||||||
|
time=data["time"],
|
||||||
|
key_id=data["key_id"],
|
||||||
|
message_type=data.get("message_type"),
|
||||||
|
received_timestamp=data.get("received_timestamp"),
|
||||||
|
read_timestamp=data.get("read_timestamp")
|
||||||
|
)
|
||||||
|
added = ("from_me", "timestamp", "time", "key_id", "message_type",
|
||||||
|
"received_timestamp", "read_timestamp")
|
||||||
|
for key, value in data.items():
|
||||||
|
if hasattr(message, key) and key not in added:
|
||||||
|
setattr(message, key, value)
|
||||||
|
return message
|
||||||
|
|||||||
@@ -1,10 +1,14 @@
|
|||||||
#!/usr/bin/python3
|
#!/usr/bin/python3
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from mimetypes import MimeTypes
|
from mimetypes import MimeTypes
|
||||||
|
from tqdm import tqdm
|
||||||
from Whatsapp_Chat_Exporter.data_model import ChatStore, Message
|
from Whatsapp_Chat_Exporter.data_model import ChatStore, Message
|
||||||
from Whatsapp_Chat_Exporter.utility import Device
|
from Whatsapp_Chat_Exporter.utility import Device, convert_time_unit
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def messages(path, data, assume_first_as_me=False):
|
def messages(path, data, assume_first_as_me=False):
|
||||||
@@ -30,17 +34,16 @@ def messages(path, data, assume_first_as_me=False):
|
|||||||
|
|
||||||
# Second pass: process the messages
|
# Second pass: process the messages
|
||||||
with open(path, "r", encoding="utf8") as file:
|
with open(path, "r", encoding="utf8") as file:
|
||||||
for index, line in enumerate(file):
|
with tqdm(total=total_row_number, desc="Processing messages & media", unit="msg&media", leave=False) as pbar:
|
||||||
you, user_identification_done = process_line(
|
for index, line in enumerate(file):
|
||||||
line, index, chat, path, you,
|
you, user_identification_done = process_line(
|
||||||
assume_first_as_me, user_identification_done
|
line, index, chat, path, you,
|
||||||
)
|
assume_first_as_me, user_identification_done
|
||||||
|
)
|
||||||
|
pbar.update(1)
|
||||||
|
total_time = pbar.format_dict['elapsed']
|
||||||
|
logging.info(f"Processed {total_row_number} messages & media in {convert_time_unit(total_time)}")
|
||||||
|
|
||||||
# Show progress
|
|
||||||
if index % 1000 == 0:
|
|
||||||
print(f"Processing messages & media...({index}/{total_row_number})", end="\r")
|
|
||||||
|
|
||||||
print(f"Processing messages & media...({total_row_number}/{total_row_number})")
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,18 @@
|
|||||||
#!/usr/bin/python3
|
#!/usr/bin/python3
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
import shutil
|
import shutil
|
||||||
from glob import glob
|
from glob import glob
|
||||||
|
from tqdm import tqdm
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from mimetypes import MimeTypes
|
from mimetypes import MimeTypes
|
||||||
from markupsafe import escape as htmle
|
from markupsafe import escape as htmle
|
||||||
from Whatsapp_Chat_Exporter.data_model import ChatStore, Message
|
from Whatsapp_Chat_Exporter.data_model import ChatStore, Message
|
||||||
from Whatsapp_Chat_Exporter.utility import APPLE_TIME, CURRENT_TZ_OFFSET, get_chat_condition
|
from Whatsapp_Chat_Exporter.utility import APPLE_TIME, get_chat_condition, Device
|
||||||
from Whatsapp_Chat_Exporter.utility import bytes_to_readable, convert_time_unit, slugify, Device
|
from Whatsapp_Chat_Exporter.utility import bytes_to_readable, convert_time_unit, safe_name
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def contacts(db, data):
|
def contacts(db, data):
|
||||||
@@ -16,19 +20,21 @@ def contacts(db, data):
|
|||||||
c = db.cursor()
|
c = db.cursor()
|
||||||
c.execute("""SELECT count() FROM ZWAADDRESSBOOKCONTACT WHERE ZABOUTTEXT IS NOT NULL""")
|
c.execute("""SELECT count() FROM ZWAADDRESSBOOKCONTACT WHERE ZABOUTTEXT IS NOT NULL""")
|
||||||
total_row_number = c.fetchone()[0]
|
total_row_number = c.fetchone()[0]
|
||||||
print(f"Pre-processing contacts...({total_row_number})")
|
logging.info(f"Pre-processing contacts...({total_row_number})", extra={"clear": True})
|
||||||
|
|
||||||
c.execute("""SELECT ZWHATSAPPID, ZABOUTTEXT FROM ZWAADDRESSBOOKCONTACT WHERE ZABOUTTEXT IS NOT NULL""")
|
c.execute("""SELECT ZWHATSAPPID, ZABOUTTEXT FROM ZWAADDRESSBOOKCONTACT WHERE ZABOUTTEXT IS NOT NULL""")
|
||||||
content = c.fetchone()
|
with tqdm(total=total_row_number, desc="Processing contacts", unit="contact", leave=False) as pbar:
|
||||||
while content is not None:
|
while (content := c.fetchone()) is not None:
|
||||||
zwhatsapp_id = content["ZWHATSAPPID"]
|
zwhatsapp_id = content["ZWHATSAPPID"]
|
||||||
if not zwhatsapp_id.endswith("@s.whatsapp.net"):
|
if not zwhatsapp_id.endswith("@s.whatsapp.net"):
|
||||||
zwhatsapp_id += "@s.whatsapp.net"
|
zwhatsapp_id += "@s.whatsapp.net"
|
||||||
|
|
||||||
current_chat = ChatStore(Device.IOS)
|
current_chat = ChatStore(Device.IOS)
|
||||||
current_chat.status = content["ZABOUTTEXT"]
|
current_chat.status = content["ZABOUTTEXT"]
|
||||||
data.add_chat(zwhatsapp_id, current_chat)
|
data.add_chat(zwhatsapp_id, current_chat)
|
||||||
content = c.fetchone()
|
pbar.update(1)
|
||||||
|
total_time = pbar.format_dict['elapsed']
|
||||||
|
logging.info(f"Pre-processed {total_row_number} contacts in {convert_time_unit(total_time)}")
|
||||||
|
|
||||||
|
|
||||||
def process_contact_avatars(current_chat, media_folder, contact_id):
|
def process_contact_avatars(current_chat, media_folder, contact_id):
|
||||||
@@ -55,14 +61,16 @@ def get_contact_name(content):
|
|||||||
return content["ZPUSHNAME"]
|
return content["ZPUSHNAME"]
|
||||||
|
|
||||||
|
|
||||||
def messages(db, data, media_folder, timezone_offset, filter_date, filter_chat, filter_empty):
|
def messages(db, data, media_folder, timezone_offset, filter_date, filter_chat, filter_empty, no_reply):
|
||||||
"""Process WhatsApp messages and contacts from the database."""
|
"""Process WhatsApp messages and contacts from the database."""
|
||||||
c = db.cursor()
|
c = db.cursor()
|
||||||
cursor2 = db.cursor()
|
cursor2 = db.cursor()
|
||||||
|
|
||||||
# Build the chat filter conditions
|
# Build the chat filter conditions
|
||||||
chat_filter_include = get_chat_condition(filter_chat[0], True, ["ZWACHATSESSION.ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
chat_filter_include = get_chat_condition(
|
||||||
chat_filter_exclude = get_chat_condition(filter_chat[1], False, ["ZWACHATSESSION.ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
filter_chat[0], True, ["ZWACHATSESSION.ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
||||||
|
chat_filter_exclude = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["ZWACHATSESSION.ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
||||||
date_filter = f'AND ZMESSAGEDATE {filter_date}' if filter_date is not None else ''
|
date_filter = f'AND ZMESSAGEDATE {filter_date}' if filter_date is not None else ''
|
||||||
|
|
||||||
# Process contacts first
|
# Process contacts first
|
||||||
@@ -85,7 +93,6 @@ def messages(db, data, media_folder, timezone_offset, filter_date, filter_chat,
|
|||||||
"""
|
"""
|
||||||
c.execute(contact_query)
|
c.execute(contact_query)
|
||||||
total_row_number = c.fetchone()[0]
|
total_row_number = c.fetchone()[0]
|
||||||
print(f"Processing contacts...({total_row_number})")
|
|
||||||
|
|
||||||
# Get distinct contacts
|
# Get distinct contacts
|
||||||
contacts_query = f"""
|
contacts_query = f"""
|
||||||
@@ -107,22 +114,24 @@ def messages(db, data, media_folder, timezone_offset, filter_date, filter_chat,
|
|||||||
c.execute(contacts_query)
|
c.execute(contacts_query)
|
||||||
|
|
||||||
# Process each contact
|
# Process each contact
|
||||||
content = c.fetchone()
|
with tqdm(total=total_row_number, desc="Processing contacts", unit="contact", leave=False) as pbar:
|
||||||
while content is not None:
|
while (content := c.fetchone()) is not None:
|
||||||
contact_name = get_contact_name(content)
|
contact_name = get_contact_name(content)
|
||||||
contact_id = content["ZCONTACTJID"]
|
contact_id = content["ZCONTACTJID"]
|
||||||
|
|
||||||
# Add or update chat
|
# Add or update chat
|
||||||
if contact_id not in data:
|
if contact_id not in data:
|
||||||
current_chat = data.add_chat(contact_id, ChatStore(Device.IOS, contact_name, media_folder))
|
current_chat = data.add_chat(contact_id, ChatStore(Device.IOS, contact_name, media_folder))
|
||||||
else:
|
else:
|
||||||
current_chat = data.get_chat(contact_id)
|
current_chat = data.get_chat(contact_id)
|
||||||
current_chat.name = contact_name
|
current_chat.name = contact_name
|
||||||
current_chat.my_avatar = os.path.join(media_folder, "Media/Profile/Photo.jpg")
|
current_chat.my_avatar = os.path.join(media_folder, "Media/Profile/Photo.jpg")
|
||||||
|
|
||||||
# Process avatar images
|
# Process avatar images
|
||||||
process_contact_avatars(current_chat, media_folder, contact_id)
|
process_contact_avatars(current_chat, media_folder, contact_id)
|
||||||
content = c.fetchone()
|
pbar.update(1)
|
||||||
|
total_time = pbar.format_dict['elapsed']
|
||||||
|
logging.info(f"Processed {total_row_number} contacts in {convert_time_unit(total_time)}")
|
||||||
|
|
||||||
# Get message count
|
# Get message count
|
||||||
message_count_query = f"""
|
message_count_query = f"""
|
||||||
@@ -139,7 +148,7 @@ def messages(db, data, media_folder, timezone_offset, filter_date, filter_chat,
|
|||||||
"""
|
"""
|
||||||
c.execute(message_count_query)
|
c.execute(message_count_query)
|
||||||
total_row_number = c.fetchone()[0]
|
total_row_number = c.fetchone()[0]
|
||||||
print(f"Processing messages...(0/{total_row_number})", end="\r")
|
logging.info(f"Processing messages...(0/{total_row_number})", extra={"clear": True})
|
||||||
|
|
||||||
# Fetch messages
|
# Fetch messages
|
||||||
messages_query = f"""
|
messages_query = f"""
|
||||||
@@ -169,51 +178,57 @@ def messages(db, data, media_folder, timezone_offset, filter_date, filter_chat,
|
|||||||
"""
|
"""
|
||||||
c.execute(messages_query)
|
c.execute(messages_query)
|
||||||
|
|
||||||
|
reply_query = """SELECT ZSTANZAID,
|
||||||
|
ZTEXT,
|
||||||
|
ZTITLE
|
||||||
|
FROM ZWAMESSAGE
|
||||||
|
LEFT JOIN ZWAMEDIAITEM
|
||||||
|
ON ZWAMESSAGE.Z_PK = ZWAMEDIAITEM.ZMESSAGE
|
||||||
|
WHERE ZTEXT IS NOT NULL
|
||||||
|
OR ZTITLE IS NOT NULL;"""
|
||||||
|
cursor2.execute(reply_query)
|
||||||
|
message_map = {row[0][:17]: row[1] or row[2] for row in cursor2.fetchall() if row[0]}
|
||||||
|
|
||||||
# Process each message
|
# Process each message
|
||||||
i = 0
|
with tqdm(total=total_row_number, desc="Processing messages", unit="msg", leave=False) as pbar:
|
||||||
content = c.fetchone()
|
while (content := c.fetchone()) is not None:
|
||||||
while content is not None:
|
contact_id = content["ZCONTACTJID"]
|
||||||
contact_id = content["ZCONTACTJID"]
|
message_pk = content["Z_PK"]
|
||||||
message_pk = content["Z_PK"]
|
is_group_message = content["ZGROUPINFO"] is not None
|
||||||
is_group_message = content["ZGROUPINFO"] is not None
|
|
||||||
|
|
||||||
# Ensure chat exists
|
# Ensure chat exists
|
||||||
if contact_id not in data:
|
if contact_id not in data:
|
||||||
current_chat = data.add_chat(contact_id, ChatStore(Device.IOS))
|
current_chat = data.add_chat(contact_id, ChatStore(Device.IOS))
|
||||||
process_contact_avatars(current_chat, media_folder, contact_id)
|
process_contact_avatars(current_chat, media_folder, contact_id)
|
||||||
else:
|
else:
|
||||||
current_chat = data.get_chat(contact_id)
|
current_chat = data.get_chat(contact_id)
|
||||||
|
|
||||||
# Create message object
|
# Create message object
|
||||||
ts = APPLE_TIME + content["ZMESSAGEDATE"]
|
ts = APPLE_TIME + content["ZMESSAGEDATE"]
|
||||||
message = Message(
|
message = Message(
|
||||||
from_me=content["ZISFROMME"],
|
from_me=content["ZISFROMME"],
|
||||||
timestamp=ts,
|
timestamp=ts,
|
||||||
time=ts,
|
time=ts,
|
||||||
key_id=content["ZSTANZAID"][:17],
|
key_id=content["ZSTANZAID"][:17],
|
||||||
timezone_offset=timezone_offset if timezone_offset else CURRENT_TZ_OFFSET,
|
timezone_offset=timezone_offset,
|
||||||
message_type=content["ZMESSAGETYPE"],
|
message_type=content["ZMESSAGETYPE"],
|
||||||
received_timestamp=APPLE_TIME + content["ZSENTDATE"] if content["ZSENTDATE"] else None,
|
received_timestamp=APPLE_TIME + content["ZSENTDATE"] if content["ZSENTDATE"] else None,
|
||||||
read_timestamp=None # TODO: Add timestamp
|
read_timestamp=None # TODO: Add timestamp
|
||||||
)
|
)
|
||||||
|
|
||||||
# Process message data
|
# Process message data
|
||||||
invalid = process_message_data(message, content, is_group_message, data, cursor2)
|
invalid = process_message_data(message, content, is_group_message, data, message_map, no_reply)
|
||||||
|
|
||||||
# Add valid messages to chat
|
# Add valid messages to chat
|
||||||
if not invalid:
|
if not invalid:
|
||||||
current_chat.add_message(message_pk, message)
|
current_chat.add_message(message_pk, message)
|
||||||
|
|
||||||
# Update progress
|
pbar.update(1)
|
||||||
i += 1
|
total_time = pbar.format_dict['elapsed']
|
||||||
if i % 1000 == 0:
|
logging.info(f"Processed {total_row_number} messages in {convert_time_unit(total_time)}")
|
||||||
print(f"Processing messages...({i}/{total_row_number})", end="\r")
|
|
||||||
content = c.fetchone()
|
|
||||||
|
|
||||||
print(f"Processing messages...({total_row_number}/{total_row_number})", end="\r")
|
|
||||||
|
|
||||||
|
|
||||||
def process_message_data(message, content, is_group_message, data, cursor2):
|
def process_message_data(message, content, is_group_message, data, message_map, no_reply):
|
||||||
"""Process and set message data from content row."""
|
"""Process and set message data from content row."""
|
||||||
# Handle group sender info
|
# Handle group sender info
|
||||||
if is_group_message and content["ZISFROMME"] == 0:
|
if is_group_message and content["ZISFROMME"] == 0:
|
||||||
@@ -236,17 +251,10 @@ def process_message_data(message, content, is_group_message, data, cursor2):
|
|||||||
return process_metadata_message(message, content, is_group_message)
|
return process_metadata_message(message, content, is_group_message)
|
||||||
|
|
||||||
# Handle quoted replies
|
# Handle quoted replies
|
||||||
if content["ZMETADATA"] is not None and content["ZMETADATA"].startswith(b"\x2a\x14") and False:
|
if content["ZMETADATA"] is not None and content["ZMETADATA"].startswith(b"\x2a\x14") and not no_reply:
|
||||||
quoted = content["ZMETADATA"][2:19]
|
quoted = content["ZMETADATA"][2:19]
|
||||||
message.reply = quoted.decode()
|
message.reply = quoted.decode()
|
||||||
cursor2.execute(f"""SELECT ZTEXT
|
message.quoted_data = message_map.get(message.reply)
|
||||||
FROM ZWAMESSAGE
|
|
||||||
WHERE ZSTANZAID LIKE '{message.reply}%'""")
|
|
||||||
quoted_content = cursor2.fetchone()
|
|
||||||
if quoted_content and "ZTEXT" in quoted_content:
|
|
||||||
message.quoted_data = quoted_content["ZTEXT"]
|
|
||||||
else:
|
|
||||||
message.quoted_data = None
|
|
||||||
|
|
||||||
# Handle stickers
|
# Handle stickers
|
||||||
if content["ZMESSAGETYPE"] == 15:
|
if content["ZMESSAGETYPE"] == 15:
|
||||||
@@ -303,13 +311,15 @@ def process_message_text(message, content):
|
|||||||
message.data = msg
|
message.data = msg
|
||||||
|
|
||||||
|
|
||||||
def media(db, data, media_folder, filter_date, filter_chat, filter_empty, separate_media=False):
|
def media(db, data, media_folder, filter_date, filter_chat, filter_empty, separate_media=False, fix_dot_files=False):
|
||||||
"""Process media files from WhatsApp messages."""
|
"""Process media files from WhatsApp messages."""
|
||||||
c = db.cursor()
|
c = db.cursor()
|
||||||
|
|
||||||
# Build filter conditions
|
# Build filter conditions
|
||||||
chat_filter_include = get_chat_condition(filter_chat[0], True, ["ZWACHATSESSION.ZCONTACTJID","ZMEMBERJID"], "ZGROUPINFO", "ios")
|
chat_filter_include = get_chat_condition(
|
||||||
chat_filter_exclude = get_chat_condition(filter_chat[1], False, ["ZWACHATSESSION.ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
filter_chat[0], True, ["ZWACHATSESSION.ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
||||||
|
chat_filter_exclude = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["ZWACHATSESSION.ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
||||||
date_filter = f'AND ZMESSAGEDATE {filter_date}' if filter_date is not None else ''
|
date_filter = f'AND ZMESSAGEDATE {filter_date}' if filter_date is not None else ''
|
||||||
|
|
||||||
# Get media count
|
# Get media count
|
||||||
@@ -329,7 +339,7 @@ def media(db, data, media_folder, filter_date, filter_chat, filter_empty, separa
|
|||||||
"""
|
"""
|
||||||
c.execute(media_count_query)
|
c.execute(media_count_query)
|
||||||
total_row_number = c.fetchone()[0]
|
total_row_number = c.fetchone()[0]
|
||||||
print(f"\nProcessing media...(0/{total_row_number})", end="\r")
|
logging.info(f"Processing media...(0/{total_row_number})", extra={"clear": True})
|
||||||
|
|
||||||
# Fetch media items
|
# Fetch media items
|
||||||
media_query = f"""
|
media_query = f"""
|
||||||
@@ -357,21 +367,15 @@ def media(db, data, media_folder, filter_date, filter_chat, filter_empty, separa
|
|||||||
|
|
||||||
# Process each media item
|
# Process each media item
|
||||||
mime = MimeTypes()
|
mime = MimeTypes()
|
||||||
i = 0
|
with tqdm(total=total_row_number, desc="Processing media", unit="media", leave=False) as pbar:
|
||||||
content = c.fetchone()
|
while (content := c.fetchone()) is not None:
|
||||||
while content is not None:
|
process_media_item(content, data, media_folder, mime, separate_media, fix_dot_files)
|
||||||
process_media_item(content, data, media_folder, mime, separate_media)
|
pbar.update(1)
|
||||||
|
total_time = pbar.format_dict['elapsed']
|
||||||
# Update progress
|
logging.info(f"Processed {total_row_number} media in {convert_time_unit(total_time)}")
|
||||||
i += 1
|
|
||||||
if i % 100 == 0:
|
|
||||||
print(f"Processing media...({i}/{total_row_number})", end="\r")
|
|
||||||
content = c.fetchone()
|
|
||||||
|
|
||||||
print(f"Processing media...({total_row_number}/{total_row_number})", end="\r")
|
|
||||||
|
|
||||||
|
|
||||||
def process_media_item(content, data, media_folder, mime, separate_media):
|
def process_media_item(content, data, media_folder, mime, separate_media, fix_dot_files=False):
|
||||||
"""Process a single media item."""
|
"""Process a single media item."""
|
||||||
file_path = f"{media_folder}/Message/{content['ZMEDIALOCALPATH']}"
|
file_path = f"{media_folder}/Message/{content['ZMEDIALOCALPATH']}"
|
||||||
current_chat = data.get_chat(content["ZCONTACTJID"])
|
current_chat = data.get_chat(content["ZCONTACTJID"])
|
||||||
@@ -382,8 +386,6 @@ def process_media_item(content, data, media_folder, mime, separate_media):
|
|||||||
current_chat.media_base = media_folder + "/"
|
current_chat.media_base = media_folder + "/"
|
||||||
|
|
||||||
if os.path.isfile(file_path):
|
if os.path.isfile(file_path):
|
||||||
message.data = '/'.join(file_path.split("/")[1:])
|
|
||||||
|
|
||||||
# Set MIME type
|
# Set MIME type
|
||||||
if content["ZVCARDSTRING"] is None:
|
if content["ZVCARDSTRING"] is None:
|
||||||
guess = mime.guess_type(file_path)[0]
|
guess = mime.guess_type(file_path)[0]
|
||||||
@@ -391,15 +393,28 @@ def process_media_item(content, data, media_folder, mime, separate_media):
|
|||||||
else:
|
else:
|
||||||
message.mime = content["ZVCARDSTRING"]
|
message.mime = content["ZVCARDSTRING"]
|
||||||
|
|
||||||
|
if fix_dot_files and file_path.endswith("."):
|
||||||
|
extension = mime.guess_extension(message.mime)
|
||||||
|
if message.mime == "application/octet-stream" or not extension:
|
||||||
|
new_file_path = file_path[:-1]
|
||||||
|
else:
|
||||||
|
extension = mime.guess_extension(message.mime)
|
||||||
|
new_file_path = file_path[:-1] + extension
|
||||||
|
os.rename(file_path, new_file_path)
|
||||||
|
file_path = new_file_path
|
||||||
|
|
||||||
# Handle separate media option
|
# Handle separate media option
|
||||||
if separate_media:
|
if separate_media:
|
||||||
chat_display_name = slugify(current_chat.name or message.sender or content["ZCONTACTJID"].split('@')[0], True)
|
chat_display_name = safe_name(
|
||||||
|
current_chat.name or message.sender or content["ZCONTACTJID"].split('@')[0])
|
||||||
current_filename = file_path.split("/")[-1]
|
current_filename = file_path.split("/")[-1]
|
||||||
new_folder = os.path.join(media_folder, "separated", chat_display_name)
|
new_folder = os.path.join(media_folder, "separated", chat_display_name)
|
||||||
Path(new_folder).mkdir(parents=True, exist_ok=True)
|
Path(new_folder).mkdir(parents=True, exist_ok=True)
|
||||||
new_path = os.path.join(new_folder, current_filename)
|
new_path = os.path.join(new_folder, current_filename)
|
||||||
shutil.copy2(file_path, new_path)
|
shutil.copy2(file_path, new_path)
|
||||||
message.data = '/'.join(new_path.split("\\")[1:])
|
message.data = '/'.join(new_path.split("/")[1:])
|
||||||
|
else:
|
||||||
|
message.data = '/'.join(file_path.split("/")[1:])
|
||||||
else:
|
else:
|
||||||
# Handle missing media
|
# Handle missing media
|
||||||
message.data = "The media is missing"
|
message.data = "The media is missing"
|
||||||
@@ -416,8 +431,10 @@ def vcard(db, data, media_folder, filter_date, filter_chat, filter_empty):
|
|||||||
c = db.cursor()
|
c = db.cursor()
|
||||||
|
|
||||||
# Build filter conditions
|
# Build filter conditions
|
||||||
chat_filter_include = get_chat_condition(filter_chat[0], True, ["ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
chat_filter_include = get_chat_condition(
|
||||||
chat_filter_exclude = get_chat_condition(filter_chat[1], False, ["ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
filter_chat[0], True, ["ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
||||||
|
chat_filter_exclude = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["ZCONTACTJID", "ZMEMBERJID"], "ZGROUPINFO", "ios")
|
||||||
date_filter = f'AND ZWAMESSAGE.ZMESSAGEDATE {filter_date}' if filter_date is not None else ''
|
date_filter = f'AND ZWAMESSAGE.ZMESSAGEDATE {filter_date}' if filter_date is not None else ''
|
||||||
|
|
||||||
# Fetch vCard mentions
|
# Fetch vCard mentions
|
||||||
@@ -444,16 +461,19 @@ def vcard(db, data, media_folder, filter_date, filter_chat, filter_empty):
|
|||||||
c.execute(vcard_query)
|
c.execute(vcard_query)
|
||||||
contents = c.fetchall()
|
contents = c.fetchall()
|
||||||
total_row_number = len(contents)
|
total_row_number = len(contents)
|
||||||
print(f"\nProcessing vCards...(0/{total_row_number})", end="\r")
|
logging.info(f"Processing vCards...(0/{total_row_number})", extra={"clear": True})
|
||||||
|
|
||||||
# Create vCards directory
|
# Create vCards directory
|
||||||
path = f'{media_folder}/Message/vCards'
|
path = f'{media_folder}/Message/vCards'
|
||||||
Path(path).mkdir(parents=True, exist_ok=True)
|
Path(path).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
# Process each vCard
|
# Process each vCard
|
||||||
for index, content in enumerate(contents):
|
with tqdm(total=total_row_number, desc="Processing vCards", unit="vcard", leave=False) as pbar:
|
||||||
process_vcard_item(content, path, data)
|
for content in contents:
|
||||||
print(f"Processing vCards...({index + 1}/{total_row_number})", end="\r")
|
process_vcard_item(content, path, data)
|
||||||
|
pbar.update(1)
|
||||||
|
total_time = pbar.format_dict['elapsed']
|
||||||
|
logging.info(f"Processed {total_row_number} vCards in {convert_time_unit(total_time)}")
|
||||||
|
|
||||||
|
|
||||||
def process_vcard_item(content, path, data):
|
def process_vcard_item(content, path, data):
|
||||||
@@ -479,7 +499,8 @@ def process_vcard_item(content, path, data):
|
|||||||
|
|
||||||
# Create vCard summary and update message
|
# Create vCard summary and update message
|
||||||
vcard_summary = "This media include the following vCard file(s):<br>"
|
vcard_summary = "This media include the following vCard file(s):<br>"
|
||||||
vcard_summary += " | ".join([f'<a href="{htmle(fp)}">{htmle(name)}</a>' for name, fp in zip(vcard_names, file_paths)])
|
vcard_summary += " | ".join([f'<a href="{htmle(fp)}">{htmle(name)}</a>' for name,
|
||||||
|
fp in zip(vcard_names, file_paths)])
|
||||||
|
|
||||||
message = data.get_chat(content["ZCONTACTJID"]).get_message(content["ZMESSAGE"])
|
message = data.get_chat(content["ZCONTACTJID"]).get_message(content["ZMESSAGE"])
|
||||||
message.data = vcard_summary
|
message.data = vcard_summary
|
||||||
@@ -494,8 +515,10 @@ def calls(db, data, timezone_offset, filter_chat):
|
|||||||
c = db.cursor()
|
c = db.cursor()
|
||||||
|
|
||||||
# Build filter conditions
|
# Build filter conditions
|
||||||
chat_filter_include = get_chat_condition(filter_chat[0], True, ["ZGROUPCALLCREATORUSERJIDSTRING"], None, "ios")
|
chat_filter_include = get_chat_condition(
|
||||||
chat_filter_exclude = get_chat_condition(filter_chat[1], False, ["ZGROUPCALLCREATORUSERJIDSTRING"], None, "ios")
|
filter_chat[0], True, ["ZGROUPCALLCREATORUSERJIDSTRING"], None, "ios")
|
||||||
|
chat_filter_exclude = get_chat_condition(
|
||||||
|
filter_chat[1], False, ["ZGROUPCALLCREATORUSERJIDSTRING"], None, "ios")
|
||||||
|
|
||||||
# Get call count
|
# Get call count
|
||||||
call_count_query = f"""
|
call_count_query = f"""
|
||||||
@@ -510,8 +533,6 @@ def calls(db, data, timezone_offset, filter_chat):
|
|||||||
if total_row_number == 0:
|
if total_row_number == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
print(f"\nProcessing calls...({total_row_number})", end="\r")
|
|
||||||
|
|
||||||
# Fetch call records
|
# Fetch call records
|
||||||
calls_query = f"""
|
calls_query = f"""
|
||||||
SELECT ZCALLIDSTRING,
|
SELECT ZCALLIDSTRING,
|
||||||
@@ -536,14 +557,15 @@ def calls(db, data, timezone_offset, filter_chat):
|
|||||||
# Create calls chat
|
# Create calls chat
|
||||||
chat = ChatStore(Device.ANDROID, "WhatsApp Calls")
|
chat = ChatStore(Device.ANDROID, "WhatsApp Calls")
|
||||||
|
|
||||||
# Process each call
|
with tqdm(total=total_row_number, desc="Processing calls", unit="call", leave=False) as pbar:
|
||||||
content = c.fetchone()
|
while (content := c.fetchone()) is not None:
|
||||||
while content is not None:
|
process_call_record(content, chat, data, timezone_offset)
|
||||||
process_call_record(content, chat, data, timezone_offset)
|
pbar.update(1)
|
||||||
content = c.fetchone()
|
total_time = pbar.format_dict['elapsed']
|
||||||
|
|
||||||
# Add calls chat to data
|
# Add calls chat to data
|
||||||
data.add_chat("000000000000000", chat)
|
data.add_chat("000000000000000", chat)
|
||||||
|
logging.info(f"Processed {total_row_number} calls in {convert_time_unit(total_time)}")
|
||||||
|
|
||||||
|
|
||||||
def process_call_record(content, chat, data, timezone_offset):
|
def process_call_record(content, chat, data, timezone_offset):
|
||||||
@@ -554,7 +576,7 @@ def process_call_record(content, chat, data, timezone_offset):
|
|||||||
timestamp=ts,
|
timestamp=ts,
|
||||||
time=ts,
|
time=ts,
|
||||||
key_id=content["ZCALLIDSTRING"],
|
key_id=content["ZCALLIDSTRING"],
|
||||||
timezone_offset=timezone_offset if timezone_offset else CURRENT_TZ_OFFSET
|
timezone_offset=timezone_offset
|
||||||
)
|
)
|
||||||
|
|
||||||
# Set sender info
|
# Set sender info
|
||||||
|
|||||||
@@ -1,11 +1,14 @@
|
|||||||
#!/usr/bin/python3
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
import logging
|
||||||
import shutil
|
import shutil
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import os
|
import os
|
||||||
import getpass
|
import getpass
|
||||||
from sys import exit
|
from sys import exit, platform as osname
|
||||||
from Whatsapp_Chat_Exporter.utility import WhatsAppIdentifier
|
import sys
|
||||||
|
from tqdm import tqdm
|
||||||
|
from Whatsapp_Chat_Exporter.utility import WhatsAppIdentifier, convert_time_unit
|
||||||
from Whatsapp_Chat_Exporter.bplist import BPListReader
|
from Whatsapp_Chat_Exporter.bplist import BPListReader
|
||||||
try:
|
try:
|
||||||
from iphone_backup_decrypt import EncryptedBackup, RelativePath
|
from iphone_backup_decrypt import EncryptedBackup, RelativePath
|
||||||
@@ -15,6 +18,8 @@ else:
|
|||||||
support_encrypted = True
|
support_encrypted = True
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class BackupExtractor:
|
class BackupExtractor:
|
||||||
"""
|
"""
|
||||||
A class to handle the extraction of WhatsApp data from iOS backups,
|
A class to handle the extraction of WhatsApp data from iOS backups,
|
||||||
@@ -42,28 +47,41 @@ class BackupExtractor:
|
|||||||
Returns:
|
Returns:
|
||||||
bool: True if encrypted, False otherwise.
|
bool: True if encrypted, False otherwise.
|
||||||
"""
|
"""
|
||||||
with sqlite3.connect(os.path.join(self.base_dir, "Manifest.db")) as db:
|
try:
|
||||||
c = db.cursor()
|
with sqlite3.connect(os.path.join(self.base_dir, "Manifest.db")) as db:
|
||||||
try:
|
c = db.cursor()
|
||||||
c.execute("SELECT count() FROM Files")
|
try:
|
||||||
c.fetchone() # Execute and fetch to trigger potential errors
|
c.execute("SELECT count() FROM Files")
|
||||||
except (sqlite3.OperationalError, sqlite3.DatabaseError):
|
c.fetchone() # Execute and fetch to trigger potential errors
|
||||||
return True
|
except (sqlite3.OperationalError, sqlite3.DatabaseError):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
except sqlite3.DatabaseError as e:
|
||||||
|
if str(e) == "authorization denied" and osname == "darwin":
|
||||||
|
logging.error(
|
||||||
|
"You don't have permission to access the backup database. Please"
|
||||||
|
"check your permissions or try moving the backup to somewhere else."
|
||||||
|
)
|
||||||
|
exit(8)
|
||||||
else:
|
else:
|
||||||
return False
|
raise e
|
||||||
|
|
||||||
def _extract_encrypted_backup(self):
|
def _extract_encrypted_backup(self):
|
||||||
"""
|
"""
|
||||||
Handles the extraction of data from an encrypted iOS backup.
|
Handles the extraction of data from an encrypted iOS backup.
|
||||||
"""
|
"""
|
||||||
if not support_encrypted:
|
if not support_encrypted:
|
||||||
print("You don't have the dependencies to handle encrypted backup.")
|
logging.error("You don't have the dependencies to handle encrypted backup."
|
||||||
print("Read more on how to deal with encrypted backup:")
|
"Read more on how to deal with encrypted backup:"
|
||||||
print("https://github.com/KnugiHK/Whatsapp-Chat-Exporter/blob/main/README.md#usage")
|
"https://github.com/KnugiHK/Whatsapp-Chat-Exporter/blob/main/README.md#usage"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
print("Encryption detected on the backup!")
|
logging.info(f"Encryption detected on the backup!")
|
||||||
password = getpass.getpass("Enter the password for the backup:")
|
password = getpass.getpass("Enter the password for the backup:")
|
||||||
|
sys.stdout.write("\033[F\033[K")
|
||||||
|
sys.stdout.flush()
|
||||||
self._decrypt_backup(password)
|
self._decrypt_backup(password)
|
||||||
self._extract_decrypted_files()
|
self._extract_decrypted_files()
|
||||||
|
|
||||||
@@ -74,7 +92,7 @@ class BackupExtractor:
|
|||||||
Args:
|
Args:
|
||||||
password (str): The password for the encrypted backup.
|
password (str): The password for the encrypted backup.
|
||||||
"""
|
"""
|
||||||
print("Trying to decrypt the iOS backup...", end="")
|
logging.info(f"Trying to open the iOS backup...")
|
||||||
self.backup = EncryptedBackup(
|
self.backup = EncryptedBackup(
|
||||||
backup_directory=self.base_dir,
|
backup_directory=self.base_dir,
|
||||||
passphrase=password,
|
passphrase=password,
|
||||||
@@ -82,7 +100,8 @@ class BackupExtractor:
|
|||||||
check_same_thread=False,
|
check_same_thread=False,
|
||||||
decrypt_chunk_size=self.decrypt_chunk_size,
|
decrypt_chunk_size=self.decrypt_chunk_size,
|
||||||
)
|
)
|
||||||
print("Done\nDecrypting WhatsApp database...", end="")
|
logging.info(f"iOS backup is opened successfully")
|
||||||
|
logging.info("Decrypting WhatsApp database...", extra={"clear": True})
|
||||||
try:
|
try:
|
||||||
self.backup.extract_file(
|
self.backup.extract_file(
|
||||||
relative_path=RelativePath.WHATSAPP_MESSAGES,
|
relative_path=RelativePath.WHATSAPP_MESSAGES,
|
||||||
@@ -100,23 +119,26 @@ class BackupExtractor:
|
|||||||
output_filename=self.identifiers.CALL,
|
output_filename=self.identifiers.CALL,
|
||||||
)
|
)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print("Failed to decrypt backup: incorrect password?")
|
logging.error("Failed to decrypt backup: incorrect password?")
|
||||||
exit(7)
|
exit(7)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
print(
|
logging.error(
|
||||||
"Essential WhatsApp files are missing from the iOS backup. "
|
"Essential WhatsApp files are missing from the iOS backup. "
|
||||||
"Perhapse you enabled end-to-end encryption for the backup? "
|
"Perhapse you enabled end-to-end encryption for the backup? "
|
||||||
"See https://wts.knugi.dev/docs.html?dest=iose2e"
|
"See https://wts.knugi.dev/docs.html?dest=iose2e"
|
||||||
)
|
)
|
||||||
exit(6)
|
exit(6)
|
||||||
else:
|
else:
|
||||||
print("Done")
|
logging.info(f"WhatsApp database decrypted successfully")
|
||||||
|
|
||||||
def _extract_decrypted_files(self):
|
def _extract_decrypted_files(self):
|
||||||
"""Extract all WhatsApp files after decryption"""
|
"""Extract all WhatsApp files after decryption"""
|
||||||
|
pbar = tqdm(desc="Decrypting and extracting files", unit="file", leave=False)
|
||||||
def extract_progress_handler(file_id, domain, relative_path, n, total_files):
|
def extract_progress_handler(file_id, domain, relative_path, n, total_files):
|
||||||
if n % 100 == 0:
|
if pbar.total is None:
|
||||||
print(f"Decrypting and extracting files...({n}/{total_files})", end="\r")
|
pbar.total = total_files
|
||||||
|
pbar.n = n
|
||||||
|
pbar.refresh()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
self.backup.extract_files(
|
self.backup.extract_files(
|
||||||
@@ -125,7 +147,9 @@ class BackupExtractor:
|
|||||||
preserve_folders=True,
|
preserve_folders=True,
|
||||||
filter_callback=extract_progress_handler
|
filter_callback=extract_progress_handler
|
||||||
)
|
)
|
||||||
print(f"All required files are decrypted and extracted. ", end="\n")
|
total_time = pbar.format_dict['elapsed']
|
||||||
|
pbar.close()
|
||||||
|
logging.info(f"All required files are decrypted and extracted in {convert_time_unit(total_time)}")
|
||||||
|
|
||||||
def _extract_unencrypted_backup(self):
|
def _extract_unencrypted_backup(self):
|
||||||
"""
|
"""
|
||||||
@@ -144,10 +168,10 @@ class BackupExtractor:
|
|||||||
|
|
||||||
if not os.path.isfile(wts_db_path):
|
if not os.path.isfile(wts_db_path):
|
||||||
if self.identifiers is WhatsAppIdentifier:
|
if self.identifiers is WhatsAppIdentifier:
|
||||||
print("WhatsApp database not found.")
|
logging.error("WhatsApp database not found.")
|
||||||
else:
|
else:
|
||||||
print("WhatsApp Business database not found.")
|
logging.error("WhatsApp Business database not found.")
|
||||||
print(
|
logging.error(
|
||||||
"Essential WhatsApp files are missing from the iOS backup. "
|
"Essential WhatsApp files are missing from the iOS backup. "
|
||||||
"Perhapse you enabled end-to-end encryption for the backup? "
|
"Perhapse you enabled end-to-end encryption for the backup? "
|
||||||
"See https://wts.knugi.dev/docs.html?dest=iose2e"
|
"See https://wts.knugi.dev/docs.html?dest=iose2e"
|
||||||
@@ -157,12 +181,12 @@ class BackupExtractor:
|
|||||||
shutil.copyfile(wts_db_path, self.identifiers.MESSAGE)
|
shutil.copyfile(wts_db_path, self.identifiers.MESSAGE)
|
||||||
|
|
||||||
if not os.path.isfile(contact_db_path):
|
if not os.path.isfile(contact_db_path):
|
||||||
print("Contact database not found. Skipping...")
|
logging.warning(f"Contact database not found. Skipping...")
|
||||||
else:
|
else:
|
||||||
shutil.copyfile(contact_db_path, self.identifiers.CONTACT)
|
shutil.copyfile(contact_db_path, self.identifiers.CONTACT)
|
||||||
|
|
||||||
if not os.path.isfile(call_db_path):
|
if not os.path.isfile(call_db_path):
|
||||||
print("Call database not found. Skipping...")
|
logging.warning(f"Call database not found. Skipping...")
|
||||||
else:
|
else:
|
||||||
shutil.copyfile(call_db_path, self.identifiers.CALL)
|
shutil.copyfile(call_db_path, self.identifiers.CALL)
|
||||||
|
|
||||||
@@ -176,7 +200,6 @@ class BackupExtractor:
|
|||||||
c = manifest.cursor()
|
c = manifest.cursor()
|
||||||
c.execute(f"SELECT count() FROM Files WHERE domain = '{_wts_id}'")
|
c.execute(f"SELECT count() FROM Files WHERE domain = '{_wts_id}'")
|
||||||
total_row_number = c.fetchone()[0]
|
total_row_number = c.fetchone()[0]
|
||||||
print(f"Extracting WhatsApp files...(0/{total_row_number})", end="\r")
|
|
||||||
c.execute(
|
c.execute(
|
||||||
f"""
|
f"""
|
||||||
SELECT fileID, relativePath, flags, file AS metadata,
|
SELECT fileID, relativePath, flags, file AS metadata,
|
||||||
@@ -189,33 +212,30 @@ class BackupExtractor:
|
|||||||
if not os.path.isdir(_wts_id):
|
if not os.path.isdir(_wts_id):
|
||||||
os.mkdir(_wts_id)
|
os.mkdir(_wts_id)
|
||||||
|
|
||||||
row = c.fetchone()
|
with tqdm(total=total_row_number, desc="Extracting WhatsApp files", unit="file", leave=False) as pbar:
|
||||||
while row is not None:
|
while (row := c.fetchone()) is not None:
|
||||||
if not row["relativePath"]: # Skip empty relative paths
|
if not row["relativePath"]: # Skip empty relative paths
|
||||||
row = c.fetchone()
|
continue
|
||||||
continue
|
|
||||||
|
|
||||||
destination = os.path.join(_wts_id, row["relativePath"])
|
destination = os.path.join(_wts_id, row["relativePath"])
|
||||||
hashes = row["fileID"]
|
hashes = row["fileID"]
|
||||||
folder = hashes[:2]
|
folder = hashes[:2]
|
||||||
flags = row["flags"]
|
flags = row["flags"]
|
||||||
|
|
||||||
if flags == 2: # Directory
|
if flags == 2: # Directory
|
||||||
try:
|
try:
|
||||||
os.mkdir(destination)
|
os.mkdir(destination)
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
pass
|
pass
|
||||||
elif flags == 1: # File
|
elif flags == 1: # File
|
||||||
shutil.copyfile(os.path.join(self.base_dir, folder, hashes), destination)
|
shutil.copyfile(os.path.join(self.base_dir, folder, hashes), destination)
|
||||||
metadata = BPListReader(row["metadata"]).parse()
|
metadata = BPListReader(row["metadata"]).parse()
|
||||||
creation = metadata["$objects"][1]["Birth"]
|
_creation = metadata["$objects"][1]["Birth"]
|
||||||
modification = metadata["$objects"][1]["LastModified"]
|
modification = metadata["$objects"][1]["LastModified"]
|
||||||
os.utime(destination, (modification, modification))
|
os.utime(destination, (modification, modification))
|
||||||
|
pbar.update(1)
|
||||||
if row["_index"] % 100 == 0:
|
total_time = pbar.format_dict['elapsed']
|
||||||
print(f"Extracting WhatsApp files...({row['_index']}/{total_row_number})", end="\r")
|
logging.info(f"Extracted {total_row_number} WhatsApp files in {convert_time_unit(total_time)}")
|
||||||
row = c.fetchone()
|
|
||||||
print(f"Extracting WhatsApp files...({total_row_number}/{total_row_number})", end="\n")
|
|
||||||
|
|
||||||
|
|
||||||
def extract_media(base_dir, identifiers, decrypt_chunk_size):
|
def extract_media(base_dir, identifiers, decrypt_chunk_size):
|
||||||
@@ -229,4 +249,3 @@ def extract_media(base_dir, identifiers, decrypt_chunk_size):
|
|||||||
"""
|
"""
|
||||||
extractor = BackupExtractor(base_dir, identifiers, decrypt_chunk_size)
|
extractor = BackupExtractor(base_dir, identifiers, decrypt_chunk_size)
|
||||||
extractor.extract()
|
extractor.extract()
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import logging
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import jinja2
|
import jinja2
|
||||||
import json
|
import json
|
||||||
@@ -5,18 +6,21 @@ import os
|
|||||||
import unicodedata
|
import unicodedata
|
||||||
import re
|
import re
|
||||||
import math
|
import math
|
||||||
|
import shutil
|
||||||
from bleach import clean as sanitize
|
from bleach import clean as sanitize
|
||||||
from markupsafe import Markup
|
from markupsafe import Markup
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from enum import IntEnum
|
from enum import IntEnum
|
||||||
from Whatsapp_Chat_Exporter.data_model import ChatStore
|
from tqdm import tqdm
|
||||||
from typing import Dict, List, Optional, Tuple
|
from Whatsapp_Chat_Exporter.data_model import ChatCollection, ChatStore, Timing
|
||||||
|
from typing import Dict, List, Optional, Tuple, Union, Any
|
||||||
try:
|
try:
|
||||||
from enum import StrEnum, IntEnum
|
from enum import StrEnum, IntEnum
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# < Python 3.11
|
# < Python 3.11
|
||||||
# This should be removed when the support for Python 3.10 ends.
|
# This should be removed when the support for Python 3.10 ends. (31 Oct 2026)
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
class StrEnum(str, Enum):
|
class StrEnum(str, Enum):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -28,6 +32,7 @@ ROW_SIZE = 0x3D0
|
|||||||
CURRENT_TZ_OFFSET = datetime.now().astimezone().utcoffset().seconds / 3600
|
CURRENT_TZ_OFFSET = datetime.now().astimezone().utcoffset().seconds / 3600
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def convert_time_unit(time_second: int) -> str:
|
def convert_time_unit(time_second: int) -> str:
|
||||||
"""Converts a time duration in seconds to a human-readable string.
|
"""Converts a time duration in seconds to a human-readable string.
|
||||||
|
|
||||||
@@ -37,23 +42,31 @@ def convert_time_unit(time_second: int) -> str:
|
|||||||
Returns:
|
Returns:
|
||||||
str: A human-readable string representing the time duration.
|
str: A human-readable string representing the time duration.
|
||||||
"""
|
"""
|
||||||
time = str(timedelta(seconds=time_second))
|
if time_second < 1:
|
||||||
if "day" not in time:
|
return "less than a second"
|
||||||
if time_second < 1:
|
elif time_second == 1:
|
||||||
time = "less than a second"
|
return "a second"
|
||||||
elif time_second == 1:
|
|
||||||
time = "a second"
|
delta = timedelta(seconds=time_second)
|
||||||
elif time_second < 60:
|
parts = []
|
||||||
time = time[5:][1 if time_second < 10 else 0:] + " seconds"
|
|
||||||
elif time_second == 60:
|
days = delta.days
|
||||||
time = "a minute"
|
if days > 0:
|
||||||
elif time_second < 3600:
|
parts.append(f"{days} day{'s' if days > 1 else ''}")
|
||||||
time = time[2:] + " minutes"
|
|
||||||
elif time_second == 3600:
|
hours = delta.seconds // 3600
|
||||||
time = "an hour"
|
if hours > 0:
|
||||||
else:
|
parts.append(f"{hours} hour{'s' if hours > 1 else ''}")
|
||||||
time += " hour"
|
|
||||||
return time
|
minutes = (delta.seconds % 3600) // 60
|
||||||
|
if minutes > 0:
|
||||||
|
parts.append(f"{minutes} minute{'s' if minutes > 1 else ''}")
|
||||||
|
|
||||||
|
seconds = delta.seconds % 60
|
||||||
|
if seconds > 0:
|
||||||
|
parts.append(f"{seconds} second{'s' if seconds > 1 else ''}")
|
||||||
|
|
||||||
|
return " ".join(parts)
|
||||||
|
|
||||||
|
|
||||||
def bytes_to_readable(size_bytes: int) -> str:
|
def bytes_to_readable(size_bytes: int) -> str:
|
||||||
@@ -70,8 +83,8 @@ def bytes_to_readable(size_bytes: int) -> str:
|
|||||||
Returns:
|
Returns:
|
||||||
A human-readable string representing the file size.
|
A human-readable string representing the file size.
|
||||||
"""
|
"""
|
||||||
if size_bytes == 0:
|
if size_bytes < 1024:
|
||||||
return "0B"
|
return f"{size_bytes} B"
|
||||||
size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
|
size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
|
||||||
i = int(math.floor(math.log(size_bytes, 1024)))
|
i = int(math.floor(math.log(size_bytes, 1024)))
|
||||||
p = math.pow(1024, i)
|
p = math.pow(1024, i)
|
||||||
@@ -103,10 +116,15 @@ def readable_to_bytes(size_str: str) -> int:
|
|||||||
'YB': 1024**8
|
'YB': 1024**8
|
||||||
}
|
}
|
||||||
size_str = size_str.upper().strip()
|
size_str = size_str.upper().strip()
|
||||||
number, unit = size_str[:-2].strip(), size_str[-2:].strip()
|
if size_str.isnumeric():
|
||||||
if unit not in SIZE_UNITS or not number.isnumeric():
|
# If the string is purely numeric, assume it's in bytes
|
||||||
raise ValueError("Invalid input for size_str. Example: 1024GB")
|
return int(size_str)
|
||||||
return int(number) * SIZE_UNITS[unit]
|
match = re.fullmatch(r'^(\d+(\.\d*)?)\s*([KMGTPEZY]?B)?$', size_str)
|
||||||
|
if not match:
|
||||||
|
raise ValueError("Invalid size format for size_str. Expected format like '10MB', '1024GB', or '512'.")
|
||||||
|
unit = ''.join(filter(str.isalpha, size_str)).strip()
|
||||||
|
number = ''.join(c for c in size_str if c.isdigit() or c == '.').strip()
|
||||||
|
return int(float(number) * SIZE_UNITS[unit])
|
||||||
|
|
||||||
|
|
||||||
def sanitize_except(html: str) -> Markup:
|
def sanitize_except(html: str) -> Markup:
|
||||||
@@ -139,51 +157,55 @@ def determine_day(last: int, current: int) -> Optional[datetime.date]:
|
|||||||
return current
|
return current
|
||||||
|
|
||||||
|
|
||||||
def check_update():
|
def check_update(include_beta: bool = False) -> int:
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import json
|
import json
|
||||||
import importlib
|
import importlib
|
||||||
from sys import platform
|
from sys import platform
|
||||||
|
from packaging import version
|
||||||
|
|
||||||
PACKAGE_JSON = "https://pypi.org/pypi/whatsapp-chat-exporter/json"
|
PACKAGE_JSON = "https://pypi.org/pypi/whatsapp-chat-exporter/json"
|
||||||
try:
|
try:
|
||||||
raw = urllib.request.urlopen(PACKAGE_JSON)
|
raw = urllib.request.urlopen(PACKAGE_JSON)
|
||||||
except Exception:
|
except Exception:
|
||||||
print("Failed to check for updates.")
|
logging.error("Failed to check for updates.")
|
||||||
return 1
|
return 1
|
||||||
else:
|
else:
|
||||||
with raw:
|
with raw:
|
||||||
package_info = json.load(raw)
|
package_info = json.load(raw)
|
||||||
latest_version = tuple(map(int, package_info["info"]["version"].split(".")))
|
if include_beta:
|
||||||
__version__ = importlib.metadata.version("whatsapp_chat_exporter")
|
all_versions = [version.parse(v) for v in package_info["releases"].keys()]
|
||||||
current_version = tuple(map(int, __version__.split(".")))
|
latest_version = max(all_versions, key=lambda v: (v.release, v.pre))
|
||||||
if current_version < latest_version:
|
|
||||||
print("===============Update===============")
|
|
||||||
print("A newer version of WhatsApp Chat Exporter is available.")
|
|
||||||
print("Current version: " + __version__)
|
|
||||||
print("Latest version: " + package_info["info"]["version"])
|
|
||||||
if platform == "win32":
|
|
||||||
print("Update with: pip install --upgrade whatsapp-chat-exporter")
|
|
||||||
else:
|
|
||||||
print("Update with: pip3 install --upgrade whatsapp-chat-exporter")
|
|
||||||
print("====================================")
|
|
||||||
else:
|
else:
|
||||||
print("You are using the latest version of WhatsApp Chat Exporter.")
|
latest_version = version.parse(package_info["info"]["version"])
|
||||||
|
current_version = version.parse(importlib.metadata.version("whatsapp_chat_exporter"))
|
||||||
|
if current_version < latest_version:
|
||||||
|
logging.info(
|
||||||
|
"===============Update===============\n"
|
||||||
|
"A newer version of WhatsApp Chat Exporter is available.\n"
|
||||||
|
f"Current version: {current_version}\n"
|
||||||
|
f"Latest version: {latest_version}"
|
||||||
|
)
|
||||||
|
pip_cmd = "pip" if platform == "win32" else "pip3"
|
||||||
|
logging.info(f"Update with: {pip_cmd} install --upgrade whatsapp-chat-exporter {'--pre' if include_beta else ''}")
|
||||||
|
logging.info("====================================")
|
||||||
|
else:
|
||||||
|
logging.info("You are using the latest version of WhatsApp Chat Exporter.")
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def rendering(
|
def rendering(
|
||||||
output_file_name,
|
output_file_name,
|
||||||
template,
|
template,
|
||||||
name,
|
name,
|
||||||
msgs,
|
msgs,
|
||||||
contact,
|
contact,
|
||||||
w3css,
|
w3css,
|
||||||
chat,
|
chat,
|
||||||
headline,
|
headline,
|
||||||
next=False,
|
next=False,
|
||||||
previous=False
|
previous=False
|
||||||
):
|
):
|
||||||
if chat.their_avatar_thumb is None and chat.their_avatar is not None:
|
if chat.their_avatar_thumb is None and chat.their_avatar is not None:
|
||||||
their_avatar_thumb = chat.their_avatar
|
their_avatar_thumb = chat.their_avatar
|
||||||
else:
|
else:
|
||||||
@@ -215,59 +237,250 @@ class Device(StrEnum):
|
|||||||
EXPORTED = "exported"
|
EXPORTED = "exported"
|
||||||
|
|
||||||
|
|
||||||
def import_from_json(json_file: str, data: Dict[str, ChatStore]):
|
def import_from_json(json_file: str, data: ChatCollection):
|
||||||
"""Imports chat data from a JSON file into the data dictionary.
|
"""Imports chat data from a JSON file into the data dictionary.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
json_file: The path to the JSON file.
|
json_file: The path to the JSON file.
|
||||||
data: The dictionary to store the imported chat data.
|
data: The dictionary to store the imported chat data.
|
||||||
"""
|
"""
|
||||||
from Whatsapp_Chat_Exporter.data_model import ChatStore, Message
|
|
||||||
with open(json_file, "r") as f:
|
with open(json_file, "r") as f:
|
||||||
temp_data = json.loads(f.read())
|
temp_data = json.loads(f.read())
|
||||||
total_row_number = len(tuple(temp_data.keys()))
|
total_row_number = len(tuple(temp_data.keys()))
|
||||||
print(f"Importing chats from JSON...(0/{total_row_number})", end="\r")
|
with tqdm(total=total_row_number, desc="Importing chats from JSON", unit="chat", leave=False) as pbar:
|
||||||
for index, (jid, chat_data) in enumerate(temp_data.items()):
|
for jid, chat_data in temp_data.items():
|
||||||
chat = ChatStore(chat_data.get("type"), chat_data.get("name"))
|
chat = ChatStore.from_json(chat_data)
|
||||||
chat.my_avatar = chat_data.get("my_avatar")
|
data.add_chat(jid, chat)
|
||||||
chat.their_avatar = chat_data.get("their_avatar")
|
pbar.update(1)
|
||||||
chat.their_avatar_thumb = chat_data.get("their_avatar_thumb")
|
total_time = pbar.format_dict['elapsed']
|
||||||
chat.status = chat_data.get("status")
|
logging.info(f"Imported {total_row_number} chats from JSON in {convert_time_unit(total_time)}")
|
||||||
for id, msg in chat_data.get("messages").items():
|
|
||||||
message = Message(
|
|
||||||
from_me=msg["from_me"],
|
class IncrementalMerger:
|
||||||
timestamp=msg["timestamp"],
|
"""Handles incremental merging of WhatsApp chat exports."""
|
||||||
time=msg["time"],
|
|
||||||
key_id=msg["key_id"],
|
def __init__(self, pretty_print_json: int, avoid_encoding_json: bool):
|
||||||
received_timestamp=msg.get("received_timestamp"),
|
"""Initialize the merger with JSON formatting options.
|
||||||
read_timestamp=msg.get("read_timestamp")
|
|
||||||
|
Args:
|
||||||
|
pretty_print_json: JSON indentation level.
|
||||||
|
avoid_encoding_json: Whether to avoid ASCII encoding.
|
||||||
|
"""
|
||||||
|
self.pretty_print_json = pretty_print_json
|
||||||
|
self.avoid_encoding_json = avoid_encoding_json
|
||||||
|
|
||||||
|
def _get_json_files(self, source_dir: str) -> List[str]:
|
||||||
|
"""Get list of JSON files from source directory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_dir: Path to the source directory.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of JSON filenames.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SystemExit: If no JSON files are found.
|
||||||
|
"""
|
||||||
|
json_files = [f for f in os.listdir(source_dir) if f.endswith('.json')]
|
||||||
|
if not json_files:
|
||||||
|
logging.error("No JSON files found in the source directory.")
|
||||||
|
raise SystemExit(1)
|
||||||
|
|
||||||
|
logging.debug("JSON files found:", json_files)
|
||||||
|
return json_files
|
||||||
|
|
||||||
|
def _copy_new_file(self, source_path: str, target_path: str, target_dir: str, json_file: str) -> None:
|
||||||
|
"""Copy a new JSON file to target directory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_path: Path to source file.
|
||||||
|
target_path: Path to target file.
|
||||||
|
target_dir: Target directory path.
|
||||||
|
json_file: Name of the JSON file.
|
||||||
|
"""
|
||||||
|
logging.info(f"Copying '{json_file}' to target directory...")
|
||||||
|
os.makedirs(target_dir, exist_ok=True)
|
||||||
|
shutil.copy2(source_path, target_path)
|
||||||
|
|
||||||
|
def _load_chat_data(self, file_path: str) -> Dict[str, Any]:
|
||||||
|
"""Load JSON data from file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to JSON file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Loaded JSON data.
|
||||||
|
"""
|
||||||
|
with open(file_path, 'r') as file:
|
||||||
|
return json.load(file)
|
||||||
|
|
||||||
|
def _parse_chats_from_json(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Parse JSON data into ChatStore objects.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: Raw JSON data.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary of JID to ChatStore objects.
|
||||||
|
"""
|
||||||
|
return {jid: ChatStore.from_json(chat) for jid, chat in data.items()}
|
||||||
|
|
||||||
|
def _merge_chat_stores(self, source_chats: Dict[str, Any], target_chats: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Merge source chats into target chats.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_chats: Source ChatStore objects.
|
||||||
|
target_chats: Target ChatStore objects.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Merged ChatStore objects.
|
||||||
|
"""
|
||||||
|
for jid, chat in source_chats.items():
|
||||||
|
if jid in target_chats:
|
||||||
|
target_chats[jid].merge_with(chat)
|
||||||
|
else:
|
||||||
|
target_chats[jid] = chat
|
||||||
|
return target_chats
|
||||||
|
|
||||||
|
def _serialize_chats(self, chats: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Serialize ChatStore objects to JSON format.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
chats: Dictionary of ChatStore objects.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Serialized JSON data.
|
||||||
|
"""
|
||||||
|
return {jid: chat.to_json() for jid, chat in chats.items()}
|
||||||
|
|
||||||
|
def _has_changes(self, merged_data: Dict[str, Any], original_data: Dict[str, Any]) -> bool:
|
||||||
|
"""Check if merged data differs from original data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
merged_data: Merged JSON data.
|
||||||
|
original_data: Original JSON data.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if changes detected, False otherwise.
|
||||||
|
"""
|
||||||
|
return json.dumps(merged_data, sort_keys=True) != json.dumps(original_data, sort_keys=True)
|
||||||
|
|
||||||
|
def _save_merged_data(self, target_path: str, merged_data: Dict[str, Any]) -> None:
|
||||||
|
"""Save merged data to target file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
target_path: Path to target file.
|
||||||
|
merged_data: Merged JSON data.
|
||||||
|
"""
|
||||||
|
with open(target_path, 'w') as merged_file:
|
||||||
|
json.dump(
|
||||||
|
merged_data,
|
||||||
|
merged_file,
|
||||||
|
indent=self.pretty_print_json,
|
||||||
|
ensure_ascii=not self.avoid_encoding_json,
|
||||||
)
|
)
|
||||||
message.media = msg.get("media")
|
|
||||||
message.meta = msg.get("meta")
|
def _merge_json_file(self, source_path: str, target_path: str, json_file: str) -> None:
|
||||||
message.data = msg.get("data")
|
"""Merge a single JSON file.
|
||||||
message.sender = msg.get("sender")
|
|
||||||
message.safe = msg.get("safe")
|
Args:
|
||||||
message.mime = msg.get("mime")
|
source_path: Path to source file.
|
||||||
message.reply = msg.get("reply")
|
target_path: Path to target file.
|
||||||
message.quoted_data = msg.get("quoted_data")
|
json_file: Name of the JSON file.
|
||||||
message.caption = msg.get("caption")
|
"""
|
||||||
message.thumb = msg.get("thumb")
|
logging.info(f"Merging '{json_file}' with existing file in target directory...", extra={"clear": True})
|
||||||
message.sticker = msg.get("sticker")
|
|
||||||
chat.add_message(id, message)
|
source_data = self._load_chat_data(source_path)
|
||||||
data[jid] = chat
|
target_data = self._load_chat_data(target_path)
|
||||||
print(f"Importing chats from JSON...({index + 1}/{total_row_number})", end="\r")
|
|
||||||
|
source_chats = self._parse_chats_from_json(source_data)
|
||||||
|
target_chats = self._parse_chats_from_json(target_data)
|
||||||
|
|
||||||
|
merged_chats = self._merge_chat_stores(source_chats, target_chats)
|
||||||
|
merged_data = self._serialize_chats(merged_chats)
|
||||||
|
|
||||||
|
if self._has_changes(merged_data, target_data):
|
||||||
|
logging.info(f"Changes detected in '{json_file}', updating target file...")
|
||||||
|
self._save_merged_data(target_path, merged_data)
|
||||||
|
else:
|
||||||
|
logging.info(f"No changes detected in '{json_file}', skipping update.")
|
||||||
|
|
||||||
|
def _should_copy_media_file(self, source_file: str, target_file: str) -> bool:
|
||||||
|
"""Check if media file should be copied.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_file: Path to source media file.
|
||||||
|
target_file: Path to target media file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if file should be copied, False otherwise.
|
||||||
|
"""
|
||||||
|
return not os.path.exists(target_file) or os.path.getmtime(source_file) > os.path.getmtime(target_file)
|
||||||
|
|
||||||
|
def _merge_media_directories(self, source_dir: str, target_dir: str, media_dir: str) -> None:
|
||||||
|
"""Merge media directories from source to target.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_dir: Source directory path.
|
||||||
|
target_dir: Target directory path.
|
||||||
|
media_dir: Media directory name.
|
||||||
|
"""
|
||||||
|
source_media_path = os.path.join(source_dir, media_dir)
|
||||||
|
target_media_path = os.path.join(target_dir, media_dir)
|
||||||
|
|
||||||
|
logging.info(f"Merging media directories. Source: {source_media_path}, target: {target_media_path}")
|
||||||
|
|
||||||
|
if not os.path.exists(source_media_path):
|
||||||
|
return
|
||||||
|
|
||||||
|
for root, _, files in os.walk(source_media_path):
|
||||||
|
relative_path = os.path.relpath(root, source_media_path)
|
||||||
|
target_root = os.path.join(target_media_path, relative_path)
|
||||||
|
os.makedirs(target_root, exist_ok=True)
|
||||||
|
|
||||||
|
for file in files:
|
||||||
|
source_file = os.path.join(root, file)
|
||||||
|
target_file = os.path.join(target_root, file)
|
||||||
|
|
||||||
|
if self._should_copy_media_file(source_file, target_file):
|
||||||
|
logging.debug(f"Copying '{source_file}' to '{target_file}'...")
|
||||||
|
shutil.copy2(source_file, target_file)
|
||||||
|
|
||||||
|
def merge(self, source_dir: str, target_dir: str, media_dir: str) -> None:
|
||||||
|
"""Merge JSON files and media from source to target directory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_dir: The path to the source directory containing JSON files.
|
||||||
|
target_dir: The path to the target directory to merge into.
|
||||||
|
media_dir: The path to the media directory.
|
||||||
|
"""
|
||||||
|
json_files = self._get_json_files(source_dir)
|
||||||
|
|
||||||
|
logging.info("Starting incremental merge process...")
|
||||||
|
for json_file in json_files:
|
||||||
|
source_path = os.path.join(source_dir, json_file)
|
||||||
|
target_path = os.path.join(target_dir, json_file)
|
||||||
|
|
||||||
|
if not os.path.exists(target_path):
|
||||||
|
self._copy_new_file(source_path, target_path, target_dir, json_file)
|
||||||
|
else:
|
||||||
|
self._merge_json_file(source_path, target_path, json_file)
|
||||||
|
|
||||||
|
self._merge_media_directories(source_dir, target_dir, media_dir)
|
||||||
|
|
||||||
|
|
||||||
def sanitize_filename(file_name: str) -> str:
|
def incremental_merge(source_dir: str, target_dir: str, media_dir: str, pretty_print_json: int, avoid_encoding_json: bool) -> None:
|
||||||
"""Sanitizes a filename by removing invalid and unsafe characters.
|
"""Wrapper for merging JSON files from the source directory into the target directory.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
file_name: The filename to sanitize.
|
source_dir: The path to the source directory containing JSON files.
|
||||||
|
target_dir: The path to the target directory to merge into.
|
||||||
Returns:
|
media_dir: The path to the media directory.
|
||||||
The sanitized filename.
|
pretty_print_json: JSON indentation level.
|
||||||
|
avoid_encoding_json: Whether to avoid ASCII encoding.
|
||||||
"""
|
"""
|
||||||
return "".join(x for x in file_name if x.isalnum() or x in "- ")
|
merger = IncrementalMerger(pretty_print_json, avoid_encoding_json)
|
||||||
|
merger.merge(source_dir, target_dir, media_dir)
|
||||||
|
|
||||||
|
|
||||||
def get_file_name(contact: str, chat: ChatStore) -> Tuple[str, str]:
|
def get_file_name(contact: str, chat: ChatStore) -> Tuple[str, str]:
|
||||||
@@ -299,7 +512,7 @@ def get_file_name(contact: str, chat: ChatStore) -> Tuple[str, str]:
|
|||||||
else:
|
else:
|
||||||
name = phone_number
|
name = phone_number
|
||||||
|
|
||||||
return sanitize_filename(file_name), name
|
return safe_name(file_name), name
|
||||||
|
|
||||||
|
|
||||||
def get_cond_for_empty(enable: bool, jid_field: str, broadcast_field: str) -> str:
|
def get_cond_for_empty(enable: bool, jid_field: str, broadcast_field: str) -> str:
|
||||||
@@ -316,9 +529,41 @@ def get_cond_for_empty(enable: bool, jid_field: str, broadcast_field: str) -> st
|
|||||||
return f"AND (chat.hidden=0 OR {jid_field}='status@broadcast' OR {broadcast_field}>0)" if enable else ""
|
return f"AND (chat.hidden=0 OR {jid_field}='status@broadcast' OR {broadcast_field}>0)" if enable else ""
|
||||||
|
|
||||||
|
|
||||||
def get_chat_condition(filter: Optional[List[str]], include: bool, columns: List[str], jid: Optional[str] = None, platform: Optional[str] = None) -> str:
|
def _get_group_condition(jid: str, platform: str) -> str:
|
||||||
|
"""Generate platform-specific group identification condition.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
jid: The JID column name.
|
||||||
|
platform: The platform ("android" or "ios").
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
SQL condition string for group identification.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If platform is not supported.
|
||||||
|
"""
|
||||||
|
if platform == "android":
|
||||||
|
return f"{jid}.type == 1"
|
||||||
|
elif platform == "ios":
|
||||||
|
return f"{jid} IS NOT NULL"
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"Only android and ios are supported for argument platform if jid is not None")
|
||||||
|
|
||||||
|
|
||||||
|
def get_chat_condition(
|
||||||
|
filter: Optional[List[str]],
|
||||||
|
include: bool,
|
||||||
|
columns: List[str],
|
||||||
|
jid: Optional[str] = None,
|
||||||
|
platform: Optional[str] = None
|
||||||
|
) -> str:
|
||||||
"""Generates a SQL condition for filtering chats based on inclusion or exclusion criteria.
|
"""Generates a SQL condition for filtering chats based on inclusion or exclusion criteria.
|
||||||
|
|
||||||
|
SQL injection risks from chat filters were evaluated during development and deemed negligible
|
||||||
|
due to the tool's offline, trusted-input model (user running this tool on WhatsApp
|
||||||
|
backups/databases on their own device).
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
filter: A list of phone numbers to include or exclude.
|
filter: A list of phone numbers to include or exclude.
|
||||||
include: True to include chats that match the filter, False to exclude them.
|
include: True to include chats that match the filter, False to exclude them.
|
||||||
@@ -332,30 +577,40 @@ def get_chat_condition(filter: Optional[List[str]], include: bool, columns: List
|
|||||||
Raises:
|
Raises:
|
||||||
ValueError: If the column count is invalid or an unsupported platform is provided.
|
ValueError: If the column count is invalid or an unsupported platform is provided.
|
||||||
"""
|
"""
|
||||||
if filter is not None:
|
if not filter:
|
||||||
conditions = []
|
|
||||||
if len(columns) < 2 and jid is not None:
|
|
||||||
raise ValueError("There must be at least two elements in argument columns if jid is not None")
|
|
||||||
if jid is not None:
|
|
||||||
if platform == "android":
|
|
||||||
is_group = f"{jid}.type == 1"
|
|
||||||
elif platform == "ios":
|
|
||||||
is_group = f"{jid} IS NOT NULL"
|
|
||||||
else:
|
|
||||||
raise ValueError("Only android and ios are supported for argument platform if jid is not None")
|
|
||||||
for index, chat in enumerate(filter):
|
|
||||||
if include:
|
|
||||||
conditions.append(f"{' OR' if index > 0 else ''} {columns[0]} LIKE '%{chat}%'")
|
|
||||||
if len(columns) > 1:
|
|
||||||
conditions.append(f" OR ({columns[1]} LIKE '%{chat}%' AND {is_group})")
|
|
||||||
else:
|
|
||||||
conditions.append(f"{' AND' if index > 0 else ''} {columns[0]} NOT LIKE '%{chat}%'")
|
|
||||||
if len(columns) > 1:
|
|
||||||
conditions.append(f" AND ({columns[1]} NOT LIKE '%{chat}%' AND {is_group})")
|
|
||||||
return f"AND ({' '.join(conditions)})"
|
|
||||||
else:
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
if jid is not None and len(columns) < 2:
|
||||||
|
raise ValueError(
|
||||||
|
"There must be at least two elements in argument columns if jid is not None")
|
||||||
|
|
||||||
|
# Get group condition if needed
|
||||||
|
is_group_condition = None
|
||||||
|
if jid is not None:
|
||||||
|
is_group_condition = _get_group_condition(jid, platform)
|
||||||
|
|
||||||
|
# Build conditions for each chat filter
|
||||||
|
conditions = []
|
||||||
|
for index, chat in enumerate(filter):
|
||||||
|
# Add connector for subsequent conditions (with double space)
|
||||||
|
connector = " OR" if include else " AND"
|
||||||
|
prefix = connector if index > 0 else ""
|
||||||
|
|
||||||
|
# Primary column condition
|
||||||
|
operator = "LIKE" if include else "NOT LIKE"
|
||||||
|
conditions.append(f"{prefix} {columns[0]} {operator} '%{chat}%'")
|
||||||
|
|
||||||
|
# Secondary column condition for groups
|
||||||
|
if len(columns) > 1 and is_group_condition:
|
||||||
|
if include:
|
||||||
|
group_condition = f" OR ({columns[1]} {operator} '%{chat}%' AND {is_group_condition})"
|
||||||
|
else:
|
||||||
|
group_condition = f" AND ({columns[1]} {operator} '%{chat}%' AND {is_group_condition})"
|
||||||
|
conditions.append(group_condition)
|
||||||
|
|
||||||
|
combined_conditions = "".join(conditions)
|
||||||
|
return f"AND ({combined_conditions})"
|
||||||
|
|
||||||
|
|
||||||
# Android Specific
|
# Android Specific
|
||||||
CRYPT14_OFFSETS = (
|
CRYPT14_OFFSETS = (
|
||||||
@@ -365,6 +620,7 @@ CRYPT14_OFFSETS = (
|
|||||||
{"iv": 67, "db": 193},
|
{"iv": 67, "db": 193},
|
||||||
{"iv": 67, "db": 194},
|
{"iv": 67, "db": 194},
|
||||||
{"iv": 67, "db": 158},
|
{"iv": 67, "db": 158},
|
||||||
|
{"iv": 67, "db": 196},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -446,7 +702,7 @@ def determine_metadata(content: sqlite3.Row, init_msg: Optional[str]) -> Optiona
|
|||||||
else:
|
else:
|
||||||
msg = f"{old} changed their number to {new}"
|
msg = f"{old} changed their number to {new}"
|
||||||
elif content["action_type"] == 46:
|
elif content["action_type"] == 46:
|
||||||
return # Voice message in PM??? Seems no need to handle.
|
return # Voice message in PM??? Seems no need to handle.
|
||||||
elif content["action_type"] == 47:
|
elif content["action_type"] == 47:
|
||||||
msg = "The contact is an official business account"
|
msg = "The contact is an official business account"
|
||||||
elif content["action_type"] == 50:
|
elif content["action_type"] == 50:
|
||||||
@@ -459,11 +715,12 @@ def determine_metadata(content: sqlite3.Row, init_msg: Optional[str]) -> Optiona
|
|||||||
else:
|
else:
|
||||||
msg = "The security code in this chat changed"
|
msg = "The security code in this chat changed"
|
||||||
elif content["action_type"] == 58:
|
elif content["action_type"] == 58:
|
||||||
msg = "You blocked this contact"
|
msg = "You blocked/unblocked this contact"
|
||||||
elif content["action_type"] == 67:
|
elif content["action_type"] == 67:
|
||||||
return # (PM) this contact use secure service from Facebook???
|
return # (PM) this contact use secure service from Facebook???
|
||||||
elif content["action_type"] == 69:
|
elif content["action_type"] == 69:
|
||||||
return # (PM) this contact use secure service from Facebook??? What's the difference with 67????
|
# (PM) this contact use secure service from Facebook??? What's the difference with 67????
|
||||||
|
return
|
||||||
else:
|
else:
|
||||||
return # Unsupported
|
return # Unsupported
|
||||||
return msg
|
return msg
|
||||||
@@ -490,8 +747,73 @@ def get_status_location(output_folder: str, offline_static: str) -> str:
|
|||||||
w3css_path = os.path.join(static_folder, "w3.css")
|
w3css_path = os.path.join(static_folder, "w3.css")
|
||||||
if not os.path.isfile(w3css_path):
|
if not os.path.isfile(w3css_path):
|
||||||
with urllib.request.urlopen(w3css) as resp:
|
with urllib.request.urlopen(w3css) as resp:
|
||||||
with open(w3css_path, "wb") as f: f.write(resp.read())
|
with open(w3css_path, "wb") as f:
|
||||||
|
f.write(resp.read())
|
||||||
w3css = os.path.join(offline_static, "w3.css")
|
w3css = os.path.join(offline_static, "w3.css")
|
||||||
|
return w3css
|
||||||
|
|
||||||
|
|
||||||
|
def check_jid_map(db: sqlite3.Connection) -> bool:
|
||||||
|
"""
|
||||||
|
Checks if the jid_map table exists in the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db (sqlite3.Connection): The SQLite database connection.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the jid_map table exists, False otherwise.
|
||||||
|
"""
|
||||||
|
cursor = db.cursor()
|
||||||
|
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='jid_map'")
|
||||||
|
return cursor.fetchone() is not None
|
||||||
|
|
||||||
|
|
||||||
|
def get_jid_map_join(jid_map_exists: bool) -> str:
|
||||||
|
"""
|
||||||
|
Returns the SQL JOIN statements for jid_map table.
|
||||||
|
"""
|
||||||
|
if not jid_map_exists:
|
||||||
|
return ""
|
||||||
|
else:
|
||||||
|
return """LEFT JOIN jid_map as jid_map_global
|
||||||
|
ON chat.jid_row_id = jid_map_global.lid_row_id
|
||||||
|
LEFT JOIN jid lid_global
|
||||||
|
ON jid_map_global.jid_row_id = lid_global._id
|
||||||
|
LEFT JOIN jid_map as jid_map_group
|
||||||
|
ON message.sender_jid_row_id = jid_map_group.lid_row_id
|
||||||
|
LEFT JOIN jid lid_group
|
||||||
|
ON jid_map_group.jid_row_id = lid_group._id"""
|
||||||
|
|
||||||
|
def get_jid_map_selection(jid_map_exists: bool) -> tuple:
|
||||||
|
"""
|
||||||
|
Returns the SQL selection statements for jid_map table.
|
||||||
|
"""
|
||||||
|
if not jid_map_exists:
|
||||||
|
return "jid_global.raw_string", "jid_group.raw_string"
|
||||||
|
else:
|
||||||
|
return (
|
||||||
|
"COALESCE(lid_global.raw_string, jid_global.raw_string)",
|
||||||
|
"COALESCE(lid_group.raw_string, jid_group.raw_string)"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_transcription_selection(db: sqlite3.Connection) -> str:
|
||||||
|
"""
|
||||||
|
Returns the SQL selection statement for transcription text based on the database schema.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db (sqlite3.Connection): The SQLite database connection.
|
||||||
|
Returns:
|
||||||
|
str: The SQL selection statement for transcription.
|
||||||
|
"""
|
||||||
|
cursor = db.cursor()
|
||||||
|
cursor.execute("PRAGMA table_info(message_media)")
|
||||||
|
columns = [row[1] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
if "raw_transcription_text" in columns:
|
||||||
|
return "message_media.raw_transcription_text AS transcription_text"
|
||||||
|
else:
|
||||||
|
return "NULL AS transcription_text"
|
||||||
|
|
||||||
|
|
||||||
def setup_template(template: Optional[str], no_avatar: bool, experimental: bool = False) -> jinja2.Template:
|
def setup_template(template: Optional[str], no_avatar: bool, experimental: bool = False) -> jinja2.Template:
|
||||||
@@ -521,44 +843,138 @@ def setup_template(template: Optional[str], no_avatar: bool, experimental: bool
|
|||||||
template_env.filters['sanitize_except'] = sanitize_except
|
template_env.filters['sanitize_except'] = sanitize_except
|
||||||
return template_env.get_template(template_file)
|
return template_env.get_template(template_file)
|
||||||
|
|
||||||
|
|
||||||
# iOS Specific
|
# iOS Specific
|
||||||
APPLE_TIME = 978307200
|
APPLE_TIME = 978307200
|
||||||
|
|
||||||
|
|
||||||
def slugify(value: str, allow_unicode: bool = False) -> str:
|
def safe_name(text: Union[str, bytes]) -> str:
|
||||||
"""
|
"""
|
||||||
Convert text to ASCII-only slugs for URL-safe strings.
|
Sanitize the input text and generates a safe file name.
|
||||||
Taken from https://github.com/django/django/blob/master/django/utils/text.py
|
This function serves a similar purpose to slugify() from
|
||||||
|
Django previously used in this project, but is a clean-room
|
||||||
|
Reimplementation tailored for performance and a narrower
|
||||||
|
Use case for this project. Licensed under the same terms
|
||||||
|
As the project (MIT).
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
value (str): The string to convert to a slug.
|
text (str|bytes): The string to be sanitized.
|
||||||
allow_unicode (bool, optional): Whether to allow Unicode characters. Defaults to False.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: The slugified string with only alphanumerics, underscores, or hyphens.
|
str: The sanitized string with only alphanumerics, underscores, or hyphens.
|
||||||
"""
|
"""
|
||||||
value = str(value)
|
if isinstance(text, bytes):
|
||||||
if allow_unicode:
|
text = text.decode("utf-8", "ignore")
|
||||||
value = unicodedata.normalize('NFKC', value)
|
elif not isinstance(text, str):
|
||||||
else:
|
raise TypeError("value must be a string or bytes")
|
||||||
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
|
normalized_text = unicodedata.normalize("NFKC", text)
|
||||||
value = re.sub(r'[^\w\s-]', '', value.lower())
|
safe_chars = [char for char in normalized_text if char.isalnum() or char in "-_ ."]
|
||||||
return re.sub(r'[-\s]+', '-', value).strip('-_')
|
return "-".join(''.join(safe_chars).split())
|
||||||
|
|
||||||
|
|
||||||
|
def get_from_string(msg: Dict, chat_id: str) -> str:
|
||||||
|
"""Return the number or name for the sender"""
|
||||||
|
if msg["from_me"]:
|
||||||
|
return "Me"
|
||||||
|
if msg["sender"]:
|
||||||
|
return str(msg["sender"])
|
||||||
|
return str(chat_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_chat_type(chat_id: str) -> str:
|
||||||
|
"""Return the chat type based on the whatsapp id"""
|
||||||
|
if chat_id == "000000000000000":
|
||||||
|
return "calls"
|
||||||
|
elif chat_id.endswith("@s.whatsapp.net"):
|
||||||
|
return "personal_chat"
|
||||||
|
elif chat_id.endswith("@g.us"):
|
||||||
|
return "private_group"
|
||||||
|
elif chat_id == "status@broadcast":
|
||||||
|
return "status_broadcast"
|
||||||
|
elif chat_id.endswith("@broadcast"):
|
||||||
|
return "broadcast_channel"
|
||||||
|
logging.warning(f"Unknown chat type for {chat_id}, defaulting to private_group")
|
||||||
|
return "private_group"
|
||||||
|
|
||||||
|
|
||||||
|
def get_from_id(msg: Dict, chat_id: str) -> str:
|
||||||
|
"""Return the user id for the sender"""
|
||||||
|
if msg["from_me"]:
|
||||||
|
return "user00000"
|
||||||
|
if msg["sender"]:
|
||||||
|
return "user" + msg["sender"]
|
||||||
|
return f"user{chat_id}"
|
||||||
|
|
||||||
|
|
||||||
|
def get_reply_id(data: Dict, reply_key: int) -> Optional[int]:
|
||||||
|
"""Get the id of the message corresponding to the reply"""
|
||||||
|
if not reply_key:
|
||||||
|
return None
|
||||||
|
for msg_id, msg in data["messages"].items():
|
||||||
|
if msg["key_id"] == reply_key:
|
||||||
|
return msg_id
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def telegram_json_format(jik: str, data: Dict, timezone_offset) -> Dict:
|
||||||
|
"""Convert the data to the Telegram export format"""
|
||||||
|
timing = Timing(timezone_offset or CURRENT_TZ_OFFSET)
|
||||||
|
try:
|
||||||
|
chat_id = int(''.join([c for c in jik if c.isdigit()]))
|
||||||
|
except ValueError:
|
||||||
|
# not a real chat: e.g. statusbroadcast
|
||||||
|
chat_id = 0
|
||||||
|
json_obj = {
|
||||||
|
"name": data["name"] if data["name"] else jik,
|
||||||
|
"type": get_chat_type(jik),
|
||||||
|
"id": chat_id,
|
||||||
|
"messages": [ {
|
||||||
|
"id": int(msgId),
|
||||||
|
"type": "message",
|
||||||
|
"date": timing.format_timestamp(msg["timestamp"], "%Y-%m-%dT%H:%M:%S"),
|
||||||
|
"date_unixtime": int(msg["timestamp"]),
|
||||||
|
"from": get_from_string(msg, chat_id),
|
||||||
|
"from_id": get_from_id(msg, chat_id),
|
||||||
|
"reply_to_message_id": get_reply_id(data, msg["reply"]),
|
||||||
|
"text": msg["data"],
|
||||||
|
"text_entities": [
|
||||||
|
{
|
||||||
|
# TODO this will lose formatting and different types
|
||||||
|
"type": "plain",
|
||||||
|
"text": msg["data"],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
for msgId, msg in data["messages"].items()]
|
||||||
|
}
|
||||||
|
# remove empty messages and replies
|
||||||
|
for msg_id, msg in enumerate(json_obj["messages"]):
|
||||||
|
if not msg["reply_to_message_id"]:
|
||||||
|
del json_obj["messages"][msg_id]["reply_to_message_id"]
|
||||||
|
json_obj["messages"] = [m for m in json_obj["messages"] if m["text"]]
|
||||||
|
return json_obj
|
||||||
|
|
||||||
|
|
||||||
class WhatsAppIdentifier(StrEnum):
|
class WhatsAppIdentifier(StrEnum):
|
||||||
MESSAGE = "7c7fba66680ef796b916b067077cc246adacf01d" # AppDomainGroup-group.net.whatsapp.WhatsApp.shared-ChatStorage.sqlite
|
# AppDomainGroup-group.net.whatsapp.WhatsApp.shared-ChatStorage.sqlite
|
||||||
CONTACT = "b8548dc30aa1030df0ce18ef08b882cf7ab5212f" # AppDomainGroup-group.net.whatsapp.WhatsApp.shared-ContactsV2.sqlite
|
MESSAGE = "7c7fba66680ef796b916b067077cc246adacf01d"
|
||||||
CALL = "1b432994e958845fffe8e2f190f26d1511534088" # AppDomainGroup-group.net.whatsapp.WhatsApp.shared-CallHistory.sqlite
|
# AppDomainGroup-group.net.whatsapp.WhatsApp.shared-ContactsV2.sqlite
|
||||||
|
CONTACT = "b8548dc30aa1030df0ce18ef08b882cf7ab5212f"
|
||||||
|
# AppDomainGroup-group.net.whatsapp.WhatsApp.shared-CallHistory.sqlite
|
||||||
|
CALL = "1b432994e958845fffe8e2f190f26d1511534088"
|
||||||
DOMAIN = "AppDomainGroup-group.net.whatsapp.WhatsApp.shared"
|
DOMAIN = "AppDomainGroup-group.net.whatsapp.WhatsApp.shared"
|
||||||
|
|
||||||
|
|
||||||
class WhatsAppBusinessIdentifier(StrEnum):
|
class WhatsAppBusinessIdentifier(StrEnum):
|
||||||
MESSAGE = "724bd3b98b18518b455a87c1f3ac3a0d189c4466" # AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-ChatStorage.sqlite
|
# AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-ChatStorage.sqlite
|
||||||
CONTACT = "d7246a707f51ddf8b17ee2dddabd9e0a4da5c552" # AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-ContactsV2.sqlite
|
MESSAGE = "724bd3b98b18518b455a87c1f3ac3a0d189c4466"
|
||||||
CALL = "b463f7c4365eefc5a8723930d97928d4e907c603" # AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-CallHistory.sqlite
|
# AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-ContactsV2.sqlite
|
||||||
|
CONTACT = "d7246a707f51ddf8b17ee2dddabd9e0a4da5c552"
|
||||||
|
# AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-CallHistory.sqlite
|
||||||
|
CALL = "b463f7c4365eefc5a8723930d97928d4e907c603"
|
||||||
DOMAIN = "AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared"
|
DOMAIN = "AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared"
|
||||||
|
|
||||||
|
|
||||||
class JidType(IntEnum):
|
class JidType(IntEnum):
|
||||||
PM = 0
|
PM = 0
|
||||||
GROUP = 1
|
GROUP = 1
|
||||||
|
|||||||
@@ -1,5 +1,11 @@
|
|||||||
import vobject
|
import logging
|
||||||
|
import re
|
||||||
|
import quopri
|
||||||
from typing import List, TypedDict
|
from typing import List, TypedDict
|
||||||
|
from Whatsapp_Chat_Exporter.data_model import ChatStore
|
||||||
|
from Whatsapp_Chat_Exporter.utility import Device
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ExportedContactNumbers(TypedDict):
|
class ExportedContactNumbers(TypedDict):
|
||||||
@@ -22,31 +28,154 @@ class ContactsFromVCards:
|
|||||||
# short number must be a bad contact, lets skip it
|
# short number must be a bad contact, lets skip it
|
||||||
if len(number) <= 5:
|
if len(number) <= 5:
|
||||||
continue
|
continue
|
||||||
|
chats_search = filter_chats_by_prefix(chats, number).values()
|
||||||
|
if chats_search:
|
||||||
|
for chat in chats_search:
|
||||||
|
if not hasattr(chat, 'name') or (hasattr(chat, 'name') and chat.name is None):
|
||||||
|
setattr(chat, 'name', name)
|
||||||
|
else:
|
||||||
|
chats.add_chat(number + "@s.whatsapp.net", ChatStore(Device.ANDROID, name))
|
||||||
|
|
||||||
for chat in filter_chats_by_prefix(chats, number).values():
|
|
||||||
if not hasattr(chat, 'name') or (hasattr(chat, 'name') and chat.name is None):
|
def decode_quoted_printable(value: str, charset: str) -> str:
|
||||||
setattr(chat, 'name', name)
|
"""Decode a vCard value that may be quoted-printable UTF-8."""
|
||||||
|
try:
|
||||||
|
bytes_val = quopri.decodestring(value)
|
||||||
|
return bytes_val.decode(charset, errors="replace")
|
||||||
|
except Exception:
|
||||||
|
# Fallback: return the original value if decoding fails
|
||||||
|
logging.warning(
|
||||||
|
f"Failed to decode quoted-printable value: {value}, "
|
||||||
|
f"charset: {charset}. Please report this issue."
|
||||||
|
)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _parse_vcard_line(line: str) -> tuple[str, dict[str, str], str] | None:
|
||||||
|
"""
|
||||||
|
Parses a single vCard property line into its components:
|
||||||
|
Property Name, Parameters (as a dict), and Value.
|
||||||
|
|
||||||
|
Example: 'FN;CHARSET=UTF-8:John Doe' -> ('FN', {'CHARSET': 'UTF-8'}, 'John Doe')
|
||||||
|
"""
|
||||||
|
# Find the first colon, which separates the property/parameters from the value.
|
||||||
|
colon_index = line.find(':')
|
||||||
|
if colon_index == -1:
|
||||||
|
return None # Invalid vCard line format
|
||||||
|
|
||||||
|
prop_and_params = line[:colon_index].strip()
|
||||||
|
value = line[colon_index + 1:].strip()
|
||||||
|
|
||||||
|
# Split property name from parameters
|
||||||
|
parts = prop_and_params.split(';')
|
||||||
|
property_name = parts[0].upper()
|
||||||
|
|
||||||
|
parameters = {}
|
||||||
|
for part in parts[1:]:
|
||||||
|
if '=' in part:
|
||||||
|
key, val = part.split('=', 1)
|
||||||
|
parameters[key.upper()] = val.strip('"') # Remove potential quotes from value
|
||||||
|
|
||||||
|
return property_name, parameters, value
|
||||||
|
|
||||||
|
|
||||||
|
def get_vcard_value(entry: str, field_name: str) -> list[str]:
|
||||||
|
"""
|
||||||
|
Scans the vCard entry for lines starting with the specific field_name
|
||||||
|
and returns a list of its decoded values, handling parameters like
|
||||||
|
ENCODING and CHARSET.
|
||||||
|
"""
|
||||||
|
target_name = field_name.upper()
|
||||||
|
cached_line = ""
|
||||||
|
charset = "utf-8"
|
||||||
|
values = []
|
||||||
|
|
||||||
|
for line in entry.splitlines():
|
||||||
|
line = line.strip()
|
||||||
|
if cached_line:
|
||||||
|
if line.endswith('='):
|
||||||
|
cached_line += line[:-1]
|
||||||
|
continue # Wait for the next line to complete the value
|
||||||
|
values.append(decode_quoted_printable(cached_line + line, charset))
|
||||||
|
cached_line = ""
|
||||||
|
else:
|
||||||
|
# Skip empty lines or lines that don't start with the target field (after stripping)
|
||||||
|
if not line or not line.upper().startswith(target_name):
|
||||||
|
continue
|
||||||
|
|
||||||
|
parsed = _parse_vcard_line(line)
|
||||||
|
if parsed is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
prop_name, params, raw_value = parsed
|
||||||
|
|
||||||
|
if prop_name != target_name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
encoding = params.get('ENCODING')
|
||||||
|
charset = params.get('CHARSET', 'utf-8')
|
||||||
|
|
||||||
|
# Apply decoding if ENCODING parameter is present
|
||||||
|
if encoding == 'QUOTED-PRINTABLE':
|
||||||
|
if raw_value.endswith('='):
|
||||||
|
# Handle soft line breaks in quoted-printable and cache the line
|
||||||
|
cached_line += raw_value[:-1]
|
||||||
|
continue # Wait for the next line to complete the value
|
||||||
|
values.append(decode_quoted_printable(raw_value, charset))
|
||||||
|
elif encoding:
|
||||||
|
raise NotImplementedError(f"Encoding '{encoding}' not supported yet.")
|
||||||
|
else:
|
||||||
|
values.append(raw_value)
|
||||||
|
return values
|
||||||
|
|
||||||
|
|
||||||
|
def process_vcard_entry(entry: str) -> dict | bool:
|
||||||
|
"""
|
||||||
|
Process a vCard entry using pure string manipulation
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entry: A string containing a single vCard block.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A dictionary of the extracted data or False if required fields are missing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = None
|
||||||
|
|
||||||
|
# Extract name in priority: FN -> N -> ORG
|
||||||
|
for field in ("FN", "N", "ORG"):
|
||||||
|
if name_values := get_vcard_value(entry, field):
|
||||||
|
name = name_values[0].replace(';', ' ') # Simple cleanup for structured name
|
||||||
|
break
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
return False
|
||||||
|
|
||||||
|
numbers = get_vcard_value(entry, "TEL")
|
||||||
|
if not numbers:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return {
|
||||||
|
"full_name": name,
|
||||||
|
# Remove duplications
|
||||||
|
"numbers": set(numbers),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def read_vcards_file(vcf_file_path, default_country_code: str):
|
def read_vcards_file(vcf_file_path, default_country_code: str):
|
||||||
contacts = []
|
contacts = []
|
||||||
with open(vcf_file_path, mode="r", encoding="utf-8") as f:
|
with open(vcf_file_path, "r", encoding="utf-8", errors="ignore") as f:
|
||||||
reader = vobject.readComponents(f)
|
content = f.read()
|
||||||
for row in reader:
|
|
||||||
if hasattr(row, 'fn'):
|
# Split into individual vCards
|
||||||
name = str(row.fn.value)
|
vcards = content.split("BEGIN:VCARD")
|
||||||
elif hasattr(row, 'n'):
|
for vcard in vcards:
|
||||||
name = str(row.n.value)
|
if "END:VCARD" not in vcard:
|
||||||
else:
|
continue
|
||||||
name = None
|
|
||||||
if not hasattr(row, 'tel') or name is None:
|
if contact := process_vcard_entry(vcard):
|
||||||
continue
|
|
||||||
contact: ExportedContactNumbers = {
|
|
||||||
"full_name": name,
|
|
||||||
"numbers": list(map(lambda tel: tel.value, row.tel_list)),
|
|
||||||
}
|
|
||||||
contacts.append(contact)
|
contacts.append(contact)
|
||||||
|
|
||||||
|
logging.info(f"Imported {len(contacts)} contacts/vcards")
|
||||||
return map_number_to_name(contacts, default_country_code)
|
return map_number_to_name(contacts, default_country_code)
|
||||||
|
|
||||||
|
|
||||||
@@ -77,6 +206,6 @@ def normalize_number(number: str, country_code: str):
|
|||||||
return number[len(starting_char):]
|
return number[len(starting_char):]
|
||||||
|
|
||||||
# leading zero should be removed
|
# leading zero should be removed
|
||||||
if starting_char == '0':
|
if number.startswith('0'):
|
||||||
number = number[1:]
|
number = number[1:]
|
||||||
return country_code + number # fall back
|
return country_code + number # fall back
|
||||||
|
|||||||
@@ -1,20 +0,0 @@
|
|||||||
# from contacts_names_from_vcards import readVCardsFile
|
|
||||||
|
|
||||||
from Whatsapp_Chat_Exporter.vcards_contacts import normalize_number, read_vcards_file
|
|
||||||
|
|
||||||
|
|
||||||
def test_readVCardsFile():
|
|
||||||
assert len(read_vcards_file("contacts.vcf", "973")) > 0
|
|
||||||
|
|
||||||
def test_create_number_to_name_dicts():
|
|
||||||
pass
|
|
||||||
|
|
||||||
def test_fuzzy_match_numbers():
|
|
||||||
pass
|
|
||||||
|
|
||||||
def test_normalize_number():
|
|
||||||
assert normalize_number('0531234567', '1') == '1531234567'
|
|
||||||
assert normalize_number('001531234567', '2') == '1531234567'
|
|
||||||
assert normalize_number('+1531234567', '34') == '1531234567'
|
|
||||||
assert normalize_number('053(123)4567', '34') == '34531234567'
|
|
||||||
assert normalize_number('0531-234-567', '58') == '58531234567'
|
|
||||||
@@ -1,329 +1,657 @@
|
|||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<title>Whatsapp - {{ name }}</title>
|
<title>Whatsapp - {{ name }}</title>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8">
|
||||||
<link rel="stylesheet" href="{{w3css}}">
|
<script src="https://cdn.tailwindcss.com"></script>
|
||||||
<style>
|
<script>
|
||||||
html, body {
|
tailwind.config = {
|
||||||
font-size: 12px;
|
theme: {
|
||||||
scroll-behavior: smooth;
|
extend: {
|
||||||
}
|
colors: {
|
||||||
header {
|
whatsapp: {
|
||||||
position: fixed;
|
light: '#e7ffdb',
|
||||||
z-index: 20;
|
DEFAULT: '#25D366',
|
||||||
border-bottom: 2px solid #e3e6e7;
|
dark: '#075E54',
|
||||||
font-size: 2em;
|
chat: '#efeae2',
|
||||||
font-weight: bolder;
|
'chat-light': '#f0f2f5',
|
||||||
background-color: white;
|
}
|
||||||
padding: 20px 0 20px 0;
|
}
|
||||||
}
|
}
|
||||||
footer {
|
}
|
||||||
border-top: 2px solid #e3e6e7;
|
}
|
||||||
padding: 20px 0 20px 0;
|
</script>
|
||||||
}
|
<style>
|
||||||
article {
|
body, html {
|
||||||
width:500px;
|
height: 100%;
|
||||||
margin:100px auto;
|
margin: 0;
|
||||||
z-index:10;
|
padding: 0;
|
||||||
font-size: 15px;
|
scroll-behavior: smooth !important;
|
||||||
word-wrap: break-word;
|
}
|
||||||
}
|
.chat-list {
|
||||||
img, video {
|
height: calc(100vh - 120px);
|
||||||
max-width:100%;
|
overflow-y: auto;
|
||||||
}
|
}
|
||||||
div.reply{
|
.message-list {
|
||||||
font-size: 13px;
|
height: calc(100vh - 90px);
|
||||||
text-decoration: none;
|
overflow-y: auto;
|
||||||
}
|
}
|
||||||
div:target::before {
|
@media (max-width: 640px) {
|
||||||
content: '';
|
.chat-list, .message-list {
|
||||||
display: block;
|
height: calc(100vh - 108px);
|
||||||
height: 115px;
|
}
|
||||||
margin-top: -115px;
|
}
|
||||||
visibility: hidden;
|
header {
|
||||||
}
|
position: fixed;
|
||||||
div:target {
|
z-index: 20;
|
||||||
border-style: solid;
|
border-bottom: 2px solid #e3e6e7;
|
||||||
border-width: 2px;
|
font-size: 2em;
|
||||||
animation: border-blink 0.5s steps(1) 5;
|
font-weight: bolder;
|
||||||
border-color: rgba(0,0,0,0)
|
background-color: white;
|
||||||
}
|
padding: 20px 0 20px 0;
|
||||||
table {
|
}
|
||||||
width: 100%;
|
footer {
|
||||||
}
|
margin-top: 10px;
|
||||||
@keyframes border-blink {
|
border-top: 2px solid #e3e6e7;
|
||||||
0% {
|
padding: 20px 0 20px 0;
|
||||||
border-color: #2196F3;
|
}
|
||||||
}
|
article {
|
||||||
50% {
|
width:430px;
|
||||||
border-color: rgba(0,0,0,0);
|
margin: auto;
|
||||||
}
|
z-index:10;
|
||||||
}
|
font-size: 15px;
|
||||||
.avatar {
|
word-wrap: break-word;
|
||||||
border-radius:50%;
|
}
|
||||||
overflow:hidden;
|
img, video, audio{
|
||||||
max-width: 64px;
|
max-width:100%;
|
||||||
max-height: 64px;
|
box-sizing: border-box;
|
||||||
}
|
}
|
||||||
.name {
|
div.reply{
|
||||||
color: #3892da;
|
font-size: 13px;
|
||||||
}
|
text-decoration: none;
|
||||||
.pad-left-10 {
|
}
|
||||||
padding-left: 10px;
|
div:target::before {
|
||||||
}
|
content: '';
|
||||||
.pad-right-10 {
|
display: block;
|
||||||
padding-right: 10px;
|
height: 115px;
|
||||||
}
|
margin-top: -115px;
|
||||||
.reply_link {
|
visibility: hidden;
|
||||||
color: #168acc;
|
}
|
||||||
}
|
div:target {
|
||||||
.blue {
|
animation: 3s highlight;
|
||||||
color: #70777a;
|
}
|
||||||
}
|
.avatar {
|
||||||
.sticker {
|
border-radius:50%;
|
||||||
max-width: 100px !important;
|
overflow:hidden;
|
||||||
max-height: 100px !important;
|
max-width: 64px;
|
||||||
}
|
max-height: 64px;
|
||||||
</style>
|
}
|
||||||
<base href="{{ media_base }}" target="_blank">
|
.name {
|
||||||
</head>
|
color: #3892da;
|
||||||
<body>
|
}
|
||||||
<header class="w3-center w3-top">
|
.pad-left-10 {
|
||||||
{{ headline }}
|
padding-left: 10px;
|
||||||
{% if status is not none %}
|
}
|
||||||
<br>
|
.pad-right-10 {
|
||||||
<span class="w3-small">{{ status }}</span>
|
padding-right: 10px;
|
||||||
{% endif %}
|
}
|
||||||
</header>
|
.reply_link {
|
||||||
<article class="w3-container">
|
color: #168acc;
|
||||||
<div class="table">
|
}
|
||||||
{% set last = {'last': 946688461.001} %}
|
.blue {
|
||||||
{% for msg in msgs -%}
|
color: #70777a;
|
||||||
<div class="w3-row w3-padding-small w3-margin-bottom" id="{{ msg.key_id }}">
|
}
|
||||||
{% if determine_day(last.last, msg.timestamp) is not none %}
|
.sticker {
|
||||||
<div class="w3-center w3-padding-16 blue">{{ determine_day(last.last, msg.timestamp) }}</div>
|
max-width: 100px !important;
|
||||||
{% if last.update({'last': msg.timestamp}) %}{% endif %}
|
max-height: 100px !important;
|
||||||
{% endif %}
|
}
|
||||||
{% if msg.from_me == true %}
|
@keyframes highlight {
|
||||||
<div class="w3-row">
|
from {
|
||||||
<div class="w3-left blue">{{ msg.time }}</div>
|
background-color: rgba(37, 211, 102, 0.1);
|
||||||
<div class="name w3-right-align pad-left-10">You</div>
|
}
|
||||||
</div>
|
to {
|
||||||
<div class="w3-row">
|
background-color: transparent;
|
||||||
{% if not no_avatar and my_avatar is not none %}
|
}
|
||||||
<div class="w3-col m10 l10">
|
}
|
||||||
{% else %}
|
.search-input {
|
||||||
<div class="w3-col m12 l12">
|
transform: translateY(-100%);
|
||||||
{% endif %}
|
transition: transform 0.3s ease-in-out;
|
||||||
<div class="w3-right-align">
|
}
|
||||||
{% if msg.reply is not none %}
|
.search-input.active {
|
||||||
<div class="reply">
|
transform: translateY(0);
|
||||||
<span class="blue">Replying to </span>
|
}
|
||||||
<a href="#{{msg.reply}}" target="_self" class="reply_link no-base">
|
.reply-box:active {
|
||||||
{% if msg.quoted_data is not none %}
|
background-color:rgb(200 202 205 / var(--tw-bg-opacity, 1));
|
||||||
"{{msg.quoted_data}}"
|
}
|
||||||
{% else %}
|
.info-box-tooltip {
|
||||||
this message
|
--tw-translate-x: -50%;
|
||||||
{% endif %}
|
transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));
|
||||||
</a>
|
}
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
|
||||||
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
|
||||||
{% if msg.safe %}
|
|
||||||
<p>{{ msg.data | safe or 'Not supported WhatsApp internal message' }}</p>
|
|
||||||
{% else %}
|
|
||||||
<p>{{ msg.data or 'Not supported WhatsApp internal message' }}</p>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
{% if msg.caption is not none %}
|
|
||||||
<div class="w3-container">
|
|
||||||
{{ msg.caption | urlize(none, true, '_blank') }}
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
{% else %}
|
|
||||||
{% if msg.media == false %}
|
|
||||||
{{ msg.data | sanitize_except() | urlize(none, true, '_blank') }}
|
|
||||||
{% else %}
|
|
||||||
{% if "image/" in msg.mime %}
|
|
||||||
<a href="{{ msg.data }}">
|
|
||||||
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} loading="lazy"/>
|
|
||||||
</a>
|
|
||||||
{% elif "audio/" in msg.mime %}
|
|
||||||
<audio controls="controls" autobuffer="autobuffer">
|
|
||||||
<source src="{{ msg.data }}" />
|
|
||||||
</audio>
|
|
||||||
{% elif "video/" in msg.mime %}
|
|
||||||
<video class="lazy" autobuffer {% if msg.message_type|int == 13 or msg.message_type|int == 11 %}autoplay muted loop playsinline{%else%}controls{% endif %}>
|
|
||||||
<source type="{{ msg.mime }}" data-src="{{ msg.data }}" />
|
|
||||||
</video>
|
|
||||||
{% elif "/" in msg.mime %}
|
|
||||||
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
|
||||||
<p>The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a></p>
|
|
||||||
</div>
|
|
||||||
{% else %}
|
|
||||||
{% filter escape %}{{ msg.data }}{% endfilter %}
|
|
||||||
{% endif %}
|
|
||||||
{% if msg.caption is not none %}
|
|
||||||
<div class="w3-container">
|
|
||||||
{{ msg.caption | urlize(none, true, '_blank') }}
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{% if not no_avatar and my_avatar is not none %}
|
|
||||||
<div class="w3-col m2 l2 pad-left-10">
|
|
||||||
<a href="{{ my_avatar }}">
|
|
||||||
<img src="{{ my_avatar }}" onerror="this.style.display='none'" class="avatar" loading="lazy">
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
{% else %}
|
|
||||||
<div class="w3-row">
|
|
||||||
<div class="w3-left pad-right-10 name">
|
|
||||||
{% if msg.sender is not none %}
|
|
||||||
{{ msg.sender }}
|
|
||||||
{% else %}
|
|
||||||
{{ name }}
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
<div class="w3-right-align blue">{{ msg.time }}</div>
|
|
||||||
</div>
|
|
||||||
<div class="w3-row">
|
|
||||||
{% if not no_avatar %}
|
|
||||||
<div class="w3-col m2 l2">
|
|
||||||
{% if their_avatar is not none %}
|
|
||||||
<a href="{{ their_avatar }}"><img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="avatar" loading="lazy"></a>
|
|
||||||
{% else %}
|
|
||||||
<img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="avatar" loading="lazy">
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
<div class="w3-col m10 l10">
|
|
||||||
{% else %}
|
|
||||||
<div class="w3-col m12 l12">
|
|
||||||
{% endif %}
|
|
||||||
<div class="w3-left-align">
|
|
||||||
{% if msg.reply is not none %}
|
|
||||||
<div class="reply">
|
|
||||||
<span class="blue">Replying to </span>
|
|
||||||
<a href="#{{msg.reply}}" target="_self" class="reply_link no-base">
|
|
||||||
{% if msg.quoted_data is not none %}
|
|
||||||
"{{msg.quoted_data}}"
|
|
||||||
{% else %}
|
|
||||||
this message
|
|
||||||
{% endif %}
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
|
||||||
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
|
||||||
{% if msg.safe %}
|
|
||||||
<p>{{ msg.data | safe or 'Not supported WhatsApp internal message' }}</p>
|
|
||||||
{% else %}
|
|
||||||
<p>{{ msg.data or 'Not supported WhatsApp internal message' }}</p>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
{% if msg.caption is not none %}
|
|
||||||
<div class="w3-container">
|
|
||||||
{{ msg.caption | urlize(none, true, '_blank') }}
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
{% else %}
|
|
||||||
{% if msg.media == false %}
|
|
||||||
{{ msg.data | sanitize_except() | urlize(none, true, '_blank') }}
|
|
||||||
{% else %}
|
|
||||||
{% if "image/" in msg.mime %}
|
|
||||||
<a href="{{ msg.data }}">
|
|
||||||
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} loading="lazy"/>
|
|
||||||
</a>
|
|
||||||
{% elif "audio/" in msg.mime %}
|
|
||||||
<audio controls="controls" autobuffer="autobuffer">
|
|
||||||
<source src="{{ msg.data }}" />
|
|
||||||
</audio>
|
|
||||||
{% elif "video/" in msg.mime %}
|
|
||||||
<video class="lazy" autobuffer {% if msg.message_type|int == 13 or msg.message_type|int == 11 %}autoplay muted loop playsinline{%else%}controls{% endif %}>
|
|
||||||
<source type="{{ msg.mime }}" data-src="{{ msg.data }}" />
|
|
||||||
</video>
|
|
||||||
{% elif "/" in msg.mime %}
|
|
||||||
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
|
||||||
<p>The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a></p>
|
|
||||||
</div>
|
|
||||||
{% else %}
|
|
||||||
{% filter escape %}{{ msg.data }}{% endfilter %}
|
|
||||||
{% endif %}
|
|
||||||
{% if msg.caption is not none %}
|
|
||||||
<div class="w3-container">
|
|
||||||
{{ msg.caption | urlize(none, true, '_blank') }}
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
{% endfor %}
|
|
||||||
</div>
|
|
||||||
</article>
|
|
||||||
<footer class="w3-center">
|
|
||||||
<h2>
|
|
||||||
{% if previous %}
|
|
||||||
<a href="./{{ previous }}" target="_self">Previous</a>
|
|
||||||
{% endif %}
|
|
||||||
<h2>
|
|
||||||
{% if next %}
|
|
||||||
<a href="./{{ next }}" target="_self">Next</a>
|
|
||||||
{% else %}
|
|
||||||
End of History
|
|
||||||
{% endif %}
|
|
||||||
</h2>
|
|
||||||
<br>
|
|
||||||
Portions of this page are reproduced from <a href="https://web.dev/articles/lazy-loading-video">work</a> created and <a href="https://developers.google.com/readme/policies">shared by Google</a> and used according to terms described in the <a href="https://www.apache.org/licenses/LICENSE-2.0">Apache 2.0 License</a>.
|
|
||||||
</footer>
|
|
||||||
<script>
|
|
||||||
document.addEventListener("DOMContentLoaded", function() {
|
|
||||||
var lazyVideos = [].slice.call(document.querySelectorAll("video.lazy"));
|
|
||||||
|
|
||||||
if ("IntersectionObserver" in window) {
|
.status-indicator {
|
||||||
var lazyVideoObserver = new IntersectionObserver(function(entries, observer) {
|
display: inline-block;
|
||||||
entries.forEach(function(video) {
|
margin-left: 4px;
|
||||||
if (video.isIntersecting) {
|
font-size: 0.8em;
|
||||||
for (var source in video.target.children) {
|
color: #8c8c8c;
|
||||||
var videoSource = video.target.children[source];
|
}
|
||||||
if (typeof videoSource.tagName === "string" && videoSource.tagName === "SOURCE") {
|
|
||||||
videoSource.src = videoSource.dataset.src;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
video.target.load();
|
.status-indicator.read {
|
||||||
video.target.classList.remove("lazy");
|
color: #34B7F1;
|
||||||
lazyVideoObserver.unobserve(video.target);
|
}
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
lazyVideos.forEach(function(lazyVideo) {
|
.play-icon {
|
||||||
lazyVideoObserver.observe(lazyVideo);
|
width: 0;
|
||||||
});
|
height: 0;
|
||||||
}
|
border-left: 8px solid white;
|
||||||
});
|
border-top: 5px solid transparent;
|
||||||
</script>
|
border-bottom: 5px solid transparent;
|
||||||
<script>
|
filter: drop-shadow(0 1px 2px rgba(0, 0, 0, 0.3));
|
||||||
// Prevent the <base> tag from affecting links with the class "no-base"
|
}
|
||||||
document.querySelectorAll('.no-base').forEach(link => {
|
|
||||||
link.addEventListener('click', function(event) {
|
.speaker-icon {
|
||||||
const href = this.getAttribute('href');
|
position: relative;
|
||||||
if (href.startsWith('#')) {
|
width: 8px;
|
||||||
window.location.hash = href;
|
height: 6px;
|
||||||
event.preventDefault();
|
background: #666;
|
||||||
}
|
border-radius: 1px 0 0 1px;
|
||||||
});
|
}
|
||||||
});
|
|
||||||
</script>
|
.speaker-icon::before {
|
||||||
</body>
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
right: -4px;
|
||||||
|
top: -1px;
|
||||||
|
width: 0;
|
||||||
|
height: 0;
|
||||||
|
border-left: 4px solid #666;
|
||||||
|
border-top: 4px solid transparent;
|
||||||
|
border-bottom: 4px solid transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.speaker-icon::after {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
right: -8px;
|
||||||
|
top: -3px;
|
||||||
|
width: 8px;
|
||||||
|
height: 12px;
|
||||||
|
border: 2px solid #666;
|
||||||
|
border-left: none;
|
||||||
|
border-radius: 0 8px 8px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-icon {
|
||||||
|
width: 20px;
|
||||||
|
height: 20px;
|
||||||
|
position: relative;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-icon::before {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
width: 12px;
|
||||||
|
height: 12px;
|
||||||
|
border: 2px solid #aebac1;
|
||||||
|
border-radius: 50%;
|
||||||
|
top: 2px;
|
||||||
|
left: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-icon::after {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
width: 2px;
|
||||||
|
height: 6px;
|
||||||
|
background: #aebac1;
|
||||||
|
transform: rotate(45deg);
|
||||||
|
top: 12px;
|
||||||
|
left: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.arrow-left {
|
||||||
|
width: 0;
|
||||||
|
height: 0;
|
||||||
|
border-top: 6px solid transparent;
|
||||||
|
border-bottom: 6px solid transparent;
|
||||||
|
border-right: 8px solid #aebac1;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.arrow-right {
|
||||||
|
width: 0;
|
||||||
|
height: 0;
|
||||||
|
border-top: 6px solid transparent;
|
||||||
|
border-bottom: 6px solid transparent;
|
||||||
|
border-left: 8px solid #aebac1;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.info-icon {
|
||||||
|
width: 20px;
|
||||||
|
height: 20px;
|
||||||
|
border: 2px solid currentColor;
|
||||||
|
border-radius: 50%;
|
||||||
|
position: relative;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.info-icon::before {
|
||||||
|
content: 'i';
|
||||||
|
position: absolute;
|
||||||
|
top: 50%;
|
||||||
|
left: 50%;
|
||||||
|
transform: translate(-50%, -50%);
|
||||||
|
font-size: 12px;
|
||||||
|
font-weight: bold;
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
<script>
|
||||||
|
function search(event) {
|
||||||
|
keywords = document.getElementById("mainHeaderSearchInput").value;
|
||||||
|
hits = [];
|
||||||
|
document.querySelectorAll(".message-text").forEach(elem => {
|
||||||
|
if (elem.innerText.trim().includes(keywords)){
|
||||||
|
hits.push(elem.parentElement.parentElement.id);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
console.log(hits);
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
<base href="{{ media_base }}" target="_blank">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<article class="h-screen bg-whatsapp-chat-light">
|
||||||
|
<div class="w-full flex flex-col">
|
||||||
|
<div class="p-3 bg-whatsapp-dark flex items-center justify-between border-l border-[#d1d7db]">
|
||||||
|
<div class="flex items-center">
|
||||||
|
{% if not no_avatar %}
|
||||||
|
<div class="w3-col m2 l2">
|
||||||
|
{% if their_avatar is not none %}
|
||||||
|
<a href="{{ their_avatar }}"><img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="w-10 h-10 rounded-full mr-3" loading="lazy"></a>
|
||||||
|
{% else %}
|
||||||
|
<img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="w-10 h-10 rounded-full mr-3" loading="lazy">
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<div>
|
||||||
|
<h2 class="text-white font-medium">{{ headline }}</h2>
|
||||||
|
{% if status is not none %}<p class="text-[#8696a0] text-xs">{{ status }}</p>{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="flex space-x-4">
|
||||||
|
<!-- <button id="searchButton">
|
||||||
|
<span class="search-icon"></span>
|
||||||
|
</button> -->
|
||||||
|
<!-- <span class="arrow-left"></span> -->
|
||||||
|
{% if previous %}
|
||||||
|
<a href="./{{ previous }}" target="_self">
|
||||||
|
<span class="arrow-left"></span>
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
{% if next %}
|
||||||
|
<a href="./{{ next }}" target="_self">
|
||||||
|
<span class="arrow-right"></span>
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<!-- Search Input Overlay -->
|
||||||
|
<div id="mainSearchInput" class="search-input absolute article top-0 bg-whatsapp-dark p-3 flex items-center space-x-3">
|
||||||
|
<button id="closeMainSearch" class="text-[#aebac1]">
|
||||||
|
<span class="arrow-left"></span>
|
||||||
|
</button>
|
||||||
|
<input type="text" placeholder="Search..." class="flex-1 bg-[#1f2c34] text-white rounded-lg px-3 py-1 focus:outline-none" id="mainHeaderSearchInput" onkeyup="search(event)">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="flex-1 p-5 message-list">
|
||||||
|
<div class="flex flex-col space-y-2">
|
||||||
|
<!--Date-->
|
||||||
|
{% set last = {'last': 946688461.001} %}
|
||||||
|
{% for msg in msgs -%}
|
||||||
|
{% if determine_day(last.last, msg.timestamp) is not none %}
|
||||||
|
<div class="flex justify-center">
|
||||||
|
<div class="bg-[#e1f2fb] rounded-lg px-2 py-1 text-xs text-[#54656f]">
|
||||||
|
{{ determine_day(last.last, msg.timestamp) }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% if last.update({'last': msg.timestamp}) %}{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
<!--Actual messages-->
|
||||||
|
{% if msg.from_me == true %}
|
||||||
|
<div class="flex justify-end items-center group" id="{{ msg.key_id }}">
|
||||||
|
<div class="opacity-0 group-hover:opacity-100 transition-opacity duration-200 relative mr-2">
|
||||||
|
<div class="relative">
|
||||||
|
<div class="relative group/tooltip">
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 text-[#8696a0] hover:text-[#54656f] cursor-pointer" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||||
|
<use href="#info-icon"></use>
|
||||||
|
</svg>
|
||||||
|
<div class="absolute bottom-full info-box-tooltip mb-2 hidden group-hover/tooltip:block z-50">
|
||||||
|
<div class="bg-black text-white text-xs rounded py-1 px-2 whitespace-nowrap">
|
||||||
|
Delivered at {{msg.received_timestamp or 'unknown'}}
|
||||||
|
{% if msg.read_timestamp is not none %}
|
||||||
|
<br>Read at {{ msg.read_timestamp }}
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<div class="absolute top-full right-3 -mt-1 border-4 border-transparent border-t-black"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="bg-whatsapp-light rounded-lg p-2 max-w-[80%] shadow-sm relative {% if msg.reactions %}mb-2{% endif %}">
|
||||||
|
{% if msg.reply is not none %}
|
||||||
|
<a href="#{{msg.reply}}" target="_self" class="no-base">
|
||||||
|
<div
|
||||||
|
class="mb-2 p-1 bg-whatsapp-chat-light rounded border-l-4 border-whatsapp text-sm reply-box">
|
||||||
|
<div class="flex items-center gap-2">
|
||||||
|
<div class="flex-1 overflow-hidden">
|
||||||
|
<p class="text-whatsapp font-medium text-xs">Replying to</p>
|
||||||
|
<p class="text-[#111b21] text-xs truncate">
|
||||||
|
{% if msg.quoted_data is not none %}
|
||||||
|
"{{msg.quoted_data}}"
|
||||||
|
{% else %}
|
||||||
|
this message
|
||||||
|
{% endif %}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
{% set replied_msg = msgs | selectattr('key_id', 'equalto', msg.reply) | first %}
|
||||||
|
{% if replied_msg and replied_msg.media == true %}
|
||||||
|
<div class="flex-shrink-0">
|
||||||
|
{% if "image/" in replied_msg.mime %}
|
||||||
|
<img src="{{ replied_msg.thumb if replied_msg.thumb is not none else replied_msg.data }}"
|
||||||
|
class="w-8 h-8 rounded object-cover" loading="lazy" />
|
||||||
|
{% elif "video/" in replied_msg.mime %}
|
||||||
|
<div class="relative w-8 h-8 rounded overflow-hidden bg-gray-200">
|
||||||
|
<img src="{{ replied_msg.thumb if replied_msg.thumb is not none else replied_msg.data }}"
|
||||||
|
class="w-full h-full object-cover" loading="lazy" />
|
||||||
|
<div class="absolute inset-0 flex items-center justify-center">
|
||||||
|
<div class="play-icon"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% elif "audio/" in replied_msg.mime %}
|
||||||
|
<div class="w-8 h-8 rounded bg-gray-200 flex items-center justify-center">
|
||||||
|
<div class="speaker-icon"></div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
<p class="text-[#111b21] text-sm message-text">
|
||||||
|
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
||||||
|
<div class="flex justify-center mb-2">
|
||||||
|
<div class="bg-[#FFF3C5] rounded-lg px-3 py-2 text-sm text-[#856404] flex items-center">
|
||||||
|
{% if msg.safe %}
|
||||||
|
{{ msg.data | safe or 'Not supported WhatsApp internal message' }}
|
||||||
|
{% else %}
|
||||||
|
{{ msg.data or 'Not supported WhatsApp internal message' }}
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% if msg.caption is not none %}
|
||||||
|
<p>{{ msg.caption | urlize(none, true, '_blank') }}</p>
|
||||||
|
{% endif %}
|
||||||
|
{% else %}
|
||||||
|
{% if msg.media == false %}
|
||||||
|
{{ msg.data | sanitize_except() | urlize(none, true, '_blank') }}
|
||||||
|
{% else %}
|
||||||
|
{% if "image/" in msg.mime %}
|
||||||
|
<a href="{{ msg.data }}">
|
||||||
|
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} loading="lazy"/>
|
||||||
|
</a>
|
||||||
|
{% elif "audio/" in msg.mime %}
|
||||||
|
<audio controls="controls" autobuffer="autobuffer">
|
||||||
|
<source src="{{ msg.data }}" />
|
||||||
|
</audio>
|
||||||
|
{% elif "video/" in msg.mime %}
|
||||||
|
<video class="lazy" autobuffer {% if msg.message_type|int == 13 or msg.message_type|int == 11 %}autoplay muted loop playsinline{%else%}controls{% endif %}>
|
||||||
|
<source type="{{ msg.mime }}" data-src="{{ msg.data }}" />
|
||||||
|
</video>
|
||||||
|
{% elif "/" in msg.mime %}
|
||||||
|
The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a>
|
||||||
|
{% else %}
|
||||||
|
{% filter escape %}{{ msg.data }}{% endfilter %}
|
||||||
|
{% endif %}
|
||||||
|
{% if msg.caption is not none %}
|
||||||
|
<p class='mt-1 {% if "audio/" in msg.mime %}text-[#808080]{% endif %}'>
|
||||||
|
{{ msg.caption | urlize(none, true, '_blank') }}
|
||||||
|
</p>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
</p>
|
||||||
|
<p class="text-[10px] text-[#667781] text-right mt-1">{{ msg.time }}
|
||||||
|
<span class="status-indicator{% if msg.read_timestamp %} read{% endif %}">
|
||||||
|
{% if msg.received_timestamp %}
|
||||||
|
✓✓
|
||||||
|
{% else %}
|
||||||
|
✓
|
||||||
|
{% endif %}
|
||||||
|
</span>
|
||||||
|
</p>
|
||||||
|
{% if msg.reactions %}
|
||||||
|
<div class="flex flex-wrap gap-1 mt-1 justify-end absolute -bottom-3 -right-2">
|
||||||
|
{% for sender, emoji in msg.reactions.items() %}
|
||||||
|
<div class="bg-white rounded-full px-1.5 py-0.5 text-xs shadow-sm border border-gray-200 cursor-help" title="{{ sender }}">
|
||||||
|
{{ emoji }}
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="flex justify-start items-center group" id="{{ msg.key_id }}">
|
||||||
|
<div class="bg-white rounded-lg p-2 max-w-[80%] shadow-sm relative {% if msg.reactions %}mb-2{% endif %}">
|
||||||
|
{% if msg.reply is not none %}
|
||||||
|
<a href="#{{msg.reply}}" target="_self" class="no-base">
|
||||||
|
<div
|
||||||
|
class="mb-2 p-1 bg-whatsapp-chat-light rounded border-l-4 border-whatsapp text-sm reply-box">
|
||||||
|
<div class="flex items-center gap-2">
|
||||||
|
<div class="flex-1 overflow-hidden">
|
||||||
|
<p class="text-whatsapp font-medium text-xs">Replying to</p>
|
||||||
|
<p class="text-[#808080] text-xs truncate">
|
||||||
|
{% if msg.quoted_data is not none %}
|
||||||
|
{{msg.quoted_data}}
|
||||||
|
{% else %}
|
||||||
|
this message
|
||||||
|
{% endif %}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
{% set replied_msg = msgs | selectattr('key_id', 'equalto', msg.reply) | first %}
|
||||||
|
{% if replied_msg and replied_msg.media == true %}
|
||||||
|
<div class="flex-shrink-0">
|
||||||
|
{% if "image/" in replied_msg.mime %}
|
||||||
|
<img src="{{ replied_msg.thumb if replied_msg.thumb is not none else replied_msg.data }}"
|
||||||
|
class="w-8 h-8 rounded object-cover" loading="lazy" />
|
||||||
|
{% elif "video/" in replied_msg.mime %}
|
||||||
|
<div class="relative w-8 h-8 rounded overflow-hidden bg-gray-200">
|
||||||
|
<img src="{{ replied_msg.thumb if replied_msg.thumb is not none else replied_msg.data }}"
|
||||||
|
class="w-full h-full object-cover" loading="lazy" />
|
||||||
|
<div class="absolute inset-0 flex items-center justify-center">
|
||||||
|
<div class="play-icon"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% elif "audio/" in replied_msg.mime %}
|
||||||
|
<div class="w-8 h-8 rounded bg-gray-200 flex items-center justify-center">
|
||||||
|
<div class="speaker-icon"></div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
<p class="text-[#111b21] text-sm">
|
||||||
|
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
||||||
|
<div class="flex justify-center mb-2">
|
||||||
|
<div class="bg-[#FFF3C5] rounded-lg px-3 py-2 text-sm text-[#856404] flex items-center">
|
||||||
|
{% if msg.safe %}
|
||||||
|
{{ msg.data | safe or 'Not supported WhatsApp internal message' }}
|
||||||
|
{% else %}
|
||||||
|
{{ msg.data or 'Not supported WhatsApp internal message' }}
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% if msg.caption is not none %}
|
||||||
|
<p>{{ msg.caption | urlize(none, true, '_blank') }}</p>
|
||||||
|
{% endif %}
|
||||||
|
{% else %}
|
||||||
|
{% if msg.media == false %}
|
||||||
|
{{ msg.data | sanitize_except() | urlize(none, true, '_blank') }}
|
||||||
|
{% else %}
|
||||||
|
{% if "image/" in msg.mime %}
|
||||||
|
<a href="{{ msg.data }}">
|
||||||
|
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} loading="lazy"/>
|
||||||
|
</a>
|
||||||
|
{% elif "audio/" in msg.mime %}
|
||||||
|
<audio controls="controls" autobuffer="autobuffer">
|
||||||
|
<source src="{{ msg.data }}" />
|
||||||
|
</audio>
|
||||||
|
{% elif "video/" in msg.mime %}
|
||||||
|
<video class="lazy" autobuffer {% if msg.message_type|int == 13 or msg.message_type|int == 11 %}autoplay muted loop playsinline{%else%}controls{% endif %}>
|
||||||
|
<source type="{{ msg.mime }}" data-src="{{ msg.data }}" />
|
||||||
|
</video>
|
||||||
|
{% elif "/" in msg.mime %}
|
||||||
|
The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a>
|
||||||
|
{% else %}
|
||||||
|
{% filter escape %}{{ msg.data }}{% endfilter %}
|
||||||
|
{% endif %}
|
||||||
|
{% if msg.caption is not none %}
|
||||||
|
<p class='mt-1 {% if "audio/" in msg.mime %}text-[#808080]{% endif %}'>
|
||||||
|
{{ msg.caption | urlize(none, true, '_blank') }}
|
||||||
|
</p>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
</p>
|
||||||
|
<div class="flex items-baseline text-[10px] text-[#667781] mt-1 gap-2">
|
||||||
|
<span class="flex-shrink-0">
|
||||||
|
{% if msg.sender is not none %}
|
||||||
|
{{ msg.sender }}
|
||||||
|
{% endif %}
|
||||||
|
</span>
|
||||||
|
<span class="flex-grow min-w-[4px]"></span>
|
||||||
|
<span class="flex-shrink-0">{{ msg.time }}</span>
|
||||||
|
</div>
|
||||||
|
{% if msg.reactions %}
|
||||||
|
<div class="flex flex-wrap gap-1 mt-1 justify-start absolute -bottom-3 -left-2">
|
||||||
|
{% for sender, emoji in msg.reactions.items() %}
|
||||||
|
<div class="bg-gray-100 rounded-full px-1.5 py-0.5 text-xs shadow-sm border border-gray-200 cursor-help" title="{{ sender }}">
|
||||||
|
{{ emoji }}
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<!-- <div class="opacity-0 group-hover:opacity-100 transition-opacity duration-200 relative ml-2">
|
||||||
|
<div class="relative">
|
||||||
|
<div class="relative group/tooltip">
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 text-[#8696a0] hover:text-[#54656f] cursor-pointer" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||||
|
<use href="#info-icon"></use>
|
||||||
|
</svg>
|
||||||
|
<div class="absolute bottom-full info-box-tooltip mb-2 hidden group-hover/tooltip:block z-50">
|
||||||
|
<div class="bg-black text-white text-xs rounded py-1 px-2 whitespace-nowrap">
|
||||||
|
Received at {{msg.received_timestamp or 'unknown'}}
|
||||||
|
</div>
|
||||||
|
<div class="absolute top-full right-3 ml-1 border-4 border-transparent border-t-black"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div> -->
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
<footer>
|
||||||
|
{% if not next %}
|
||||||
|
<div class="flex justify-center mb-6">
|
||||||
|
<div class="bg-[#e1f2fb] rounded-lg px-3 py-2 text-sm text-[#54656f]">
|
||||||
|
End of History
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<br>
|
||||||
|
Portions of this page are reproduced from <a href="https://web.dev/articles/lazy-loading-video">work</a>
|
||||||
|
created and <a href="https://developers.google.com/readme/policies">shared by Google</a> and used
|
||||||
|
according to terms described in the <a href="https://www.apache.org/licenses/LICENSE-2.0">Apache 2.0
|
||||||
|
License</a>.
|
||||||
|
</footer>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
</body>
|
||||||
|
<script>
|
||||||
|
// Search functionality
|
||||||
|
const searchButton = document.getElementById('searchButton');
|
||||||
|
const mainSearchInput = document.getElementById('mainSearchInput');
|
||||||
|
const closeMainSearch = document.getElementById('closeMainSearch');
|
||||||
|
const mainHeaderSearchInput = document.getElementById('mainHeaderSearchInput');
|
||||||
|
|
||||||
|
// Function to show search input
|
||||||
|
const showSearch = () => {
|
||||||
|
mainSearchInput.classList.add('active');
|
||||||
|
mainHeaderSearchInput.focus();
|
||||||
|
};
|
||||||
|
|
||||||
|
// Function to hide search input
|
||||||
|
const hideSearch = () => {
|
||||||
|
mainSearchInput.classList.remove('active');
|
||||||
|
mainHeaderSearchInput.value = '';
|
||||||
|
};
|
||||||
|
|
||||||
|
// Event listeners
|
||||||
|
searchButton.addEventListener('click', showSearch);
|
||||||
|
closeMainSearch.addEventListener('click', hideSearch);
|
||||||
|
|
||||||
|
// Handle ESC key
|
||||||
|
document.addEventListener('keydown', (event) => {
|
||||||
|
if (event.key === 'Escape' && mainSearchInput.classList.contains('active')) {
|
||||||
|
hideSearch();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
<script>
|
||||||
|
document.addEventListener("DOMContentLoaded", function() {
|
||||||
|
var lazyVideos = [].slice.call(document.querySelectorAll("video.lazy"));
|
||||||
|
|
||||||
|
if ("IntersectionObserver" in window) {
|
||||||
|
var lazyVideoObserver = new IntersectionObserver(function(entries, observer) {
|
||||||
|
entries.forEach(function(video) {
|
||||||
|
if (video.isIntersecting) {
|
||||||
|
for (var source in video.target.children) {
|
||||||
|
var videoSource = video.target.children[source];
|
||||||
|
if (typeof videoSource.tagName === "string" && videoSource.tagName === "SOURCE") {
|
||||||
|
videoSource.src = videoSource.dataset.src;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
video.target.load();
|
||||||
|
video.target.classList.remove("lazy");
|
||||||
|
lazyVideoObserver.unobserve(video.target);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
lazyVideos.forEach(function(lazyVideo) {
|
||||||
|
lazyVideoObserver.observe(lazyVideo);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
<script>
|
||||||
|
// Prevent the <base> tag from affecting links with the class "no-base"
|
||||||
|
document.querySelectorAll('.no-base').forEach(link => {
|
||||||
|
link.addEventListener('click', function(event) {
|
||||||
|
const href = this.getAttribute('href');
|
||||||
|
if (href.startsWith('#')) {
|
||||||
|
window.location.hash = href;
|
||||||
|
event.preventDefault();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
</html>
|
</html>
|
||||||
@@ -1,467 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>Whatsapp - {{ name }}</title>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<script src="https://cdn.tailwindcss.com"></script>
|
|
||||||
<script>
|
|
||||||
tailwind.config = {
|
|
||||||
theme: {
|
|
||||||
extend: {
|
|
||||||
colors: {
|
|
||||||
whatsapp: {
|
|
||||||
light: '#e7ffdb',
|
|
||||||
DEFAULT: '#25D366',
|
|
||||||
dark: '#075E54',
|
|
||||||
chat: '#efeae2',
|
|
||||||
'chat-light': '#f0f2f5',
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
<style>
|
|
||||||
body, html {
|
|
||||||
height: 100%;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
scroll-behavior: smooth !important;
|
|
||||||
}
|
|
||||||
.chat-list {
|
|
||||||
height: calc(100vh - 120px);
|
|
||||||
overflow-y: auto;
|
|
||||||
}
|
|
||||||
.message-list {
|
|
||||||
height: calc(100vh - 90px);
|
|
||||||
overflow-y: auto;
|
|
||||||
}
|
|
||||||
@media (max-width: 640px) {
|
|
||||||
.chat-list, .message-list {
|
|
||||||
height: calc(100vh - 108px);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
header {
|
|
||||||
position: fixed;
|
|
||||||
z-index: 20;
|
|
||||||
border-bottom: 2px solid #e3e6e7;
|
|
||||||
font-size: 2em;
|
|
||||||
font-weight: bolder;
|
|
||||||
background-color: white;
|
|
||||||
padding: 20px 0 20px 0;
|
|
||||||
}
|
|
||||||
footer {
|
|
||||||
margin-top: 10px;
|
|
||||||
border-top: 2px solid #e3e6e7;
|
|
||||||
padding: 20px 0 20px 0;
|
|
||||||
}
|
|
||||||
article {
|
|
||||||
width:430px;
|
|
||||||
margin: auto;
|
|
||||||
z-index:10;
|
|
||||||
font-size: 15px;
|
|
||||||
word-wrap: break-word;
|
|
||||||
}
|
|
||||||
img, video, audio{
|
|
||||||
max-width:100%;
|
|
||||||
box-sizing: border-box;
|
|
||||||
}
|
|
||||||
div.reply{
|
|
||||||
font-size: 13px;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
div:target::before {
|
|
||||||
content: '';
|
|
||||||
display: block;
|
|
||||||
height: 115px;
|
|
||||||
margin-top: -115px;
|
|
||||||
visibility: hidden;
|
|
||||||
}
|
|
||||||
div:target {
|
|
||||||
animation: 3s highlight;
|
|
||||||
}
|
|
||||||
.avatar {
|
|
||||||
border-radius:50%;
|
|
||||||
overflow:hidden;
|
|
||||||
max-width: 64px;
|
|
||||||
max-height: 64px;
|
|
||||||
}
|
|
||||||
.name {
|
|
||||||
color: #3892da;
|
|
||||||
}
|
|
||||||
.pad-left-10 {
|
|
||||||
padding-left: 10px;
|
|
||||||
}
|
|
||||||
.pad-right-10 {
|
|
||||||
padding-right: 10px;
|
|
||||||
}
|
|
||||||
.reply_link {
|
|
||||||
color: #168acc;
|
|
||||||
}
|
|
||||||
.blue {
|
|
||||||
color: #70777a;
|
|
||||||
}
|
|
||||||
.sticker {
|
|
||||||
max-width: 100px !important;
|
|
||||||
max-height: 100px !important;
|
|
||||||
}
|
|
||||||
@keyframes highlight {
|
|
||||||
from {
|
|
||||||
background-color: rgba(37, 211, 102, 0.1);
|
|
||||||
}
|
|
||||||
to {
|
|
||||||
background-color: transparent;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.search-input {
|
|
||||||
transform: translateY(-100%);
|
|
||||||
transition: transform 0.3s ease-in-out;
|
|
||||||
}
|
|
||||||
.search-input.active {
|
|
||||||
transform: translateY(0);
|
|
||||||
}
|
|
||||||
.reply-box:active {
|
|
||||||
background-color:rgb(200 202 205 / var(--tw-bg-opacity, 1));
|
|
||||||
}
|
|
||||||
.info-box-tooltip {
|
|
||||||
--tw-translate-x: -50%;
|
|
||||||
transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<script>
|
|
||||||
function search(event) {
|
|
||||||
keywords = document.getElementById("mainHeaderSearchInput").value;
|
|
||||||
hits = [];
|
|
||||||
document.querySelectorAll(".message-text").forEach(elem => {
|
|
||||||
if (elem.innerText.trim().includes(keywords)){
|
|
||||||
hits.push(elem.parentElement.parentElement.id);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
console.log(hits);
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
<base href="{{ media_base }}" target="_blank">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<article class="h-screen bg-whatsapp-chat-light">
|
|
||||||
<div class="w-full flex flex-col">
|
|
||||||
<div class="p-3 bg-whatsapp-dark flex items-center justify-between border-l border-[#d1d7db]">
|
|
||||||
<div class="flex items-center">
|
|
||||||
{% if not no_avatar %}
|
|
||||||
<div class="w3-col m2 l2">
|
|
||||||
{% if their_avatar is not none %}
|
|
||||||
<a href="{{ their_avatar }}"><img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="w-10 h-10 rounded-full mr-3" loading="lazy"></a>
|
|
||||||
{% else %}
|
|
||||||
<img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="w-10 h-10 rounded-full mr-3" loading="lazy">
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
<div>
|
|
||||||
<h2 class="text-white font-medium">{{ headline }}</h2>
|
|
||||||
{% if status is not none %}<p class="text-[#8696a0] text-xs">{{ status }}</p>{% endif %}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="flex space-x-4">
|
|
||||||
<!-- <button id="searchButton">
|
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 text-[#aebac1]" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
|
||||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
|
|
||||||
</svg>
|
|
||||||
</button> -->
|
|
||||||
<!-- <svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 text-[#aebac1]" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
|
||||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7" />
|
|
||||||
</svg> -->
|
|
||||||
{% if previous %}
|
|
||||||
<a href="./{{ previous }}" target="_self">
|
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 text-[#aebac1]" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
|
||||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 5l-7 7 7 7" />
|
|
||||||
</svg>
|
|
||||||
</a>
|
|
||||||
{% endif %}
|
|
||||||
{% if next %}
|
|
||||||
<a href="./{{ next }}" target="_self">
|
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 text-[#aebac1]" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
|
||||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 5l7 7-7 7" />
|
|
||||||
</svg>
|
|
||||||
</a>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
<!-- Search Input Overlay -->
|
|
||||||
<div id="mainSearchInput" class="search-input absolute article top-0 bg-whatsapp-dark p-3 flex items-center space-x-3">
|
|
||||||
<button id="closeMainSearch" class="text-[#aebac1]">
|
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" class="h-6 w-6" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
|
||||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7" />
|
|
||||||
</svg>
|
|
||||||
</button>
|
|
||||||
<input type="text" placeholder="Search..." class="flex-1 bg-[#1f2c34] text-white rounded-lg px-3 py-1 focus:outline-none" id="mainHeaderSearchInput" onkeyup="search(event)">
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="flex-1 p-5 message-list">
|
|
||||||
<div class="flex flex-col space-y-2">
|
|
||||||
<!--Date-->
|
|
||||||
{% set last = {'last': 946688461.001} %}
|
|
||||||
{% for msg in msgs -%}
|
|
||||||
{% if determine_day(last.last, msg.timestamp) is not none %}
|
|
||||||
<div class="flex justify-center">
|
|
||||||
<div class="bg-[#e1f2fb] rounded-lg px-2 py-1 text-xs text-[#54656f]">
|
|
||||||
{{ determine_day(last.last, msg.timestamp) }}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{% if last.update({'last': msg.timestamp}) %}{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
<!--Actual messages-->
|
|
||||||
{% if msg.from_me == true %}
|
|
||||||
<div class="flex justify-end items-center group" id="{{ msg.key_id }}">
|
|
||||||
<div class="opacity-0 group-hover:opacity-100 transition-opacity duration-200 relative mr-2">
|
|
||||||
<div class="relative">
|
|
||||||
<div class="relative group/tooltip">
|
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 text-[#8696a0] hover:text-[#54656f] cursor-pointer" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
|
||||||
<use href="#info-icon"></use>
|
|
||||||
</svg>
|
|
||||||
<div class="absolute bottom-full info-box-tooltip mb-2 hidden group-hover/tooltip:block z-50">
|
|
||||||
<div class="bg-black text-white text-xs rounded py-1 px-2 whitespace-nowrap">
|
|
||||||
Delivered at {{msg.received_timestamp or 'unknown'}}
|
|
||||||
{% if msg.read_timestamp is not none %}
|
|
||||||
<br>Read at {{ msg.read_timestamp }}
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
<div class="absolute top-full right-3 -mt-1 border-4 border-transparent border-t-black"></div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="bg-whatsapp-light rounded-lg p-2 max-w-[80%] shadow-sm">
|
|
||||||
{% if msg.reply is not none %}
|
|
||||||
<a href="#{{msg.reply}}" target="_self" class="no-base">
|
|
||||||
<div class="mb-2 p-1 bg-whatsapp-chat-light rounded border-l-4 border-whatsapp text-sm reply-box">
|
|
||||||
<p class="text-whatsapp font-medium text-xs">Replying to</p>
|
|
||||||
<p class="text-[#111b21] text-xs truncate">
|
|
||||||
{% if msg.quoted_data is not none %}
|
|
||||||
"{{msg.quoted_data}}"
|
|
||||||
{% else %}
|
|
||||||
this message
|
|
||||||
{% endif %}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</a>
|
|
||||||
{% endif %}
|
|
||||||
<p class="text-[#111b21] text-sm message-text">
|
|
||||||
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
|
||||||
<div class="flex justify-center mb-2">
|
|
||||||
<div class="bg-[#FFF3C5] rounded-lg px-3 py-2 text-sm text-[#856404] flex items-center">
|
|
||||||
{% if msg.safe %}
|
|
||||||
{{ msg.data | safe or 'Not supported WhatsApp internal message' }}
|
|
||||||
{% else %}
|
|
||||||
{{ msg.data or 'Not supported WhatsApp internal message' }}
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{% if msg.caption is not none %}
|
|
||||||
<p>{{ msg.caption | urlize(none, true, '_blank') }}</p>
|
|
||||||
{% endif %}
|
|
||||||
{% else %}
|
|
||||||
{% if msg.media == false %}
|
|
||||||
{{ msg.data | sanitize_except() | urlize(none, true, '_blank') }}
|
|
||||||
{% else %}
|
|
||||||
{% if "image/" in msg.mime %}
|
|
||||||
<a href="{{ msg.data }}">
|
|
||||||
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} loading="lazy"/>
|
|
||||||
</a>
|
|
||||||
{% elif "audio/" in msg.mime %}
|
|
||||||
<audio controls="controls" autobuffer="autobuffer">
|
|
||||||
<source src="{{ msg.data }}" />
|
|
||||||
</audio>
|
|
||||||
{% elif "video/" in msg.mime %}
|
|
||||||
<video class="lazy" autobuffer {% if msg.message_type|int == 13 or msg.message_type|int == 11 %}autoplay muted loop playsinline{%else%}controls{% endif %}>
|
|
||||||
<source type="{{ msg.mime }}" data-src="{{ msg.data }}" />
|
|
||||||
</video>
|
|
||||||
{% elif "/" in msg.mime %}
|
|
||||||
The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a>
|
|
||||||
{% else %}
|
|
||||||
{% filter escape %}{{ msg.data }}{% endfilter %}
|
|
||||||
{% endif %}
|
|
||||||
{% if msg.caption is not none %}
|
|
||||||
{{ msg.caption | urlize(none, true, '_blank') }}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
</p>
|
|
||||||
<p class="text-[10px] text-[#667781] text-right mt-1">{{ msg.time }}</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{% else %}
|
|
||||||
<div class="flex justify-start items-center group" id="{{ msg.key_id }}">
|
|
||||||
<div class="bg-white rounded-lg p-2 max-w-[80%] shadow-sm">
|
|
||||||
{% if msg.reply is not none %}
|
|
||||||
<a href="#{{msg.reply}}" target="_self" class="no-base">
|
|
||||||
<div class="mb-2 p-1 bg-whatsapp-chat-light rounded border-l-4 border-whatsapp text-sm reply-box">
|
|
||||||
<p class="text-whatsapp font-medium text-xs">Replying to</p>
|
|
||||||
<p class="text-[#808080] text-xs truncate">
|
|
||||||
{% if msg.quoted_data is not none %}
|
|
||||||
{{msg.quoted_data}}
|
|
||||||
{% else %}
|
|
||||||
this message
|
|
||||||
{% endif %}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</a>
|
|
||||||
{% endif %}
|
|
||||||
<p class="text-[#111b21] text-sm">
|
|
||||||
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
|
||||||
<div class="flex justify-center mb-2">
|
|
||||||
<div class="bg-[#FFF3C5] rounded-lg px-3 py-2 text-sm text-[#856404] flex items-center">
|
|
||||||
{% if msg.safe %}
|
|
||||||
{{ msg.data | safe or 'Not supported WhatsApp internal message' }}
|
|
||||||
{% else %}
|
|
||||||
{{ msg.data or 'Not supported WhatsApp internal message' }}
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{% if msg.caption is not none %}
|
|
||||||
<p>{{ msg.caption | urlize(none, true, '_blank') }}</p>
|
|
||||||
{% endif %}
|
|
||||||
{% else %}
|
|
||||||
{% if msg.media == false %}
|
|
||||||
{{ msg.data | sanitize_except() | urlize(none, true, '_blank') }}
|
|
||||||
{% else %}
|
|
||||||
{% if "image/" in msg.mime %}
|
|
||||||
<a href="{{ msg.data }}">
|
|
||||||
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} loading="lazy"/>
|
|
||||||
</a>
|
|
||||||
{% elif "audio/" in msg.mime %}
|
|
||||||
<audio controls="controls" autobuffer="autobuffer">
|
|
||||||
<source src="{{ msg.data }}" />
|
|
||||||
</audio>
|
|
||||||
{% elif "video/" in msg.mime %}
|
|
||||||
<video class="lazy" autobuffer {% if msg.message_type|int == 13 or msg.message_type|int == 11 %}autoplay muted loop playsinline{%else%}controls{% endif %}>
|
|
||||||
<source type="{{ msg.mime }}" data-src="{{ msg.data }}" />
|
|
||||||
</video>
|
|
||||||
{% elif "/" in msg.mime %}
|
|
||||||
The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a>
|
|
||||||
{% else %}
|
|
||||||
{% filter escape %}{{ msg.data }}{% endfilter %}
|
|
||||||
{% endif %}
|
|
||||||
{% if msg.caption is not none %}
|
|
||||||
{{ msg.caption | urlize(none, true, '_blank') }}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
</p>
|
|
||||||
<div class="flex items-baseline text-[10px] text-[#667781] mt-1 gap-2">
|
|
||||||
<span class="flex-shrink-0">
|
|
||||||
{% if msg.sender is not none %}
|
|
||||||
{{ msg.sender }}
|
|
||||||
{% endif %}
|
|
||||||
</span>
|
|
||||||
<span class="flex-grow min-w-[4px]"></span>
|
|
||||||
<span class="flex-shrink-0">{{ msg.time }}</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<!-- <div class="opacity-0 group-hover:opacity-100 transition-opacity duration-200 relative ml-2">
|
|
||||||
<div class="relative">
|
|
||||||
<div class="relative group/tooltip">
|
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 text-[#8696a0] hover:text-[#54656f] cursor-pointer" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
|
||||||
<use href="#info-icon"></use>
|
|
||||||
</svg>
|
|
||||||
<div class="absolute bottom-full info-box-tooltip mb-2 hidden group-hover/tooltip:block z-50">
|
|
||||||
<div class="bg-black text-white text-xs rounded py-1 px-2 whitespace-nowrap">
|
|
||||||
Received at {{msg.received_timestamp or 'unknown'}}
|
|
||||||
</div>
|
|
||||||
<div class="absolute top-full right-3 ml-1 border-4 border-transparent border-t-black"></div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div> -->
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
</div>
|
|
||||||
<footer>
|
|
||||||
<h2 class="text-center">
|
|
||||||
{% if not next %}
|
|
||||||
End of History
|
|
||||||
{% endif %}
|
|
||||||
</h2>
|
|
||||||
<br>
|
|
||||||
Portions of this page are reproduced from <a href="https://web.dev/articles/lazy-loading-video">work</a> created and <a href="https://developers.google.com/readme/policies">shared by Google</a> and used according to terms described in the <a href="https://www.apache.org/licenses/LICENSE-2.0">Apache 2.0 License</a>.
|
|
||||||
</footer>
|
|
||||||
<svg style="display: none;">
|
|
||||||
<!-- Tooltip info icon -->
|
|
||||||
<symbol id="info-icon" viewBox="0 0 24 24">
|
|
||||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" />
|
|
||||||
</symbol>
|
|
||||||
</svg>
|
|
||||||
</div>
|
|
||||||
</article>
|
|
||||||
</body>
|
|
||||||
<script>
|
|
||||||
// Search functionality
|
|
||||||
const searchButton = document.getElementById('searchButton');
|
|
||||||
const mainSearchInput = document.getElementById('mainSearchInput');
|
|
||||||
const closeMainSearch = document.getElementById('closeMainSearch');
|
|
||||||
const mainHeaderSearchInput = document.getElementById('mainHeaderSearchInput');
|
|
||||||
|
|
||||||
// Function to show search input
|
|
||||||
const showSearch = () => {
|
|
||||||
mainSearchInput.classList.add('active');
|
|
||||||
mainHeaderSearchInput.focus();
|
|
||||||
};
|
|
||||||
|
|
||||||
// Function to hide search input
|
|
||||||
const hideSearch = () => {
|
|
||||||
mainSearchInput.classList.remove('active');
|
|
||||||
mainHeaderSearchInput.value = '';
|
|
||||||
};
|
|
||||||
|
|
||||||
// Event listeners
|
|
||||||
searchButton.addEventListener('click', showSearch);
|
|
||||||
closeMainSearch.addEventListener('click', hideSearch);
|
|
||||||
|
|
||||||
// Handle ESC key
|
|
||||||
document.addEventListener('keydown', (event) => {
|
|
||||||
if (event.key === 'Escape' && mainSearchInput.classList.contains('active')) {
|
|
||||||
hideSearch();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
<script>
|
|
||||||
document.addEventListener("DOMContentLoaded", function() {
|
|
||||||
var lazyVideos = [].slice.call(document.querySelectorAll("video.lazy"));
|
|
||||||
|
|
||||||
if ("IntersectionObserver" in window) {
|
|
||||||
var lazyVideoObserver = new IntersectionObserver(function(entries, observer) {
|
|
||||||
entries.forEach(function(video) {
|
|
||||||
if (video.isIntersecting) {
|
|
||||||
for (var source in video.target.children) {
|
|
||||||
var videoSource = video.target.children[source];
|
|
||||||
if (typeof videoSource.tagName === "string" && videoSource.tagName === "SOURCE") {
|
|
||||||
videoSource.src = videoSource.dataset.src;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
video.target.load();
|
|
||||||
video.target.classList.remove("lazy");
|
|
||||||
lazyVideoObserver.unobserve(video.target);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
lazyVideos.forEach(function(lazyVideo) {
|
|
||||||
lazyVideoObserver.observe(lazyVideo);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
<script>
|
|
||||||
// Prevent the <base> tag from affecting links with the class "no-base"
|
|
||||||
document.querySelectorAll('.no-base').forEach(link => {
|
|
||||||
link.addEventListener('click', function(event) {
|
|
||||||
const href = this.getAttribute('href');
|
|
||||||
if (href.startsWith('#')) {
|
|
||||||
window.location.hash = href;
|
|
||||||
event.preventDefault();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
</html>
|
|
||||||
329
Whatsapp_Chat_Exporter/whatsapp_old.html
Normal file
329
Whatsapp_Chat_Exporter/whatsapp_old.html
Normal file
@@ -0,0 +1,329 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Whatsapp - {{ name }}</title>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<link rel="stylesheet" href="{{w3css}}">
|
||||||
|
<style>
|
||||||
|
html, body {
|
||||||
|
font-size: 12px;
|
||||||
|
scroll-behavior: smooth;
|
||||||
|
}
|
||||||
|
header {
|
||||||
|
position: fixed;
|
||||||
|
z-index: 20;
|
||||||
|
border-bottom: 2px solid #e3e6e7;
|
||||||
|
font-size: 2em;
|
||||||
|
font-weight: bolder;
|
||||||
|
background-color: white;
|
||||||
|
padding: 20px 0 20px 0;
|
||||||
|
}
|
||||||
|
footer {
|
||||||
|
border-top: 2px solid #e3e6e7;
|
||||||
|
padding: 20px 0 20px 0;
|
||||||
|
}
|
||||||
|
article {
|
||||||
|
width:500px;
|
||||||
|
margin:100px auto;
|
||||||
|
z-index:10;
|
||||||
|
font-size: 15px;
|
||||||
|
word-wrap: break-word;
|
||||||
|
}
|
||||||
|
img, video {
|
||||||
|
max-width:100%;
|
||||||
|
}
|
||||||
|
div.reply{
|
||||||
|
font-size: 13px;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
div:target::before {
|
||||||
|
content: '';
|
||||||
|
display: block;
|
||||||
|
height: 115px;
|
||||||
|
margin-top: -115px;
|
||||||
|
visibility: hidden;
|
||||||
|
}
|
||||||
|
div:target {
|
||||||
|
border-style: solid;
|
||||||
|
border-width: 2px;
|
||||||
|
animation: border-blink 0.5s steps(1) 5;
|
||||||
|
border-color: rgba(0,0,0,0)
|
||||||
|
}
|
||||||
|
table {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
@keyframes border-blink {
|
||||||
|
0% {
|
||||||
|
border-color: #2196F3;
|
||||||
|
}
|
||||||
|
50% {
|
||||||
|
border-color: rgba(0,0,0,0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.avatar {
|
||||||
|
border-radius:50%;
|
||||||
|
overflow:hidden;
|
||||||
|
max-width: 64px;
|
||||||
|
max-height: 64px;
|
||||||
|
}
|
||||||
|
.name {
|
||||||
|
color: #3892da;
|
||||||
|
}
|
||||||
|
.pad-left-10 {
|
||||||
|
padding-left: 10px;
|
||||||
|
}
|
||||||
|
.pad-right-10 {
|
||||||
|
padding-right: 10px;
|
||||||
|
}
|
||||||
|
.reply_link {
|
||||||
|
color: #168acc;
|
||||||
|
}
|
||||||
|
.blue {
|
||||||
|
color: #70777a;
|
||||||
|
}
|
||||||
|
.sticker {
|
||||||
|
max-width: 100px !important;
|
||||||
|
max-height: 100px !important;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
<base href="{{ media_base }}" target="_blank">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<header class="w3-center w3-top">
|
||||||
|
{{ headline }}
|
||||||
|
{% if status is not none %}
|
||||||
|
<br>
|
||||||
|
<span class="w3-small">{{ status }}</span>
|
||||||
|
{% endif %}
|
||||||
|
</header>
|
||||||
|
<article class="w3-container">
|
||||||
|
<div class="table">
|
||||||
|
{% set last = {'last': 946688461.001} %}
|
||||||
|
{% for msg in msgs -%}
|
||||||
|
<div class="w3-row w3-padding-small w3-margin-bottom" id="{{ msg.key_id }}">
|
||||||
|
{% if determine_day(last.last, msg.timestamp) is not none %}
|
||||||
|
<div class="w3-center w3-padding-16 blue">{{ determine_day(last.last, msg.timestamp) }}</div>
|
||||||
|
{% if last.update({'last': msg.timestamp}) %}{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% if msg.from_me == true %}
|
||||||
|
<div class="w3-row">
|
||||||
|
<div class="w3-left blue">{{ msg.time }}</div>
|
||||||
|
<div class="name w3-right-align pad-left-10">You</div>
|
||||||
|
</div>
|
||||||
|
<div class="w3-row">
|
||||||
|
{% if not no_avatar and my_avatar is not none %}
|
||||||
|
<div class="w3-col m10 l10">
|
||||||
|
{% else %}
|
||||||
|
<div class="w3-col m12 l12">
|
||||||
|
{% endif %}
|
||||||
|
<div class="w3-right-align">
|
||||||
|
{% if msg.reply is not none %}
|
||||||
|
<div class="reply">
|
||||||
|
<span class="blue">Replying to </span>
|
||||||
|
<a href="#{{msg.reply}}" target="_self" class="reply_link no-base">
|
||||||
|
{% if msg.quoted_data is not none %}
|
||||||
|
"{{msg.quoted_data}}"
|
||||||
|
{% else %}
|
||||||
|
this message
|
||||||
|
{% endif %}
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
||||||
|
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
||||||
|
{% if msg.safe %}
|
||||||
|
<p>{{ msg.data | safe or 'Not supported WhatsApp internal message' }}</p>
|
||||||
|
{% else %}
|
||||||
|
<p>{{ msg.data or 'Not supported WhatsApp internal message' }}</p>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% if msg.caption is not none %}
|
||||||
|
<div class="w3-container">
|
||||||
|
{{ msg.caption | urlize(none, true, '_blank') }}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% else %}
|
||||||
|
{% if msg.media == false %}
|
||||||
|
{{ msg.data | sanitize_except() | urlize(none, true, '_blank') }}
|
||||||
|
{% else %}
|
||||||
|
{% if "image/" in msg.mime %}
|
||||||
|
<a href="{{ msg.data }}">
|
||||||
|
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} loading="lazy"/>
|
||||||
|
</a>
|
||||||
|
{% elif "audio/" in msg.mime %}
|
||||||
|
<audio controls="controls" autobuffer="autobuffer">
|
||||||
|
<source src="{{ msg.data }}" />
|
||||||
|
</audio>
|
||||||
|
{% elif "video/" in msg.mime %}
|
||||||
|
<video class="lazy" autobuffer {% if msg.message_type|int == 13 or msg.message_type|int == 11 %}autoplay muted loop playsinline{%else%}controls{% endif %}>
|
||||||
|
<source type="{{ msg.mime }}" data-src="{{ msg.data }}" />
|
||||||
|
</video>
|
||||||
|
{% elif "/" in msg.mime %}
|
||||||
|
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
||||||
|
<p>The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a></p>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
{% filter escape %}{{ msg.data }}{% endfilter %}
|
||||||
|
{% endif %}
|
||||||
|
{% if msg.caption is not none %}
|
||||||
|
<div class="w3-container">
|
||||||
|
{{ msg.caption | urlize(none, true, '_blank') }}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% if not no_avatar and my_avatar is not none %}
|
||||||
|
<div class="w3-col m2 l2 pad-left-10">
|
||||||
|
<a href="{{ my_avatar }}">
|
||||||
|
<img src="{{ my_avatar }}" onerror="this.style.display='none'" class="avatar" loading="lazy">
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="w3-row">
|
||||||
|
<div class="w3-left pad-right-10 name">
|
||||||
|
{% if msg.sender is not none %}
|
||||||
|
{{ msg.sender }}
|
||||||
|
{% else %}
|
||||||
|
{{ name }}
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<div class="w3-right-align blue">{{ msg.time }}</div>
|
||||||
|
</div>
|
||||||
|
<div class="w3-row">
|
||||||
|
{% if not no_avatar %}
|
||||||
|
<div class="w3-col m2 l2">
|
||||||
|
{% if their_avatar is not none %}
|
||||||
|
<a href="{{ their_avatar }}"><img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="avatar" loading="lazy"></a>
|
||||||
|
{% else %}
|
||||||
|
<img src="{{ their_avatar_thumb or '' }}" onerror="this.style.display='none'" class="avatar" loading="lazy">
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<div class="w3-col m10 l10">
|
||||||
|
{% else %}
|
||||||
|
<div class="w3-col m12 l12">
|
||||||
|
{% endif %}
|
||||||
|
<div class="w3-left-align">
|
||||||
|
{% if msg.reply is not none %}
|
||||||
|
<div class="reply">
|
||||||
|
<span class="blue">Replying to </span>
|
||||||
|
<a href="#{{msg.reply}}" target="_self" class="reply_link no-base">
|
||||||
|
{% if msg.quoted_data is not none %}
|
||||||
|
"{{msg.quoted_data}}"
|
||||||
|
{% else %}
|
||||||
|
this message
|
||||||
|
{% endif %}
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% if msg.meta == true or msg.media == false and msg.data is none %}
|
||||||
|
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
||||||
|
{% if msg.safe %}
|
||||||
|
<p>{{ msg.data | safe or 'Not supported WhatsApp internal message' }}</p>
|
||||||
|
{% else %}
|
||||||
|
<p>{{ msg.data or 'Not supported WhatsApp internal message' }}</p>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% if msg.caption is not none %}
|
||||||
|
<div class="w3-container">
|
||||||
|
{{ msg.caption | urlize(none, true, '_blank') }}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% else %}
|
||||||
|
{% if msg.media == false %}
|
||||||
|
{{ msg.data | sanitize_except() | urlize(none, true, '_blank') }}
|
||||||
|
{% else %}
|
||||||
|
{% if "image/" in msg.mime %}
|
||||||
|
<a href="{{ msg.data }}">
|
||||||
|
<img src="{{ msg.thumb if msg.thumb is not none else msg.data }}" {{ 'class="sticker"' | safe if msg.sticker }} loading="lazy"/>
|
||||||
|
</a>
|
||||||
|
{% elif "audio/" in msg.mime %}
|
||||||
|
<audio controls="controls" autobuffer="autobuffer">
|
||||||
|
<source src="{{ msg.data }}" />
|
||||||
|
</audio>
|
||||||
|
{% elif "video/" in msg.mime %}
|
||||||
|
<video class="lazy" autobuffer {% if msg.message_type|int == 13 or msg.message_type|int == 11 %}autoplay muted loop playsinline{%else%}controls{% endif %}>
|
||||||
|
<source type="{{ msg.mime }}" data-src="{{ msg.data }}" />
|
||||||
|
</video>
|
||||||
|
{% elif "/" in msg.mime %}
|
||||||
|
<div class="w3-panel w3-border-blue w3-pale-blue w3-rightbar w3-leftbar w3-threequarter w3-center">
|
||||||
|
<p>The file cannot be displayed here, however it should be located at <a href="./{{ msg.data }}">here</a></p>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
{% filter escape %}{{ msg.data }}{% endfilter %}
|
||||||
|
{% endif %}
|
||||||
|
{% if msg.caption is not none %}
|
||||||
|
<div class="w3-container">
|
||||||
|
{{ msg.caption | urlize(none, true, '_blank') }}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
<footer class="w3-center">
|
||||||
|
<h2>
|
||||||
|
{% if previous %}
|
||||||
|
<a href="./{{ previous }}" target="_self">Previous</a>
|
||||||
|
{% endif %}
|
||||||
|
<h2>
|
||||||
|
{% if next %}
|
||||||
|
<a href="./{{ next }}" target="_self">Next</a>
|
||||||
|
{% else %}
|
||||||
|
End of History
|
||||||
|
{% endif %}
|
||||||
|
</h2>
|
||||||
|
<br>
|
||||||
|
Portions of this page are reproduced from <a href="https://web.dev/articles/lazy-loading-video">work</a> created and <a href="https://developers.google.com/readme/policies">shared by Google</a> and used according to terms described in the <a href="https://www.apache.org/licenses/LICENSE-2.0">Apache 2.0 License</a>.
|
||||||
|
</footer>
|
||||||
|
<script>
|
||||||
|
document.addEventListener("DOMContentLoaded", function() {
|
||||||
|
var lazyVideos = [].slice.call(document.querySelectorAll("video.lazy"));
|
||||||
|
|
||||||
|
if ("IntersectionObserver" in window) {
|
||||||
|
var lazyVideoObserver = new IntersectionObserver(function(entries, observer) {
|
||||||
|
entries.forEach(function(video) {
|
||||||
|
if (video.isIntersecting) {
|
||||||
|
for (var source in video.target.children) {
|
||||||
|
var videoSource = video.target.children[source];
|
||||||
|
if (typeof videoSource.tagName === "string" && videoSource.tagName === "SOURCE") {
|
||||||
|
videoSource.src = videoSource.dataset.src;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
video.target.load();
|
||||||
|
video.target.classList.remove("lazy");
|
||||||
|
lazyVideoObserver.unobserve(video.target);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
lazyVideos.forEach(function(lazyVideo) {
|
||||||
|
lazyVideoObserver.observe(lazyVideo);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
<script>
|
||||||
|
// Prevent the <base> tag from affecting links with the class "no-base"
|
||||||
|
document.querySelectorAll('.no-base').forEach(link => {
|
||||||
|
link.addEventListener('click', function(event) {
|
||||||
|
const href = this.getAttribute('href');
|
||||||
|
if (href.startsWith('#')) {
|
||||||
|
window.location.hash = href;
|
||||||
|
event.preventDefault();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
BIN
imgs/group.png
BIN
imgs/group.png
Binary file not shown.
|
Before Width: | Height: | Size: 15 KiB |
BIN
imgs/pm.png
BIN
imgs/pm.png
Binary file not shown.
|
Before Width: | Height: | Size: 126 KiB After Width: | Height: | Size: 116 KiB |
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "whatsapp-chat-exporter"
|
name = "whatsapp-chat-exporter"
|
||||||
version = "0.12.0"
|
version = "0.13.0"
|
||||||
description = "A Whatsapp database parser that provides history of your Whatsapp conversations in HTML and JSON. Android, iOS, iPadOS, Crypt12, Crypt14, Crypt15 supported."
|
description = "A Whatsapp database parser that provides history of your Whatsapp conversations in HTML and JSON. Android, iOS, iPadOS, Crypt12, Crypt14, Crypt15 supported."
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
authors = [
|
authors = [
|
||||||
@@ -19,10 +19,11 @@ keywords = [
|
|||||||
]
|
]
|
||||||
classifiers = [
|
classifiers = [
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
"Programming Language :: Python :: 3.9",
|
|
||||||
"Programming Language :: Python :: 3.10",
|
"Programming Language :: Python :: 3.10",
|
||||||
"Programming Language :: Python :: 3.11",
|
"Programming Language :: Python :: 3.11",
|
||||||
"Programming Language :: Python :: 3.12",
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: 3.13",
|
||||||
|
"Programming Language :: Python :: 3.14",
|
||||||
"License :: OSI Approved :: MIT License",
|
"License :: OSI Approved :: MIT License",
|
||||||
"Operating System :: OS Independent",
|
"Operating System :: OS Independent",
|
||||||
"Development Status :: 4 - Beta",
|
"Development Status :: 4 - Beta",
|
||||||
@@ -32,10 +33,11 @@ classifiers = [
|
|||||||
"Topic :: Utilities",
|
"Topic :: Utilities",
|
||||||
"Topic :: Database"
|
"Topic :: Database"
|
||||||
]
|
]
|
||||||
requires-python = ">=3.9"
|
requires-python = ">=3.10"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"jinja2",
|
"jinja2",
|
||||||
"bleach"
|
"bleach",
|
||||||
|
"tqdm"
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.optional-dependencies]
|
[project.optional-dependencies]
|
||||||
@@ -43,10 +45,9 @@ android_backup = ["pycryptodome", "javaobj-py3"]
|
|||||||
crypt12 = ["pycryptodome"]
|
crypt12 = ["pycryptodome"]
|
||||||
crypt14 = ["pycryptodome"]
|
crypt14 = ["pycryptodome"]
|
||||||
crypt15 = ["pycryptodome", "javaobj-py3"]
|
crypt15 = ["pycryptodome", "javaobj-py3"]
|
||||||
all = ["pycryptodome", "javaobj-py3", "vobject"]
|
all = ["pycryptodome", "javaobj-py3"]
|
||||||
everything = ["pycryptodome", "javaobj-py3", "vobject"]
|
everything = ["pycryptodome", "javaobj-py3"]
|
||||||
backup = ["pycryptodome", "javaobj-py3"]
|
backup = ["pycryptodome", "javaobj-py3"]
|
||||||
vcards = ["vobject", "pycryptodome", "javaobj-py3"]
|
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
wtsexporter = "Whatsapp_Chat_Exporter.__main__:main"
|
wtsexporter = "Whatsapp_Chat_Exporter.__main__:main"
|
||||||
@@ -59,3 +60,8 @@ include = ["Whatsapp_Chat_Exporter"]
|
|||||||
|
|
||||||
[tool.setuptools.package-data]
|
[tool.setuptools.package-data]
|
||||||
Whatsapp_Chat_Exporter = ["*.html"]
|
Whatsapp_Chat_Exporter = ["*.html"]
|
||||||
|
|
||||||
|
[dependency-groups]
|
||||||
|
dev = [
|
||||||
|
"pytest>=8.3.5",
|
||||||
|
]
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ Contributed by @magpires https://github.com/KnugiHK/WhatsApp-Chat-Exporter/issue
|
|||||||
import re
|
import re
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
|
||||||
def process_phone_number(raw_phone):
|
def process_phone_number(raw_phone):
|
||||||
"""
|
"""
|
||||||
Process the raw phone string from the VCARD and return two formatted numbers:
|
Process the raw phone string from the VCARD and return two formatted numbers:
|
||||||
@@ -70,6 +71,7 @@ def process_phone_number(raw_phone):
|
|||||||
|
|
||||||
return original_formatted, modified_formatted
|
return original_formatted, modified_formatted
|
||||||
|
|
||||||
|
|
||||||
def process_vcard(input_vcard, output_vcard):
|
def process_vcard(input_vcard, output_vcard):
|
||||||
"""
|
"""
|
||||||
Process a VCARD file to standardize telephone entries and add a second TEL line
|
Process a VCARD file to standardize telephone entries and add a second TEL line
|
||||||
@@ -103,6 +105,7 @@ def process_vcard(input_vcard, output_vcard):
|
|||||||
with open(output_vcard, 'w', encoding='utf-8') as file:
|
with open(output_vcard, 'w', encoding='utf-8') as file:
|
||||||
file.writelines(output_lines)
|
file.writelines(output_lines)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Process a VCARD file to standardize telephone entries and add a second TEL line with the modified number (removing the extra ninth digit) for contacts with 9-digit subscribers."
|
description="Process a VCARD file to standardize telephone entries and add a second TEL line with the modified number (removing the extra ninth digit) for contacts with 9-digit subscribers."
|
||||||
|
|||||||
@@ -27,23 +27,24 @@ def _extract_encrypted_key(keyfile):
|
|||||||
return _generate_hmac_of_hmac(key_stream)
|
return _generate_hmac_of_hmac(key_stream)
|
||||||
|
|
||||||
|
|
||||||
key = open("encrypted_backup.key", "rb").read()
|
if __name__ == "__main__":
|
||||||
database = open("wa.db.crypt15", "rb").read()
|
key = open("encrypted_backup.key", "rb").read()
|
||||||
main_key, hex_key = _extract_encrypted_key(key)
|
database = open("wa.db.crypt15", "rb").read()
|
||||||
for i in range(100):
|
main_key, hex_key = _extract_encrypted_key(key)
|
||||||
iv = database[i:i+16]
|
for i in range(100):
|
||||||
for j in range(100):
|
iv = database[i:i+16]
|
||||||
cipher = AES.new(main_key, AES.MODE_GCM, iv)
|
for j in range(100):
|
||||||
db_ciphertext = database[j:]
|
cipher = AES.new(main_key, AES.MODE_GCM, iv)
|
||||||
db_compressed = cipher.decrypt(db_ciphertext)
|
db_ciphertext = database[j:]
|
||||||
try:
|
db_compressed = cipher.decrypt(db_ciphertext)
|
||||||
db = zlib.decompress(db_compressed)
|
try:
|
||||||
except zlib.error:
|
db = zlib.decompress(db_compressed)
|
||||||
...
|
except zlib.error:
|
||||||
else:
|
...
|
||||||
if db[0:6] == b"SQLite":
|
else:
|
||||||
print(f"Found!\nIV: {i}\nOffset: {j}")
|
if db[0:6] == b"SQLite":
|
||||||
print(db_compressed[:10])
|
print(f"Found!\nIV: {i}\nOffset: {j}")
|
||||||
exit()
|
print(db_compressed[:10])
|
||||||
|
exit()
|
||||||
|
|
||||||
print("Not found! Try to increase maximum search.")
|
print("Not found! Try to increase maximum search.")
|
||||||
|
|||||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
27
tests/conftest.py
Normal file
27
tests/conftest.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import pytest
|
||||||
|
import os
|
||||||
|
|
||||||
|
def pytest_collection_modifyitems(config, items):
|
||||||
|
"""
|
||||||
|
Moves test_nuitka_binary.py to the end and fails if the file is missing.
|
||||||
|
"""
|
||||||
|
target_file = "test_nuitka_binary.py"
|
||||||
|
|
||||||
|
# Sanity Check: Ensure the file actually exists in the tests directory
|
||||||
|
test_dir = os.path.join(config.rootdir, "tests")
|
||||||
|
file_path = os.path.join(test_dir, target_file)
|
||||||
|
|
||||||
|
if not os.path.exists(file_path):
|
||||||
|
pytest.exit(f"\n[FATAL] Required test file '{target_file}' not found in {test_dir}. "
|
||||||
|
f"Order enforcement failed!", returncode=1)
|
||||||
|
|
||||||
|
nuitka_tests = []
|
||||||
|
remaining_tests = []
|
||||||
|
|
||||||
|
for item in items:
|
||||||
|
if target_file in item.nodeid:
|
||||||
|
nuitka_tests.append(item)
|
||||||
|
else:
|
||||||
|
remaining_tests.append(item)
|
||||||
|
|
||||||
|
items[:] = remaining_tests + nuitka_tests
|
||||||
44
tests/data/contacts.vcf
Normal file
44
tests/data/contacts.vcf
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
BEGIN:VCARD
|
||||||
|
VERSION:3.0
|
||||||
|
FN:Sample Contact
|
||||||
|
TEL;TYPE=CELL:+85288888888
|
||||||
|
END:VCARD
|
||||||
|
|
||||||
|
BEGIN:VCARD
|
||||||
|
VERSION:2.1
|
||||||
|
N:Lopez;Yard Lawn Guy;Jose;;
|
||||||
|
FN:Yard Lawn Guy, Jose Lopez
|
||||||
|
TEL;HOME:5673334444
|
||||||
|
END:VCARD
|
||||||
|
|
||||||
|
BEGIN:VCARD
|
||||||
|
VERSION:2.1
|
||||||
|
N;CHARSET=UTF-8;ENCODING=QUOTED-PRINTABLE:;=4A=6F=68=6E=20=42=75=74=6C=65=72=20=F0=9F=8C=9F=
|
||||||
|
=F0=9F=92=AB=F0=9F=8C=9F;;;
|
||||||
|
FN;CHARSET=UTF-8;ENCODING=QUOTED-PRINTABLE:=4A=6F=68=6E=20=42=75=74=6C=65=72=20=F0=9F=8C=9F=
|
||||||
|
=F0=9F=92=AB=F0=9F=8C=9F
|
||||||
|
TEL;PREF:5556667777
|
||||||
|
END:VCARD
|
||||||
|
|
||||||
|
BEGIN:VCARD
|
||||||
|
VERSION:2.1
|
||||||
|
TEL;WORK;PREF:1234567890
|
||||||
|
ORG:Airline Contact #'s
|
||||||
|
NOTE;ENCODING=QUOTED-PRINTABLE:=53=70=69=72=69=74=20=41=69=72=6C=69=
|
||||||
|
=6E=65=73=20=38=30=30=2D=37=37=32=2D=37=31=31=37=55=6E=69=74=65=64=
|
||||||
|
=20=41=69=72=6C=69=6E=65=73=20=38=30=30=2D=32=34=31=2D=36=35=32=32
|
||||||
|
END:VCARD
|
||||||
|
|
||||||
|
BEGIN:VCARD
|
||||||
|
VERSION:2.1
|
||||||
|
TEL;WORK;PREF:3451112222
|
||||||
|
X-SAMSUNGADR;ENCODING=QUOTED-PRINTABLE:;;=31=31=31=31=32=20=4E=6F=72=74=68=20=45=6C=64=72=
|
||||||
|
=69=64=67=65=20=50=61=72=6B=77=61=79;=44=61=6C=6C=61=73;=54=58;=32=32=32=32=32
|
||||||
|
ORG:James Peacock Elementary
|
||||||
|
END:VCARD
|
||||||
|
|
||||||
|
BEGIN:VCARD
|
||||||
|
VERSION:2.1
|
||||||
|
TEL;CELL:8889990001
|
||||||
|
ORG:AAA Car Service
|
||||||
|
END:VCARD
|
||||||
@@ -4,7 +4,8 @@ import tempfile
|
|||||||
import os
|
import os
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from brazilian_number_processing import process_phone_number, process_vcard
|
from scripts.brazilian_number_processing import process_phone_number, process_vcard
|
||||||
|
|
||||||
|
|
||||||
class TestVCardProcessor(unittest.TestCase):
|
class TestVCardProcessor(unittest.TestCase):
|
||||||
|
|
||||||
@@ -248,7 +249,8 @@ END:VCARD
|
|||||||
output_path = input_path + '.out'
|
output_path = input_path + '.out'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
test_args = ['python' if os.name == 'nt' else 'python3', 'brazilian_number_processing.py', input_path, output_path]
|
test_args = ['python' if os.name == 'nt' else 'python3',
|
||||||
|
'scripts/brazilian_number_processing.py', input_path, output_path]
|
||||||
# We're just testing that the argument parsing works
|
# We're just testing that the argument parsing works
|
||||||
subprocess.call(
|
subprocess.call(
|
||||||
test_args,
|
test_args,
|
||||||
@@ -265,5 +267,6 @@ END:VCARD
|
|||||||
if os.path.exists(output_path):
|
if os.path.exists(output_path):
|
||||||
os.unlink(output_path)
|
os.unlink(output_path)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
50
tests/test_exporter.py
Normal file
50
tests/test_exporter.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import subprocess
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def command_runner():
|
||||||
|
"""
|
||||||
|
A pytest fixture to simplify running commands. This is a helper
|
||||||
|
function that you can use in multiple tests.
|
||||||
|
"""
|
||||||
|
def _run_command(command_list, check=True):
|
||||||
|
"""
|
||||||
|
Runs a command and returns the result.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
command_list (list): A list of strings representing the command
|
||||||
|
and its arguments (e.g., ["python", "my_script.py", "arg1"]).
|
||||||
|
check (bool, optional): If True, raise an exception if the
|
||||||
|
command returns a non-zero exit code. Defaults to True.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
subprocess.CompletedProcess: The result of the command.
|
||||||
|
"""
|
||||||
|
return subprocess.run(
|
||||||
|
command_list,
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=check,
|
||||||
|
)
|
||||||
|
return _run_command
|
||||||
|
|
||||||
|
|
||||||
|
def test_sanity_check(command_runner):
|
||||||
|
"""
|
||||||
|
This is a basic sanity check to make sure all modules can be imported
|
||||||
|
This runs the exporter without any arguments. It should fail with a
|
||||||
|
message about missing arguments.
|
||||||
|
"""
|
||||||
|
result = command_runner(["wtsexporter"], False)
|
||||||
|
expected_stderr = "You must define the device type"
|
||||||
|
assert expected_stderr in result.stderr, f"STDERR was: {result.stderr}"
|
||||||
|
assert result.returncode == 2
|
||||||
|
|
||||||
|
|
||||||
|
def test_android(command_runner):
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def test_ios(command_runner):
|
||||||
|
...
|
||||||
344
tests/test_incremental_merge.py
Normal file
344
tests/test_incremental_merge.py
Normal file
@@ -0,0 +1,344 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import patch, mock_open, call, MagicMock
|
||||||
|
from Whatsapp_Chat_Exporter.utility import incremental_merge
|
||||||
|
from Whatsapp_Chat_Exporter.data_model import ChatStore
|
||||||
|
|
||||||
|
# Test data setup
|
||||||
|
BASE_PATH = "AppDomainGroup-group.net.whatsapp.WhatsApp.shared"
|
||||||
|
chat_data_1 = {
|
||||||
|
"12345678@s.whatsapp.net": {
|
||||||
|
"name": "Friend",
|
||||||
|
"type": "ios",
|
||||||
|
"my_avatar": os.path.join(BASE_PATH, "Media", "Profile", "Photo.jpg"),
|
||||||
|
"their_avatar": os.path.join(BASE_PATH, "Media", "Profile", "12345678-1709851420.thumb"),
|
||||||
|
"their_avatar_thumb": None,
|
||||||
|
"status": None,
|
||||||
|
"messages": {
|
||||||
|
"24690": {
|
||||||
|
"from_me": True,
|
||||||
|
"timestamp": 1463926635.571629,
|
||||||
|
"time": "10:17",
|
||||||
|
"media": False,
|
||||||
|
"key_id": "34B5EF10FBCA37B7E",
|
||||||
|
"meta": False,
|
||||||
|
"data": "I'm here",
|
||||||
|
"safe": False,
|
||||||
|
"sticker": False
|
||||||
|
},
|
||||||
|
"24691": { # This message only exists in target
|
||||||
|
"from_me": False,
|
||||||
|
"timestamp": 1463926641.571629,
|
||||||
|
"time": "10:17",
|
||||||
|
"media": False,
|
||||||
|
"key_id": "34B5EF10FBCA37B8E",
|
||||||
|
"meta": False,
|
||||||
|
"data": "Great to see you",
|
||||||
|
"safe": False,
|
||||||
|
"sticker": False
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
chat_data_2 = {
|
||||||
|
"12345678@s.whatsapp.net": {
|
||||||
|
"name": "Friend",
|
||||||
|
"type": "ios",
|
||||||
|
"my_avatar": os.path.join(BASE_PATH, "Media", "Profile", "Photo.jpg"),
|
||||||
|
"their_avatar": os.path.join(BASE_PATH, "Media", "Profile", "12345678-1709851420.thumb"),
|
||||||
|
"their_avatar_thumb": None,
|
||||||
|
"status": None,
|
||||||
|
"messages": {
|
||||||
|
"24690": {
|
||||||
|
"from_me": True,
|
||||||
|
"timestamp": 1463926635.571629,
|
||||||
|
"time": "10:17",
|
||||||
|
"media": False,
|
||||||
|
"key_id": "34B5EF10FBCA37B7E",
|
||||||
|
"meta": False,
|
||||||
|
"data": "I'm here",
|
||||||
|
"safe": False,
|
||||||
|
"sticker": False
|
||||||
|
},
|
||||||
|
"24692": { # This message only exists in source
|
||||||
|
"from_me": False,
|
||||||
|
"timestamp": 1463926642.571629,
|
||||||
|
"time": "10:17",
|
||||||
|
"media": False,
|
||||||
|
"key_id": "34B5EF10FBCA37B9E",
|
||||||
|
"meta": False,
|
||||||
|
"data": "Hi there!",
|
||||||
|
"safe": False,
|
||||||
|
"sticker": False
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Expected merged data - should contain all messages with all fields initialized as they would be by Message class
|
||||||
|
chat_data_merged = {
|
||||||
|
"12345678@s.whatsapp.net": {
|
||||||
|
"name": "Friend",
|
||||||
|
"type": "ios",
|
||||||
|
"my_avatar": os.path.join(BASE_PATH, "Media", "Profile", "Photo.jpg"),
|
||||||
|
"their_avatar": os.path.join(BASE_PATH, "Media", "Profile", "12345678-1709851420.thumb"),
|
||||||
|
"their_avatar_thumb": None,
|
||||||
|
"status": None,
|
||||||
|
"media_base": "",
|
||||||
|
"messages": {
|
||||||
|
"24690": {
|
||||||
|
"from_me": True,
|
||||||
|
"timestamp": 1463926635.571629,
|
||||||
|
"time": "10:17",
|
||||||
|
"media": False,
|
||||||
|
"key_id": "34B5EF10FBCA37B7E",
|
||||||
|
"meta": False,
|
||||||
|
"data": "I'm here",
|
||||||
|
"sender": None,
|
||||||
|
"safe": False,
|
||||||
|
"mime": None,
|
||||||
|
"reply": None,
|
||||||
|
"quoted_data": None,
|
||||||
|
'reactions': {},
|
||||||
|
"caption": None,
|
||||||
|
"thumb": None,
|
||||||
|
"sticker": False,
|
||||||
|
"message_type": None,
|
||||||
|
"received_timestamp": None,
|
||||||
|
"read_timestamp": None
|
||||||
|
},
|
||||||
|
"24691": {
|
||||||
|
"from_me": False,
|
||||||
|
"timestamp": 1463926641.571629,
|
||||||
|
"time": "10:17",
|
||||||
|
"media": False,
|
||||||
|
"key_id": "34B5EF10FBCA37B8E",
|
||||||
|
"meta": False,
|
||||||
|
"data": "Great to see you",
|
||||||
|
"sender": None,
|
||||||
|
"safe": False,
|
||||||
|
"mime": None,
|
||||||
|
"reply": None,
|
||||||
|
"quoted_data": None,
|
||||||
|
'reactions': {},
|
||||||
|
"caption": None,
|
||||||
|
"thumb": None,
|
||||||
|
"sticker": False,
|
||||||
|
"message_type": None,
|
||||||
|
"received_timestamp": None,
|
||||||
|
"read_timestamp": None
|
||||||
|
},
|
||||||
|
"24692": {
|
||||||
|
"from_me": False,
|
||||||
|
"timestamp": 1463926642.571629,
|
||||||
|
"time": "10:17",
|
||||||
|
"media": False,
|
||||||
|
"key_id": "34B5EF10FBCA37B9E",
|
||||||
|
"meta": False,
|
||||||
|
"data": "Hi there!",
|
||||||
|
"sender": None,
|
||||||
|
"safe": False,
|
||||||
|
"mime": None,
|
||||||
|
"reply": None,
|
||||||
|
"quoted_data": None,
|
||||||
|
'reactions': {},
|
||||||
|
"caption": None,
|
||||||
|
"thumb": None,
|
||||||
|
"sticker": False,
|
||||||
|
"message_type": None,
|
||||||
|
"received_timestamp": None,
|
||||||
|
"read_timestamp": None
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_filesystem():
|
||||||
|
with (
|
||||||
|
patch("os.path.exists") as mock_exists,
|
||||||
|
patch("os.makedirs") as mock_makedirs,
|
||||||
|
patch("os.path.getmtime") as mock_getmtime,
|
||||||
|
patch("os.listdir") as mock_listdir,
|
||||||
|
patch("os.walk") as mock_walk,
|
||||||
|
patch("shutil.copy2") as mock_copy2,
|
||||||
|
):
|
||||||
|
yield {
|
||||||
|
"exists": mock_exists,
|
||||||
|
"makedirs": mock_makedirs,
|
||||||
|
"getmtime": mock_getmtime,
|
||||||
|
"listdir": mock_listdir,
|
||||||
|
"walk": mock_walk,
|
||||||
|
"copy2": mock_copy2,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_incremental_merge_new_file(mock_filesystem):
|
||||||
|
"""Test merging when target file doesn't exist"""
|
||||||
|
source_dir = "/source"
|
||||||
|
target_dir = "/target"
|
||||||
|
media_dir = "media"
|
||||||
|
|
||||||
|
# Setup mock filesystem
|
||||||
|
mock_filesystem["exists"].side_effect = lambda x: x == "/source"
|
||||||
|
mock_filesystem["listdir"].return_value = ["chat.json"]
|
||||||
|
|
||||||
|
# Run the function
|
||||||
|
incremental_merge(source_dir, target_dir, media_dir, 2, True)
|
||||||
|
|
||||||
|
# Verify the operations
|
||||||
|
mock_filesystem["makedirs"].assert_called_once_with(target_dir, exist_ok=True)
|
||||||
|
mock_filesystem["copy2"].assert_called_once_with(
|
||||||
|
os.path.join(source_dir, "chat.json"),
|
||||||
|
os.path.join(target_dir, "chat.json")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_incremental_merge_existing_file_with_changes(mock_filesystem):
|
||||||
|
"""Test merging when target file exists and has changes"""
|
||||||
|
source_dir = "source"
|
||||||
|
target_dir = "target"
|
||||||
|
media_dir = "media"
|
||||||
|
|
||||||
|
# Setup mock filesystem
|
||||||
|
mock_filesystem["exists"].side_effect = lambda x: True
|
||||||
|
mock_filesystem["listdir"].return_value = ["chat.json"]
|
||||||
|
|
||||||
|
# Mock file operations with consistent path separators
|
||||||
|
source_file = os.path.join(source_dir, "chat.json")
|
||||||
|
target_file = os.path.join(target_dir, "chat.json")
|
||||||
|
mock_file_content = {
|
||||||
|
source_file: json.dumps(chat_data_2),
|
||||||
|
target_file: json.dumps(chat_data_1),
|
||||||
|
}
|
||||||
|
|
||||||
|
written_chunks = []
|
||||||
|
|
||||||
|
def mock_file_write(data):
|
||||||
|
written_chunks.append(data)
|
||||||
|
|
||||||
|
mock_write = MagicMock(side_effect=mock_file_write)
|
||||||
|
|
||||||
|
with patch("builtins.open", mock_open()) as mock_file:
|
||||||
|
def mock_file_read(filename, mode="r"):
|
||||||
|
if mode == 'w':
|
||||||
|
file_mock = mock_open().return_value
|
||||||
|
file_mock.write.side_effect = mock_write
|
||||||
|
return file_mock
|
||||||
|
else:
|
||||||
|
# Use normalized path for lookup
|
||||||
|
norm_filename = os.path.normpath(filename)
|
||||||
|
content = mock_file_content.get(norm_filename, '')
|
||||||
|
file_mock = mock_open(read_data=content).return_value
|
||||||
|
return file_mock
|
||||||
|
|
||||||
|
mock_file.side_effect = mock_file_read
|
||||||
|
|
||||||
|
# Run the function
|
||||||
|
incremental_merge(source_dir, target_dir, media_dir, 2, True)
|
||||||
|
|
||||||
|
# Verify file operations using os.path.join
|
||||||
|
mock_file.assert_any_call(source_file, "r")
|
||||||
|
mock_file.assert_any_call(target_file, "r")
|
||||||
|
mock_file.assert_any_call(target_file, "w")
|
||||||
|
|
||||||
|
# Rest of verification code...
|
||||||
|
assert mock_write.called, "Write method was never called"
|
||||||
|
written_data = json.loads(''.join(written_chunks))
|
||||||
|
assert written_data is not None, "No data was written"
|
||||||
|
assert written_data == chat_data_merged, "Merged data does not match expected result"
|
||||||
|
|
||||||
|
messages = written_data["12345678@s.whatsapp.net"]["messages"]
|
||||||
|
assert "24690" in messages, "Common message should be present"
|
||||||
|
assert "24691" in messages, "Target-only message should be preserved"
|
||||||
|
assert "24692" in messages, "Source-only message should be added"
|
||||||
|
assert len(messages) == 3, "Should have exactly 3 messages"
|
||||||
|
|
||||||
|
|
||||||
|
def test_incremental_merge_existing_file_no_changes(mock_filesystem):
|
||||||
|
"""Test merging when target file exists but has no changes"""
|
||||||
|
source_dir = "source"
|
||||||
|
target_dir = "target"
|
||||||
|
media_dir = "media"
|
||||||
|
|
||||||
|
# Setup mock filesystem
|
||||||
|
mock_filesystem["exists"].side_effect = lambda x: True
|
||||||
|
mock_filesystem["listdir"].return_value = ["chat.json"]
|
||||||
|
|
||||||
|
# Mock file operations with consistent path separators
|
||||||
|
source_file = os.path.join(source_dir, "chat.json")
|
||||||
|
target_file = os.path.join(target_dir, "chat.json")
|
||||||
|
mock_file_content = {
|
||||||
|
source_file: json.dumps(chat_data_1),
|
||||||
|
target_file: json.dumps(chat_data_1),
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch("builtins.open", mock_open()) as mock_file:
|
||||||
|
def mock_file_read(filename, mode="r"):
|
||||||
|
if mode == 'w':
|
||||||
|
file_mock = mock_open().return_value
|
||||||
|
return file_mock
|
||||||
|
else:
|
||||||
|
# Use normalized path for lookup
|
||||||
|
norm_filename = os.path.normpath(filename)
|
||||||
|
content = mock_file_content.get(norm_filename, '')
|
||||||
|
file_mock = mock_open(read_data=content).return_value
|
||||||
|
return file_mock
|
||||||
|
|
||||||
|
mock_file.side_effect = mock_file_read
|
||||||
|
|
||||||
|
# Run the function
|
||||||
|
incremental_merge(source_dir, target_dir, media_dir, 2, True)
|
||||||
|
|
||||||
|
# Verify no write operations occurred on target file
|
||||||
|
write_calls = [
|
||||||
|
call for call in mock_file.mock_calls if call[0] == "().write"]
|
||||||
|
assert len(write_calls) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_incremental_merge_media_copy(mock_filesystem):
|
||||||
|
"""Test media file copying during merge"""
|
||||||
|
source_dir = "source"
|
||||||
|
target_dir = "target"
|
||||||
|
media_dir = "media"
|
||||||
|
|
||||||
|
# Setup mock filesystem
|
||||||
|
mock_filesystem["exists"].side_effect = lambda x: True
|
||||||
|
mock_filesystem["listdir"].return_value = ["chat.json"]
|
||||||
|
mock_filesystem["walk"].return_value = [
|
||||||
|
(os.path.join(source_dir, "media"), ["subfolder"], ["file1.jpg"]),
|
||||||
|
(os.path.join(source_dir, "media", "subfolder"), [], ["file2.jpg"]),
|
||||||
|
]
|
||||||
|
mock_filesystem["getmtime"].side_effect = lambda x: 1000 if "source" in x else 500
|
||||||
|
|
||||||
|
# Mock file operations with consistent path separators
|
||||||
|
source_file = os.path.join(source_dir, "chat.json")
|
||||||
|
target_file = os.path.join(target_dir, "chat.json")
|
||||||
|
mock_file_content = {
|
||||||
|
source_file: json.dumps(chat_data_1),
|
||||||
|
target_file: json.dumps(chat_data_1),
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch("builtins.open", mock_open()) as mock_file:
|
||||||
|
def mock_file_read(filename, mode="r"):
|
||||||
|
if mode == 'w':
|
||||||
|
file_mock = mock_open().return_value
|
||||||
|
return file_mock
|
||||||
|
else:
|
||||||
|
# Use normalized path for lookup
|
||||||
|
norm_filename = os.path.normpath(filename)
|
||||||
|
content = mock_file_content.get(norm_filename, '')
|
||||||
|
file_mock = mock_open(read_data=content).return_value
|
||||||
|
return file_mock
|
||||||
|
|
||||||
|
mock_file.side_effect = mock_file_read
|
||||||
|
|
||||||
|
# Run the function
|
||||||
|
incremental_merge(source_dir, target_dir, media_dir, 2, True)
|
||||||
|
|
||||||
|
# Verify media file operations
|
||||||
|
assert mock_filesystem["makedirs"].call_count >= 2 # At least target dir and media dir
|
||||||
|
assert mock_filesystem["copy2"].call_count == 2 # Two media files copied
|
||||||
76
tests/test_nuitka_binary.py
Normal file
76
tests/test_nuitka_binary.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import pytest
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def command_runner():
|
||||||
|
"""
|
||||||
|
A pytest fixture to simplify running commands. This is a helper
|
||||||
|
function that you can use in multiple tests.
|
||||||
|
"""
|
||||||
|
def _run_command(command_list, check=True):
|
||||||
|
"""
|
||||||
|
Runs a command and returns the result.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
command_list (list): A list of strings representing the command
|
||||||
|
and its arguments (e.g., ["python", "my_script.py", "arg1"]).
|
||||||
|
check (bool, optional): If True, raise an exception if the
|
||||||
|
command returns a non-zero exit code. Defaults to True.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
subprocess.CompletedProcess: The result of the command.
|
||||||
|
"""
|
||||||
|
return subprocess.run(
|
||||||
|
command_list,
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=check,
|
||||||
|
)
|
||||||
|
return _run_command
|
||||||
|
|
||||||
|
|
||||||
|
def test_nuitka_binary():
|
||||||
|
"""
|
||||||
|
Tests the creation and execution of a Nuitka-compiled binary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 14):
|
||||||
|
print("Skipping Nuitka test: Python 3.14 is not yet fully supported by Nuitka.")
|
||||||
|
return
|
||||||
|
|
||||||
|
nuitka_command = [
|
||||||
|
"python", "-m", "nuitka", "--onefile", "--assume-yes-for-downloads",
|
||||||
|
"--include-data-file=./Whatsapp_Chat_Exporter/whatsapp.html=./Whatsapp_Chat_Exporter/whatsapp.html",
|
||||||
|
"Whatsapp_Chat_Exporter",
|
||||||
|
"--output-filename=wtsexporter.exe" # use .exe on all platforms for compatibility
|
||||||
|
]
|
||||||
|
|
||||||
|
compile_result = subprocess.run(
|
||||||
|
nuitka_command,
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True
|
||||||
|
)
|
||||||
|
print(f"Nuitka compilation output: {compile_result.stdout}")
|
||||||
|
|
||||||
|
binary_path = "./wtsexporter.exe"
|
||||||
|
assert os.path.exists(binary_path), f"Binary {binary_path} was not created."
|
||||||
|
|
||||||
|
try:
|
||||||
|
execute_result = subprocess.run(
|
||||||
|
[binary_path, "--help"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
print(f"Binary execution output: {execute_result.stdout}")
|
||||||
|
assert "usage:" in execute_result.stdout.lower(), "Binary did not produce expected help output."
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print(f"Binary execution failed with error: {e.stderr}")
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
if os.path.exists(binary_path):
|
||||||
|
os.remove(binary_path)
|
||||||
352
tests/test_utility.py
Normal file
352
tests/test_utility.py
Normal file
@@ -0,0 +1,352 @@
|
|||||||
|
import pytest
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
from unittest.mock import patch, mock_open, MagicMock
|
||||||
|
from Whatsapp_Chat_Exporter.utility import *
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_time_unit():
|
||||||
|
assert convert_time_unit(0) == "less than a second"
|
||||||
|
assert convert_time_unit(1) == "a second"
|
||||||
|
assert convert_time_unit(10) == "10 seconds"
|
||||||
|
assert convert_time_unit(60) == "1 minute"
|
||||||
|
assert convert_time_unit(61) == "1 minute 1 second"
|
||||||
|
assert convert_time_unit(122) == "2 minutes 2 seconds"
|
||||||
|
assert convert_time_unit(3600) == "1 hour"
|
||||||
|
assert convert_time_unit(3661) == "1 hour 1 minute 1 second"
|
||||||
|
assert convert_time_unit(3720) == "1 hour 2 minutes"
|
||||||
|
assert convert_time_unit(3660) == "1 hour 1 minute"
|
||||||
|
assert convert_time_unit(7263) == "2 hours 1 minute 3 seconds"
|
||||||
|
assert convert_time_unit(86400) == "1 day"
|
||||||
|
assert convert_time_unit(86461) == "1 day 1 minute 1 second"
|
||||||
|
assert convert_time_unit(172805) == "2 days 5 seconds"
|
||||||
|
|
||||||
|
|
||||||
|
class TestBytesToReadable:
|
||||||
|
assert bytes_to_readable(0) == "0 B"
|
||||||
|
assert bytes_to_readable(500) == "500 B"
|
||||||
|
assert bytes_to_readable(1024) == "1.0 KB"
|
||||||
|
assert bytes_to_readable(2048) == "2.0 KB"
|
||||||
|
assert bytes_to_readable(1536) == "1.5 KB"
|
||||||
|
assert bytes_to_readable(1024**2) == "1.0 MB"
|
||||||
|
assert bytes_to_readable(5 * 1024**2) == "5.0 MB"
|
||||||
|
assert bytes_to_readable(1024**3) == "1.0 GB"
|
||||||
|
assert bytes_to_readable(1024**4) == "1.0 TB"
|
||||||
|
assert bytes_to_readable(1024**5) == "1.0 PB"
|
||||||
|
assert bytes_to_readable(1024**6) == "1.0 EB"
|
||||||
|
assert bytes_to_readable(1024**7) == "1.0 ZB"
|
||||||
|
assert bytes_to_readable(1024**8) == "1.0 YB"
|
||||||
|
|
||||||
|
|
||||||
|
class TestReadableToBytes:
|
||||||
|
def test_conversion(self):
|
||||||
|
assert readable_to_bytes("0B") == 0
|
||||||
|
assert readable_to_bytes("100B") == 100
|
||||||
|
assert readable_to_bytes("50 B") == 50
|
||||||
|
assert readable_to_bytes("1KB") == 1024
|
||||||
|
assert readable_to_bytes("2.5 KB") == 2560
|
||||||
|
assert readable_to_bytes("2.0 KB") == 2048
|
||||||
|
assert readable_to_bytes("1MB") == 1024**2
|
||||||
|
assert readable_to_bytes("0.5 MB") == 524288
|
||||||
|
assert readable_to_bytes("1. MB") == 1048576
|
||||||
|
assert readable_to_bytes("1GB") == 1024**3
|
||||||
|
assert readable_to_bytes("1.GB") == 1024**3
|
||||||
|
assert readable_to_bytes("1TB") == 1024**4
|
||||||
|
assert readable_to_bytes("1PB") == 1024**5
|
||||||
|
assert readable_to_bytes("1EB") == 1024**6
|
||||||
|
assert readable_to_bytes("1ZB") == 1024**7
|
||||||
|
assert readable_to_bytes("1YB") == 1024**8
|
||||||
|
|
||||||
|
def test_case_insensitivity(self):
|
||||||
|
assert readable_to_bytes("1kb") == 1024
|
||||||
|
assert readable_to_bytes("2mB") == 2 * 1024**2
|
||||||
|
|
||||||
|
def test_whitespace(self):
|
||||||
|
assert readable_to_bytes(" 10 KB ") == 10 * 1024
|
||||||
|
assert readable_to_bytes(" 1 MB") == 1024**2
|
||||||
|
|
||||||
|
def test_invalid_unit(self):
|
||||||
|
with pytest.raises(ValueError, match="Invalid size format for size_str"):
|
||||||
|
readable_to_bytes("100X")
|
||||||
|
readable_to_bytes("A100")
|
||||||
|
readable_to_bytes("100$$$$$")
|
||||||
|
|
||||||
|
def test_invalid_number(self):
|
||||||
|
with pytest.raises(ValueError, match="Invalid size format for size_str"):
|
||||||
|
readable_to_bytes("ABC KB")
|
||||||
|
|
||||||
|
def test_missing_unit(self):
|
||||||
|
assert readable_to_bytes("100") == 100
|
||||||
|
|
||||||
|
|
||||||
|
class TestSanitizeExcept:
|
||||||
|
def test_no_tags(self):
|
||||||
|
html = "This is plain text."
|
||||||
|
assert sanitize_except(html) == Markup("This is plain text.")
|
||||||
|
|
||||||
|
def test_allowed_br_tag(self):
|
||||||
|
html = "Line 1<br>Line 2"
|
||||||
|
assert sanitize_except(html) == Markup("Line 1<br>Line 2")
|
||||||
|
html = "<br/>Line"
|
||||||
|
assert sanitize_except(html) == Markup("<br>Line")
|
||||||
|
html = "Line<br />"
|
||||||
|
assert sanitize_except(html) == Markup("Line<br>")
|
||||||
|
|
||||||
|
def test_mixed_tags(self):
|
||||||
|
html = "<b>Bold</b><br><i>Italic</i><img src='evil.gif'><script>alert('XSS')</script>"
|
||||||
|
assert sanitize_except(html) == Markup(
|
||||||
|
"<b>Bold</b><br><i>Italic</i><img src='evil.gif'><script>alert('XSS')</script>")
|
||||||
|
|
||||||
|
def test_attribute_stripping(self):
|
||||||
|
html = "<br class='someclass'>"
|
||||||
|
assert sanitize_except(html) == Markup("<br>")
|
||||||
|
|
||||||
|
|
||||||
|
class TestDetermineDay:
|
||||||
|
def test_same_day(self):
|
||||||
|
timestamp1 = 1678838400 # March 15, 2023 00:00:00 GMT
|
||||||
|
timestamp2 = 1678881600 # March 15, 2023 12:00:00 GMT
|
||||||
|
assert determine_day(timestamp1, timestamp2) is None
|
||||||
|
|
||||||
|
def test_different_day(self):
|
||||||
|
timestamp1 = 1678886400 # March 15, 2023 00:00:00 GMT
|
||||||
|
timestamp2 = 1678972800 # March 16, 2023 00:00:00 GMT
|
||||||
|
assert determine_day(timestamp1, timestamp2) == datetime(2023, 3, 16).date()
|
||||||
|
|
||||||
|
def test_crossing_month(self):
|
||||||
|
timestamp1 = 1680220800 # March 31, 2023 00:00:00 GMT
|
||||||
|
timestamp2 = 1680307200 # April 1, 2023 00:00:00 GMT
|
||||||
|
assert determine_day(timestamp1, timestamp2) == datetime(2023, 4, 1).date()
|
||||||
|
|
||||||
|
def test_crossing_year(self):
|
||||||
|
timestamp1 = 1703980800 # December 31, 2023 00:00:00 GMT
|
||||||
|
timestamp2 = 1704067200 # January 1, 2024 00:00:00 GMT
|
||||||
|
assert determine_day(timestamp1, timestamp2) == datetime(2024, 1, 1).date()
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetFileName:
|
||||||
|
def test_valid_contact_phone_number_no_chat_name(self):
|
||||||
|
chat = ChatStore(Device.ANDROID, name=None)
|
||||||
|
filename, name = get_file_name("1234567890@s.whatsapp.net", chat)
|
||||||
|
assert filename == "1234567890"
|
||||||
|
assert name == "1234567890"
|
||||||
|
|
||||||
|
def test_valid_contact_phone_number_with_chat_name(self):
|
||||||
|
chat = ChatStore(Device.IOS, name="My Chat Group")
|
||||||
|
filename, name = get_file_name("1234567890@s.whatsapp.net", chat)
|
||||||
|
assert filename == "1234567890-My-Chat-Group"
|
||||||
|
assert name == "My Chat Group"
|
||||||
|
|
||||||
|
def test_valid_contact_exported_chat(self):
|
||||||
|
chat = ChatStore(Device.ANDROID, name="Testing")
|
||||||
|
filename, name = get_file_name("ExportedChat", chat)
|
||||||
|
assert filename == "ExportedChat-Testing"
|
||||||
|
assert name == "Testing"
|
||||||
|
|
||||||
|
def test_valid_contact_special_ids(self):
|
||||||
|
chat = ChatStore(Device.ANDROID, name="Special Chat")
|
||||||
|
filename_000, name_000 = get_file_name("000000000000000", chat)
|
||||||
|
assert filename_000 == "000000000000000-Special-Chat"
|
||||||
|
assert name_000 == "Special Chat"
|
||||||
|
filename_001, name_001 = get_file_name("000000000000001", chat)
|
||||||
|
assert filename_001 == "000000000000001-Special-Chat"
|
||||||
|
assert name_001 == "Special Chat"
|
||||||
|
|
||||||
|
def test_unexpected_contact_format(self):
|
||||||
|
chat = ChatStore(Device.ANDROID, name="Some Chat")
|
||||||
|
with pytest.raises(ValueError, match="Unexpected contact format: invalid-contact"):
|
||||||
|
get_file_name("invalid-contact", chat)
|
||||||
|
|
||||||
|
def test_contact_with_hyphen_and_chat_name(self):
|
||||||
|
chat = ChatStore(Device.ANDROID, name="Another Chat")
|
||||||
|
filename, name = get_file_name("123-456-7890@g.us", chat)
|
||||||
|
assert filename == "Another-Chat"
|
||||||
|
assert name == "Another Chat"
|
||||||
|
|
||||||
|
def test_contact_with_hyphen_no_chat_name(self):
|
||||||
|
chat = ChatStore(Device.ANDROID, name=None)
|
||||||
|
filename, name = get_file_name("123-456-7890@g.us", chat)
|
||||||
|
assert filename == "123-456-7890"
|
||||||
|
assert name == "123-456-7890"
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetCondForEmpty:
|
||||||
|
def test_enable_true(self):
|
||||||
|
condition = get_cond_for_empty(True, "c.jid", "c.broadcast")
|
||||||
|
assert condition == "AND (chat.hidden=0 OR c.jid='status@broadcast' OR c.broadcast>0)"
|
||||||
|
|
||||||
|
def test_enable_false(self):
|
||||||
|
condition = get_cond_for_empty(False, "other_jid", "other_broadcast")
|
||||||
|
assert condition == ""
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetChatCondition:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetStatusLocation:
|
||||||
|
@patch('os.path.isdir')
|
||||||
|
@patch('os.path.isfile')
|
||||||
|
@patch('os.mkdir')
|
||||||
|
@patch('urllib.request.urlopen')
|
||||||
|
@patch('builtins.open', new_callable=mock_open)
|
||||||
|
def test_offline_static_set(self, mock_open_file, mock_urlopen, mock_mkdir, mock_isfile, mock_isdir):
|
||||||
|
mock_isdir.return_value = False
|
||||||
|
mock_isfile.return_value = False
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.read.return_value = b'W3.CSS Content'
|
||||||
|
mock_urlopen.return_value.__enter__.return_value = mock_response
|
||||||
|
output_folder = "output_folder"
|
||||||
|
offline_static = "offline_static"
|
||||||
|
|
||||||
|
result = get_status_location(output_folder, offline_static)
|
||||||
|
|
||||||
|
assert result == os.path.join(offline_static, "w3.css")
|
||||||
|
mock_mkdir.assert_called_once_with(os.path.join(output_folder, offline_static))
|
||||||
|
mock_urlopen.assert_called_once_with("https://www.w3schools.com/w3css/4/w3.css")
|
||||||
|
mock_open_file.assert_called_once_with(os.path.join(output_folder, offline_static, "w3.css"), "wb")
|
||||||
|
mock_open_file().write.assert_called_once_with(b'W3.CSS Content')
|
||||||
|
|
||||||
|
def test_offline_static_not_set(self):
|
||||||
|
result = get_status_location("output_folder", "")
|
||||||
|
assert result == "https://www.w3schools.com/w3css/4/w3.css"
|
||||||
|
|
||||||
|
|
||||||
|
class TestSafeName:
|
||||||
|
def generate_random_string(length=50):
|
||||||
|
random.seed(10)
|
||||||
|
return ''.join(random.choice(string.ascii_letters + string.digits + "äöüß") for _ in range(length))
|
||||||
|
|
||||||
|
safe_name_test_cases = [
|
||||||
|
("This is a test string", "This-is-a-test-string"),
|
||||||
|
("This is a test string with special characters!@#$%^&*()",
|
||||||
|
"This-is-a-test-string-with-special-characters"),
|
||||||
|
("This is a test string with numbers 1234567890", "This-is-a-test-string-with-numbers-1234567890"),
|
||||||
|
("This is a test string with mixed case ThisIsATestString",
|
||||||
|
"This-is-a-test-string-with-mixed-case-ThisIsATestString"),
|
||||||
|
("This is a test string with extra spaces \u00A0 \u00A0 \u00A0 ThisIsATestString",
|
||||||
|
"This-is-a-test-string-with-extra-spaces-ThisIsATestString"),
|
||||||
|
("This is a test string with unicode characters äöüß",
|
||||||
|
"This-is-a-test-string-with-unicode-characters-äöüß"),
|
||||||
|
("這是一個包含中文的測試字符串", "這是一個包含中文的測試字符串"), # Chinese characters, should stay as is
|
||||||
|
(
|
||||||
|
f"This is a test string with long length {generate_random_string(1000)}",
|
||||||
|
f"This-is-a-test-string-with-long-length-{generate_random_string(1000)}",
|
||||||
|
),
|
||||||
|
("", ""), # Empty string
|
||||||
|
(" ", ""), # String with only space
|
||||||
|
("---", "---"), # String with only hyphens
|
||||||
|
("___", "___"), # String with only underscores
|
||||||
|
("a" * 100, "a" * 100), # Long string with single character
|
||||||
|
("a-b-c-d-e", "a-b-c-d-e"), # String with hyphen
|
||||||
|
("a_b_c_d_e", "a_b_c_d_e"), # String with underscore
|
||||||
|
("a b c d e", "a-b-c-d-e"), # String with spaces
|
||||||
|
("test.com/path/to/resource?param1=value1¶m2=value2",
|
||||||
|
"test.compathtoresourceparam1value1param2value2"), # Test with URL
|
||||||
|
("filename.txt", "filename.txt"), # Test with filename
|
||||||
|
("Αυτή είναι μια δοκιμαστική συμβολοσειρά με ελληνικούς χαρακτήρες.",
|
||||||
|
"Αυτή-είναι-μια-δοκιμαστική-συμβολοσειρά-με-ελληνικούς-χαρακτήρες."), # Greek characters
|
||||||
|
("This is a test with комбинированные знаки ̆ example",
|
||||||
|
"This-is-a-test-with-комбинированные-знаки-example") # Mixed with unicode
|
||||||
|
]
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("input_text, expected_output", safe_name_test_cases)
|
||||||
|
def test_safe_name(self, input_text, expected_output):
|
||||||
|
result = safe_name(input_text)
|
||||||
|
assert result == expected_output
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetChatCondition:
|
||||||
|
def test_no_filter(self):
|
||||||
|
"""Test when filter is None"""
|
||||||
|
result = get_chat_condition(None, True, ["column1", "column2"])
|
||||||
|
assert result == ""
|
||||||
|
|
||||||
|
result = get_chat_condition(None, False, ["column1"])
|
||||||
|
assert result == ""
|
||||||
|
|
||||||
|
def test_include_single_chat_single_column(self):
|
||||||
|
"""Test including a single chat with single column"""
|
||||||
|
result = get_chat_condition(["1234567890"], True, ["phone"])
|
||||||
|
assert result == "AND ( phone LIKE '%1234567890%')"
|
||||||
|
|
||||||
|
def test_include_multiple_chats_single_column(self):
|
||||||
|
"""Test including multiple chats with single column"""
|
||||||
|
result = get_chat_condition(["1234567890", "0987654321"], True, ["phone"])
|
||||||
|
assert result == "AND ( phone LIKE '%1234567890%' OR phone LIKE '%0987654321%')"
|
||||||
|
|
||||||
|
def test_exclude_single_chat_single_column(self):
|
||||||
|
"""Test excluding a single chat with single column"""
|
||||||
|
result = get_chat_condition(["1234567890"], False, ["phone"])
|
||||||
|
assert result == "AND ( phone NOT LIKE '%1234567890%')"
|
||||||
|
|
||||||
|
def test_exclude_multiple_chats_single_column(self):
|
||||||
|
"""Test excluding multiple chats with single column"""
|
||||||
|
result = get_chat_condition(["1234567890", "0987654321"], False, ["phone"])
|
||||||
|
assert result == "AND ( phone NOT LIKE '%1234567890%' AND phone NOT LIKE '%0987654321%')"
|
||||||
|
|
||||||
|
def test_include_with_jid_android(self):
|
||||||
|
"""Test including chats with JID for Android platform"""
|
||||||
|
result = get_chat_condition(["1234567890"], True, ["phone", "name"], "jid", "android")
|
||||||
|
assert result == "AND ( phone LIKE '%1234567890%' OR (name LIKE '%1234567890%' AND jid.type == 1))"
|
||||||
|
|
||||||
|
def test_include_with_jid_ios(self):
|
||||||
|
"""Test including chats with JID for iOS platform"""
|
||||||
|
result = get_chat_condition(["1234567890"], True, ["phone", "name"], "jid", "ios")
|
||||||
|
assert result == "AND ( phone LIKE '%1234567890%' OR (name LIKE '%1234567890%' AND jid IS NOT NULL))"
|
||||||
|
|
||||||
|
def test_exclude_with_jid_android(self):
|
||||||
|
"""Test excluding chats with JID for Android platform"""
|
||||||
|
result = get_chat_condition(["1234567890"], False, ["phone", "name"], "jid", "android")
|
||||||
|
assert result == "AND ( phone NOT LIKE '%1234567890%' AND (name NOT LIKE '%1234567890%' AND jid.type == 1))"
|
||||||
|
|
||||||
|
def test_exclude_with_jid_ios(self):
|
||||||
|
"""Test excluding chats with JID for iOS platform"""
|
||||||
|
result = get_chat_condition(["1234567890"], False, ["phone", "name"], "jid", "ios")
|
||||||
|
assert result == "AND ( phone NOT LIKE '%1234567890%' AND (name NOT LIKE '%1234567890%' AND jid IS NOT NULL))"
|
||||||
|
|
||||||
|
def test_multiple_chats_with_jid_android(self):
|
||||||
|
"""Test multiple chats with JID for Android platform"""
|
||||||
|
result = get_chat_condition(["1234567890", "0987654321"], True, ["phone", "name"], "jid", "android")
|
||||||
|
expected = "AND ( phone LIKE '%1234567890%' OR (name LIKE '%1234567890%' AND jid.type == 1) OR phone LIKE '%0987654321%' OR (name LIKE '%0987654321%' AND jid.type == 1))"
|
||||||
|
assert result == expected
|
||||||
|
|
||||||
|
def test_multiple_chats_exclude_with_jid_android(self):
|
||||||
|
"""Test excluding multiple chats with JID for Android platform"""
|
||||||
|
result = get_chat_condition(["1234567890", "0987654321"], False, ["phone", "name"], "jid", "android")
|
||||||
|
expected = "AND ( phone NOT LIKE '%1234567890%' AND (name NOT LIKE '%1234567890%' AND jid.type == 1) AND phone NOT LIKE '%0987654321%' AND (name NOT LIKE '%0987654321%' AND jid.type == 1))"
|
||||||
|
assert result == expected
|
||||||
|
|
||||||
|
def test_invalid_column_count_with_jid(self):
|
||||||
|
"""Test error when column count is less than 2 but jid is provided"""
|
||||||
|
with pytest.raises(ValueError, match="There must be at least two elements in argument columns if jid is not None"):
|
||||||
|
get_chat_condition(["1234567890"], True, ["phone"], "jid", "android")
|
||||||
|
|
||||||
|
def test_unsupported_platform(self):
|
||||||
|
"""Test error when unsupported platform is provided"""
|
||||||
|
with pytest.raises(ValueError, match="Only android and ios are supported for argument platform if jid is not None"):
|
||||||
|
get_chat_condition(["1234567890"], True, ["phone", "name"], "jid", "windows")
|
||||||
|
|
||||||
|
def test_empty_filter_list(self):
|
||||||
|
"""Test with empty filter list"""
|
||||||
|
result = get_chat_condition([], True, ["phone"])
|
||||||
|
assert result == ""
|
||||||
|
|
||||||
|
result = get_chat_condition([], False, ["phone"])
|
||||||
|
assert result == ""
|
||||||
|
|
||||||
|
def test_filter_with_empty_strings(self):
|
||||||
|
"""Test with filter containing empty strings"""
|
||||||
|
result = get_chat_condition(["", "1234567890"], True, ["phone"])
|
||||||
|
assert result == "AND ( phone LIKE '%%' OR phone LIKE '%1234567890%')"
|
||||||
|
|
||||||
|
result = get_chat_condition([""], True, ["phone"])
|
||||||
|
assert result == "AND ( phone LIKE '%%')"
|
||||||
|
|
||||||
|
def test_special_characters_in_filter(self):
|
||||||
|
"""Test with special characters in filter values"""
|
||||||
|
result = get_chat_condition(["test@example.com"], True, ["email"])
|
||||||
|
assert result == "AND ( email LIKE '%test@example.com%')"
|
||||||
|
|
||||||
|
result = get_chat_condition(["user-name"], True, ["username"])
|
||||||
|
assert result == "AND ( username LIKE '%user-name%')"
|
||||||
48
tests/test_vcards_contacts.py
Normal file
48
tests/test_vcards_contacts.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# from contacts_names_from_vcards import readVCardsFile
|
||||||
|
|
||||||
|
import os
|
||||||
|
from Whatsapp_Chat_Exporter.vcards_contacts import normalize_number, read_vcards_file
|
||||||
|
|
||||||
|
|
||||||
|
def test_readVCardsFile():
|
||||||
|
data_dir = os.path.join(os.path.dirname(__file__), "data")
|
||||||
|
data = read_vcards_file(os.path.join(data_dir, "contacts.vcf"), "852")
|
||||||
|
if data:
|
||||||
|
print("Found Names")
|
||||||
|
print("-----------------------")
|
||||||
|
for count, contact_tuple in enumerate(data, start=1):
|
||||||
|
# The name is the second element of the tuple (at index 1)
|
||||||
|
name = contact_tuple[1]
|
||||||
|
|
||||||
|
# Print the count and the name
|
||||||
|
print(f"{count}. {name}")
|
||||||
|
print(data)
|
||||||
|
assert len(data) == 6
|
||||||
|
# Test simple contact name
|
||||||
|
assert data[0][1] == "Sample Contact"
|
||||||
|
# Test complex name
|
||||||
|
assert data[1][1] == "Yard Lawn Guy, Jose Lopez"
|
||||||
|
# Test name with emoji
|
||||||
|
assert data[2][1] == "John Butler 🌟💫🌟"
|
||||||
|
# Test note with multi-line encoding
|
||||||
|
assert data[3][1] == "Airline Contact #'s"
|
||||||
|
# Test address with multi-line encoding
|
||||||
|
assert data[4][1] == "James Peacock Elementary"
|
||||||
|
# Test business entry using ORG but not F/FN
|
||||||
|
assert data[5][1] == "AAA Car Service"
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_number_to_name_dicts():
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def test_fuzzy_match_numbers():
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def test_normalize_number():
|
||||||
|
assert normalize_number('0531234567', '1') == '1531234567'
|
||||||
|
assert normalize_number('001531234567', '2') == '1531234567'
|
||||||
|
assert normalize_number('+1531234567', '34') == '1531234567'
|
||||||
|
assert normalize_number('053(123)4567', '34') == '34531234567'
|
||||||
|
assert normalize_number('0531-234-567', '58') == '58531234567'
|
||||||
Reference in New Issue
Block a user