Skip to content

Commit 6b440c4

Browse files
rxpha3lqkaiser
authored andcommitted
feat(handler): add par2 directory handler
1 parent f2918be commit 6b440c4

File tree

11 files changed

+120
-0
lines changed

11 files changed

+120
-0
lines changed

docs/handlers.md

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@
4545
| [`NETGEAR TRX V1`](#netgear-trx-v1) | ARCHIVE | :octicons-check-16: |
4646
| [`NETGEAR TRX V2`](#netgear-trx-v2) | ARCHIVE | :octicons-check-16: |
4747
| [`NTFS`](#ntfs) | FILESYSTEM | :octicons-check-16: |
48+
| [`PAR2 (MULTI-VOLUME)`](#par2-multi-volume) | ARCHIVE | :octicons-check-16: |
4849
| [`PARTCLONE`](#partclone) | ARCHIVE | :octicons-check-16: |
4950
| [`QNAP NAS`](#qnap-nas) | ARCHIVE | :octicons-check-16: |
5051
| [`RAR`](#rar) | ARCHIVE | :octicons-alert-fill-12: |
@@ -793,6 +794,22 @@
793794
=== "References"
794795

795796
- [NTFS Wikipedia](https://en.wikipedia.org/wiki/NTFS){ target="_blank" }
797+
## PAR2 (multi-volume)
798+
799+
!!! success "Fully supported"
800+
801+
=== "Description"
802+
803+
Parchive or PAR2, is a format for creating redundant data that helps detect and repair corrupted files. These archives typically accompany split-file sets (like multi-volume RAR or ZIP archives). Each PAR2 file is composed of multiple 'packets'.
804+
805+
---
806+
807+
- **Handler type:** Archive
808+
809+
810+
=== "References"
811+
812+
- [Parchive Documentation](https://parchive.github.io/){ target="_blank" }
796813
## Partclone
797814

798815
!!! success "Fully supported"

python/unblob/handlers/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
cab,
77
cpio,
88
dmg,
9+
par2,
910
partclone,
1011
rar,
1112
sevenzip,
@@ -126,4 +127,5 @@
126127
BUILTIN_DIR_HANDLERS: DirectoryHandlers = (
127128
sevenzip.MultiVolumeSevenZipHandler,
128129
gzip.MultiVolumeGzipHandler,
130+
par2.MultiVolumePAR2Handler,
129131
)
Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
import hashlib
2+
import io
3+
from pathlib import Path
4+
from typing import Optional
5+
6+
from unblob.file_utils import Endian, StructParser
7+
from unblob.models import (
8+
DirectoryHandler,
9+
Glob,
10+
HandlerDoc,
11+
HandlerType,
12+
MultiFile,
13+
Reference,
14+
)
15+
16+
C_DEFINITIONS = r"""
17+
typedef struct par2_header{
18+
char magic[8];
19+
uint64 packet_length;
20+
char md5_hash[16];
21+
char recovery_set_id[16];
22+
char type[16];
23+
} par2_header_t;
24+
"""
25+
26+
PAR2_MAGIC = b"PAR2\x00PKT"
27+
HEADER_STRUCT = "par2_header_t"
28+
HEADER_PARSER = StructParser(C_DEFINITIONS)
29+
30+
31+
class MultiVolumePAR2Handler(DirectoryHandler):
32+
NAME = "multi-par2"
33+
PATTERN = Glob("*.par2")
34+
EXTRACTOR = None
35+
36+
DOC = HandlerDoc(
37+
name="PAR2 (multi-volume)",
38+
description="Parchive or PAR2, is a format for creating redundant data that helps detect and repair corrupted files. These archives typically accompany split-file sets (like multi-volume RAR or ZIP archives). Each PAR2 file is composed of multiple 'packets'.",
39+
handler_type=HandlerType.ARCHIVE,
40+
vendor=None,
41+
references=[
42+
Reference(
43+
title="Parchive Documentation",
44+
url="https://parchive.github.io/",
45+
),
46+
],
47+
limitations=[],
48+
)
49+
50+
def is_valid_header(self, file_paths: list) -> bool:
51+
for path in file_paths:
52+
with path.open("rb") as f:
53+
header = HEADER_PARSER.parse(HEADER_STRUCT, f, Endian.LITTLE)
54+
if header.magic != PAR2_MAGIC:
55+
return False
56+
57+
offset_to_recovery_id = 32
58+
# seek to beginning of recovery set ID
59+
f.seek(offset_to_recovery_id, io.SEEK_SET)
60+
packet_content = f.read(
61+
header.packet_length - len(header) + offset_to_recovery_id
62+
)
63+
packet_checksum = hashlib.md5(packet_content).digest() # noqa: S324
64+
65+
if packet_checksum != header.md5_hash:
66+
return False
67+
return True
68+
69+
def calculate_multifile(self, file: Path) -> Optional[MultiFile]:
70+
paths = sorted(
71+
[p for p in file.parent.glob(f"{file.stem}.*") if p.resolve().exists()]
72+
)
73+
74+
if len(paths) <= 1 or not self.is_valid_header(paths):
75+
return None
76+
77+
return MultiFile(
78+
name=file.stem,
79+
paths=paths,
80+
)
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
version https://git-lfs.github.com/spec/v1
2+
oid sha256:ec6b3ce93cd3dbecf78e67740c4c7592bd2563004f319d63606e984deaadd816
3+
size 20892
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
version https://git-lfs.github.com/spec/v1
2+
oid sha256:84ded5e3d5a6676b65f735637532d7d2aa408215603b7044bdf9ee2b8deb3a1a
3+
size 20964
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
version https://git-lfs.github.com/spec/v1
2+
oid sha256:456d8113f08c9a6e3a98eb0df3fe0f069461435acc727527b932ee8e9e1e9c69
3+
size 41824
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
version https://git-lfs.github.com/spec/v1
2+
oid sha256:2b64b4c8eb00f579ad3370661307073f685bbc3ce2a8ebed413e530f87690090
3+
size 62756
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
version https://git-lfs.github.com/spec/v1
2+
oid sha256:acee373d6b04ccacd7414361d3e19a6c42fa4cf6d9cf6bc867060b8777de5b32
3+
size 83832
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
version https://git-lfs.github.com/spec/v1
2+
oid sha256:162a4b25d4dba77bf4b059c79c2c361a620dd1cc2e4e46696fb1e97e3009fa48
3+
size 105196
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
version https://git-lfs.github.com/spec/v1
2+
oid sha256:f44677b7f3f4b3b811efaf8a4d8646107d1f0c4e8ab35aea854f8090c124e921
3+
size 105484

0 commit comments

Comments
 (0)