Skip to content

Commit

Permalink
annotated files partially for static checks
Browse files Browse the repository at this point in the history
fixed corner case when unpacking vromfs
updated dependencies
slightly changed packaged files structure
  • Loading branch information
klensy committed Dec 31, 2017
1 parent 82f2f70 commit 3e8435e
Show file tree
Hide file tree
Showing 6 changed files with 41 additions and 32 deletions.
5 changes: 5 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[flake8]
ignore =
max-line-length = 120
exclude = tests/*
max-complexity = 10
44 changes: 23 additions & 21 deletions blk_unpack.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import argparse
import os.path

from typing import Tuple, List, Iterable, Any, Dict

type_list = {
0x0: 'size', 0x1: 'str', 0x2: 'int', 0x3: 'float', 0x4: 'vec2f',
0x5: 'vec3f', 0x6: 'vec4f', 0x7: 'vec2i', 0x8: 'typex8', 0x9: 'bool',
Expand Down Expand Up @@ -72,10 +74,10 @@ class BLK:
def __init__(self, data):
self.data = data
self.num_of_units_in_file = 0
self.ids_w_names = dict() # {key_id: key_string} for keys
self.ids_w_names: Dict[int, str] = dict() # {key_id: key_string} for keys
self.blk_version = 0 # 2 for 1.45 and lower, 3 for 1.47

def unpack(self, out_type=output_type['json']):
def unpack(self, out_type=output_type['json']) -> str:
# check file header and version
# TODO: error handle
if struct.unpack_from('4s', self.data, 0)[0].decode("utf-8") != BLK.bbf_magic:
Expand Down Expand Up @@ -107,7 +109,7 @@ def unpack(self, out_type=output_type['json']):

# print '\nnum of sub units: ' + str(total_sub_units)
cur_p += 0x4
sub_units_size = []
sub_units_size: List[int] = []
while len(sub_units_size) < total_sub_units:
sub_units_size.append(struct.unpack_from('H', self.data, cur_p)[0])
cur_p += 2
Expand Down Expand Up @@ -236,7 +238,7 @@ def get_unit_sizes_and_ids(self):
else:
raise TypeError('Unknown block = {:x}'.format(header_type))

def parse_data(self, cur_p, sub_units_names, out_type):
def parse_data(self, cur_p: int, sub_units_names, out_type):
"""
Read main block of data and parse it.
Expand All @@ -252,14 +254,14 @@ def parse_data(self, cur_p, sub_units_names, out_type):
full_data, cur_p = self.parse_inner_v3(cur_p, b_size, sub_units_names, out_type)
return full_data

def read_first_header(self, offset):
def read_first_header(self, offset: int):
linear_units, group_num = struct.unpack_from('HH', self.data, offset)
return (linear_units, group_num), True

def parse_inner(self, cur_p, b_size, sub_units_names, out_type):
# TODO: make class from it, drop ids_w_names, sub_units_names refs
if out_type == BLK.output_type['strict_blk']:
curr_block = []
curr_block: Iterable = []
else:
curr_block = OrderedDict()
not_list = True # flag for group_num == 0
Expand Down Expand Up @@ -295,22 +297,22 @@ def parse_inner(self, cur_p, b_size, sub_units_names, out_type):
break
return curr_block, cur_p

def parse_inner_v3(self, cur_p, b_size, sub_units_names, out_type):
def parse_inner_v3(self, cur_p: int, b_size, sub_units_names, out_type):
# TODO: make class from it, drop ids_w_names, sub_units_names refs
if out_type == BLK.output_type['strict_blk']:
curr_block = []
curr_block: Iterable = []
else:
curr_block = OrderedDict()
not_list = True # flag for group_num == 0
# print 'b_size, cur_p =', b_size, cur_p
while cur_p < len(self.data):
flat_num, group_num = b_size
if flat_num > 0:
id_list = [None] * flat_num
id_list: List[Tuple] = []
for i in range(flat_num):
b_id, b_type = self.get_block_id_w_type(cur_p)
b_value, b_off = self.get_block_value(cur_p, b_type)
id_list[i] = ((b_id, b_type, b_value))
id_list.append((b_id, b_type, b_value))
cur_p += 4
# print id_list
# print 'cur_p start 2th cycle: %d' % cur_p
Expand Down Expand Up @@ -350,7 +352,7 @@ def parse_inner_v3(self, cur_p, b_size, sub_units_names, out_type):
break
return curr_block, cur_p

def parse_inner_detect_take(self, is_not_list, str_id, val_type, value, block, out_type):
def parse_inner_detect_take(self, is_not_list: bool, str_id, val_type, value, block, out_type) -> Tuple[Any, bool]:
"""
Check if str_id not already in block as key, and change it type
to list if necessary(duplicated), and return block and is_not_list state
Expand All @@ -369,11 +371,11 @@ def parse_inner_detect_take(self, is_not_list, str_id, val_type, value, block, o
return block, is_not_list

# return block id with type
def get_block_id_w_type(self, offset):
def get_block_id_w_type(self, offset: int) -> Tuple[int, int]:
block_id, block_type = struct.unpack_from('HxB', self.data, offset)
return block_id, block_type

def from_id_to_str(self, id, type, value, sub_units_names):
def from_id_to_str(self, id: int, type: int, value, sub_units_names) -> Tuple[str, Any]:
item_id = self.ids_w_names[id]
item_type = type_list[type]
if item_type != 'size':
Expand All @@ -383,7 +385,7 @@ def from_id_to_str(self, id, type, value, sub_units_names):
return item_id, []

# return value, next offset
def get_block_value(self, id_offset, block_type):
def get_block_value(self, id_offset: int, block_type: int) -> Tuple[Any, int]:
if block_type not in type_list:
raise TypeError("Unknown type = {:x}, position = {:x}".format(block_type, id_offset))
block_type_from_list = type_list[block_type]
Expand Down Expand Up @@ -423,7 +425,7 @@ def get_block_value(self, id_offset, block_type):
value, offset = struct.unpack_from('III', self.data, id_offset + 0x4), 0x10
return value, offset

def print_item(self, item_type, item_data, sub_units_names):
def print_item(self, item_type: str, item_data, sub_units_names):
if item_type == 'str':
return sub_units_names[item_data].decode("utf-8")
elif item_type == 'float':
Expand All @@ -449,7 +451,7 @@ def print_item(self, item_type, item_data, sub_units_names):

# format output for strict blk type
def print_item_for_strict_blk(self, item_str_id, item_type, item_data,
indent_level):
indent_level: int) -> str:
# check if item_str_id is string with spaces, then add quotes
if ' ' in item_str_id:
item_str_id = '"' + item_str_id + '"'
Expand All @@ -470,13 +472,13 @@ def print_item_for_strict_blk(self, item_str_id, item_type, item_data,
else:
return ret + str(item_data)

def print_strict_blk(self, s_data):
def print_strict_blk(self, s_data) -> str:
s_data_lines = self.print_strict_blk_inner(s_data)
if s_data_lines[0] == '':
s_data_lines.pop(0)
return '\n'.join(s_data_lines)

def print_strict_blk_inner(self, s_data, indent_level=0):
def print_strict_blk_inner(self, s_data, indent_level=0) -> List[str]:
lines = []
for line in s_data:
id_str_name = line[0]
Expand All @@ -490,7 +492,7 @@ def print_strict_blk_inner(self, s_data, indent_level=0):
lines.append('%s}' % (' ' * indent_level))
return lines

def _hash_key_name(self, key):
def _hash_key_name(self, key: str) -> int:
"""
Generate hashcode from 'key' string name.
"""
Expand All @@ -500,7 +502,7 @@ def _hash_key_name(self, key):
return key_hash


def unpack_file(filename, out_type):
def unpack_file(filename, out_type: int):
with open(filename, 'rb') as f:
data = f.read()
if len(data) == 0:
Expand All @@ -518,7 +520,7 @@ def unpack_file(filename, out_type):
print(' ', e)


def unpack_dir(dirname, out_type):
def unpack_dir(dirname, out_type: int):
"""
Unpack all *.blk files in `dirname` with `out_type` format.
"""
Expand Down
3 changes: 2 additions & 1 deletion formats/common.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import zlib
from construct import *

from construct import Construct, Struct, Tell, Computed, Seek, this


# used for unpacking zlib block and return in context
Expand Down
11 changes: 6 additions & 5 deletions formats/vromfs_parser.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import struct

import zstd
from construct import *

from construct import Construct, Enum, Byte, this, Adapter, Struct, Seek, Int32ul, Array, CString, Tell, If, Bytes, \
Computed, Embedded, Switch, Error, Const, Int64ub, Int24ul, Hex

from formats.common import zlib_stream

Expand All @@ -25,10 +26,10 @@ def _parse(self, stream, ctx, path):
need_read_size = ctx._._.header.packed_size - (16 if ctx.first_part else 0) - (16 if ctx.second_part else 0)
# ugly: align read size to 4 bytes
need_read_size = need_read_size // 4 * 4
deobfs_compressed_data = (ctx.first_part if ctx.first_part else '') + \
deobfs_compressed_data = (ctx.first_part if ctx.first_part else b'') + \
stream.getvalue()[ctx.middle_data_offset:ctx.middle_data_offset + need_read_size] + \
(ctx.second_part.data if ctx.second_part.data else '') + \
(ctx.align_tail if ctx.align_tail else '')
(ctx.second_part.data if ctx.second_part.data else b'') + \
(ctx.align_tail if ctx.align_tail else b'')
dctx = zstd.ZstdDecompressor()
decompressed_data = dctx.decompress(deobfs_compressed_data, max_output_size=ctx._._.header.original_size)
ctx.parsed_data = vromfs_not_packed_body.parse(decompressed_data)
Expand Down
3 changes: 1 addition & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,4 @@ git+https://github.com/indygreg/[email protected] --install-option="--l
pylzma==0.4.9

# for building wt-tools
# cx_Freeze v5.x branch commit
git+https://github.com/anthony-tuininga/cx_Freeze.git@e9272a53624da9da7a2cd0dc7b55ed5fce655251
cx_Freeze==6.0b1
7 changes: 4 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from cx_Freeze import setup, Executable

packages = ["construct", "zstd", "pylzma"]
packages = []
includes = []
excludes = ["socket", "unittest", "http", "email", "pydoc", "construct.examples"]
includefiles = []
zip_include_packages = ["collections", "construct", "ctypes", "encodings", "json", "logging", "importlib", "formats"]

blk_unpack = Executable(
script="blk_unpack.py",
Expand Down Expand Up @@ -31,10 +32,10 @@

setup(
name="wt-tools",
version="0.2.1-dev",
version="0.2.1.1-dev",
author='klensy',
description="War Thunder resource extraction tools",
options={"build_exe": {"includes": includes, "excludes": excludes, "include_files": includefiles,
"packages": packages}},
"packages": packages, "zip_include_packages": zip_include_packages}},
executables=[blk_unpack, clog_unpack, ddsx_unpack, dxp_unpack, vromfs_unpacker, wrpl_unpacker]
)

0 comments on commit 3e8435e

Please sign in to comment.