Skip to content

Commit

Permalink
ovl - fixes for PC2 saving
Browse files Browse the repository at this point in the history
  • Loading branch information
HENDRIX-ZT2 committed Dec 3, 2024
1 parent b7a27af commit 975ae94
Show file tree
Hide file tree
Showing 10 changed files with 66 additions and 46 deletions.
2 changes: 1 addition & 1 deletion __init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"name": "Frontier's Cobra Engine Formats",
"author": "Harlequinz Ego, HENDRIX et al.",
"blender": (4, 0, 0),
"version": (2024, 11, 27),
"version": (2024, 11, 28),
"location": "File > Import-Export",
"description": "Import-Export models, skeletons and animations",
"warning": "",
Expand Down
6 changes: 3 additions & 3 deletions __version__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# this file is auto-generated by the pre-commit hook increment_version.py
VERSION = "2024.11.27"
COMMIT_HASH = "1e9d7086b"
COMMIT_TIME = "Wed Nov 27 18:54:31 2024 +0100"
VERSION = "2024.11.28"
COMMIT_HASH = "b7a27aff9"
COMMIT_TIME = "Thu Nov 28 08:46:23 2024 +0100"
7 changes: 5 additions & 2 deletions generated/formats/base/compounds/ZStringBuffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,17 +49,20 @@ def _get_str_at(self, pos):
def update_strings(self, list_of_strs):
"""Updates this name buffer with a list of names"""
# logging.debug(f"Updating name buffer, data = {len(self.data)} bytes {self.data}")
self.strings = sorted(set(list_of_strs))
self.strings = []
self.offset_dic = {}
self.offset_2_str = {}
with BytesIO() as stream:
for name in self.strings:
for name in list_of_strs:
if not isinstance(name, str):
logging.warning(f"Name '{name}' is not a string, skipping")
continue
if name in self.offset_dic:
continue
# store offset and write zstring
self.offset_dic[name] = stream.tell()
self.offset_2_str[stream.tell()] = name
self.strings.append(name)
ZString.to_stream(name, stream, self.context)
# get the actual result buffer
buffer_bytes = stream.getvalue()
Expand Down
2 changes: 1 addition & 1 deletion generated/formats/ms2/compounds/JointData.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def write_fields(cls, stream, instance):
instance.context.joint_names = instance.joint_names
# if instance.context.version <= 32:
# set arg = instance.joint_names
strings = list(instance.get_strings())
strings = sorted(instance.get_strings())
instance.joint_names.update_strings(strings)
# at least PC+PZ store the length without the 8 byte alignment padding at the end
# however the end of ZStringBuffer is aligned to 8 and may be padded additionally
Expand Down
23 changes: 14 additions & 9 deletions generated/formats/ovl/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ def rebuild_pools(self):
logging.info("Updating pool names, deleting unused pools")
# map the pool types to pools
pools_by_type = {}
for pool_index, pool in enumerate(self.ovl.reporter.iter_progress(self.pools, "Updating pools")):
for pool_index, pool in enumerate(self.ovl.reporter.iter_progress(self.pools, "Rebuilding pools", cond=len(self.pools) > 1)):
if pool.offsets:
# store pool in pool_groups map
if pool.type not in pools_by_type:
Expand Down Expand Up @@ -1097,14 +1097,15 @@ def get_dep_hash(name):
if UNK_HASH in name:
logging.warning(f"Won't update hash {name}")
return int(name.replace(f"{UNK_HASH}_", ""))
elif UNK_HASH.lower() in name:
logging.warning(f"Won't update hash {name}")
return int(name.replace(f"{UNK_HASH.lower()}_", ""))
return djb2(name)

def rebuild_ovl_arrays(self):
"""Call this if any file names have changed and hashes or indices have to be recomputed"""

# update file hashes and extend entries per loader
# self.files.sort(key=lambda x: (x.ext, x.file_hash))
# sorted_loaders = sorted(self.loaders, key=lambda x: (x.ext, x.file_hash))
loaders_by_extension = self.get_loaders_by_ext()
mimes_ext = sorted(loaders_by_extension)
mimes_triplets = [self.get_mime(ext, "triplets") for ext in mimes_ext]
Expand Down Expand Up @@ -1138,10 +1139,10 @@ def rebuild_ovl_arrays(self):
deps_ext = [ext.replace(".", ":") for ext in deps_ext]
aux_suffices = [aux_suffix for aux_suffix, loader in loaders_and_aux]
names_list = [
*aux_suffices,
*sorted(set(deps_ext)),
*ovl_includes,
*mimes_name,
*aux_suffices,
*sorted(loader.basename for loader in self.loaders.values())]
self.names.update_strings(names_list)
# create the mimes
Expand Down Expand Up @@ -1199,7 +1200,7 @@ def rebuild_ovl_arrays(self):
# update all pools before indexing anything that points into pools
pools_offset = 0
self.archives.sort(key=lambda a: a.name)
for archive in self.archives:
for archive in self.reporter.iter_progress(self.archives, "Rebuilding pools"):
ovs = archive.content
ovs.clear_ovs_arrays()
ovs.rebuild_pools()
Expand Down Expand Up @@ -1246,7 +1247,7 @@ def rebuild_ovs_arrays(self, flat_sorted_loaders, ext_lut):
logging.exception(f"Couldn't map loader {loader.name} to ovs {loader.ovs_name}")
raise
# remove all entries to rebuild them from the loaders
for archive in self.archives:
for archive in self.reporter.iter_progress(self.archives, "Updating headers"):
ovs = archive.content
loaders = archive_name_to_loaders[archive.name]
archive.num_root_entries = len(loaders)
Expand Down Expand Up @@ -1307,7 +1308,7 @@ def resolve(pool, offset):

pools_byte_offset = 0
# make a temporary copy so we can delete archive if needed
for archive in tuple(self.archives):
for archive in self.reporter.iter_progress(tuple(self.archives), "Updating archives"):

logging.debug(f"Sorting pools for {archive.name}")
ovs = archive.content
Expand Down Expand Up @@ -1441,8 +1442,12 @@ def save(self, filepath, use_threads=True):
self.rebuild_ovl_arrays()
# these need to be done after the rest
self.update_stream_files()
ovs_types = {archive.name for archive in self.archives if "Textures_L" not in archive.name}
ovs_types.discard("STATIC")
ovs_types = set()
for loader in self.loaders.values():
if loader.ovs.arg.name != "STATIC" and loader.ext not in (".tex", ".texturestream"):
ovs_types.add(loader.ovs.arg.name)
# ovs_types = {archive.name for archive in self.archives if "Textures_L" not in archive.name}
# ovs_types.discard("STATIC")
self.num_ovs_types = len(ovs_types)
ovl_compressed = b""
self.reset_field("archives_meta")
Expand Down
38 changes: 21 additions & 17 deletions modules/formats/shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,25 +73,29 @@ class DummyReporter:
progress_total = DummySignal() # type: ignore
current_action = DummySignal() # type: ignore

def iter_progress(self, iterable, message):
self.current_action.emit(message)
self._percentage = -1
if len(iterable) > 1:
def iter_progress(self, iterable, message, cond=True):
if cond:
self.current_action.emit(message)
self._percentage = -1
if len(iterable) > 1:
self.progress_total.emit(100)
else:
self.progress_total.emit(0)
for i, item in enumerate(iterable):
p = round(i / len(iterable) * 100)
if p != self._percentage:
self.progress_percentage.emit(p)
self._percentage = p
yield item
# clear both to also make indeterminate processes appear complete
self.progress_percentage.emit(100)
self.progress_total.emit(100)
msg = f"Finished {message}"
self.current_action.emit(msg)
# logging.success(msg)
else:
self.progress_total.emit(0)
for i, item in enumerate(iterable):
p = round(i / len(iterable) * 100)
if p != self._percentage:
self.progress_percentage.emit(p)
self._percentage = p
yield item
# clear both to also make indeterminate processes appear complete
self.progress_percentage.emit(100)
self.progress_total.emit(100)
msg = f"Finished {message}"
self.current_action.emit(msg)
# logging.success(msg)
for item in iterable:
yield item

@contextlib.contextmanager
def report_error_files(self, operation):
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "cobra-tools"
version = "2024.11.27"
version = "2024.11.28"
readme = "README.md"
license = { file = "LICENSE" }
requires-python = "==3.11.*"
Expand Down
7 changes: 5 additions & 2 deletions source/formats/base/compounds/ZStringBuffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,17 +37,20 @@ def _get_str_at(self, pos):
def update_strings(self, list_of_strs):
"""Updates this name buffer with a list of names"""
# logging.debug(f"Updating name buffer, data = {len(self.data)} bytes {self.data}")
self.strings = sorted(set(list_of_strs))
self.strings = []
self.offset_dic = {}
self.offset_2_str = {}
with BytesIO() as stream:
for name in self.strings:
for name in list_of_strs:
if not isinstance(name, str):
logging.warning(f"Name '{name}' is not a string, skipping")
continue
if name in self.offset_dic:
continue
# store offset and write zstring
self.offset_dic[name] = stream.tell()
self.offset_2_str[stream.tell()] = name
self.strings.append(name)
ZString.to_stream(name, stream, self.context)
# get the actual result buffer
buffer_bytes = stream.getvalue()
Expand Down
2 changes: 1 addition & 1 deletion source/formats/ms2/compounds/JointData.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def write_fields(cls, stream, instance):
instance.context.joint_names = instance.joint_names
# if instance.context.version <= 32:
# set arg = instance.joint_names
strings = list(instance.get_strings())
strings = sorted(instance.get_strings())
instance.joint_names.update_strings(strings)
# at least PC+PZ store the length without the 8 byte alignment padding at the end
# however the end of ZStringBuffer is aligned to 8 and may be padded additionally
Expand Down
23 changes: 14 additions & 9 deletions source/formats/ovl/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@ def rebuild_pools(self):
logging.info("Updating pool names, deleting unused pools")
# map the pool types to pools
pools_by_type = {}
for pool_index, pool in enumerate(self.ovl.reporter.iter_progress(self.pools, "Updating pools")):
for pool_index, pool in enumerate(self.ovl.reporter.iter_progress(self.pools, "Rebuilding pools", cond=len(self.pools) > 1)):
if pool.offsets:
# store pool in pool_groups map
if pool.type not in pools_by_type:
Expand Down Expand Up @@ -1096,14 +1096,15 @@ def get_dep_hash(name):
if UNK_HASH in name:
logging.warning(f"Won't update hash {name}")
return int(name.replace(f"{UNK_HASH}_", ""))
elif UNK_HASH.lower() in name:
logging.warning(f"Won't update hash {name}")
return int(name.replace(f"{UNK_HASH.lower()}_", ""))
return djb2(name)

def rebuild_ovl_arrays(self):
"""Call this if any file names have changed and hashes or indices have to be recomputed"""

# update file hashes and extend entries per loader
# self.files.sort(key=lambda x: (x.ext, x.file_hash))
# sorted_loaders = sorted(self.loaders, key=lambda x: (x.ext, x.file_hash))
loaders_by_extension = self.get_loaders_by_ext()
mimes_ext = sorted(loaders_by_extension)
mimes_triplets = [self.get_mime(ext, "triplets") for ext in mimes_ext]
Expand Down Expand Up @@ -1137,10 +1138,10 @@ def rebuild_ovl_arrays(self):
deps_ext = [ext.replace(".", ":") for ext in deps_ext]
aux_suffices = [aux_suffix for aux_suffix, loader in loaders_and_aux]
names_list = [
*aux_suffices,
*sorted(set(deps_ext)),
*ovl_includes,
*mimes_name,
*aux_suffices,
*sorted(loader.basename for loader in self.loaders.values())]
self.names.update_strings(names_list)
# create the mimes
Expand Down Expand Up @@ -1198,7 +1199,7 @@ def rebuild_ovl_arrays(self):
# update all pools before indexing anything that points into pools
pools_offset = 0
self.archives.sort(key=lambda a: a.name)
for archive in self.archives:
for archive in self.reporter.iter_progress(self.archives, "Rebuilding pools"):
ovs = archive.content
ovs.clear_ovs_arrays()
ovs.rebuild_pools()
Expand Down Expand Up @@ -1245,7 +1246,7 @@ def rebuild_ovs_arrays(self, flat_sorted_loaders, ext_lut):
logging.exception(f"Couldn't map loader {loader.name} to ovs {loader.ovs_name}")
raise
# remove all entries to rebuild them from the loaders
for archive in self.archives:
for archive in self.reporter.iter_progress(self.archives, "Updating headers"):
ovs = archive.content
loaders = archive_name_to_loaders[archive.name]
archive.num_root_entries = len(loaders)
Expand Down Expand Up @@ -1306,7 +1307,7 @@ def resolve(pool, offset):

pools_byte_offset = 0
# make a temporary copy so we can delete archive if needed
for archive in tuple(self.archives):
for archive in self.reporter.iter_progress(tuple(self.archives), "Updating archives"):

logging.debug(f"Sorting pools for {archive.name}")
ovs = archive.content
Expand Down Expand Up @@ -1440,8 +1441,12 @@ def save(self, filepath, use_threads=True):
self.rebuild_ovl_arrays()
# these need to be done after the rest
self.update_stream_files()
ovs_types = {archive.name for archive in self.archives if "Textures_L" not in archive.name}
ovs_types.discard("STATIC")
ovs_types = set()
for loader in self.loaders.values():
if loader.ovs.arg.name != "STATIC" and loader.ext not in (".tex", ".texturestream"):
ovs_types.add(loader.ovs.arg.name)
# ovs_types = {archive.name for archive in self.archives if "Textures_L" not in archive.name}
# ovs_types.discard("STATIC")
self.num_ovs_types = len(ovs_types)
ovl_compressed = b""
self.reset_field("archives_meta")
Expand Down

0 comments on commit 975ae94

Please sign in to comment.