Skip to content
This repository has been archived by the owner on Apr 21, 2023. It is now read-only.

Commit

Permalink
Reuse pools, fix pack merging, Cemu GFX fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
NiceneNerd committed Feb 9, 2020
1 parent aac48e8 commit 561d7a8
Show file tree
Hide file tree
Showing 11 changed files with 120 additions and 59 deletions.
5 changes: 3 additions & 2 deletions bcml/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -441,9 +441,10 @@ def full_rstb():


def MergePacks_Clicked(self):
merger = pack.PackMerger()
self.PerformOperation(
pack.merge_installed_packs,
(False, None, False, True),
pack.PackMerger.perform_merge,
(merger),
title='Remerging Packs'
)

Expand Down
2 changes: 1 addition & 1 deletion bcml/__version__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
VERSION = "2.7.4"
VERSION = "2.7.5"
25 changes: 14 additions & 11 deletions bcml/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,24 +152,25 @@ def _bgdata_from_bytes(file: str, game_dict: dict) -> {}:
return byml.Byml(game_dict[file]).parse()


def consolidate_gamedata(gamedata: sarc.SARC) -> {}:
def consolidate_gamedata(gamedata: sarc.SARC, original_pool: Pool = None) -> {}:
"""
Consolidates all game data in a game data SARC
:return: Returns a dict of all game data entries in a SARC
:rtype: dict of str: list
"""
data = {}
pool = Pool(processes=cpu_count())
pool = original_pool or Pool(processes=cpu_count())
game_dict = {}
for file in gamedata.list_files():
game_dict[file] = gamedata.get_file_data(file).tobytes()
results = pool.map(
partial(_bgdata_from_bytes, game_dict=game_dict),
gamedata.list_files()
)
pool.close()
pool.join()
if not original_pool:
pool.close()
pool.join()
del game_dict
del gamedata
for result in results:
Expand All @@ -190,7 +191,7 @@ def diff_gamedata_type(data_type: str, mod_data: dict, stock_data: dict) -> {}:
return {data_type: diffs}


def get_modded_gamedata_entries(gamedata: sarc.SARC) -> {}:
def get_modded_gamedata_entries(gamedata: sarc.SARC, original_pool: Pool = None) -> {}:
"""
Gets all of the modified gamedata entries in a dict of modded gamedata contents.
Expand All @@ -199,20 +200,21 @@ def get_modded_gamedata_entries(gamedata: sarc.SARC) -> {}:
:returns: Returns a dictionary with each data type and the modified entries for it.
:rtype: dict of str: dict of str: dict
"""
stock_data = consolidate_gamedata(get_stock_gamedata())
mod_data = consolidate_gamedata(gamedata)
stock_data = consolidate_gamedata(get_stock_gamedata(), original_pool)
mod_data = consolidate_gamedata(gamedata, original_pool)
diffs = {}
pool = Pool(cpu_count())
pool = original_pool or Pool(cpu_count())
results = pool.map(
partial(diff_gamedata_type, mod_data=mod_data, stock_data=stock_data),
list(mod_data.keys())
)
pool.close()
pool.join()
for result in results:
_, entries = list(result.items())[0]
if entries:
diffs.update(result)
if not original_pool:
pool.close()
pool.join()
return diffs


Expand Down Expand Up @@ -546,7 +548,8 @@ def generate_diff(self, mod_dir: Path, modded_files: List[Union[Path, str]]):
util.decompress(
bootup_sarc.get_file_data('GameData/gamedata.ssarc').tobytes()
)
)
),
original_pool=self._pool
)
else:
return {}
Expand Down
47 changes: 34 additions & 13 deletions bcml/install.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def threaded_aamp_diffs(file_info: tuple, tmp_dir: Path):
return (file_info[0], None)


def find_modded_files(tmp_dir: Path, verbose: bool = False) -> List[Union[Path, str]]:
def find_modded_files(tmp_dir: Path, verbose: bool = False, original_pool: Pool = None) -> List[Union[Path, str]]:
"""
Detects all of the modified files in an extracted mod
Expand Down Expand Up @@ -137,16 +137,17 @@ def find_modded_files(tmp_dir: Path, verbose: bool = False) -> List[Union[Path,
if sarc_files:
print(f'Scanning files packed in SARCs...')
num_threads = min(len(sarc_files), cpu_count() - 1)
pool = Pool(processes=num_threads)
pool = original_pool or Pool(processes=num_threads)
modded_sarc_files = pool.map(
partial(find_modded_sarc_files, tmp_dir=tmp_dir, verbose=verbose),
sarc_files
)
pool.close()
pool.join()
for files in modded_sarc_files:
total += len(files)
modded_files.extend(files)
if not original_pool:
pool.close()
pool.join()
print(f'Found {total} modified packed file{"s" if total > 1 else ""}')
return modded_files

Expand Down Expand Up @@ -213,7 +214,7 @@ def find_modded_sarc_files(mod_sarc: Union[Path, sarc.SARC], tmp_dir: Path, name
return modded_files


def generate_logs(tmp_dir: Path, verbose: bool = False, options: dict = None) -> List[Path]:
def generate_logs(tmp_dir: Path, verbose: bool = False, options: dict = None, original_pool: Pool = None) -> List[Path]:
"""Analyzes a mod and generates BCML log files containing its changes"""
if isinstance(tmp_dir, str):
tmp_dir = Path(tmp_dir)
Expand All @@ -225,19 +226,23 @@ def generate_logs(tmp_dir: Path, verbose: bool = False, options: dict = None) ->
if 'disable' not in options:
options['disable'] = []

pool = original_pool or Pool(cpu_count())
print('Scanning for modified files...')
modded_files = find_modded_files(tmp_dir, verbose=verbose)
modded_files = find_modded_files(tmp_dir, verbose=verbose, original_pool=original_pool)
if not modded_files:
raise RuntimeError('No modified files were found. Very unusual.')

(tmp_dir / 'logs').mkdir(parents=True, exist_ok=True)
for merger_class in [merger_class for merger_class in mergers.get_mergers() \
if merger_class.NAME not in options['disable']]:
merger = merger_class()
merger.set_pool(pool)
if options is not None and merger.NAME in options:
merger.set_options(options[merger.NAME])
merger.log_diff(tmp_dir, modded_files)

if not original_pool:
pool.close()
pool.join()
return modded_files


Expand Down Expand Up @@ -269,8 +274,11 @@ def refresh_cemu_mods():
if 'BCML' in entry.getAttribute('filename'):
gpack.removeChild(entry)
else:
if 'BCML' in entry.getElementsByTagName('filename')[0].childNodes[0].data:
gpack.removeChild(entry)
try:
if 'BCML' in entry.getElementsByTagName('filename')[0].childNodes[0].data:
gpack.removeChild(entry)
except IndexError:
pass
bcmlentry = create_settings_mod_node(settings, new_cemu_version)
# Issue #33 - end BCML node
gpack.appendChild(bcmlentry)
Expand All @@ -294,6 +302,7 @@ def create_settings_mod_node(settings, new_cemu: bool, mod=None) -> minidom.Elem
else:
entryfile = settings.createElement('filename')
entryfile.appendChild(settings.createTextNode(modpath))
modentry.appendChild(entryfile)
entrypresethead = settings.createElement('Preset')
entrypreset = settings.createElement('preset')
entrypreset.appendChild(settings.createTextNode(''))
Expand Down Expand Up @@ -338,6 +347,7 @@ def install_mod(mod: Path, verbose: bool = False, options: dict = None, wait_mer
print(f'Error: {str(mod)} is neither a valid file nor a directory')
return

pool: Pool
try:
rules = util.RulesParser()
rules.read(tmp_dir / 'rules.txt')
Expand All @@ -352,7 +362,8 @@ def install_mod(mod: Path, verbose: bool = False, options: dict = None, wait_mer
if merger.is_mod_logged(BcmlMod('', 0, tmp_dir)):
(tmp_dir / 'logs' / merger.log_name()).unlink()
else:
generate_logs(tmp_dir=tmp_dir, verbose=verbose, options=options)
pool = Pool(cpu_count())
generate_logs(tmp_dir=tmp_dir, verbose=verbose, options=options, original_pool=pool)
except Exception as e: # pylint: disable=broad-except
if hasattr(e, 'error_text'):
raise e
Expand Down Expand Up @@ -447,6 +458,7 @@ def install_mod(mod: Path, verbose: bool = False, options: dict = None, wait_mer
options['disable'] = []
for merger in mergers.sort_mergers([cls() for cls in mergers.get_mergers() \
if cls.NAME not in options['disable']]):
merger.set_pool(pool)
if merger.NAME in options:
merger.set_options(options[merger.NAME])
if merger.is_mod_logged(output_mod):
Expand All @@ -467,6 +479,8 @@ def install_mod(mod: Path, verbose: bool = False, options: dict = None, wait_mer
except FileNotFoundError:
pass
raise clean_error
pool.close()
pool.join()
return output_mod


Expand Down Expand Up @@ -543,10 +557,14 @@ def uninstall_mod(mod: Union[Path, BcmlMod, str], wait_merge: bool = False, verb
print()

if not wait_merge:
pool = Pool(cpu_count())
for merger in mergers.sort_mergers(remergers):
merger.set_pool(pool)
if merger.NAME in partials:
merger.set_options({'only_these': partials[merger.NAME]})
merger.perform_merge()
pool.close()
pool.join()
print(f'{mod_name} has been uninstalled.')


Expand Down Expand Up @@ -632,8 +650,12 @@ def refresh_merges(verbose: bool = False):
print('Cleansing old merges...')
shutil.rmtree(util.get_master_modpack_dir())
print('Refreshing merged mods...')
pool = Pool(cpu_count())
for merger in mergers.sort_mergers([merger_class() for merger_class in mergers.get_mergers()]):
merger.set_pool(pool)
merger.perform_merge()
pool.close()
pool.join()


def _clean_sarc(file: Path, hashes: dict, tmp_dir: Path):
Expand Down Expand Up @@ -725,7 +747,8 @@ def create_bnp_mod(mod: Path, output: Path, options: dict = None):
if not options:
options = {}
options['texts'] = {'user_only': False}
logged_files = generate_logs(tmp_dir, options=options)
pool = Pool(cpu_count())
logged_files = generate_logs(tmp_dir, options=options, original_pool=pool)

print('Removing unnecessary files...')
if (tmp_dir / 'logs' / 'map.yml').exists():
Expand Down Expand Up @@ -758,8 +781,6 @@ def create_bnp_mod(mod: Path, output: Path, options: dict = None):
print('Creating partial packs...')
sarc_files = {file for file in tmp_dir.rglob('**/*') if file.suffix in util.SARC_EXTS}
if sarc_files:
num_threads = min(len(sarc_files), cpu_count())
pool = Pool(processes=num_threads)
pool.map(partial(_clean_sarc, hashes=hashes, tmp_dir=tmp_dir), sarc_files)
pool.close()
pool.join()
Expand Down
18 changes: 12 additions & 6 deletions bcml/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,8 @@ def threaded_merge(item, verbose: bool) -> (str, dict):
return util.get_canon_name(file), failures


def deep_merge(verbose: bool = False, wait_rstb: bool = False, only_these: List[str] = None):
def deep_merge(verbose: bool = False, wait_rstb: bool = False, only_these: List[str] = None,
original_pool: multiprocessing.Pool = None):
"""Performs deep merge on all installed AAMP files"""
mods = get_deepmerge_mods()
if not mods:
Expand All @@ -305,10 +306,11 @@ def deep_merge(verbose: bool = False, wait_rstb: bool = False, only_these: List[
if not diffs:
return
num_threads = min(multiprocessing.cpu_count(), len(diffs))
pool = multiprocessing.Pool(processes=num_threads)
pool = original_pool or multiprocessing.Pool(processes=num_threads)
pool.map(partial(threaded_merge, verbose=verbose), diffs.items())
pool.close()
pool.join()
if not original_pool:
pool.close()
pool.join()

if not wait_rstb:
bcml.rstable.generate_master_rstb()
Expand Down Expand Up @@ -381,9 +383,13 @@ def consolidate_diffs(self, diffs: list):

def perform_merge(self):
if 'only_these' in self._options:
deep_merge(wait_rstb=True, only_these=self._options['only_these'])
deep_merge(
wait_rstb=True,
only_these=self._options['only_these'],
original_pool=self._pool
)
else:
deep_merge(wait_rstb=True)
deep_merge(wait_rstb=True, original_pool=self._pool)

def get_checkbox_options(self):
return []
8 changes: 7 additions & 1 deletion bcml/mergers.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
""" Provides abstracted merging objects """
from abc import ABCMeta
from multiprocessing import Pool
from pathlib import Path
from typing import List, Union
from bcml import util
Expand All @@ -14,16 +15,18 @@ class Merger(metaclass=ABCMeta):
_description: str
_log_name: str
_options: dict
_pool: Pool

def __init__(self, friendly_name: str, description: str, log_name: str,
options: dict = None):
options: dict = None, pool: Pool = None):
self._friendly_name = friendly_name
self._description = description
self._log_name = log_name
if options:
self._options = options
else:
self._options = {}
self._pool = pool

def friendly_name(self) -> str:
""" The name of this merger in the UI """
Expand All @@ -41,6 +44,9 @@ def set_options(self, options: dict):
""" Sets custom options for this merger """
self._options = options

def set_pool(self, pool: Pool):
self._pool = pool

def generate_diff(self, mod_dir: Path, modded_files: List[Union[str, Path]]):
""" Detects changes made to a modded file or files from the base game """
raise NotImplementedError
Expand Down
16 changes: 11 additions & 5 deletions bcml/mubin.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,8 @@ def merge_map(map_pair: tuple, rstb_calc: rstb.SizeCalculator, no_del: bool = Fa
}


def merge_maps(no_del: bool = False, link_del: bool = False, verbose: bool = False):
def merge_maps(no_del: bool = False, link_del: bool = False, verbose: bool = False,
original_pool: Pool = None):
"""Merges all installed modifications to mainfield maps"""
aoc_pack = util.get_master_modpack_dir() / 'aoc' / '0010' / \
'Pack' / 'AocMainField.pack'
Expand All @@ -370,14 +371,15 @@ def merge_maps(no_del: bool = False, link_del: bool = False, verbose: bool = Fal
rstb_calc = rstb.SizeCalculator()
print('Merging modded map units...')
num_threads = min(cpu_count() - 1, len(map_diffs))
pool = Pool(processes=num_threads)
pool = original_pool or Pool(processes=num_threads)
rstb_results = pool.map(partial(merge_map, rstb_calc=rstb_calc, no_del=no_del,
link_del=link_del, verbose=verbose), list(map_diffs.items()))
pool.close()
pool.join()
for result in rstb_results:
rstb_vals[result['aoc'][0]] = result['aoc'][1]
rstb_vals[result['main'][0]] = result['main'][1]
if not original_pool:
pool.close()
pool.join()

print('Adjusting RSTB...')
with log_path.open('w', encoding='utf-8') as l_file:
Expand Down Expand Up @@ -509,7 +511,11 @@ def consolidate_diffs(self, diffs: list):
return get_all_map_diffs()

def perform_merge(self):
merge_maps(no_del=self._options['no_del'], link_del=self._options['link_del'])
merge_maps(
no_del=self._options['no_del'],
link_del=self._options['link_del'],
original_pool=self._pool
)

def get_checkbox_options(self):
return [
Expand Down
Loading

0 comments on commit 561d7a8

Please sign in to comment.