From 373c46512839317ff111644dc023cb3037a84af0 Mon Sep 17 00:00:00 2001 From: Brandon Victor Date: Thu, 13 Feb 2020 00:53:20 +1100 Subject: [PATCH] Allows the user to provide a folder for 'target', and it will create a separate file for each of the dat files. Additionally, doesn't crash out if it's unable to read the dat file. --- PyPoE/cli/exporter/dat/handler.py | 10 +- PyPoE/cli/exporter/dat/parsers/json.py | 121 ++++++++++++++----------- 2 files changed, 73 insertions(+), 58 deletions(-) diff --git a/PyPoE/cli/exporter/dat/handler.py b/PyPoE/cli/exporter/dat/handler.py index ae05afec..179635b1 100644 --- a/PyPoE/cli/exporter/dat/handler.py +++ b/PyPoE/cli/exporter/dat/handler.py @@ -127,10 +127,14 @@ def _read_dat_files(self, args, prefix=''): remove.append(name) continue - df = dat.DatFile(name) - df.read(file_path_or_raw=node.record.extract(), use_dat_value=False) + try: + df = dat.DatFile(name) + df.read(file_path_or_raw=node.record.extract(), use_dat_value=False) - dat_files[name] = df + dat_files[name] = df + except: + print('Error occured for %s' % name) + remove.append(name) for file_name in remove: args.files.remove(file_name) diff --git a/PyPoE/cli/exporter/dat/parsers/json.py b/PyPoE/cli/exporter/dat/parsers/json.py index f39817eb..d8e0a23f 100644 --- a/PyPoE/cli/exporter/dat/parsers/json.py +++ b/PyPoE/cli/exporter/dat/parsers/json.py @@ -31,6 +31,7 @@ # Python import argparse +from pathlib import Path from json import dump # self @@ -101,67 +102,77 @@ def handle(self, args): dict_spec = args.spec.as_dict() - with open( - args.target, - mode='w', - encoding='ascii' if args.ascii else 'utf-8' - ) as f: - dat_files = self._read_dat_files(args) - - console('Building data object...') - out = [] - - for file_name in args.files: - dat_file = dat_files[file_name] - - header = [ - dict({ 'name': name, 'rowid': index }, **props) - for index, (name, props) - in enumerate(dict_spec[file_name]['fields'].items()) - ] - - virtual_header = [ - dict({ 'name': name, 'rowid': index }, **props) - for index, (name, props) - in enumerate(dict_spec[file_name]['virtual_fields'].items()) - ] - - if args.use_object_format: - out_obj = { - 'filename': file_name, - 'header': {row['name']: row for row in header}, - 'data': [{ - cid: row[i] for i, cid in enumerate( - dat_file.reader.columns_data - ) - } for row in dat_file.reader.table_data - ], - } - - virtual_header = ( - {row['name']: row for row in virtual_header} - ) - else: - out_obj = { - 'filename': file_name, - 'header': header, - 'data': dat_file.reader.table_data, - } - - if args.include_virtual_fields: - out_obj['virtual_header'] = virtual_header - - if args.include_record_length: - out_obj['record_length'] = dat_files[file_name].reader.table_record_length - + dat_files = self._read_dat_files(args) + + console('Building data object...') + out = [] + out_path = Path(args.target) + + for file_name in args.files: + dat_file = dat_files[file_name] + + header = [ + dict({ 'name': name, 'rowid': index }, **props) + for index, (name, props) + in enumerate(dict_spec[file_name]['fields'].items()) + ] + + virtual_header = [ + dict({ 'name': name, 'rowid': index }, **props) + for index, (name, props) + in enumerate(dict_spec[file_name]['virtual_fields'].items()) + ] + + if args.use_object_format: + out_obj = { + 'filename': file_name, + 'header': {row['name']: row for row in header}, + 'data': [{ + cid: row[i] for i, cid in enumerate( + dat_file.reader.columns_data + ) + } for row in dat_file.reader.table_data + ], + } + + virtual_header = ( + {row['name']: row for row in virtual_header} + ) + else: + out_obj = { + 'filename': file_name, + 'header': header, + 'data': dat_file.reader.table_data, + } + + if args.include_virtual_fields: + out_obj['virtual_header'] = virtual_header + + if args.include_record_length: + out_obj['record_length'] = dat_files[file_name].reader.table_record_length + + if out_path.is_dir(): + file_path = (out_path / file_name).with_suffix('.json') + console('Dumping data to "%s"...' % file_path) + with file_path.open( + mode='w', + encoding='ascii' if args.ascii else 'utf-8' + ) as f: + dump(out_obj, f, ensure_ascii=args.ascii, indent=4) + else: out.append(out_obj) + if out_path.is_file(): console('Dumping data to "%s"...' % args.target) - - dump(out, f, ensure_ascii=args.ascii, indent=4) + with out_path.open( + mode='w', + encoding='ascii' if args.ascii else 'utf-8' + ) as f: + dump(out, f, ensure_ascii=args.ascii, indent=4) console('Done.') + # ============================================================================= # Functions # =============================================================================