From 7009d4286cfbbc9be5e64f30c5a7efa87da7a2a5 Mon Sep 17 00:00:00 2001 From: AmericaMusium <114616689+AmericaMusium@users.noreply.github.com> Date: Wed, 25 Sep 2024 08:34:58 +0300 Subject: [PATCH 1/6] Auto importing images to nodes This is Blender Source Tools 3.3.1 with fixed importing textures for materials. Standart-Stock version: When we importing qc\smd this plugin creates materials with same names as texture images. Without importing image-files. My fix: Automatically importing images in nodes and conecting to material in Node-mode, Roughness 1.0. Tested only on Blender 4.1 with GoldSrc models. custom function is in "Import_smd.py" --- README.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 0000000..ee60296 --- /dev/null +++ b/README.md @@ -0,0 +1,5 @@ +This is Blender Source Tools 3.3.1 with fixed importing textures for materials. +Standart-Stock version: When we importing qc\smd this plugin creates materials with same names as texture images. Without importing image-files. +My fix: Automatically importing images in nodes and conecting to material in Node-mode, Roughness 1.0. +Tested only on Blender 4.1 with GoldSrc models. +custom function is in "Import_smd.py" From 5ee327a3dc6b3ac11032e17a2d7e484530d6f956 Mon Sep 17 00:00:00 2001 From: AmericaMusium <114616689+AmericaMusium@users.noreply.github.com> Date: Wed, 25 Sep 2024 08:40:49 +0300 Subject: [PATCH 2/6] Update import_smd.py This is Blender Source Tools 3.3.1 with fixed importing textures for materials. Standart-Stock version: When we importing qc\smd this plugin creates materials with same names as texture images. Without importing image-files. My fix: Automatically importing images in nodes and conecting to material in Node-mode, Roughness 1.0. Tested only on Blender 4.1 with GoldSrc models. custom function is in "Import_smd.py" --- io_scene_valvesource/import_smd.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/io_scene_valvesource/import_smd.py b/io_scene_valvesource/import_smd.py index 2984706..1c55452 100644 --- a/io_scene_valvesource/import_smd.py +++ b/io_scene_valvesource/import_smd.py @@ -627,6 +627,19 @@ def getMeshMaterial(self,mat_name): else: # material does not exist print("- New material: {}".format(mat_name)) mat = bpy.data.materials.new(mat_name) + #new fix importing textures + mat.use_nodes = True + base_node = mat.node_tree.nodes["Principled BSDF"] + if base_node: + base_node.inputs["Roughness"].default_value = 1.0 + texture_node = mat.node_tree.nodes.new("ShaderNodeTexImage") + texture_path = os.path.join(os.path.dirname(self.filepath), mat_name) + try: + texture_node.image = bpy.data.images.load(texture_path) + except RuntimeError: + print(f"Текстура не найдена: {texture_path}") + + mat.node_tree.links.new(texture_node.outputs["Color"], base_node.inputs["Base Color"]) md.materials.append(mat) # Give it a random colour randCol = [] From 99052c6ec75936025d03d39fe18099247a21caa5 Mon Sep 17 00:00:00 2001 From: AmericaMusium <114616689+AmericaMusium@users.noreply.github.com> Date: Thu, 26 Sep 2024 21:15:44 +0300 Subject: [PATCH 3/6] Update README.md --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index ee60296..e51ea5a 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ -This is Blender Source Tools 3.3.1 with fixed importing textures for materials. -Standart-Stock version: When we importing qc\smd this plugin creates materials with same names as texture images. Without importing image-files. -My fix: Automatically importing images in nodes and conecting to material in Node-mode, Roughness 1.0. -Tested only on Blender 4.1 with GoldSrc models. -custom function is in "Import_smd.py" +The Blender Source Tools add Source engine support to Blender, the free 3D modelling suite. + +The Blender Source Tools is an add-on for Blender that simplifies working with assets used in games built on the Source engine, such as Half-Life 2, Counter-Strike: Source, and Team Fortress 2. This tool streamlines the process of importing and exporting models, animations, textures, and other data between Blender and Source engine projects. + +The Blender Source Tools — это аддон для Blender, который позволяет работать с файлами и ресурсами, используемыми в играх на движке Source (например, Half-Life 2, Counter-Strike: Source, Team Fortress 2 и другие). Этот инструмент значительно упрощает процесс импорта и экспорта моделей, анимаций, текстур и других данных между Blender и игровыми проектами на движке Source. From cbc583b1b12a4eb8d36d5330fe94eb121454345b Mon Sep 17 00:00:00 2001 From: AmericaMusium <114616689+AmericaMusium@users.noreply.github.com> Date: Fri, 3 Jan 2025 14:17:49 +0300 Subject: [PATCH 4/6] Import\export GUI button and GoldSrc orinted 1. When importing qc files , automatically will set export dir and SMD , GoldSrc buttons. 2. Add new Buttons for fast import and export on N-panel --- io_scene_valvesource/GUI.py | 1310 ++++----- io_scene_valvesource/__init__.py | 568 ++-- io_scene_valvesource/datamodel.py | 2114 +++++++------- io_scene_valvesource/export_smd.py | 4164 +++++++++++++-------------- io_scene_valvesource/flex.py | 464 +-- io_scene_valvesource/import_smd.py | 3490 +++++++++++----------- io_scene_valvesource/ordered_set.py | 976 +++---- io_scene_valvesource/update.py | 186 +- io_scene_valvesource/utils.py | 1414 ++++----- 9 files changed, 7356 insertions(+), 7330 deletions(-) diff --git a/io_scene_valvesource/GUI.py b/io_scene_valvesource/GUI.py index 26263b0..999577d 100644 --- a/io_scene_valvesource/GUI.py +++ b/io_scene_valvesource/GUI.py @@ -1,655 +1,655 @@ -# Copyright (c) 2014 Tom Edwards contact@steamreview.org -# -# ##### BEGIN GPL LICENSE BLOCK ##### -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# ##### END GPL LICENSE BLOCK ##### - -import bpy -from .utils import * -from .export_smd import SmdExporter, SMD_OT_Compile -from .update import SmdToolsUpdate # comment this line if you make third-party changes -from .flex import * - -vca_icon = 'EDITMODE_HLT' - -class SMD_MT_ExportChoice(bpy.types.Menu): - bl_label = get_id("exportmenu_title") - - def draw(self, context): - l = self.layout - l.operator_context = 'EXEC_DEFAULT' - - exportables = list(getSelectedExportables()) - if len(exportables): - single_obs = list([ex for ex in exportables if ex.ob_type != 'COLLECTION']) - groups = list([ex for ex in exportables if ex.ob_type == 'COLLECTION']) - groups.sort(key=lambda g: g.name.lower()) - - group_layout = l - for i,group in enumerate(groups): # always display all possible groups, as an object could be part of several - if type(self) == SMD_PT_Scene: - if i == 0: group_col = l.column(align=True) - if i % 2 == 0: group_layout = group_col.row(align=True) - group_layout.operator(SmdExporter.bl_idname, text=group.name, icon='GROUP').collection = group.item.name - - if len(exportables) - len(groups) > 1: - l.operator(SmdExporter.bl_idname, text=get_id("exportmenu_selected", True).format(len(exportables)), icon='OBJECT_DATA') - elif len(single_obs): - l.operator(SmdExporter.bl_idname, text=single_obs[0].name, icon=single_obs[0].icon) - elif len(bpy.context.selected_objects): - row = l.row() - row.operator(SmdExporter.bl_idname, text=get_id("exportmenu_invalid"),icon='BLANK1') - row.enabled = False - - row = l.row() - num_scene_exports = count_exports(context) - row.operator(SmdExporter.bl_idname, text=get_id("exportmenu_scene", True).format(num_scene_exports), icon='SCENE_DATA').export_scene = True - row.enabled = num_scene_exports > 0 - -class SMD_PT_Scene(bpy.types.Panel): - bl_label = get_id("exportpanel_title") - bl_space_type = "PROPERTIES" - bl_region_type = "WINDOW" - bl_context = "scene" - bl_options = {'DEFAULT_CLOSED'} - - def draw(self, context): - l = self.layout - scene = context.scene - num_to_export = 0 - - l.operator(SmdExporter.bl_idname,text="Export") - - row = l.row() - row.alert = len(scene.vs.export_path) == 0 - row.prop(scene.vs,"export_path") - - if State.datamodelEncoding != 0: - row = l.row().split(factor=0.33) - row.label(text=get_id("export_format") + ":") - row.row().prop(scene.vs,"export_format",expand=True) - row = l.row().split(factor=0.33) - row.label(text=get_id("up_axis") + ":") - row.row().prop(scene.vs,"up_axis", expand=True) - - if State.exportFormat == ExportFormat.DMX and bpy.app.debug_value > 0 or scene.vs.use_kv2: - l.prop(scene.vs,"use_kv2") - l.separator() - - row = l.row() - row.alert = len(scene.vs.engine_path) > 0 and State.compiler == Compiler.UNKNOWN - row.prop(scene.vs,"engine_path") - - if scene.vs.export_format == 'DMX': - if State.engineBranch is None: - row = l.split(factor=0.33) - row.label(text=get_id("exportpanel_dmxver")) - row = row.row(align=True) - row.prop(scene.vs,"dmx_encoding",text="") - row.prop(scene.vs,"dmx_format",text="") - row.enabled = not row.alert - if State.exportFormat == ExportFormat.DMX: - col = l.column() - col.prop(scene.vs,"material_path") - col.prop(scene.vs,"dmx_weightlink_threshold",slider=True) - else: - row = l.split(factor=0.33) - row.label(text=get_id("smd_format") + ":") - row.row().prop(scene.vs,"smd_format", expand=True) - - col = l.column(align=True) - row = col.row(align=True) - self.HelpButton(row) - row.operator("wm.url_open",text=get_id("exportpanel_steam",True),icon='URL').url = "http://steamcommunity.com/groups/BlenderSourceTools" - if "SmdToolsUpdate" in globals(): - col.operator(SmdToolsUpdate.bl_idname,text=get_id("exportpanel_update",True),icon='URL') - - @staticmethod - def HelpButton(layout): - layout.operator("wm.url_open",text=get_id("help",True),icon='HELP').url = "http://developer.valvesoftware.com/wiki/Blender_Source_Tools_Help#Exporting" - -class SMD_MT_ConfigureScene(bpy.types.Menu): - bl_label = get_id("exporter_report_menu") - def draw(self, context): - self.layout.label(text=get_id("exporter_err_unconfigured")) - SMD_PT_Scene.HelpButton(self.layout) - -class SMD_UL_ExportItems(bpy.types.UIList): - def draw_item(self, context, layout, data, exportable, icon, active_data, active_propname, index): - item = exportable.item - enabled = not (type(item) == bpy.types.Collection and item.vs.mute) - - row = layout.row(align=True) - row.alignment = 'LEFT' - row.enabled = enabled - - row.prop(item.vs,"export",icon='CHECKBOX_HLT' if item.vs.export and enabled else 'CHECKBOX_DEHLT',text="",emboss=False) - row.label(text=exportable.name,icon=exportable.icon) - - if not enabled: return - - row = layout.row(align=True) - row.alignment='RIGHT' - - num_shapes, num_correctives = countShapes(item) - num_shapes += num_correctives - if num_shapes > 0: - row.label(text=str(num_shapes),icon='SHAPEKEY_DATA') - - num_vca = len(item.vs.vertex_animations) - if num_vca > 0: - row.label(text=str(num_vca),icon=vca_icon) - -class FilterCache: - def __init__(self): - self.state_objects = State.exportableObjects - - fname = None - filter = None - order = None -gui_cache = {} - -class SMD_UL_GroupItems(bpy.types.UIList): - def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): - r = layout.row(align=True) - r.prop(item.vs,"export",text="",icon='CHECKBOX_HLT' if item.vs.export else 'CHECKBOX_DEHLT',emboss=False) - r.label(text=item.name,translate=False,icon=MakeObjectIcon(item,suffix="_DATA")) - - def filter_items(self, context, data, propname): - fname = self.filter_name.lower() - cache = gui_cache.get(data) - - if not (cache and cache.fname == fname and cache.state_objects is State.exportableObjects): - cache = FilterCache() - cache.filter = [self.bitflag_filter_item if ob.session_uid in State.exportableObjects and (not fname or fname in ob.name.lower()) else 0 for ob in data.objects] - cache.order = bpy.types.UI_UL_list.sort_items_by_name(data.objects) - cache.fname = fname - gui_cache[data] = cache - - return cache.filter, cache.order if self.use_filter_sort_alpha else [] - -class SMD_UL_VertexAnimationItem(bpy.types.UIList): - def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): - r = layout.row() - r.alignment='LEFT' - r.prop(item,"name",text="",emboss=False) - r = layout.row(align=True) - r.alignment='RIGHT' - r.operator(SMD_OT_PreviewVertexAnimation.bl_idname,text="",icon='PAUSE' if context.screen.is_animation_playing else 'PLAY') - r.prop(item,"start",text="") - r.prop(item,"end",text="") - r.prop(item,"export_sequence",text="",icon='ACTION') - -class SMD_OT_AddVertexAnimation(bpy.types.Operator): - bl_idname = "smd.vertexanim_add" - bl_label = get_id("vca_add") - bl_description = get_id("vca_add_tip") - bl_options = {'INTERNAL'} - - @classmethod - def poll(cls,c): - return type(get_active_exportable(c).item) in [bpy.types.Object, bpy.types.Collection] - - def execute(self,c): - item = get_active_exportable(c).item - item.vs.vertex_animations.add() - item.vs.active_vertex_animation = len(item.vs.vertex_animations) - 1 - return {'FINISHED'} - -class SMD_OT_RemoveVertexAnimation(bpy.types.Operator): - bl_idname = "smd.vertexanim_remove" - bl_label = get_id("vca_remove") - bl_description = get_id("vca_remove_tip") - bl_options = {'INTERNAL'} - - index : bpy.props.IntProperty(min=0) - - @classmethod - def poll(cls,c): - item = get_active_exportable(c).item - return type(item) in [bpy.types.Object, bpy.types.Collection] and len(item.vs.vertex_animations) - - def execute(self,c): - item = get_active_exportable(c).item - item.vs.vertex_animations.remove(self.index) - item.vs.active_vertex_animation -= 1 - return {'FINISHED'} - -class SMD_OT_PreviewVertexAnimation(bpy.types.Operator): - bl_idname = "smd.vertexanim_preview" - bl_label = get_id("vca_preview") - bl_description = get_id("vca_preview_tip") - bl_options = {'INTERNAL'} - - def execute(self,c): - item = get_active_exportable(c).item - anim = item.vs.vertex_animations[item.vs.active_vertex_animation] - c.scene.use_preview_range = True - c.scene.frame_preview_start = anim.start - c.scene.frame_preview_end = anim.end - if not c.screen.is_animation_playing: - c.scene.frame_set(anim.start) - bpy.ops.screen.animation_play() - return {'FINISHED'} - -class SMD_OT_GenerateVertexAnimationQCSnippet(bpy.types.Operator): - bl_idname = "smd.vertexanim_generate_qc" - bl_label = get_id("vca_qcgen") - bl_description = get_id("vca_qcgen_tip") - bl_options = {'INTERNAL'} - - @classmethod - def poll(cls,c): - return get_active_exportable(c) is not None - - def execute(self,c): # FIXME: DMX syntax - item = get_active_exportable(c).item - fps = c.scene.render.fps / c.scene.render.fps_base - wm = c.window_manager - wm.clipboard = '$model "merge_me" {0}{1}'.format(item.name,getFileExt()) - if c.scene.vs.export_format == 'SMD': - wm.clipboard += ' {{\n{0}\n}}\n'.format("\n".join(["\tvcafile {0}.vta".format(vca.name) for vca in item.vs.vertex_animations])) - else: wm.clipboard += '\n' - wm.clipboard += "\n// vertex animation block begins\n$upaxis Y\n" - wm.clipboard += "\n".join([''' -$boneflexdriver "vcabone_{0}" tx "{0}" 0 1 -$boneflexdriver "vcabone_{0}" ty "multi_{0}" 0 1 -$sequence "{0}" "vcaanim_{0}{1}" fps {2} -'''.format(vca.name, getFileExt(), fps) for vca in item.vs.vertex_animations if vca.export_sequence]) - wm.clipboard += "\n// vertex animation block ends\n" - self.report({'INFO'},"QC segment copied to clipboard.") - return {'FINISHED'} - -SMD_OT_CreateVertexMap_idname = "smd.vertex_map_create_" -SMD_OT_SelectVertexMap_idname = "smd.vertex_map_select_" -SMD_OT_RemoveVertexMap_idname = "smd.vertex_map_remove_" - -for map_name in vertex_maps: - def is_mesh(ob): - return ob is not None and ob.type == 'MESH' - - class SelectVertexMap(bpy.types.Operator): - bl_idname = SMD_OT_SelectVertexMap_idname + map_name - bl_label = bl_description = get_id("vertmap_select") - bl_options = {'INTERNAL'} - vertex_map = map_name - - @classmethod - def poll(cls,c): - if not is_mesh(c.active_object): return False - - vc_loop = c.active_object.data.vertex_colors.get(cls.vertex_map) - return vc_loop and not vc_loop.active - - def execute(self,c): - c.active_object.data.vertex_colors[self.vertex_map].active = True - return {'FINISHED'} - - class CreateVertexMap(bpy.types.Operator): - bl_idname = SMD_OT_CreateVertexMap_idname + map_name - bl_label = bl_description = get_id("vertmap_create") - bl_options = {'INTERNAL'} - vertex_map = map_name - - @classmethod - def poll(cls,c): - return is_mesh(c.active_object) and not cls.vertex_map in c.active_object.data.vertex_colors - - def execute(self,c): - vc = c.active_object.data.vertex_colors.new(name=self.vertex_map) - vc.data.foreach_set("color",[1.0] * len(vc.data) * 4) - SelectVertexMap.execute(self,c) - return {'FINISHED'} - - class RemoveVertexMap(bpy.types.Operator): - bl_idname = SMD_OT_RemoveVertexMap_idname + map_name - bl_label = bl_description = get_id("vertmap_remove") - bl_options = {'INTERNAL'} - vertex_map = map_name - - @classmethod - def poll(cls,c): - return is_mesh(c.active_object) and cls.vertex_map in c.active_object.data.vertex_colors - - def execute(self,c): - vcs = c.active_object.data.vertex_colors - vcs.remove(vcs[self.vertex_map]) - return {'FINISHED'} - - bpy.utils.register_class(SelectVertexMap) - bpy.utils.register_class(CreateVertexMap) - bpy.utils.register_class(RemoveVertexMap) - -class SMD_PT_Object_Config(bpy.types.Panel): - bl_label = get_id('exportables_title') - bl_space_type = "PROPERTIES" - bl_region_type = "WINDOW" - bl_context = "scene" - bl_options = {'DEFAULT_CLOSED'} - - def draw(self,context): - l = self.layout - scene = context.scene - - l.template_list("SMD_UL_ExportItems","",scene.vs,"export_list",scene.vs,"export_list_active",rows=3,maxrows=8) - - active_exportable = get_active_exportable(context) - if not active_exportable: - return - - item = active_exportable.item - is_group = type(item) == bpy.types.Collection - - if not (is_group and item.vs.mute): - l.column().prop(item.vs,"subdir",icon='FILE_FOLDER') - -class ExportableConfigurationPanel(bpy.types.Panel): - bl_space_type = "PROPERTIES" - bl_region_type = "WINDOW" - bl_context = "scene" - bl_parent_id = "SMD_PT_Object_Config" - bl_options = {'DEFAULT_CLOSED'} - vs_icon = "" - - @classmethod - def get_item(cls, context): - active_exportable = get_active_exportable(context) - if not active_exportable: - return - - return active_exportable.item - - @classmethod - def poll(cls, context): - return (cls.get_item(context) is not None) - - @classmethod - def is_collection(cls, item): - return isinstance(item, bpy.types.Collection) - - @classmethod - def get_active_object(cls, context): - item = cls.get_item(context) - - if not cls.is_collection(item): - return item - - ob = context.active_object - if ob and ob.name in item.objects: - return ob - - @classmethod - def unpack_collection(cls, context): - item = cls.get_item(context) - return [ob for ob in item.objects if ob.session_uid in State.exportableObjects] if cls.is_collection(item) else [item] - - def draw_header(self, context): - if self.vs_icon: - self.layout.label(icon=self.vs_icon) - - -class SMD_PT_VertexAnimation(ExportableConfigurationPanel): - bl_label = get_id("vca_group_props") - vs_icon = vca_icon - - @classmethod - def poll(cls, context): - item = cls.get_item(context) - return item and (cls.is_collection(item) or item.type in mesh_compatible) - - def draw(self, context): - item = self.get_item(context) - r = self.layout.row(align=True) - r.operator(SMD_OT_AddVertexAnimation.bl_idname,icon="ADD",text="Add") - op = r.operator(SMD_OT_RemoveVertexAnimation.bl_idname,icon="REMOVE",text="Remove") - r.operator("wm.url_open", text=get_id("help",True), icon='HELP').url = "http://developer.valvesoftware.com/wiki/Vertex_animation" - - if item.vs.vertex_animations: - op.index = item.vs.active_vertex_animation - self.layout.template_list("SMD_UL_VertexAnimationItem","",item.vs,"vertex_animations",item.vs,"active_vertex_animation",rows=2,maxrows=4) - self.layout.operator(SMD_OT_GenerateVertexAnimationQCSnippet.bl_idname,icon='SCRIPT') - -class SMD_PT_Group(ExportableConfigurationPanel): - bl_label = get_id("exportables_group_props") - bl_options = set() # override - vs_icon = 'GROUP' - - @classmethod - def poll(cls, context): - item = cls.get_item(context) - return item and cls.is_collection(item) - - def draw(self, context): - item = self.get_item(context) - if not item.vs.mute: - self.layout.template_list("SMD_UL_GroupItems",item.name,item,"objects",item.vs,"selected_item",type='GRID',columns=2,rows=2,maxrows=10) - - r = self.layout.row() - r.alignment = 'CENTER' - r.prop(item.vs,"mute") - if item.vs.mute: - return - elif State.exportFormat == ExportFormat.DMX: - r.prop(item.vs,"automerge") - - -class SMD_PT_Armature(ExportableConfigurationPanel): - bl_label = " " - bl_options = set() # override - - @classmethod - def poll(cls, context): - item = cls.get_active_object(context) - return item and (not cls.is_collection(item)) and (item.type == 'ARMATURE' or item.find_armature()) - - def get_armature(self, context): - item = self.get_active_object(context) - if item is None: return None - return item if item.type == 'ARMATURE' else item.find_armature() - - def draw_header(self, context): - armature = self.get_armature(context) - self.bl_label = get_id("exportables_armature_props", True).format(armature.name if armature else "NONE") - self.layout.label(icon='OUTLINER_OB_ARMATURE') - - def draw(self, context): - item = self.get_item(context) - armature = self.get_armature(context) - col = self.layout - if armature == item: # only display action stuff if the user has actually selected the armature - col.row().prop(armature.data.vs,"action_selection",expand=True) - if armature.data.vs.action_selection == 'FILTERED': - col.prop(armature.vs,"action_filter") - - if State.exportFormat == ExportFormat.SMD: - col.prop(armature.data.vs,"implicit_zero_bone") - col.prop(armature.data.vs,"legacy_rotation") - - if armature.animation_data and not 'ActLib' in dir(bpy.types): - col.template_ID(armature.animation_data, "action", new="action.new") - -class SMD_PT_ShapeKeys(ExportableConfigurationPanel): - bl_label = get_id("exportables_flex_props") - vs_icon = 'SHAPEKEY_DATA' - - @classmethod - def poll(cls, context): - item = cls.get_item(context) - return item and item.vs.export and hasShapes(item) and context.scene.vs.export_format == 'DMX' - - def draw(self, context): - item = self.get_item(context) - objects = self.unpack_collection(context) - - col = self.layout - col.row().prop(item.vs,"flex_controller_mode",expand=True) - - def insertCorrectiveUi(parent): - col = parent.column(align=True) - col.operator(AddCorrectiveShapeDrivers.bl_idname, icon='DRIVER',text=get_id("gen_drivers",True)) - col.operator(RenameShapesToMatchCorrectiveDrivers.bl_idname, icon='SYNTAX_OFF',text=get_id("apply_drivers",True)) - - if item.vs.flex_controller_mode == 'ADVANCED': - controller_source = col.row() - controller_source.alert = hasFlexControllerSource(item.vs.flex_controller_source) == False - controller_source.prop(item.vs,"flex_controller_source",text=get_id("exportables_flex_src"),icon = 'TEXT' if item.vs.flex_controller_source in bpy.data.texts else 'NONE') - - row = col.row(align=True) - row.operator(DmxWriteFlexControllers.bl_idname,icon='TEXT',text=get_id("exportables_flex_generate", True)) - row.operator("wm.url_open",text=get_id("exportables_flex_help", True),icon='HELP').url = "http://developer.valvesoftware.com/wiki/Blender_SMD_Tools_Help#Flex_properties" - - insertCorrectiveUi(col) - - datablocks_dispayed = [] - - for ob in [ob for ob in objects if ob.vs.export and ob.type in shape_types and ob.active_shape_key and ob.data not in datablocks_dispayed]: - if not len(datablocks_dispayed): - col.label(text=get_id("exportables_flex_split")) - sharpness_col = col.column(align=True) - r = sharpness_col.split(factor=0.33,align=True) - r.label(text=ob.data.name + ":",icon=MakeObjectIcon(ob,suffix='_DATA'),translate=False) - r2 = r.split(factor=0.7,align=True) - if ob.data.vs.flex_stereo_mode == 'VGROUP': - r2.alert = ob.vertex_groups.get(ob.data.vs.flex_stereo_vg) is None - r2.prop_search(ob.data.vs,"flex_stereo_vg",ob,"vertex_groups",text="") - else: - r2.prop(ob.data.vs,"flex_stereo_sharpness",text="Sharpness") - r2.prop(ob.data.vs,"flex_stereo_mode",text="") - datablocks_dispayed.append(ob.data) - else: - insertCorrectiveUi(col) - - num_shapes, num_correctives = countShapes(objects) - - col.separator() - row = col.row() - row.alignment = 'CENTER' - row.label(icon='SHAPEKEY_DATA',text = get_id("exportables_flex_count", True).format(num_shapes)) - row.label(icon='SHAPEKEY_DATA',text = get_id("exportables_flex_count_corrective", True).format(num_correctives)) - -class SMD_PT_VertexMaps(ExportableConfigurationPanel): - bl_label = " " - - @classmethod - def poll(cls, context): - item = cls.get_active_object(context) - return item and item.type == 'MESH' - - def draw_header(self, context): - title = get_id("vertmap_group_props") - item = self.get_item(context) - is_collection = type(item) == bpy.types.Collection - if is_collection: - member = self.get_active_object(context) - if member: - title += " ({})".format(member.data.name) - self.bl_label = title - self.layout.label(icon='VPAINT_HLT') - - def draw(self, context): - l = self.layout - for map_name in vertex_maps: - r = l.row().split(factor=0.55) - r.label(text=get_id(map_name),icon='GROUP_VCOL') - - r = r.row() - add_remove = r.row(align=True) - add_remove.operator(SMD_OT_CreateVertexMap_idname + map_name,icon='ADD',text="") - add_remove.operator(SMD_OT_RemoveVertexMap_idname + map_name,icon='REMOVE',text="") - r.operator(SMD_OT_SelectVertexMap_idname + map_name,text="Activate") - -class SMD_PT_Curves(ExportableConfigurationPanel): - bl_label = get_id("exportables_curve_props") - vs_icon = 'OUTLINER_OB_CURVE' - - @classmethod - def poll(cls, context): - item = cls.get_item(context) - return item and hasCurves(item) - - def draw(self, context): - self.layout.label(text=get_id("exportables_curve_polyside")) - done = set() - for ob in [ob for ob in self.unpack_collection(context) if hasCurves(ob) and not ob.data in done]: - row = self.layout.split(factor=0.33) - row.label(text=ob.data.name + ":",icon=MakeObjectIcon(ob,suffix='_DATA'),translate=False) - row.prop(ob.data.vs,"faces",text="") - done.add(ob.data) - -class SMD_PT_Scene_QC_Complie(bpy.types.Panel): - bl_label = get_id("qc_title") - bl_space_type = "PROPERTIES" - bl_region_type = "WINDOW" - bl_context = "scene" - bl_options = {'DEFAULT_CLOSED'} - - searchPath = None - lastPathRow = None - qcFiles = None - lastUpdate = 0.0 - - def draw(self,context): - l = self.layout - scene = context.scene - - if State.compiler == Compiler.UNKNOWN: - if len(scene.vs.engine_path): - l.label(icon='ERROR',text=get_id("qc_bad_enginepath")) - else: - l.label(icon='INFO',text=get_id("qc_no_enginepath")) - return - - if State.compiler > Compiler.STUDIOMDL: - l.enabled = False - l.label(icon='INFO',text=get_id("qc_invalid_source2")) - return - - row = l.row() - row.alert = len(scene.vs.game_path) and State.gamePath is None - row.prop(scene.vs,"game_path") - - if not len(scene.vs.game_path) and State.gamePath is None: - row = l.row() - row.label(icon='ERROR',text=get_id("qc_nogamepath")) - row.enabled = False - return - - # QCs - filesRow = l.row() - if scene.vs.qc_path != self.searchPath or self.qcFiles is None or time.time() > self.lastUpdate + 2: - self.qcFiles = SMD_OT_Compile.getQCs() - self.searchPath = scene.vs.qc_path - self.lastUpdate = time.time() - - if self.qcFiles: - c = l.column_flow(columns=2) - c.operator_context = 'EXEC_DEFAULT' - for path in self.qcFiles: - c.operator(SMD_OT_Compile.bl_idname,text=os.path.basename(path),translate=False).filepath = path - - compile_row = l.row() - compile_row.prop(scene.vs,"qc_compile") - compile_row.operator_context = 'EXEC_DEFAULT' - compile_row.operator(SMD_OT_Compile.bl_idname,text=get_id("qc_compilenow", True),icon='SCRIPT').filepath="*" - - if not self.qcFiles: - if scene.vs.qc_path: - filesRow.alert = True - compile_row.enabled = False - filesRow.prop(scene.vs,"qc_path") # can't add this until the above test completes! - - l.operator(SMD_OT_LaunchHLMV.bl_idname,icon='PREFERENCES',text=get_id("launch_hlmv",True)) +# Copyright (c) 2014 Tom Edwards contact@steamreview.org +# +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +import bpy +from .utils import * +from .export_smd import SmdExporter, SMD_OT_Compile +from .update import SmdToolsUpdate # comment this line if you make third-party changes +from .flex import * + +vca_icon = 'EDITMODE_HLT' + +class SMD_MT_ExportChoice(bpy.types.Menu): + bl_label = get_id("exportmenu_title") + + def draw(self, context): + l = self.layout + l.operator_context = 'EXEC_DEFAULT' + + exportables = list(getSelectedExportables()) + if len(exportables): + single_obs = list([ex for ex in exportables if ex.ob_type != 'COLLECTION']) + groups = list([ex for ex in exportables if ex.ob_type == 'COLLECTION']) + groups.sort(key=lambda g: g.name.lower()) + + group_layout = l + for i,group in enumerate(groups): # always display all possible groups, as an object could be part of several + if type(self) == SMD_PT_Scene: + if i == 0: group_col = l.column(align=True) + if i % 2 == 0: group_layout = group_col.row(align=True) + group_layout.operator(SmdExporter.bl_idname, text=group.name, icon='GROUP').collection = group.item.name + + if len(exportables) - len(groups) > 1: + l.operator(SmdExporter.bl_idname, text=get_id("exportmenu_selected", True).format(len(exportables)), icon='OBJECT_DATA') + elif len(single_obs): + l.operator(SmdExporter.bl_idname, text=single_obs[0].name, icon=single_obs[0].icon) + elif len(bpy.context.selected_objects): + row = l.row() + row.operator(SmdExporter.bl_idname, text=get_id("exportmenu_invalid"),icon='BLANK1') + row.enabled = False + + row = l.row() + num_scene_exports = count_exports(context) + row.operator(SmdExporter.bl_idname, text=get_id("exportmenu_scene", True).format(num_scene_exports), icon='SCENE_DATA').export_scene = True + row.enabled = num_scene_exports > 0 + +class SMD_PT_Scene(bpy.types.Panel): + bl_label = get_id("exportpanel_title") + bl_space_type = "PROPERTIES" + bl_region_type = "WINDOW" + bl_context = "scene" + bl_options = {'DEFAULT_CLOSED'} + + def draw(self, context): + l = self.layout + scene = context.scene + num_to_export = 0 + + l.operator(SmdExporter.bl_idname,text="Export") + + row = l.row() + row.alert = len(scene.vs.export_path) == 0 + row.prop(scene.vs,"export_path") + + if State.datamodelEncoding != 0: + row = l.row().split(factor=0.33) + row.label(text=get_id("export_format") + ":") + row.row().prop(scene.vs,"export_format",expand=True) + row = l.row().split(factor=0.33) + row.label(text=get_id("up_axis") + ":") + row.row().prop(scene.vs,"up_axis", expand=True) + + if State.exportFormat == ExportFormat.DMX and bpy.app.debug_value > 0 or scene.vs.use_kv2: + l.prop(scene.vs,"use_kv2") + l.separator() + + row = l.row() + row.alert = len(scene.vs.engine_path) > 0 and State.compiler == Compiler.UNKNOWN + row.prop(scene.vs,"engine_path") + + if scene.vs.export_format == 'DMX': + if State.engineBranch is None: + row = l.split(factor=0.33) + row.label(text=get_id("exportpanel_dmxver")) + row = row.row(align=True) + row.prop(scene.vs,"dmx_encoding",text="") + row.prop(scene.vs,"dmx_format",text="") + row.enabled = not row.alert + if State.exportFormat == ExportFormat.DMX: + col = l.column() + col.prop(scene.vs,"material_path") + col.prop(scene.vs,"dmx_weightlink_threshold",slider=True) + else: + row = l.split(factor=0.33) + row.label(text=get_id("smd_format") + ":") + row.row().prop(scene.vs,"smd_format", expand=True) + + col = l.column(align=True) + row = col.row(align=True) + self.HelpButton(row) + row.operator("wm.url_open",text=get_id("exportpanel_steam",True),icon='URL').url = "http://steamcommunity.com/groups/BlenderSourceTools" + if "SmdToolsUpdate" in globals(): + col.operator(SmdToolsUpdate.bl_idname,text=get_id("exportpanel_update",True),icon='URL') + + @staticmethod + def HelpButton(layout): + layout.operator("wm.url_open",text=get_id("help",True),icon='HELP').url = "http://developer.valvesoftware.com/wiki/Blender_Source_Tools_Help#Exporting" + +class SMD_MT_ConfigureScene(bpy.types.Menu): + bl_label = get_id("exporter_report_menu") + def draw(self, context): + self.layout.label(text=get_id("exporter_err_unconfigured")) + SMD_PT_Scene.HelpButton(self.layout) + +class SMD_UL_ExportItems(bpy.types.UIList): + def draw_item(self, context, layout, data, exportable, icon, active_data, active_propname, index): + item = exportable.item + enabled = not (type(item) == bpy.types.Collection and item.vs.mute) + + row = layout.row(align=True) + row.alignment = 'LEFT' + row.enabled = enabled + + row.prop(item.vs,"export",icon='CHECKBOX_HLT' if item.vs.export and enabled else 'CHECKBOX_DEHLT',text="",emboss=False) + row.label(text=exportable.name,icon=exportable.icon) + + if not enabled: return + + row = layout.row(align=True) + row.alignment='RIGHT' + + num_shapes, num_correctives = countShapes(item) + num_shapes += num_correctives + if num_shapes > 0: + row.label(text=str(num_shapes),icon='SHAPEKEY_DATA') + + num_vca = len(item.vs.vertex_animations) + if num_vca > 0: + row.label(text=str(num_vca),icon=vca_icon) + +class FilterCache: + def __init__(self): + self.state_objects = State.exportableObjects + + fname = None + filter = None + order = None +gui_cache = {} + +class SMD_UL_GroupItems(bpy.types.UIList): + def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): + r = layout.row(align=True) + r.prop(item.vs,"export",text="",icon='CHECKBOX_HLT' if item.vs.export else 'CHECKBOX_DEHLT',emboss=False) + r.label(text=item.name,translate=False,icon=MakeObjectIcon(item,suffix="_DATA")) + + def filter_items(self, context, data, propname): + fname = self.filter_name.lower() + cache = gui_cache.get(data) + + if not (cache and cache.fname == fname and cache.state_objects is State.exportableObjects): + cache = FilterCache() + cache.filter = [self.bitflag_filter_item if ob.session_uid in State.exportableObjects and (not fname or fname in ob.name.lower()) else 0 for ob in data.objects] + cache.order = bpy.types.UI_UL_list.sort_items_by_name(data.objects) + cache.fname = fname + gui_cache[data] = cache + + return cache.filter, cache.order if self.use_filter_sort_alpha else [] + +class SMD_UL_VertexAnimationItem(bpy.types.UIList): + def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): + r = layout.row() + r.alignment='LEFT' + r.prop(item,"name",text="",emboss=False) + r = layout.row(align=True) + r.alignment='RIGHT' + r.operator(SMD_OT_PreviewVertexAnimation.bl_idname,text="",icon='PAUSE' if context.screen.is_animation_playing else 'PLAY') + r.prop(item,"start",text="") + r.prop(item,"end",text="") + r.prop(item,"export_sequence",text="",icon='ACTION') + +class SMD_OT_AddVertexAnimation(bpy.types.Operator): + bl_idname = "smd.vertexanim_add" + bl_label = get_id("vca_add") + bl_description = get_id("vca_add_tip") + bl_options = {'INTERNAL'} + + @classmethod + def poll(cls,c): + return type(get_active_exportable(c).item) in [bpy.types.Object, bpy.types.Collection] + + def execute(self,c): + item = get_active_exportable(c).item + item.vs.vertex_animations.add() + item.vs.active_vertex_animation = len(item.vs.vertex_animations) - 1 + return {'FINISHED'} + +class SMD_OT_RemoveVertexAnimation(bpy.types.Operator): + bl_idname = "smd.vertexanim_remove" + bl_label = get_id("vca_remove") + bl_description = get_id("vca_remove_tip") + bl_options = {'INTERNAL'} + + index : bpy.props.IntProperty(min=0) + + @classmethod + def poll(cls,c): + item = get_active_exportable(c).item + return type(item) in [bpy.types.Object, bpy.types.Collection] and len(item.vs.vertex_animations) + + def execute(self,c): + item = get_active_exportable(c).item + item.vs.vertex_animations.remove(self.index) + item.vs.active_vertex_animation -= 1 + return {'FINISHED'} + +class SMD_OT_PreviewVertexAnimation(bpy.types.Operator): + bl_idname = "smd.vertexanim_preview" + bl_label = get_id("vca_preview") + bl_description = get_id("vca_preview_tip") + bl_options = {'INTERNAL'} + + def execute(self,c): + item = get_active_exportable(c).item + anim = item.vs.vertex_animations[item.vs.active_vertex_animation] + c.scene.use_preview_range = True + c.scene.frame_preview_start = anim.start + c.scene.frame_preview_end = anim.end + if not c.screen.is_animation_playing: + c.scene.frame_set(anim.start) + bpy.ops.screen.animation_play() + return {'FINISHED'} + +class SMD_OT_GenerateVertexAnimationQCSnippet(bpy.types.Operator): + bl_idname = "smd.vertexanim_generate_qc" + bl_label = get_id("vca_qcgen") + bl_description = get_id("vca_qcgen_tip") + bl_options = {'INTERNAL'} + + @classmethod + def poll(cls,c): + return get_active_exportable(c) is not None + + def execute(self,c): # FIXME: DMX syntax + item = get_active_exportable(c).item + fps = c.scene.render.fps / c.scene.render.fps_base + wm = c.window_manager + wm.clipboard = '$model "merge_me" {0}{1}'.format(item.name,getFileExt()) + if c.scene.vs.export_format == 'SMD': + wm.clipboard += ' {{\n{0}\n}}\n'.format("\n".join(["\tvcafile {0}.vta".format(vca.name) for vca in item.vs.vertex_animations])) + else: wm.clipboard += '\n' + wm.clipboard += "\n// vertex animation block begins\n$upaxis Y\n" + wm.clipboard += "\n".join([''' +$boneflexdriver "vcabone_{0}" tx "{0}" 0 1 +$boneflexdriver "vcabone_{0}" ty "multi_{0}" 0 1 +$sequence "{0}" "vcaanim_{0}{1}" fps {2} +'''.format(vca.name, getFileExt(), fps) for vca in item.vs.vertex_animations if vca.export_sequence]) + wm.clipboard += "\n// vertex animation block ends\n" + self.report({'INFO'},"QC segment copied to clipboard.") + return {'FINISHED'} + +SMD_OT_CreateVertexMap_idname = "smd.vertex_map_create_" +SMD_OT_SelectVertexMap_idname = "smd.vertex_map_select_" +SMD_OT_RemoveVertexMap_idname = "smd.vertex_map_remove_" + +for map_name in vertex_maps: + def is_mesh(ob): + return ob is not None and ob.type == 'MESH' + + class SelectVertexMap(bpy.types.Operator): + bl_idname = SMD_OT_SelectVertexMap_idname + map_name + bl_label = bl_description = get_id("vertmap_select") + bl_options = {'INTERNAL'} + vertex_map = map_name + + @classmethod + def poll(cls,c): + if not is_mesh(c.active_object): return False + + vc_loop = c.active_object.data.vertex_colors.get(cls.vertex_map) + return vc_loop and not vc_loop.active + + def execute(self,c): + c.active_object.data.vertex_colors[self.vertex_map].active = True + return {'FINISHED'} + + class CreateVertexMap(bpy.types.Operator): + bl_idname = SMD_OT_CreateVertexMap_idname + map_name + bl_label = bl_description = get_id("vertmap_create") + bl_options = {'INTERNAL'} + vertex_map = map_name + + @classmethod + def poll(cls,c): + return is_mesh(c.active_object) and not cls.vertex_map in c.active_object.data.vertex_colors + + def execute(self,c): + vc = c.active_object.data.vertex_colors.new(name=self.vertex_map) + vc.data.foreach_set("color",[1.0] * len(vc.data) * 4) + SelectVertexMap.execute(self,c) + return {'FINISHED'} + + class RemoveVertexMap(bpy.types.Operator): + bl_idname = SMD_OT_RemoveVertexMap_idname + map_name + bl_label = bl_description = get_id("vertmap_remove") + bl_options = {'INTERNAL'} + vertex_map = map_name + + @classmethod + def poll(cls,c): + return is_mesh(c.active_object) and cls.vertex_map in c.active_object.data.vertex_colors + + def execute(self,c): + vcs = c.active_object.data.vertex_colors + vcs.remove(vcs[self.vertex_map]) + return {'FINISHED'} + + bpy.utils.register_class(SelectVertexMap) + bpy.utils.register_class(CreateVertexMap) + bpy.utils.register_class(RemoveVertexMap) + +class SMD_PT_Object_Config(bpy.types.Panel): + bl_label = get_id('exportables_title') + bl_space_type = "PROPERTIES" + bl_region_type = "WINDOW" + bl_context = "scene" + bl_options = {'DEFAULT_CLOSED'} + + def draw(self,context): + l = self.layout + scene = context.scene + + l.template_list("SMD_UL_ExportItems","",scene.vs,"export_list",scene.vs,"export_list_active",rows=3,maxrows=8) + + active_exportable = get_active_exportable(context) + if not active_exportable: + return + + item = active_exportable.item + is_group = type(item) == bpy.types.Collection + + if not (is_group and item.vs.mute): + l.column().prop(item.vs,"subdir",icon='FILE_FOLDER') + +class ExportableConfigurationPanel(bpy.types.Panel): + bl_space_type = "PROPERTIES" + bl_region_type = "WINDOW" + bl_context = "scene" + bl_parent_id = "SMD_PT_Object_Config" + bl_options = {'DEFAULT_CLOSED'} + vs_icon = "" + + @classmethod + def get_item(cls, context): + active_exportable = get_active_exportable(context) + if not active_exportable: + return + + return active_exportable.item + + @classmethod + def poll(cls, context): + return (cls.get_item(context) is not None) + + @classmethod + def is_collection(cls, item): + return isinstance(item, bpy.types.Collection) + + @classmethod + def get_active_object(cls, context): + item = cls.get_item(context) + + if not cls.is_collection(item): + return item + + ob = context.active_object + if ob and ob.name in item.objects: + return ob + + @classmethod + def unpack_collection(cls, context): + item = cls.get_item(context) + return [ob for ob in item.objects if ob.session_uid in State.exportableObjects] if cls.is_collection(item) else [item] + + def draw_header(self, context): + if self.vs_icon: + self.layout.label(icon=self.vs_icon) + + +class SMD_PT_VertexAnimation(ExportableConfigurationPanel): + bl_label = get_id("vca_group_props") + vs_icon = vca_icon + + @classmethod + def poll(cls, context): + item = cls.get_item(context) + return item and (cls.is_collection(item) or item.type in mesh_compatible) + + def draw(self, context): + item = self.get_item(context) + r = self.layout.row(align=True) + r.operator(SMD_OT_AddVertexAnimation.bl_idname,icon="ADD",text="Add") + op = r.operator(SMD_OT_RemoveVertexAnimation.bl_idname,icon="REMOVE",text="Remove") + r.operator("wm.url_open", text=get_id("help",True), icon='HELP').url = "http://developer.valvesoftware.com/wiki/Vertex_animation" + + if item.vs.vertex_animations: + op.index = item.vs.active_vertex_animation + self.layout.template_list("SMD_UL_VertexAnimationItem","",item.vs,"vertex_animations",item.vs,"active_vertex_animation",rows=2,maxrows=4) + self.layout.operator(SMD_OT_GenerateVertexAnimationQCSnippet.bl_idname,icon='SCRIPT') + +class SMD_PT_Group(ExportableConfigurationPanel): + bl_label = get_id("exportables_group_props") + bl_options = set() # override + vs_icon = 'GROUP' + + @classmethod + def poll(cls, context): + item = cls.get_item(context) + return item and cls.is_collection(item) + + def draw(self, context): + item = self.get_item(context) + if not item.vs.mute: + self.layout.template_list("SMD_UL_GroupItems",item.name,item,"objects",item.vs,"selected_item",type='GRID',columns=2,rows=2,maxrows=10) + + r = self.layout.row() + r.alignment = 'CENTER' + r.prop(item.vs,"mute") + if item.vs.mute: + return + elif State.exportFormat == ExportFormat.DMX: + r.prop(item.vs,"automerge") + + +class SMD_PT_Armature(ExportableConfigurationPanel): + bl_label = " " + bl_options = set() # override + + @classmethod + def poll(cls, context): + item = cls.get_active_object(context) + return item and (not cls.is_collection(item)) and (item.type == 'ARMATURE' or item.find_armature()) + + def get_armature(self, context): + item = self.get_active_object(context) + if item is None: return None + return item if item.type == 'ARMATURE' else item.find_armature() + + def draw_header(self, context): + armature = self.get_armature(context) + self.bl_label = get_id("exportables_armature_props", True).format(armature.name if armature else "NONE") + self.layout.label(icon='OUTLINER_OB_ARMATURE') + + def draw(self, context): + item = self.get_item(context) + armature = self.get_armature(context) + col = self.layout + if armature == item: # only display action stuff if the user has actually selected the armature + col.row().prop(armature.data.vs,"action_selection",expand=True) + if armature.data.vs.action_selection == 'FILTERED': + col.prop(armature.vs,"action_filter") + + if State.exportFormat == ExportFormat.SMD: + col.prop(armature.data.vs,"implicit_zero_bone") + col.prop(armature.data.vs,"legacy_rotation") + + if armature.animation_data and not 'ActLib' in dir(bpy.types): + col.template_ID(armature.animation_data, "action", new="action.new") + +class SMD_PT_ShapeKeys(ExportableConfigurationPanel): + bl_label = get_id("exportables_flex_props") + vs_icon = 'SHAPEKEY_DATA' + + @classmethod + def poll(cls, context): + item = cls.get_item(context) + return item and item.vs.export and hasShapes(item) and context.scene.vs.export_format == 'DMX' + + def draw(self, context): + item = self.get_item(context) + objects = self.unpack_collection(context) + + col = self.layout + col.row().prop(item.vs,"flex_controller_mode",expand=True) + + def insertCorrectiveUi(parent): + col = parent.column(align=True) + col.operator(AddCorrectiveShapeDrivers.bl_idname, icon='DRIVER',text=get_id("gen_drivers",True)) + col.operator(RenameShapesToMatchCorrectiveDrivers.bl_idname, icon='SYNTAX_OFF',text=get_id("apply_drivers",True)) + + if item.vs.flex_controller_mode == 'ADVANCED': + controller_source = col.row() + controller_source.alert = hasFlexControllerSource(item.vs.flex_controller_source) == False + controller_source.prop(item.vs,"flex_controller_source",text=get_id("exportables_flex_src"),icon = 'TEXT' if item.vs.flex_controller_source in bpy.data.texts else 'NONE') + + row = col.row(align=True) + row.operator(DmxWriteFlexControllers.bl_idname,icon='TEXT',text=get_id("exportables_flex_generate", True)) + row.operator("wm.url_open",text=get_id("exportables_flex_help", True),icon='HELP').url = "http://developer.valvesoftware.com/wiki/Blender_SMD_Tools_Help#Flex_properties" + + insertCorrectiveUi(col) + + datablocks_dispayed = [] + + for ob in [ob for ob in objects if ob.vs.export and ob.type in shape_types and ob.active_shape_key and ob.data not in datablocks_dispayed]: + if not len(datablocks_dispayed): + col.label(text=get_id("exportables_flex_split")) + sharpness_col = col.column(align=True) + r = sharpness_col.split(factor=0.33,align=True) + r.label(text=ob.data.name + ":",icon=MakeObjectIcon(ob,suffix='_DATA'),translate=False) + r2 = r.split(factor=0.7,align=True) + if ob.data.vs.flex_stereo_mode == 'VGROUP': + r2.alert = ob.vertex_groups.get(ob.data.vs.flex_stereo_vg) is None + r2.prop_search(ob.data.vs,"flex_stereo_vg",ob,"vertex_groups",text="") + else: + r2.prop(ob.data.vs,"flex_stereo_sharpness",text="Sharpness") + r2.prop(ob.data.vs,"flex_stereo_mode",text="") + datablocks_dispayed.append(ob.data) + else: + insertCorrectiveUi(col) + + num_shapes, num_correctives = countShapes(objects) + + col.separator() + row = col.row() + row.alignment = 'CENTER' + row.label(icon='SHAPEKEY_DATA',text = get_id("exportables_flex_count", True).format(num_shapes)) + row.label(icon='SHAPEKEY_DATA',text = get_id("exportables_flex_count_corrective", True).format(num_correctives)) + +class SMD_PT_VertexMaps(ExportableConfigurationPanel): + bl_label = " " + + @classmethod + def poll(cls, context): + item = cls.get_active_object(context) + return item and item.type == 'MESH' + + def draw_header(self, context): + title = get_id("vertmap_group_props") + item = self.get_item(context) + is_collection = type(item) == bpy.types.Collection + if is_collection: + member = self.get_active_object(context) + if member: + title += " ({})".format(member.data.name) + self.bl_label = title + self.layout.label(icon='VPAINT_HLT') + + def draw(self, context): + l = self.layout + for map_name in vertex_maps: + r = l.row().split(factor=0.55) + r.label(text=get_id(map_name),icon='GROUP_VCOL') + + r = r.row() + add_remove = r.row(align=True) + add_remove.operator(SMD_OT_CreateVertexMap_idname + map_name,icon='ADD',text="") + add_remove.operator(SMD_OT_RemoveVertexMap_idname + map_name,icon='REMOVE',text="") + r.operator(SMD_OT_SelectVertexMap_idname + map_name,text="Activate") + +class SMD_PT_Curves(ExportableConfigurationPanel): + bl_label = get_id("exportables_curve_props") + vs_icon = 'OUTLINER_OB_CURVE' + + @classmethod + def poll(cls, context): + item = cls.get_item(context) + return item and hasCurves(item) + + def draw(self, context): + self.layout.label(text=get_id("exportables_curve_polyside")) + done = set() + for ob in [ob for ob in self.unpack_collection(context) if hasCurves(ob) and not ob.data in done]: + row = self.layout.split(factor=0.33) + row.label(text=ob.data.name + ":",icon=MakeObjectIcon(ob,suffix='_DATA'),translate=False) + row.prop(ob.data.vs,"faces",text="") + done.add(ob.data) + +class SMD_PT_Scene_QC_Complie(bpy.types.Panel): + bl_label = get_id("qc_title") + bl_space_type = "PROPERTIES" + bl_region_type = "WINDOW" + bl_context = "scene" + bl_options = {'DEFAULT_CLOSED'} + + searchPath = None + lastPathRow = None + qcFiles = None + lastUpdate = 0.0 + + def draw(self,context): + l = self.layout + scene = context.scene + + if State.compiler == Compiler.UNKNOWN: + if len(scene.vs.engine_path): + l.label(icon='ERROR',text=get_id("qc_bad_enginepath")) + else: + l.label(icon='INFO',text=get_id("qc_no_enginepath")) + return + + if State.compiler > Compiler.STUDIOMDL: + l.enabled = False + l.label(icon='INFO',text=get_id("qc_invalid_source2")) + return + + row = l.row() + row.alert = len(scene.vs.game_path) and State.gamePath is None + row.prop(scene.vs,"game_path") + + if not len(scene.vs.game_path) and State.gamePath is None: + row = l.row() + row.label(icon='ERROR',text=get_id("qc_nogamepath")) + row.enabled = False + return + + # QCs + filesRow = l.row() + if scene.vs.qc_path != self.searchPath or self.qcFiles is None or time.time() > self.lastUpdate + 2: + self.qcFiles = SMD_OT_Compile.getQCs() + self.searchPath = scene.vs.qc_path + self.lastUpdate = time.time() + + if self.qcFiles: + c = l.column_flow(columns=2) + c.operator_context = 'EXEC_DEFAULT' + for path in self.qcFiles: + c.operator(SMD_OT_Compile.bl_idname,text=os.path.basename(path),translate=False).filepath = path + + compile_row = l.row() + compile_row.prop(scene.vs,"qc_compile") + compile_row.operator_context = 'EXEC_DEFAULT' + compile_row.operator(SMD_OT_Compile.bl_idname,text=get_id("qc_compilenow", True),icon='SCRIPT').filepath="*" + + if not self.qcFiles: + if scene.vs.qc_path: + filesRow.alert = True + compile_row.enabled = False + filesRow.prop(scene.vs,"qc_path") # can't add this until the above test completes! + + l.operator(SMD_OT_LaunchHLMV.bl_idname,icon='PREFERENCES',text=get_id("launch_hlmv",True)) diff --git a/io_scene_valvesource/__init__.py b/io_scene_valvesource/__init__.py index 8db79c8..2c242fb 100644 --- a/io_scene_valvesource/__init__.py +++ b/io_scene_valvesource/__init__.py @@ -1,275 +1,293 @@ -# Copyright (c) 2014 Tom Edwards contact@steamreview.org -# -# ##### BEGIN GPL LICENSE BLOCK ##### -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# ##### END GPL LICENSE BLOCK ##### - -bl_info = { - "name": "Blender Source Tools", - "author": "Tom Edwards (translators: Grigory Revzin)", - "version": (3, 3, 1), - "blender": (4, 1, 0), - "category": "Import-Export", - "location": "File > Import/Export, Scene properties", - "wiki_url": "http://steamcommunity.com/groups/BlenderSourceTools", - "tracker_url": "http://steamcommunity.com/groups/BlenderSourceTools/discussions/0/", - "description": "Importer and exporter for Valve Software's Source Engine. Supports SMD\VTA, DMX and QC." -} - -import bpy, os -from bpy.props import StringProperty, BoolProperty, EnumProperty, IntProperty, CollectionProperty, FloatProperty, PointerProperty - -# Python doesn't reload package sub-modules at the same time as __init__.py! -import importlib, sys -for filename in [ f for f in os.listdir(os.path.dirname(os.path.realpath(__file__))) if f.endswith(".py") ]: - if filename == os.path.basename(__file__): continue - module = sys.modules.get("{}.{}".format(__name__,filename[:-3])) - if module: importlib.reload(module) - -# clear out any scene update funcs hanging around, e.g. after a script reload -for collection in [bpy.app.handlers.depsgraph_update_post, bpy.app.handlers.load_post]: - for func in collection: - if func.__module__.startswith(__name__): - collection.remove(func) - -from . import datamodel, import_smd, export_smd, flex, GUI, update -from .utils import * - -class ValveSource_Exportable(bpy.types.PropertyGroup): - ob_type : StringProperty() - icon : StringProperty() - obj : PointerProperty(type=bpy.types.Object) - collection : PointerProperty(type=bpy.types.Collection) - - @property - def item(self): return self.obj or self.collection - - @property - def session_uid(self): return self.item.session_uid - -def menu_func_import(self, context): - self.layout.operator(import_smd.SmdImporter.bl_idname, text=get_id("import_menuitem", True)) - -def menu_func_export(self, context): - self.layout.menu("SMD_MT_ExportChoice", text=get_id("export_menuitem")) - -def menu_func_shapekeys(self,context): - self.layout.operator(flex.ActiveDependencyShapes.bl_idname, text=get_id("activate_dependency_shapes",True), icon='SHAPEKEY_DATA') - -def menu_func_textedit(self,context): - self.layout.operator(flex.InsertUUID.bl_idname) - -def export_active_changed(self, context): - if not context.scene.vs.export_list_active < len(context.scene.vs.export_list): - context.scene.vs.export_list_active = len(context.scene.vs.export_list) - 1 - return - - item = get_active_exportable(context).item - - if type(item) == bpy.types.Collection and item.vs.mute: return - for ob in context.scene.objects: ob.select_set(False) - - if type(item) == bpy.types.Collection: - context.view_layer.objects.active = item.objects[0] - for ob in item.objects: ob.select_set(True) - else: - item.select_set(True) - context.view_layer.objects.active = item -# -# Property Groups -# -from bpy.types import PropertyGroup - -encodings = [] -for enc in datamodel.list_support()['binary']: encodings.append( (str(enc), f"Binary {enc}", '' ) ) -formats = [] -for version in set(x for x in [*dmx_versions_source1.values(), *dmx_versions_source2.values()] if x.format != 0): - formats.append((version.format_enum, version.format_title, '')) -formats.sort(key = lambda f: f[0]) - -class ValveSource_SceneProps(PropertyGroup): - export_path : StringProperty(name=get_id("exportroot"),description=get_id("exportroot_tip"), subtype='DIR_PATH') - qc_compile : BoolProperty(name=get_id("qc_compileall"),description=get_id("qc_compileall_tip"),default=False) - qc_path : StringProperty(name=get_id("qc_path"),description=get_id("qc_path_tip"),default="//*.qc",subtype="FILE_PATH") - engine_path : StringProperty(name=get_id("engine_path"),description=get_id("engine_path_tip"), subtype='DIR_PATH',update=State.onEnginePathChanged) - - dmx_encoding : EnumProperty(name=get_id("dmx_encoding"),description=get_id("dmx_encoding_tip"),items=tuple(encodings),default='2') - dmx_format : EnumProperty(name=get_id("dmx_format"),description=get_id("dmx_format_tip"),items=tuple(formats),default='1') - - export_format : EnumProperty(name=get_id("export_format"),items=( ('SMD', "SMD", "Studiomdl Data" ), ('DMX', "DMX", "Datamodel Exchange" ) ),default='DMX') - up_axis : EnumProperty(name=get_id("up_axis"),items=axes,default='Z',description=get_id("up_axis_tip")) - material_path : StringProperty(name=get_id("dmx_mat_path"),description=get_id("dmx_mat_path_tip")) - export_list_active : IntProperty(name=get_id("active_exportable"),default=0,min=0,update=export_active_changed) - export_list : CollectionProperty(type=ValveSource_Exportable,options={'SKIP_SAVE','HIDDEN'}) - use_kv2 : BoolProperty(name="Write KeyValues2",description="Write ASCII DMX files",default=False) - game_path : StringProperty(name=get_id("game_path"),description=get_id("game_path_tip"),subtype='DIR_PATH',update=State.onGamePathChanged) - dmx_weightlink_threshold : FloatProperty(name=get_id("dmx_weightlinkcull"),description=get_id("dmx_weightlinkcull_tip"),max=1,min=0) - smd_format : EnumProperty(name=get_id("smd_format"), items=(('SOURCE', "Source", "Source Engine (Half-Life 2)") , ("GOLDSOURCE", "GoldSrc", "GoldSrc engine (Half-Life 1)")), default="SOURCE") - -class ValveSource_VertexAnimation(PropertyGroup): - name : StringProperty(name="Name",default="VertexAnim") - start : IntProperty(name="Start",description=get_id("vca_start_tip"),default=0) - end : IntProperty(name="End",description=get_id("vca_end_tip"),default=250) - export_sequence : BoolProperty(name=get_id("vca_sequence"),description=get_id("vca_sequence_tip"),default=True) - -class ExportableProps(): - flex_controller_modes = ( - ('SIMPLE',"Simple",get_id("controllers_simple_tip")), - ('ADVANCED',"Advanced",get_id("controllers_advanced_tip")) - ) - - export : BoolProperty(name=get_id("scene_export"),description=get_id("use_scene_export_tip"),default=True) - subdir : StringProperty(name=get_id("subdir"),description=get_id("subdir_tip")) - flex_controller_mode : EnumProperty(name=get_id("controllers_mode"),description=get_id("controllers_mode_tip"),items=flex_controller_modes,default='SIMPLE') - flex_controller_source : StringProperty(name=get_id("controller_source"),description=get_id("controllers_source_tip"),subtype='FILE_PATH') - - vertex_animations : CollectionProperty(name=get_id("vca_group_props"),type=ValveSource_VertexAnimation) - active_vertex_animation : IntProperty(default=-1) - -class ValveSource_ObjectProps(ExportableProps,PropertyGroup): - action_filter : StringProperty(name=get_id("action_filter"),description=get_id("action_filter_tip")) - triangulate : BoolProperty(name=get_id("triangulate"),description=get_id("triangulate_tip"),default=False) - -class ValveSource_ArmatureProps(PropertyGroup): - implicit_zero_bone : BoolProperty(name=get_id("dummy_bone"),default=True,description=get_id("dummy_bone_tip")) - arm_modes = ( - ('CURRENT',get_id("action_selection_current"),get_id("action_selection_current_tip")), - ('FILTERED',get_id("action_filter"),get_id("action_selection_filter_tip")) - ) - action_selection : EnumProperty(name=get_id("action_selection_mode"), items=arm_modes,description=get_id("action_selection_mode_tip"),default='CURRENT') - legacy_rotation : BoolProperty(name=get_id("bone_rot_legacy"),description=get_id("bone_rot_legacy_tip"),default=False) - -class ValveSource_CollectionProps(ExportableProps,PropertyGroup): - mute : BoolProperty(name=get_id("group_suppress"),description=get_id("group_suppress_tip"),default=False) - selected_item : IntProperty(default=-1, max=-1, min=-1) - automerge : BoolProperty(name=get_id("group_merge_mech"),description=get_id("group_merge_mech_tip"),default=False) - -class ShapeTypeProps(): - flex_stereo_sharpness : FloatProperty(name=get_id("shape_stereo_sharpness"),description=get_id("shape_stereo_sharpness_tip"),default=90,min=0,max=100,subtype='PERCENTAGE') - flex_stereo_mode : EnumProperty(name=get_id("shape_stereo_mode"),description=get_id("shape_stereo_mode_tip"), - items=tuple(list(axes) + [('VGROUP','Vertex Group',get_id("shape_stereo_mode_vgroup"))]), default='X') - flex_stereo_vg : StringProperty(name=get_id("shape_stereo_vgroup"),description=get_id("shape_stereo_vgroup_tip")) - -class CurveTypeProps(): - faces : EnumProperty(name=get_id("curve_poly_side"),description=get_id("curve_poly_side_tip"),default='FORWARD',items=( - ('FORWARD', get_id("curve_poly_side_fwd"), ''), - ('BACKWARD', get_id("curve_poly_side_back"), ''), - ('BOTH', get_id("curve_poly_side_both"), '')) ) - -class ValveSource_MeshProps(ShapeTypeProps,PropertyGroup): - pass -class ValveSource_SurfaceProps(ShapeTypeProps,CurveTypeProps,PropertyGroup): - pass -class ValveSource_CurveProps(ShapeTypeProps,CurveTypeProps,PropertyGroup): - pass -class ValveSource_TextProps(CurveTypeProps,PropertyGroup): - pass - -_classes = ( - ValveSource_Exportable, - ValveSource_SceneProps, - ValveSource_VertexAnimation, - ValveSource_ObjectProps, - ValveSource_ArmatureProps, - ValveSource_CollectionProps, - ValveSource_MeshProps, - ValveSource_SurfaceProps, - ValveSource_CurveProps, - ValveSource_TextProps, - GUI.SMD_MT_ExportChoice, - GUI.SMD_PT_Scene, - GUI.SMD_MT_ConfigureScene, - GUI.SMD_UL_ExportItems, - GUI.SMD_UL_GroupItems, - GUI.SMD_UL_VertexAnimationItem, - GUI.SMD_OT_AddVertexAnimation, - GUI.SMD_OT_RemoveVertexAnimation, - GUI.SMD_OT_PreviewVertexAnimation, - GUI.SMD_OT_GenerateVertexAnimationQCSnippet, - GUI.SMD_OT_LaunchHLMV, - GUI.SMD_PT_Object_Config, - GUI.SMD_PT_Group, - GUI.SMD_PT_VertexAnimation, - GUI.SMD_PT_Armature, - GUI.SMD_PT_ShapeKeys, - GUI.SMD_PT_VertexMaps, - GUI.SMD_PT_Curves, - GUI.SMD_PT_Scene_QC_Complie, - flex.DmxWriteFlexControllers, - flex.AddCorrectiveShapeDrivers, - flex.RenameShapesToMatchCorrectiveDrivers, - flex.ActiveDependencyShapes, - flex.InsertUUID, - update.SmdToolsUpdate, - update.SMD_MT_Updated, - export_smd.SMD_OT_Compile, - export_smd.SmdExporter, - import_smd.SmdImporter) - -def register(): - for cls in _classes: - bpy.utils.register_class(cls) - - from . import translations - bpy.app.translations.register(__name__,translations.translations) - - bpy.types.TOPBAR_MT_file_import.append(menu_func_import) - bpy.types.TOPBAR_MT_file_export.append(menu_func_export) - bpy.types.MESH_MT_shape_key_context_menu.append(menu_func_shapekeys) - bpy.types.TEXT_MT_edit.append(menu_func_textedit) - - try: bpy.ops.wm.addon_disable('EXEC_SCREEN',module="io_smd_tools") - except: pass - - def make_pointer(prop_type): - return PointerProperty(name=get_id("settings_prop"),type=prop_type) - - bpy.types.Scene.vs = make_pointer(ValveSource_SceneProps) - bpy.types.Object.vs = make_pointer(ValveSource_ObjectProps) - bpy.types.Armature.vs = make_pointer(ValveSource_ArmatureProps) - bpy.types.Collection.vs = make_pointer(ValveSource_CollectionProps) - bpy.types.Mesh.vs = make_pointer(ValveSource_MeshProps) - bpy.types.SurfaceCurve.vs = make_pointer(ValveSource_SurfaceProps) - bpy.types.Curve.vs = make_pointer(ValveSource_CurveProps) - bpy.types.Text.vs = make_pointer(ValveSource_TextProps) - - State.hook_events() - -def unregister(): - State.unhook_events() - - bpy.types.TOPBAR_MT_file_import.remove(menu_func_import) - bpy.types.TOPBAR_MT_file_export.remove(menu_func_export) - bpy.types.MESH_MT_shape_key_context_menu.remove(menu_func_shapekeys) - bpy.types.TEXT_MT_edit.remove(menu_func_textedit) - - bpy.app.translations.unregister(__name__) - - for cls in reversed(_classes): - bpy.utils.unregister_class(cls) - - del bpy.types.Scene.vs - del bpy.types.Object.vs - del bpy.types.Armature.vs - del bpy.types.Collection.vs - del bpy.types.Mesh.vs - del bpy.types.SurfaceCurve.vs - del bpy.types.Curve.vs - del bpy.types.Text.vs - -if __name__ == "__main__": - register() \ No newline at end of file +# Copyright (c) 2014 Tom Edwards contact@steamreview.org +# +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +bl_info = { + "name": "(fixed) Blender Source Tools SMD", + "author": "Tom Edwards (translators: Grigory Revzin)", + "version": (3, 3, 1), + "blender": (4, 1, 0), + "category": "Import-Export", + "location": "File > Import/Export, Scene properties", + "wiki_url": "http://steamcommunity.com/groups/BlenderSourceTools", + "tracker_url": "http://steamcommunity.com/groups/BlenderSourceTools/discussions/0/", + "description": "Importer and exporter for Valve Software's Source Engine. Supports SMD\VTA, DMX and QC." +} + +import bpy, os +from bpy.props import StringProperty, BoolProperty, EnumProperty, IntProperty, CollectionProperty, FloatProperty, PointerProperty + +# Python doesn't reload package sub-modules at the same time as __init__.py! +import importlib, sys +for filename in [ f for f in os.listdir(os.path.dirname(os.path.realpath(__file__))) if f.endswith(".py") ]: + if filename == os.path.basename(__file__): continue + module = sys.modules.get("{}.{}".format(__name__,filename[:-3])) + if module: importlib.reload(module) + +# clear out any scene update funcs hanging around, e.g. after a script reload +for collection in [bpy.app.handlers.depsgraph_update_post, bpy.app.handlers.load_post]: + for func in collection: + if func.__module__.startswith(__name__): + collection.remove(func) + +from . import datamodel, import_smd, export_smd, flex, GUI, update +from .utils import * + +class ValveSource_Exportable(bpy.types.PropertyGroup): + ob_type : StringProperty() + icon : StringProperty() + obj : PointerProperty(type=bpy.types.Object) + collection : PointerProperty(type=bpy.types.Collection) + + @property + def item(self): return self.obj or self.collection + + @property + def session_uid(self): return self.item.session_uid + +def menu_func_import(self, context): + self.layout.operator(import_smd.SmdImporter.bl_idname, text=get_id("import_menuitem", True)) + +def menu_func_export(self, context): + self.layout.menu("SMD_MT_ExportChoice", text=get_id("export_menuitem")) + +def menu_func_shapekeys(self,context): + self.layout.operator(flex.ActiveDependencyShapes.bl_idname, text=get_id("activate_dependency_shapes",True), icon='SHAPEKEY_DATA') + +def menu_func_textedit(self,context): + self.layout.operator(flex.InsertUUID.bl_idname) + +def export_active_changed(self, context): + if not context.scene.vs.export_list_active < len(context.scene.vs.export_list): + context.scene.vs.export_list_active = len(context.scene.vs.export_list) - 1 + return + + item = get_active_exportable(context).item + + if type(item) == bpy.types.Collection and item.vs.mute: return + for ob in context.scene.objects: ob.select_set(False) + + if type(item) == bpy.types.Collection: + context.view_layer.objects.active = item.objects[0] + for ob in item.objects: ob.select_set(True) + else: + item.select_set(True) + context.view_layer.objects.active = item +# +# Property Groups +# +from bpy.types import PropertyGroup + +encodings = [] +for enc in datamodel.list_support()['binary']: encodings.append( (str(enc), f"Binary {enc}", '' ) ) +formats = [] +for version in set(x for x in [*dmx_versions_source1.values(), *dmx_versions_source2.values()] if x.format != 0): + formats.append((version.format_enum, version.format_title, '')) +formats.sort(key = lambda f: f[0]) + +class ValveSource_SceneProps(PropertyGroup): + export_path : StringProperty(name=get_id("exportroot"),description=get_id("exportroot_tip"), subtype='DIR_PATH') + qc_compile : BoolProperty(name=get_id("qc_compileall"),description=get_id("qc_compileall_tip"),default=False) + qc_path : StringProperty(name=get_id("qc_path"),description=get_id("qc_path_tip"),default="//*.qc",subtype="FILE_PATH") + engine_path : StringProperty(name=get_id("engine_path"),description=get_id("engine_path_tip"), subtype='DIR_PATH',update=State.onEnginePathChanged) + + dmx_encoding : EnumProperty(name=get_id("dmx_encoding"),description=get_id("dmx_encoding_tip"),items=tuple(encodings),default='2') + dmx_format : EnumProperty(name=get_id("dmx_format"),description=get_id("dmx_format_tip"),items=tuple(formats),default='1') + + export_format : EnumProperty(name=get_id("export_format"),items=( ('SMD', "SMD", "Studiomdl Data" ), ('DMX', "DMX", "Datamodel Exchange" ) ),default='SMD') + up_axis : EnumProperty(name=get_id("up_axis"),items=axes,default='Z',description=get_id("up_axis_tip")) + material_path : StringProperty(name=get_id("dmx_mat_path"),description=get_id("dmx_mat_path_tip")) + export_list_active : IntProperty(name=get_id("active_exportable"),default=0,min=0,update=export_active_changed) + export_list : CollectionProperty(type=ValveSource_Exportable,options={'SKIP_SAVE','HIDDEN'}) + use_kv2 : BoolProperty(name="Write KeyValues2",description="Write ASCII DMX files",default=False) + game_path : StringProperty(name=get_id("game_path"),description=get_id("game_path_tip"),subtype='DIR_PATH',update=State.onGamePathChanged) + dmx_weightlink_threshold : FloatProperty(name=get_id("dmx_weightlinkcull"),description=get_id("dmx_weightlinkcull_tip"),max=1,min=0) + smd_format : EnumProperty(name=get_id("smd_format"), items=(('SOURCE', "Source", "Source Engine (Half-Life 2)") , ("GOLDSOURCE", "GoldSrc", "GoldSrc engine (Half-Life 1)")), default="GOLDSOURCE") + +class ValveSource_VertexAnimation(PropertyGroup): + name : StringProperty(name="Name",default="VertexAnim") + start : IntProperty(name="Start",description=get_id("vca_start_tip"),default=0) + end : IntProperty(name="End",description=get_id("vca_end_tip"),default=250) + export_sequence : BoolProperty(name=get_id("vca_sequence"),description=get_id("vca_sequence_tip"),default=True) + +class ExportableProps(): + flex_controller_modes = ( + ('SIMPLE',"Simple",get_id("controllers_simple_tip")), + ('ADVANCED',"Advanced",get_id("controllers_advanced_tip")) + ) + + export : BoolProperty(name=get_id("scene_export"),description=get_id("use_scene_export_tip"),default=True) + subdir : StringProperty(name=get_id("subdir"),description=get_id("subdir_tip")) + flex_controller_mode : EnumProperty(name=get_id("controllers_mode"),description=get_id("controllers_mode_tip"),items=flex_controller_modes,default='SIMPLE') + flex_controller_source : StringProperty(name=get_id("controller_source"),description=get_id("controllers_source_tip"),subtype='FILE_PATH') + + vertex_animations : CollectionProperty(name=get_id("vca_group_props"),type=ValveSource_VertexAnimation) + active_vertex_animation : IntProperty(default=-1) + +class ValveSource_ObjectProps(ExportableProps,PropertyGroup): + action_filter : StringProperty(name=get_id("action_filter"),description=get_id("action_filter_tip")) + triangulate : BoolProperty(name=get_id("triangulate"),description=get_id("triangulate_tip"),default=False) + +class ValveSource_ArmatureProps(PropertyGroup): + implicit_zero_bone : BoolProperty(name=get_id("dummy_bone"),default=True,description=get_id("dummy_bone_tip")) + arm_modes = ( + ('CURRENT',get_id("action_selection_current"),get_id("action_selection_current_tip")), + ('FILTERED',get_id("action_filter"),get_id("action_selection_filter_tip")) + ) + action_selection : EnumProperty(name=get_id("action_selection_mode"), items=arm_modes,description=get_id("action_selection_mode_tip"),default='CURRENT') + legacy_rotation : BoolProperty(name=get_id("bone_rot_legacy"),description=get_id("bone_rot_legacy_tip"),default=False) + +class ValveSource_CollectionProps(ExportableProps,PropertyGroup): + mute : BoolProperty(name=get_id("group_suppress"),description=get_id("group_suppress_tip"),default=False) + selected_item : IntProperty(default=-1, max=-1, min=-1) + automerge : BoolProperty(name=get_id("group_merge_mech"),description=get_id("group_merge_mech_tip"),default=False) + +class ShapeTypeProps(): + flex_stereo_sharpness : FloatProperty(name=get_id("shape_stereo_sharpness"),description=get_id("shape_stereo_sharpness_tip"),default=90,min=0,max=100,subtype='PERCENTAGE') + flex_stereo_mode : EnumProperty(name=get_id("shape_stereo_mode"),description=get_id("shape_stereo_mode_tip"), + items=tuple(list(axes) + [('VGROUP','Vertex Group',get_id("shape_stereo_mode_vgroup"))]), default='X') + flex_stereo_vg : StringProperty(name=get_id("shape_stereo_vgroup"),description=get_id("shape_stereo_vgroup_tip")) + +class CurveTypeProps(): + faces : EnumProperty(name=get_id("curve_poly_side"),description=get_id("curve_poly_side_tip"),default='FORWARD',items=( + ('FORWARD', get_id("curve_poly_side_fwd"), ''), + ('BACKWARD', get_id("curve_poly_side_back"), ''), + ('BOTH', get_id("curve_poly_side_both"), '')) ) + +class ValveSource_MeshProps(ShapeTypeProps,PropertyGroup): + pass +class ValveSource_SurfaceProps(ShapeTypeProps,CurveTypeProps,PropertyGroup): + pass +class ValveSource_CurveProps(ShapeTypeProps,CurveTypeProps,PropertyGroup): + pass +class ValveSource_TextProps(CurveTypeProps,PropertyGroup): + pass + +#### Import and Export button s +class SMD_PT_ImportExport(bpy.types.Panel): + bl_label = "SMD Import/Export" + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = "SMD" + + def draw(self, context): + layout = self.layout + layout.operator("import_scene.smd", text="Import SMD") + layout.operator("export_scene.smd", text="Export SMD") +#### Import and Export button f + +_classes = ( + ValveSource_Exportable, + ValveSource_SceneProps, + ValveSource_VertexAnimation, + ValveSource_ObjectProps, + ValveSource_ArmatureProps, + ValveSource_CollectionProps, + ValveSource_MeshProps, + ValveSource_SurfaceProps, + ValveSource_CurveProps, + ValveSource_TextProps, + GUI.SMD_MT_ExportChoice, + GUI.SMD_PT_Scene, + GUI.SMD_MT_ConfigureScene, + GUI.SMD_UL_ExportItems, + GUI.SMD_UL_GroupItems, + GUI.SMD_UL_VertexAnimationItem, + GUI.SMD_OT_AddVertexAnimation, + GUI.SMD_OT_RemoveVertexAnimation, + GUI.SMD_OT_PreviewVertexAnimation, + GUI.SMD_OT_GenerateVertexAnimationQCSnippet, + GUI.SMD_OT_LaunchHLMV, + GUI.SMD_PT_Object_Config, + GUI.SMD_PT_Group, + GUI.SMD_PT_VertexAnimation, + GUI.SMD_PT_Armature, + GUI.SMD_PT_ShapeKeys, + GUI.SMD_PT_VertexMaps, + GUI.SMD_PT_Curves, + GUI.SMD_PT_Scene_QC_Complie, + flex.DmxWriteFlexControllers, + flex.AddCorrectiveShapeDrivers, + flex.RenameShapesToMatchCorrectiveDrivers, + flex.ActiveDependencyShapes, + flex.InsertUUID, + update.SmdToolsUpdate, + update.SMD_MT_Updated, + export_smd.SMD_OT_Compile, + export_smd.SmdExporter, + import_smd.SmdImporter) + +def register(): + for cls in _classes: + bpy.utils.register_class(cls) + + from . import translations + bpy.app.translations.register(__name__,translations.translations) + #### Import and Export button s + bpy.utils.register_class(SMD_PT_ImportExport) + #### Import and Export button f + bpy.types.TOPBAR_MT_file_import.append(menu_func_import) + bpy.types.TOPBAR_MT_file_export.append(menu_func_export) + bpy.types.MESH_MT_shape_key_context_menu.append(menu_func_shapekeys) + bpy.types.TEXT_MT_edit.append(menu_func_textedit) + + try: bpy.ops.wm.addon_disable('EXEC_SCREEN',module="io_smd_tools") + except: pass + + def make_pointer(prop_type): + return PointerProperty(name=get_id("settings_prop"),type=prop_type) + + bpy.types.Scene.vs = make_pointer(ValveSource_SceneProps) + bpy.types.Object.vs = make_pointer(ValveSource_ObjectProps) + bpy.types.Armature.vs = make_pointer(ValveSource_ArmatureProps) + bpy.types.Collection.vs = make_pointer(ValveSource_CollectionProps) + bpy.types.Mesh.vs = make_pointer(ValveSource_MeshProps) + bpy.types.SurfaceCurve.vs = make_pointer(ValveSource_SurfaceProps) + bpy.types.Curve.vs = make_pointer(ValveSource_CurveProps) + bpy.types.Text.vs = make_pointer(ValveSource_TextProps) + + State.hook_events() + +def unregister(): + State.unhook_events() + + bpy.types.TOPBAR_MT_file_import.remove(menu_func_import) + bpy.types.TOPBAR_MT_file_export.remove(menu_func_export) + bpy.types.MESH_MT_shape_key_context_menu.remove(menu_func_shapekeys) + bpy.types.TEXT_MT_edit.remove(menu_func_textedit) + #### Import and Export button s + bpy.utils.unregister_class(SMD_PT_ImportExport) + #### Import and Export button f + bpy.app.translations.unregister(__name__) + + for cls in reversed(_classes): + bpy.utils.unregister_class(cls) + + del bpy.types.Scene.vs + del bpy.types.Object.vs + del bpy.types.Armature.vs + del bpy.types.Collection.vs + del bpy.types.Mesh.vs + del bpy.types.SurfaceCurve.vs + del bpy.types.Curve.vs + del bpy.types.Text.vs + +if __name__ == "__main__": + register() + \ No newline at end of file diff --git a/io_scene_valvesource/datamodel.py b/io_scene_valvesource/datamodel.py index 3cd8a5b..d7d7958 100644 --- a/io_scene_valvesource/datamodel.py +++ b/io_scene_valvesource/datamodel.py @@ -1,1057 +1,1057 @@ -# The MIT License (MIT) -# -# Copyright (c) 2014 Tom Edwards contact@steamreview.org -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -import struct, array, io, binascii, collections, uuid -from struct import unpack,calcsize - -header_format = "" -header_format_regex = header_format.replace("{:d}","([0-9]+)").replace("{:s}","(\S+)") - -header_proto2 = "" -header_proto2_regex = header_proto2.replace("{:d}","([0-9]+)") - -intsize = calcsize("i") -shortsize = calcsize("H") -floatsize = calcsize("f") - -def list_support(): - return { 'binary':[1,2,3,4,5,9], 'keyvalues2':[1,2,3,4],'binary_proto':[2] } - -def check_support(encoding,encoding_ver): - versions = list_support().get(encoding) - if not versions: - raise ValueError("DMX encoding \"{}\" is not supported".format(encoding)) - if encoding_ver not in versions: - raise ValueError("Version {} of {} DMX is not supported".format(encoding_ver,encoding)) - -def _encode_binary_string(string): - return (bytes(string,'utf-8') + bytes(1)) if string else bytes(1) - - -global _kv2_indent -_kv2_indent = "" -def _add_kv2_indent(): - global _kv2_indent - _kv2_indent += "\t" -def _sub_kv2_indent(): - global _kv2_indent - _kv2_indent = _kv2_indent[:-1] - -def _validate_array_list(iterable,array_type): - if not iterable: return None - try: - return list([array_type(i) if type(i) != array_type else i for i in iterable]) - except Exception as e: - raise TypeError("Could not convert all values to {}: {}".format(array_type,e)) from e - -def _quote(str): - return "\"{}\"".format(str) - -def get_bool(file): - return file.read(1) != b'\x00' -def get_byte(file): - return int(unpack("B",file.read(1))[0]) -def get_char(file): - c = file.read(1) - if isinstance(c, str): return c - return unpack("c",c)[0].decode('ASCII') -def get_int(file): - return int( unpack("i",file.read(intsize))[0] ) -def get_short(file, signed = False): - return int( unpack("h" if signed else "H",file.read(shortsize))[0] ) -def get_float(file): - return float( unpack("f",file.read(floatsize))[0] ) -def get_vec(file,dim): - return list( unpack("{}f".format(dim),file.read(floatsize*dim)) ) -def get_color(file): - return Color(list(unpack("4B",file.read(4)))) - -def get_str(file): - out = b'' - while True: - b = file.read(1) - if b == b'\x00': break - out += b - return out.decode() if len(out) else None - -def _get_kv2_repr(var): - t = type(var) - if t == float or t == int: # optimisation: very common, so first - return str(var) - elif issubclass(t, (_Array,Matrix)): - return var.to_kv2() - elif t == Element: - return str(var.id) - elif t == bool: - return "1" if var else "0" - elif t == Binary: - return binascii.hexlify(var).decode('ASCII') - elif var == None: - return "" - else: - return str(var) - -class _Array(list): - type = None - type_str = "" - - def __init__(self,l=None): - if l: - return super().__init__(_validate_array_list(l,self.type)) - else: - return super().__init__() - - def to_kv2(self): - if len(self) == 0: - return "[ ]" - if self.type == Element: - - out = "\n{}[\n".format(_kv2_indent) - _add_kv2_indent() - out += _kv2_indent - - out += ",\n{}".format(_kv2_indent).join([item.get_kv2() if item and item._users == 1 else "\"element\" {}".format(_quote(item.id if item else "")) for item in self]) - - _sub_kv2_indent() - return "{}\n{}]".format(out,_kv2_indent) - else: - return "[{}]".format(", ".join([_quote(_get_kv2_repr(item)) for item in self])) - - def frombytes(self,file): - length = get_int(file) - self.extend( unpack( self.type_str*length, file.read( calcsize(self.type_str) * length) ) ) - -class _BoolArray(_Array): - type = bool - type_str = "b" -class _IntArray(_Array): - type = int - type_str = "i" -class _FloatArray(_Array): - type = float - type_str = "f" -class _StrArray(_Array): - type = str - -class _Vector(list): - type = float - type_str = "" - def __init__(self,l): - if len(l) != len(self.type_str): - raise TypeError("Expected {} values".format(len(self.type_str))) - l = _validate_array_list(l,self.type) - super().__init__(l) - - def __repr__(self): - return " ".join([str(self.type(ord)) for ord in self]) - - def __hash__(self): - return hash(tuple(self)) - - def __round__(self,n=0): - return type(self)([round(ord,n) for ord in self]) - - def tobytes(self): - return struct.pack(self.type_str,*self) - -class Vector2(_Vector): - type_str = "ff" -class Vector3(_Vector): - type_str = "fff" -class Vector4(_Vector): - type_str = "ffff" -class Quaternion(Vector4): - '''XYZW''' - pass -class Angle(Vector3): - pass -class _VectorArray(_Array): - type = list - def __init__(self,l=None): - l = _validate_array_list(l,self.type) - _Array.__init__(self,l) -class _Vector2Array(_VectorArray): - type = Vector2 -class _Vector3Array(_VectorArray): - type = Vector3 -class _Vector4Array(_VectorArray): - type = Vector4 -class _QuaternionArray(_Vector4Array): - type = Quaternion -class _AngleArray(_Vector3Array): - type = Angle - -class Matrix(list): - type = list - def __init__(self,matrix=None): - if matrix: - attr_error = ValueError("Matrix is row-major and must be initialised with 4 lists of 4 floats, or a single list of 16 floats") - if len(matrix) == 16: - matrix = [matrix[i:i + 4] for i in range(0, len(matrix), 4)] - elif len(matrix) != 4: raise attr_error - - for row in matrix: - if len(row) != 4: raise attr_error - for i in range(4): - if type(row[i]) != float: - row[i] = float(row[i]) - else: - matrix = [[0.0] * 4] * 4 - super().__init__(matrix) - - def __hash__(self): - return hash(tuple(self)) - - def to_kv2(self): - return " ".join([str(f) for row in self for f in row]) - def tobytes(self): - return struct.pack("f" * 16,*[f for row in self for f in row]) - -class _MatrixArray(_Array): - type = Matrix - -class Binary(bytes): - pass -class _BinaryArray(_Array): - type = Binary - type_str = "b" - -class Color(_Vector): - type = int - type_str = "BBBB" - - def __init__(self, l): - if any(b < 0 or b > 255 for b in l): - raise TypeError("Color channel values must be between 0 and 255") - super().__init__(l) - -class _ColorArray(_VectorArray): - type=Color - -class Time(float): - @classmethod - def from_int(cls,int_value): - return Time(int_value / 10000) - - def tobytes(self): - return struct.pack("i",int(self * 10000)) - -class _TimeArray(_Array): - type = Time - -def make_array(l,t): - if t not in _dmxtypes_all: - raise TypeError("{} is not a valid datamodel attribute type".format(t)) - at = _get_array_type(t) - return at(l) - -class AttributeError(KeyError): - '''Raised when an attribute is not found on an element. Essentially a KeyError, but subclassed because it's normally an unrecoverable data issue.''' - pass - -class IDCollisionError(Exception): - pass - -_array_types = [list,set,tuple,array.array] -class Element(collections.OrderedDict): - '''Effectively a dictionary, but keys must be str. Also contains a name (str), type (str) and ID (uuid.UUID, can be generated from str).''' - _datamodels = None - _users = 0 - - @property - def name(self): return self._name - @name.setter - def name(self,value): self._name = str(value) if value else None - - @property - def type(self): return self._type - @type.setter - def type(self,value): self._type = str(value) - - @property - def id(self): return self._id - - def __init__(self,datamodel,name,elemtype="DmElement",id=None,_is_placeholder=False): - self.name = name - self.type = elemtype - self._is_placeholder = _is_placeholder - self._datamodels = set() - self._datamodels.add(datamodel) - - if id: - if isinstance(id,uuid.UUID): self._id = id - elif isinstance(id,str): self._id = uuid.uuid3(uuid.UUID('20ba94f8-59f0-4579-9e01-50aac4567d3b'),id) - else: raise ValueError("id must be uuid.UUID or str") - else: - self._id = uuid.uuid4() - - super().__init__() - - def __eq__(self,other): - return isinstance(other,Element) and self.id == other.id - - def __bool__(self): - return True - - def __repr__(self): - return "".format(self.name,self.type) - - def __hash__(self): - return hash(self.id) - - def __getitem__(self,item): - if type(item) != str: raise TypeError("Attribute name must be a string, not {}".format(type(item))) - try: - return super().__getitem__(item) - except KeyError as e: - raise AttributeError("No attribute \"{}\" on {}".format(item,self)) from e - - def __setitem__(self,key,item): - key = str(key) - if key in ["name", "id"]: raise KeyError("\"{}\" is a reserved name".format(key)) - - def import_element(elem): - for dm in [dm for dm in self._datamodels if not dm in elem._datamodels]: - dm.validate_element(elem) - dm.elements.append(elem) - elem._datamodels.add(dm) - for attr in elem.values(): - t = type(attr) - if t == Element: - import_element(attr) - if t == _ElementArray: - for arr_elem in attr: - import_element(arr_elem) - - t = type(item) - - if t in _dmxtypes_all or t == type(None): - if t == Element: - import_element(item) - elif t == _ElementArray: - for arr_elem in item: - import_element(arr_elem) - - return super().__setitem__(key,item) - else: - if t in _array_types: - raise ValueError("Cannot create an attribute from a generic Python list. Use make_array() first.") - else: - raise ValueError("Invalid attribute type ({})".format(t)) - - def get(self,k,d=None): - return self[k] if k in self else d - - def get_kv2(self,deep = True): - out = "" - out += _quote(self.type) - out += "\n" + _kv2_indent + "{\n" - _add_kv2_indent() - - def _make_attr_str(name,dm_type,value, is_array = False): - if value is not None: - if is_array: - return "{}\"{}\" \"{}\" {}\n".format(_kv2_indent,name,dm_type,value) - else: - return "{}\"{}\" \"{}\" \"{}\"\n".format(_kv2_indent,name,dm_type,value) - else: - return "{}\"{}\" {}\n".format(_kv2_indent,name,dm_type) - - out += _make_attr_str("id", "elementid", self.id) - out += _make_attr_str("name", "string", self.name) - - for name in self: - attr = self[name] - if attr == None: - out += _make_attr_str(name, "element", None) - continue - - t = type(attr) - - if t == Element and attr._users < 2 and deep: - out += _kv2_indent - out += _quote(name) - out += " {}".format( attr.get_kv2() ) - out += "\n" - else: - if issubclass(t,_Array): - if t == _ElementArray: - type_str = "element_array" - else: - type_str = _dmxtypes_str[_dmxtypes_array.index(t)] + "_array" - else: - type_str = _dmxtypes_str[_dmxtypes.index(t)] - - out += _make_attr_str(name, type_str, _get_kv2_repr(attr), issubclass(t,_Array)) - _sub_kv2_indent() - out += _kv2_indent + "}" - return out - - def tobytes(self): - if self._is_placeholder: - if self.encoding_ver < 5: - return b'-1' - else: - return bytes.join(b'',b'-2',bytes.decode(self.id,encoding='ASCII')) - else: - return struct.pack("i",self._index) - -class _ElementArray(_Array): - type = Element - -_dmxtypes = [Element,int,float,bool,str,Binary,Time,Color,Vector2,Vector3,Vector4,Angle,Quaternion,Matrix,int,int] -_dmxtypes_array = [_ElementArray,_IntArray,_FloatArray,_BoolArray,_StrArray,_BinaryArray,_TimeArray,_ColorArray,_Vector2Array,_Vector3Array,_Vector4Array,_AngleArray,_QuaternionArray,_MatrixArray,_IntArray,_IntArray] -_dmxtypes_all = _dmxtypes + _dmxtypes_array -_dmxtypes_str = ["element","int","float","bool","string","binary","time","color","vector2","vector3","vector4","angle","quaternion","matrix","uint64","uint8"] - -attr_list_v1 = [ - None,Element,int,float,bool,str,Binary,"ObjectID",Color,Vector2,Vector3,Vector4,Angle,Quaternion,Matrix, - _ElementArray,_IntArray,_FloatArray,_BoolArray,_StrArray,_BinaryArray,"_ObjectIDArray",_ColorArray,_Vector2Array,_Vector3Array,_Vector4Array,_AngleArray,_QuaternionArray,_MatrixArray -] # ObjectID is an element UUID -attr_list_v2 = [ - None,Element,int,float,bool,str,Binary,Time,Color,Vector2,Vector3,Vector4,Angle,Quaternion,Matrix, - _ElementArray,_IntArray,_FloatArray,_BoolArray,_StrArray,_BinaryArray,_TimeArray,_ColorArray,_Vector2Array,_Vector3Array,_Vector4Array,_AngleArray,_QuaternionArray,_MatrixArray -] -attr_list_v3 = [None,Element,int,float,bool,str,Binary,Time,Color,Vector2,Vector3,Vector4,Angle,Quaternion,Matrix,int,int] # last two are meant to be uint64, uint8 - -def _get_type_from_string(type_str): - return _dmxtypes[_dmxtypes_str.index(type_str)] -def _get_array_type(single_type): - if single_type in _dmxtypes_array: raise ValueError("Argument is already an array type") - return _dmxtypes_array[ _dmxtypes.index(single_type) ] -def _get_single_type(array_type): - if array_type in _dmxtypes: raise ValueError("Argument is already a single type") - return _dmxtypes[ _dmxtypes_array.index(array_type) ] - -def _get_dmx_id_type(encoding,version,id): - if encoding in ["binary","binary_proto"]: - if version in [1,2]: - return attr_list_v1[id] - if version in [3,4,5]: - return attr_list_v2[id] - if version in [9]: - if id >= 32: # array - return eval("_" + attr_list_v3[id-32].__name__.capitalize() + "Array") - return attr_list_v3[id] - if encoding == "keyvalues2": - return _dmxtypes[ _dmxtypes_str.index(id) ] - - raise ValueError("Type ID {} invalid in {} {}".format(id,encoding,version)) - -def _get_dmx_type_id(encoding,version,t): - if t == type(None): t = Element - if encoding == "keyvalues2": raise ValueError("Type IDs do not exist in KeyValues2") - try: - if encoding == "binary": - if version in [1,2]: - return attr_list_v1.index(t) - if version in [3,4,5]: - return attr_list_v2.index(t) - if version in [9]: - if issubclass(t,_Array): - return attr_list_v3.index(t.type) + 32 - return attr_list_v3.index(t) - elif encoding == "binary_proto": - return attr_list_v1.index(t) - except ValueError as e: - raise ValueError("Type {} not supported in {} {}".format(t,encoding,version)) from e - - raise ValueError("Encoding {} not recognised".format(encoding)) - -class _StringDictionary(list): - dummy = False - - def __init__(self,encoding,encoding_ver,in_file=None,out_datamodel=None): - if encoding == "binary": - self.indice_size = self.length_size = intsize - - if encoding_ver == 4: - self.indice_size = shortsize - elif encoding_ver in [3,2]: - self.indice_size = self.length_size = shortsize - elif encoding_ver == 1: - self.dummy = True - return - elif encoding == "binary_proto": - self.dummy = True - return - - if in_file: - num_strings = get_short(in_file, signed = True) if self.length_size == shortsize else get_int(in_file) - for _ in range(num_strings): - self.append(get_str(in_file)) - - elif out_datamodel: - checked = set() - string_set = set() - def process_element(elem): - checked.add(elem) - if elem.name : string_set.add(elem.name) - string_set.add(elem.type) - for name in elem: - attr = elem[name] - string_set.add(name) - if isinstance(attr, str): string_set.add(attr) - elif isinstance(attr, Element): - if attr not in checked: process_element(attr) - elif type(attr) == _ElementArray: - for item in [item for item in attr if item and item not in checked]: - process_element(item) - process_element(out_datamodel.root) - self.extend(string_set) - self.sort() - - def read_string(self,in_file): - if self.dummy: - return get_str(in_file) - else: - index = get_short(in_file, signed = True) if self.indice_size == shortsize else get_int(in_file) - return self[index] if index >= 0 else None - - def write_string(self,out_file,string): - if self.dummy: - out_file.write( _encode_binary_string(string) ) - else: - assert(string is None or string in self) - out_file.write( struct.pack("h" if self.indice_size == shortsize else "i", self.index(string) if string else -1 ) ) - - def write_dictionary(self,out_file): - if not self.dummy: - out_file.write( struct.pack("h" if self.length_size == shortsize else "i", len(self) ) ) - for string in self: - out_file.write( _encode_binary_string(string) ) - -class DataModel: - '''Container for Element objects. Has a format name (str) and format version (int). Can write itself to a string object or a file.''' - - @property - def format(self): return self.__format - @format.setter - def format(self,value): self.__format = str(value) - @property - def format_ver(self): return self.__format_ver - @format_ver.setter - def format_ver(self,value): self.__format_ver = int(value) - - @property - def root(self): return self.__root - @root.setter - def root(self,value): - if not value or isinstance(value, Element): self.__root = value - else: raise ValueError("Root must be an Element object") - @property - def elements(self): return self.__elements - - @property - def prefix_attributes(self): return self.__prefix_attributes - - def __init__(self,format,format_ver): - self.format = format - self.format_ver = format_ver - - self.__elements = [] - self.__prefix_attributes = Element(self,"") - self.root = None - self.allow_random_ids = True - - def __repr__(self): - return "".format(id(self)," (root == \"{}\")".format(self.root.name) if self.root else "") - - def validate_element(self,elem): - if elem._is_placeholder: - return - - try: - collision = self.elements[self.elements.index(elem)] - except ValueError: - return # no match - - if not collision._is_placeholder: - raise IDCollisionError("{} invalid for {}: ID collision with {}. ID is {}.".format(elem, self, collision, elem.id)) - - def add_element(self,name,elemtype="DmElement",id=None,_is_placeholder=False): - if id == None and not self.allow_random_ids: - raise ValueError("{} does not allow random IDs.".format(self)) - elem = Element(self,name,elemtype,id,_is_placeholder) - self.validate_element(elem) - self.elements.append(elem) - elem.datamodel = self - if len(self.elements) == 1: self.root = elem - return elem - - def find_elements(self,name=None,id=None,elemtype=None): - out = [] - if isinstance(id, str): id = uuid.UUID(id) - for elem in self.elements: - if elem.id == id: return [elem] - if elem.name == name: out.append(elem) - if elem.type == elemtype: out.append(elem) - if len(out): return out - - def _writeString(self, value, suppress_dict = None): - if suppress_dict == None: - suppress_dict = self.encoding_ver < 4 - - if type(value) == str or value is None: - value = [value] - - if suppress_dict: - self.out.write(bytes.join(b'',[_encode_binary_string(item) for item in value])) - else: - self._string_dict.write_string(self.out,value[0]) - - def _write(self,value): - t = type(value) - is_array = issubclass(t, _Array) - - if is_array: - t = value.type - self.out.write( struct.pack("i",len(value)) ) - else: - value = [value] - - if t in [bytes,Binary]: - for item in value: - if t == Binary: - self.out.write( struct.pack("i",len(item)) ) - self.out.write(item) - - elif t == uuid.UUID: - self.out.write(b''.join([id.bytes_le for id in value])) - elif t == str: - self._writeString(value, is_array) - elif t == Element: - self.out.write(bytes.join(b'',[item.tobytes() if item else struct.pack("i",-1) for item in value])) - elif issubclass(t,(_Vector,Matrix, Time)): - self.out.write(bytes.join(b'',[item.tobytes() for item in value])) - - elif t == bool: - self.out.write( struct.pack("b" * len(value),*value) ) - elif t == int: - self.out.write( struct.pack("i" * len(value),*value) ) - elif t == float: - self.out.write( struct.pack("f" * len(value),*value) ) - - else: - raise TypeError("Cannot write attributes of type {}".format(t)) - - def _write_element_index(self,elem): - if elem._is_placeholder or hasattr(elem,"_index"): return - self._writeString(elem.type, suppress_dict = False) - self._writeString(elem.name) - self._write(elem.id) - - elem._index = len(self.elem_chain) - self.elem_chain.append(elem) - - for name in elem: - attr = elem[name] - t = type(attr) - if t == Element: - self._write_element_index(attr) - elif t == _ElementArray: - for item in [item for item in attr if item]: - self._write_element_index(item) - - def _write_element_props(self): - for elem in self.elem_chain: - if elem._is_placeholder: continue - self._write(len(elem)) - for name in elem: - attr = elem[name] - self._write(name) - self._write( struct.pack("b", _get_dmx_type_id(self.encoding, self.encoding_ver, type(attr) )) ) - if attr == None: - self._write(-1) - else: - self._write(attr) - - def echo(self,encoding,encoding_ver): - check_support(encoding, encoding_ver) - - if encoding in ["binary", "binary_proto"]: - self.out = io.BytesIO() - else: - self.out = io.StringIO() - - self.encoding = encoding - self.encoding_ver = encoding_ver - - if self.encoding == 'binary_proto': - self.out.write( _encode_binary_string(header_proto2.format(encoding_ver) + "\n") ) - else: - header = header_format.format(encoding,encoding_ver,self.format,self.format_ver) - if self.encoding == 'binary': - self.out.write( _encode_binary_string(header + "\n") ) - elif self.encoding == 'keyvalues2': - self.out.write(header + "\n") - - if encoding == 'binary': - if encoding_ver >= 9: - self._write(1 if len(self.prefix_attributes) else 0) - if len(self.prefix_attributes): - self._write(len(self.prefix_attributes)) - for name,value in self.prefix_attributes.items(): - self._write(name) - self._write(value) - - self._string_dict = _StringDictionary(encoding,encoding_ver,out_datamodel=self) - self._string_dict.write_dictionary(self.out) - - # count elements - out_elems = set() - for elem in self.elements: - elem._users = 0 - def _count_child_elems(elem): - if elem in out_elems: return - - out_elems.add(elem) - for name in elem: - attr = elem[name] - t = type(attr) - if t == Element: - if attr not in out_elems: - _count_child_elems(attr) - attr._users += 1 - elif t == _ElementArray: - for item in [item for item in attr if item]: - if item not in out_elems: - _count_child_elems(item) - item._users += 1 - _count_child_elems(self.root) - - if self.encoding in ["binary", "binary_proto"]: - self._write(len(out_elems)) - self.elem_chain = [] - self._write_element_index(self.root) - self._write_element_props() - - for elem in self.elem_chain: del elem._index - elif self.encoding == 'keyvalues2': - self.out.write(self.root.get_kv2() + "\n\n") - for elem in out_elems: - if elem._users > 1: - self.out.write(elem.get_kv2() + "\n\n") - - self._string_dict = None - return self.out.getvalue() - - def write(self,path,encoding,encoding_ver): - with open(path,'wb') as file: - dm = self.echo(encoding,encoding_ver) - if encoding == 'keyvalues2': dm = dm.encode('utf-8') - file.write(dm) - - -class DatamodelParseError(Exception): - pass - -def parse(parse_string, element_path=None): - return load(in_file=io.StringIO(parse_string),element_path=element_path) - -def load(path = None, in_file = None, element_path = None): - if bool(path) == bool(in_file): - raise ValueError("A path string OR a file object must be provided") - if element_path != None and type(element_path) != list: - raise TypeError("element_path must be a list containing element names") - if not in_file: - in_file = open(path,'rb') - - try: - import re - - try: - header = "" - while True: - header += get_char(in_file) - if header.endswith(">"): break - - matches = re.findall(header_format_regex,header) - - if len(matches) != 1 or len(matches[0]) != 4: - matches = re.findall(header_proto2_regex,header) - if len(matches) == 1 and len(matches[0]) == 1: - encoding = "binary_proto" - encoding_ver = int(matches[0][0]) - format = "undefined_format" - format_ver = 0 - else: - raise Exception() - else: - encoding,encoding_ver, format,format_ver = matches[0] - encoding_ver = int(encoding_ver) - format_ver = int(format_ver) - except Exception as e: - raise IOError("Could not read DMX header") from e - - check_support(encoding,encoding_ver) - dm = DataModel(format,format_ver) - - class LineTracker(): - line = 0 - - def __next__(self): - self.line += 1 - - line_tracker = LineTracker() - - max_elem_path = len(element_path) + 1 if element_path else 0 - - if encoding == 'keyvalues2': - class AttributeReference: - def __init__(self,Owner,Name,Index=-1): - self.Owner = Owner - self.Name = Name - self.Index = Index - - def parse_line(line): - return re.findall("\"(.*?)\"",line.strip("\n\t ") ) - - def read_element(elem_type, line_tracker): - name = None - prefix = elem_type == "$prefix_element$" - if prefix: element_chain.append(dm.prefix_attributes) - - def read_value(name,type_str,kv2_value, index=-1): - if type_str == 'element': # make a record; will link everything up once all elements have been read - if not kv2_value: - return None - else: - element_users[kv2_value].append(AttributeReference(element_chain[-1], name, index)) - return dm.add_element("Missing element",id=uuid.UUID(hex=kv2_value),_is_placeholder=True) - - elif type_str == 'string': return kv2_value - elif type_str in ['int',"uint8"]: return int(kv2_value) - elif type_str == "uint64": return int(kv2_value, 0) - elif type_str == 'float': return float(kv2_value) - elif type_str == 'bool': return bool(int(kv2_value)) - elif type_str == 'time': return Time(kv2_value) - elif type_str.startswith('vector') or type_str in ['color','quaternion','angle','matrix']: - return _get_type_from_string(type_str)( [float(i) for i in kv2_value.split(" ")] ) - elif type_str == 'binary': return Binary(binascii.unhexlify(kv2_value)) - - new_elem = None - for line_raw in in_file: - next(line_tracker) - if line_raw.strip("\n\t, ").endswith("}"): - #print("{}- {}".format('\t' * (len(element_chain)-1),element_chain[-1].name)) - return element_chain.pop() - - line = parse_line(line_raw) - if len(line) == 0: - continue - - if line[0] == 'id': - if not prefix: - new_elem = dm.add_element(name,elem_type,uuid.UUID(hex=line[2])) - element_chain.append(new_elem) - continue - elif line[0] == 'name': - if len(line) > 2: # unnamed element? - if new_elem: new_elem.name = line[2] - else: name = line[2] - continue - - # don't read elements outside the element path - if max_elem_path and name and len(dm.elements): - if len(element_path): - skip = name.lower() != element_path[0].lower() - else: - skip = len(element_chain) < max_elem_path - if skip: - child_level = 0 - for line_raw in in_file: - next(line_tracker) - if "{" in line_raw: child_level += 1 - if "}" in line_raw: - if child_level == 0: return - else: child_level -= 1 - return - elif len(element_path): - del element_path[0] - - if new_elem == None and not prefix: - continue - - if len(line) >= 2: - if line[1] == "element_array": - arr_name = line[0] - arr = _ElementArray() - - if "[" not in line_raw: # immediate "[" means and empty array; elements must be on separate lines - for line in in_file: - next(line_tracker) - if "[" in line: continue - if "]" in line: break - line = parse_line(line) - - if len(line) == 1: - arr.append( read_element(line[0], line_tracker) ) - elif len(line) == 2: - arr.append( read_value(arr_name,"element",line[1],index=len(arr)) ) - - element_chain[-1][arr_name] = arr - continue - - elif line[1].endswith("_array"): - arr_name = line[0] - arr_type_str = line[1].split("_")[0] - arr = _get_array_type(_get_type_from_string(arr_type_str))() - - if "[" in line_raw: # one-line array - for item in line[2:]: - arr.append(read_value(arr_name,arr_type_str,item)) - element_chain[-1][arr_name] = arr - - else: # multi-line array - for line in in_file: - next(line_tracker) - if "[" in line: - continue - if "]" in line: - element_chain[-1][arr_name] = arr - break - - line = parse_line(line) - if line: - arr.append(read_value(arr_name,arr_type_str,line[0])) - - elif len(line) == 2: # inline element or binary - if line[1] == "binary": - num_quotes = 0 - value = Binary() - for line in in_file: - next(line_tracker) - if "\"" in line: - num_quotes += 1 - if num_quotes == 2: break - else: - value = read_value(line[0],line[1], in_file.readline().strip()) - next(line_tracker) - else: - value = read_element(line[1], line_tracker) - element_chain[-1][line[0]] = value - elif len(line) == 3: # ordinary attribute or element ID - element_chain[-1][line[0]] = read_value(line[0],line[1],line[2]) - - raise IOError("Unexpected EOF") - - if hasattr(in_file,'mode') and 'b' in in_file.mode: in_file = io.TextIOWrapper(in_file) - in_file.seek(len(header)) - - element_chain = [] - element_users = collections.defaultdict(list) - for line in in_file: - try: - next(line_tracker) - line = parse_line(line) - if len(line) == 0: continue - - if len(element_chain) == 0 and len(line) == 1: - read_element(line[0], line_tracker) - except Exception as ex: - raise DatamodelParseError("Parsing of {} failed on line {}".format(path, line_tracker.line)) from ex - - for element in dm.elements: - if element._is_placeholder == True: continue - users = element_users[str(element.id)] - for user_info in users: - if user_info.Index == -1: - user_info.Owner[user_info.Name] = element - else: - user_info.Owner[user_info.Name][user_info.Index] = element - - elif encoding in ['binary', 'binary_proto']: - in_file.seek(2,1) # skip header's line break and null terminator - - def get_value(attr_type,from_array = False): - if attr_type == Element: - element_index = get_int(in_file) - if element_index == -1: - return None - elif element_index == -2: - return dm.add_element("Missing element",id=uuid.UUID(hex=get_str(in_file)),_is_placeholder=True) - else: - return dm.elements[element_index] - - elif attr_type == str: return get_str(in_file) if encoding_ver < 4 or from_array else dm._string_dict.read_string(in_file) - elif attr_type == int: return get_int(in_file) - elif attr_type == float: return get_float(in_file) - elif attr_type == bool: return get_bool(in_file) - - elif attr_type == Vector2: return Vector2(get_vec(in_file,2)) - elif attr_type == Vector3: return Vector3(get_vec(in_file,3)) - elif attr_type == Angle: return Angle(get_vec(in_file,3)) - elif attr_type == Vector4: return Vector4(get_vec(in_file,4)) - elif attr_type == Quaternion: return Quaternion(get_vec(in_file,4)) - elif attr_type == Matrix: - out = [] - for _ in range(4): out.append(get_vec(in_file,4)) - return Matrix(out) - - elif attr_type == Color: return get_color(in_file) - elif attr_type == Time: return Time.from_int(get_int(in_file)) - elif attr_type == Binary: return Binary(in_file.read(get_int(in_file))) - - else: - raise TypeError("Cannot read attributes of type {}".format(attr_type)) - - def read_element(elem, use_string_dict = True): - #print(elem.name,"@",in_file.tell()) - num_attributes = get_int(in_file) - for _ in range(num_attributes): - #start = in_file.tell() - name = dm._string_dict.read_string(in_file) if use_string_dict else get_str(in_file) - attr_type = _get_dmx_id_type(encoding,encoding_ver,get_byte(in_file)) - #print("\t",name,"@",start,attr_type) - if attr_type in _dmxtypes: - elem[name] = get_value(attr_type) - elif attr_type in _dmxtypes_array: - array_len = get_int(in_file) - arr = elem[name] = attr_type() - arr_item_type = _get_single_type(attr_type) - for _ in range(array_len): - arr.append( get_value(arr_item_type,from_array=True) ) - - # prefix attributes - if encoding_ver >= 9: - for _ in range(get_int(in_file)): - read_element(dm.prefix_attributes, use_string_dict = False) - - dm._string_dict = _StringDictionary(encoding,encoding_ver,in_file=in_file) - num_elements = get_int(in_file) - - # element headers - for _ in range(num_elements): - elemtype = dm._string_dict.read_string(in_file) - name = dm._string_dict.read_string(in_file) if encoding_ver >= 4 else get_str(in_file) - id = uuid.UUID(bytes_le = in_file.read(16)) # little-endian - dm.add_element(name,elemtype,id) - - # element bodies - for elem in [elem for elem in dm.elements if not elem._is_placeholder]: - read_element(elem) - - dm._string_dict = None - return dm - finally: - if in_file: in_file.close() +# The MIT License (MIT) +# +# Copyright (c) 2014 Tom Edwards contact@steamreview.org +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import struct, array, io, binascii, collections, uuid +from struct import unpack,calcsize + +header_format = "" +header_format_regex = header_format.replace("{:d}","([0-9]+)").replace("{:s}","(\S+)") + +header_proto2 = "" +header_proto2_regex = header_proto2.replace("{:d}","([0-9]+)") + +intsize = calcsize("i") +shortsize = calcsize("H") +floatsize = calcsize("f") + +def list_support(): + return { 'binary':[1,2,3,4,5,9], 'keyvalues2':[1,2,3,4],'binary_proto':[2] } + +def check_support(encoding,encoding_ver): + versions = list_support().get(encoding) + if not versions: + raise ValueError("DMX encoding \"{}\" is not supported".format(encoding)) + if encoding_ver not in versions: + raise ValueError("Version {} of {} DMX is not supported".format(encoding_ver,encoding)) + +def _encode_binary_string(string): + return (bytes(string,'utf-8') + bytes(1)) if string else bytes(1) + + +global _kv2_indent +_kv2_indent = "" +def _add_kv2_indent(): + global _kv2_indent + _kv2_indent += "\t" +def _sub_kv2_indent(): + global _kv2_indent + _kv2_indent = _kv2_indent[:-1] + +def _validate_array_list(iterable,array_type): + if not iterable: return None + try: + return list([array_type(i) if type(i) != array_type else i for i in iterable]) + except Exception as e: + raise TypeError("Could not convert all values to {}: {}".format(array_type,e)) from e + +def _quote(str): + return "\"{}\"".format(str) + +def get_bool(file): + return file.read(1) != b'\x00' +def get_byte(file): + return int(unpack("B",file.read(1))[0]) +def get_char(file): + c = file.read(1) + if isinstance(c, str): return c + return unpack("c",c)[0].decode('ASCII') +def get_int(file): + return int( unpack("i",file.read(intsize))[0] ) +def get_short(file, signed = False): + return int( unpack("h" if signed else "H",file.read(shortsize))[0] ) +def get_float(file): + return float( unpack("f",file.read(floatsize))[0] ) +def get_vec(file,dim): + return list( unpack("{}f".format(dim),file.read(floatsize*dim)) ) +def get_color(file): + return Color(list(unpack("4B",file.read(4)))) + +def get_str(file): + out = b'' + while True: + b = file.read(1) + if b == b'\x00': break + out += b + return out.decode() if len(out) else None + +def _get_kv2_repr(var): + t = type(var) + if t == float or t == int: # optimisation: very common, so first + return str(var) + elif issubclass(t, (_Array,Matrix)): + return var.to_kv2() + elif t == Element: + return str(var.id) + elif t == bool: + return "1" if var else "0" + elif t == Binary: + return binascii.hexlify(var).decode('ASCII') + elif var == None: + return "" + else: + return str(var) + +class _Array(list): + type = None + type_str = "" + + def __init__(self,l=None): + if l: + return super().__init__(_validate_array_list(l,self.type)) + else: + return super().__init__() + + def to_kv2(self): + if len(self) == 0: + return "[ ]" + if self.type == Element: + + out = "\n{}[\n".format(_kv2_indent) + _add_kv2_indent() + out += _kv2_indent + + out += ",\n{}".format(_kv2_indent).join([item.get_kv2() if item and item._users == 1 else "\"element\" {}".format(_quote(item.id if item else "")) for item in self]) + + _sub_kv2_indent() + return "{}\n{}]".format(out,_kv2_indent) + else: + return "[{}]".format(", ".join([_quote(_get_kv2_repr(item)) for item in self])) + + def frombytes(self,file): + length = get_int(file) + self.extend( unpack( self.type_str*length, file.read( calcsize(self.type_str) * length) ) ) + +class _BoolArray(_Array): + type = bool + type_str = "b" +class _IntArray(_Array): + type = int + type_str = "i" +class _FloatArray(_Array): + type = float + type_str = "f" +class _StrArray(_Array): + type = str + +class _Vector(list): + type = float + type_str = "" + def __init__(self,l): + if len(l) != len(self.type_str): + raise TypeError("Expected {} values".format(len(self.type_str))) + l = _validate_array_list(l,self.type) + super().__init__(l) + + def __repr__(self): + return " ".join([str(self.type(ord)) for ord in self]) + + def __hash__(self): + return hash(tuple(self)) + + def __round__(self,n=0): + return type(self)([round(ord,n) for ord in self]) + + def tobytes(self): + return struct.pack(self.type_str,*self) + +class Vector2(_Vector): + type_str = "ff" +class Vector3(_Vector): + type_str = "fff" +class Vector4(_Vector): + type_str = "ffff" +class Quaternion(Vector4): + '''XYZW''' + pass +class Angle(Vector3): + pass +class _VectorArray(_Array): + type = list + def __init__(self,l=None): + l = _validate_array_list(l,self.type) + _Array.__init__(self,l) +class _Vector2Array(_VectorArray): + type = Vector2 +class _Vector3Array(_VectorArray): + type = Vector3 +class _Vector4Array(_VectorArray): + type = Vector4 +class _QuaternionArray(_Vector4Array): + type = Quaternion +class _AngleArray(_Vector3Array): + type = Angle + +class Matrix(list): + type = list + def __init__(self,matrix=None): + if matrix: + attr_error = ValueError("Matrix is row-major and must be initialised with 4 lists of 4 floats, or a single list of 16 floats") + if len(matrix) == 16: + matrix = [matrix[i:i + 4] for i in range(0, len(matrix), 4)] + elif len(matrix) != 4: raise attr_error + + for row in matrix: + if len(row) != 4: raise attr_error + for i in range(4): + if type(row[i]) != float: + row[i] = float(row[i]) + else: + matrix = [[0.0] * 4] * 4 + super().__init__(matrix) + + def __hash__(self): + return hash(tuple(self)) + + def to_kv2(self): + return " ".join([str(f) for row in self for f in row]) + def tobytes(self): + return struct.pack("f" * 16,*[f for row in self for f in row]) + +class _MatrixArray(_Array): + type = Matrix + +class Binary(bytes): + pass +class _BinaryArray(_Array): + type = Binary + type_str = "b" + +class Color(_Vector): + type = int + type_str = "BBBB" + + def __init__(self, l): + if any(b < 0 or b > 255 for b in l): + raise TypeError("Color channel values must be between 0 and 255") + super().__init__(l) + +class _ColorArray(_VectorArray): + type=Color + +class Time(float): + @classmethod + def from_int(cls,int_value): + return Time(int_value / 10000) + + def tobytes(self): + return struct.pack("i",int(self * 10000)) + +class _TimeArray(_Array): + type = Time + +def make_array(l,t): + if t not in _dmxtypes_all: + raise TypeError("{} is not a valid datamodel attribute type".format(t)) + at = _get_array_type(t) + return at(l) + +class AttributeError(KeyError): + '''Raised when an attribute is not found on an element. Essentially a KeyError, but subclassed because it's normally an unrecoverable data issue.''' + pass + +class IDCollisionError(Exception): + pass + +_array_types = [list,set,tuple,array.array] +class Element(collections.OrderedDict): + '''Effectively a dictionary, but keys must be str. Also contains a name (str), type (str) and ID (uuid.UUID, can be generated from str).''' + _datamodels = None + _users = 0 + + @property + def name(self): return self._name + @name.setter + def name(self,value): self._name = str(value) if value else None + + @property + def type(self): return self._type + @type.setter + def type(self,value): self._type = str(value) + + @property + def id(self): return self._id + + def __init__(self,datamodel,name,elemtype="DmElement",id=None,_is_placeholder=False): + self.name = name + self.type = elemtype + self._is_placeholder = _is_placeholder + self._datamodels = set() + self._datamodels.add(datamodel) + + if id: + if isinstance(id,uuid.UUID): self._id = id + elif isinstance(id,str): self._id = uuid.uuid3(uuid.UUID('20ba94f8-59f0-4579-9e01-50aac4567d3b'),id) + else: raise ValueError("id must be uuid.UUID or str") + else: + self._id = uuid.uuid4() + + super().__init__() + + def __eq__(self,other): + return isinstance(other,Element) and self.id == other.id + + def __bool__(self): + return True + + def __repr__(self): + return "".format(self.name,self.type) + + def __hash__(self): + return hash(self.id) + + def __getitem__(self,item): + if type(item) != str: raise TypeError("Attribute name must be a string, not {}".format(type(item))) + try: + return super().__getitem__(item) + except KeyError as e: + raise AttributeError("No attribute \"{}\" on {}".format(item,self)) from e + + def __setitem__(self,key,item): + key = str(key) + if key in ["name", "id"]: raise KeyError("\"{}\" is a reserved name".format(key)) + + def import_element(elem): + for dm in [dm for dm in self._datamodels if not dm in elem._datamodels]: + dm.validate_element(elem) + dm.elements.append(elem) + elem._datamodels.add(dm) + for attr in elem.values(): + t = type(attr) + if t == Element: + import_element(attr) + if t == _ElementArray: + for arr_elem in attr: + import_element(arr_elem) + + t = type(item) + + if t in _dmxtypes_all or t == type(None): + if t == Element: + import_element(item) + elif t == _ElementArray: + for arr_elem in item: + import_element(arr_elem) + + return super().__setitem__(key,item) + else: + if t in _array_types: + raise ValueError("Cannot create an attribute from a generic Python list. Use make_array() first.") + else: + raise ValueError("Invalid attribute type ({})".format(t)) + + def get(self,k,d=None): + return self[k] if k in self else d + + def get_kv2(self,deep = True): + out = "" + out += _quote(self.type) + out += "\n" + _kv2_indent + "{\n" + _add_kv2_indent() + + def _make_attr_str(name,dm_type,value, is_array = False): + if value is not None: + if is_array: + return "{}\"{}\" \"{}\" {}\n".format(_kv2_indent,name,dm_type,value) + else: + return "{}\"{}\" \"{}\" \"{}\"\n".format(_kv2_indent,name,dm_type,value) + else: + return "{}\"{}\" {}\n".format(_kv2_indent,name,dm_type) + + out += _make_attr_str("id", "elementid", self.id) + out += _make_attr_str("name", "string", self.name) + + for name in self: + attr = self[name] + if attr == None: + out += _make_attr_str(name, "element", None) + continue + + t = type(attr) + + if t == Element and attr._users < 2 and deep: + out += _kv2_indent + out += _quote(name) + out += " {}".format( attr.get_kv2() ) + out += "\n" + else: + if issubclass(t,_Array): + if t == _ElementArray: + type_str = "element_array" + else: + type_str = _dmxtypes_str[_dmxtypes_array.index(t)] + "_array" + else: + type_str = _dmxtypes_str[_dmxtypes.index(t)] + + out += _make_attr_str(name, type_str, _get_kv2_repr(attr), issubclass(t,_Array)) + _sub_kv2_indent() + out += _kv2_indent + "}" + return out + + def tobytes(self): + if self._is_placeholder: + if self.encoding_ver < 5: + return b'-1' + else: + return bytes.join(b'',b'-2',bytes.decode(self.id,encoding='ASCII')) + else: + return struct.pack("i",self._index) + +class _ElementArray(_Array): + type = Element + +_dmxtypes = [Element,int,float,bool,str,Binary,Time,Color,Vector2,Vector3,Vector4,Angle,Quaternion,Matrix,int,int] +_dmxtypes_array = [_ElementArray,_IntArray,_FloatArray,_BoolArray,_StrArray,_BinaryArray,_TimeArray,_ColorArray,_Vector2Array,_Vector3Array,_Vector4Array,_AngleArray,_QuaternionArray,_MatrixArray,_IntArray,_IntArray] +_dmxtypes_all = _dmxtypes + _dmxtypes_array +_dmxtypes_str = ["element","int","float","bool","string","binary","time","color","vector2","vector3","vector4","angle","quaternion","matrix","uint64","uint8"] + +attr_list_v1 = [ + None,Element,int,float,bool,str,Binary,"ObjectID",Color,Vector2,Vector3,Vector4,Angle,Quaternion,Matrix, + _ElementArray,_IntArray,_FloatArray,_BoolArray,_StrArray,_BinaryArray,"_ObjectIDArray",_ColorArray,_Vector2Array,_Vector3Array,_Vector4Array,_AngleArray,_QuaternionArray,_MatrixArray +] # ObjectID is an element UUID +attr_list_v2 = [ + None,Element,int,float,bool,str,Binary,Time,Color,Vector2,Vector3,Vector4,Angle,Quaternion,Matrix, + _ElementArray,_IntArray,_FloatArray,_BoolArray,_StrArray,_BinaryArray,_TimeArray,_ColorArray,_Vector2Array,_Vector3Array,_Vector4Array,_AngleArray,_QuaternionArray,_MatrixArray +] +attr_list_v3 = [None,Element,int,float,bool,str,Binary,Time,Color,Vector2,Vector3,Vector4,Angle,Quaternion,Matrix,int,int] # last two are meant to be uint64, uint8 + +def _get_type_from_string(type_str): + return _dmxtypes[_dmxtypes_str.index(type_str)] +def _get_array_type(single_type): + if single_type in _dmxtypes_array: raise ValueError("Argument is already an array type") + return _dmxtypes_array[ _dmxtypes.index(single_type) ] +def _get_single_type(array_type): + if array_type in _dmxtypes: raise ValueError("Argument is already a single type") + return _dmxtypes[ _dmxtypes_array.index(array_type) ] + +def _get_dmx_id_type(encoding,version,id): + if encoding in ["binary","binary_proto"]: + if version in [1,2]: + return attr_list_v1[id] + if version in [3,4,5]: + return attr_list_v2[id] + if version in [9]: + if id >= 32: # array + return eval("_" + attr_list_v3[id-32].__name__.capitalize() + "Array") + return attr_list_v3[id] + if encoding == "keyvalues2": + return _dmxtypes[ _dmxtypes_str.index(id) ] + + raise ValueError("Type ID {} invalid in {} {}".format(id,encoding,version)) + +def _get_dmx_type_id(encoding,version,t): + if t == type(None): t = Element + if encoding == "keyvalues2": raise ValueError("Type IDs do not exist in KeyValues2") + try: + if encoding == "binary": + if version in [1,2]: + return attr_list_v1.index(t) + if version in [3,4,5]: + return attr_list_v2.index(t) + if version in [9]: + if issubclass(t,_Array): + return attr_list_v3.index(t.type) + 32 + return attr_list_v3.index(t) + elif encoding == "binary_proto": + return attr_list_v1.index(t) + except ValueError as e: + raise ValueError("Type {} not supported in {} {}".format(t,encoding,version)) from e + + raise ValueError("Encoding {} not recognised".format(encoding)) + +class _StringDictionary(list): + dummy = False + + def __init__(self,encoding,encoding_ver,in_file=None,out_datamodel=None): + if encoding == "binary": + self.indice_size = self.length_size = intsize + + if encoding_ver == 4: + self.indice_size = shortsize + elif encoding_ver in [3,2]: + self.indice_size = self.length_size = shortsize + elif encoding_ver == 1: + self.dummy = True + return + elif encoding == "binary_proto": + self.dummy = True + return + + if in_file: + num_strings = get_short(in_file, signed = True) if self.length_size == shortsize else get_int(in_file) + for _ in range(num_strings): + self.append(get_str(in_file)) + + elif out_datamodel: + checked = set() + string_set = set() + def process_element(elem): + checked.add(elem) + if elem.name : string_set.add(elem.name) + string_set.add(elem.type) + for name in elem: + attr = elem[name] + string_set.add(name) + if isinstance(attr, str): string_set.add(attr) + elif isinstance(attr, Element): + if attr not in checked: process_element(attr) + elif type(attr) == _ElementArray: + for item in [item for item in attr if item and item not in checked]: + process_element(item) + process_element(out_datamodel.root) + self.extend(string_set) + self.sort() + + def read_string(self,in_file): + if self.dummy: + return get_str(in_file) + else: + index = get_short(in_file, signed = True) if self.indice_size == shortsize else get_int(in_file) + return self[index] if index >= 0 else None + + def write_string(self,out_file,string): + if self.dummy: + out_file.write( _encode_binary_string(string) ) + else: + assert(string is None or string in self) + out_file.write( struct.pack("h" if self.indice_size == shortsize else "i", self.index(string) if string else -1 ) ) + + def write_dictionary(self,out_file): + if not self.dummy: + out_file.write( struct.pack("h" if self.length_size == shortsize else "i", len(self) ) ) + for string in self: + out_file.write( _encode_binary_string(string) ) + +class DataModel: + '''Container for Element objects. Has a format name (str) and format version (int). Can write itself to a string object or a file.''' + + @property + def format(self): return self.__format + @format.setter + def format(self,value): self.__format = str(value) + @property + def format_ver(self): return self.__format_ver + @format_ver.setter + def format_ver(self,value): self.__format_ver = int(value) + + @property + def root(self): return self.__root + @root.setter + def root(self,value): + if not value or isinstance(value, Element): self.__root = value + else: raise ValueError("Root must be an Element object") + @property + def elements(self): return self.__elements + + @property + def prefix_attributes(self): return self.__prefix_attributes + + def __init__(self,format,format_ver): + self.format = format + self.format_ver = format_ver + + self.__elements = [] + self.__prefix_attributes = Element(self,"") + self.root = None + self.allow_random_ids = True + + def __repr__(self): + return "".format(id(self)," (root == \"{}\")".format(self.root.name) if self.root else "") + + def validate_element(self,elem): + if elem._is_placeholder: + return + + try: + collision = self.elements[self.elements.index(elem)] + except ValueError: + return # no match + + if not collision._is_placeholder: + raise IDCollisionError("{} invalid for {}: ID collision with {}. ID is {}.".format(elem, self, collision, elem.id)) + + def add_element(self,name,elemtype="DmElement",id=None,_is_placeholder=False): + if id == None and not self.allow_random_ids: + raise ValueError("{} does not allow random IDs.".format(self)) + elem = Element(self,name,elemtype,id,_is_placeholder) + self.validate_element(elem) + self.elements.append(elem) + elem.datamodel = self + if len(self.elements) == 1: self.root = elem + return elem + + def find_elements(self,name=None,id=None,elemtype=None): + out = [] + if isinstance(id, str): id = uuid.UUID(id) + for elem in self.elements: + if elem.id == id: return [elem] + if elem.name == name: out.append(elem) + if elem.type == elemtype: out.append(elem) + if len(out): return out + + def _writeString(self, value, suppress_dict = None): + if suppress_dict == None: + suppress_dict = self.encoding_ver < 4 + + if type(value) == str or value is None: + value = [value] + + if suppress_dict: + self.out.write(bytes.join(b'',[_encode_binary_string(item) for item in value])) + else: + self._string_dict.write_string(self.out,value[0]) + + def _write(self,value): + t = type(value) + is_array = issubclass(t, _Array) + + if is_array: + t = value.type + self.out.write( struct.pack("i",len(value)) ) + else: + value = [value] + + if t in [bytes,Binary]: + for item in value: + if t == Binary: + self.out.write( struct.pack("i",len(item)) ) + self.out.write(item) + + elif t == uuid.UUID: + self.out.write(b''.join([id.bytes_le for id in value])) + elif t == str: + self._writeString(value, is_array) + elif t == Element: + self.out.write(bytes.join(b'',[item.tobytes() if item else struct.pack("i",-1) for item in value])) + elif issubclass(t,(_Vector,Matrix, Time)): + self.out.write(bytes.join(b'',[item.tobytes() for item in value])) + + elif t == bool: + self.out.write( struct.pack("b" * len(value),*value) ) + elif t == int: + self.out.write( struct.pack("i" * len(value),*value) ) + elif t == float: + self.out.write( struct.pack("f" * len(value),*value) ) + + else: + raise TypeError("Cannot write attributes of type {}".format(t)) + + def _write_element_index(self,elem): + if elem._is_placeholder or hasattr(elem,"_index"): return + self._writeString(elem.type, suppress_dict = False) + self._writeString(elem.name) + self._write(elem.id) + + elem._index = len(self.elem_chain) + self.elem_chain.append(elem) + + for name in elem: + attr = elem[name] + t = type(attr) + if t == Element: + self._write_element_index(attr) + elif t == _ElementArray: + for item in [item for item in attr if item]: + self._write_element_index(item) + + def _write_element_props(self): + for elem in self.elem_chain: + if elem._is_placeholder: continue + self._write(len(elem)) + for name in elem: + attr = elem[name] + self._write(name) + self._write( struct.pack("b", _get_dmx_type_id(self.encoding, self.encoding_ver, type(attr) )) ) + if attr == None: + self._write(-1) + else: + self._write(attr) + + def echo(self,encoding,encoding_ver): + check_support(encoding, encoding_ver) + + if encoding in ["binary", "binary_proto"]: + self.out = io.BytesIO() + else: + self.out = io.StringIO() + + self.encoding = encoding + self.encoding_ver = encoding_ver + + if self.encoding == 'binary_proto': + self.out.write( _encode_binary_string(header_proto2.format(encoding_ver) + "\n") ) + else: + header = header_format.format(encoding,encoding_ver,self.format,self.format_ver) + if self.encoding == 'binary': + self.out.write( _encode_binary_string(header + "\n") ) + elif self.encoding == 'keyvalues2': + self.out.write(header + "\n") + + if encoding == 'binary': + if encoding_ver >= 9: + self._write(1 if len(self.prefix_attributes) else 0) + if len(self.prefix_attributes): + self._write(len(self.prefix_attributes)) + for name,value in self.prefix_attributes.items(): + self._write(name) + self._write(value) + + self._string_dict = _StringDictionary(encoding,encoding_ver,out_datamodel=self) + self._string_dict.write_dictionary(self.out) + + # count elements + out_elems = set() + for elem in self.elements: + elem._users = 0 + def _count_child_elems(elem): + if elem in out_elems: return + + out_elems.add(elem) + for name in elem: + attr = elem[name] + t = type(attr) + if t == Element: + if attr not in out_elems: + _count_child_elems(attr) + attr._users += 1 + elif t == _ElementArray: + for item in [item for item in attr if item]: + if item not in out_elems: + _count_child_elems(item) + item._users += 1 + _count_child_elems(self.root) + + if self.encoding in ["binary", "binary_proto"]: + self._write(len(out_elems)) + self.elem_chain = [] + self._write_element_index(self.root) + self._write_element_props() + + for elem in self.elem_chain: del elem._index + elif self.encoding == 'keyvalues2': + self.out.write(self.root.get_kv2() + "\n\n") + for elem in out_elems: + if elem._users > 1: + self.out.write(elem.get_kv2() + "\n\n") + + self._string_dict = None + return self.out.getvalue() + + def write(self,path,encoding,encoding_ver): + with open(path,'wb') as file: + dm = self.echo(encoding,encoding_ver) + if encoding == 'keyvalues2': dm = dm.encode('utf-8') + file.write(dm) + + +class DatamodelParseError(Exception): + pass + +def parse(parse_string, element_path=None): + return load(in_file=io.StringIO(parse_string),element_path=element_path) + +def load(path = None, in_file = None, element_path = None): + if bool(path) == bool(in_file): + raise ValueError("A path string OR a file object must be provided") + if element_path != None and type(element_path) != list: + raise TypeError("element_path must be a list containing element names") + if not in_file: + in_file = open(path,'rb') + + try: + import re + + try: + header = "" + while True: + header += get_char(in_file) + if header.endswith(">"): break + + matches = re.findall(header_format_regex,header) + + if len(matches) != 1 or len(matches[0]) != 4: + matches = re.findall(header_proto2_regex,header) + if len(matches) == 1 and len(matches[0]) == 1: + encoding = "binary_proto" + encoding_ver = int(matches[0][0]) + format = "undefined_format" + format_ver = 0 + else: + raise Exception() + else: + encoding,encoding_ver, format,format_ver = matches[0] + encoding_ver = int(encoding_ver) + format_ver = int(format_ver) + except Exception as e: + raise IOError("Could not read DMX header") from e + + check_support(encoding,encoding_ver) + dm = DataModel(format,format_ver) + + class LineTracker(): + line = 0 + + def __next__(self): + self.line += 1 + + line_tracker = LineTracker() + + max_elem_path = len(element_path) + 1 if element_path else 0 + + if encoding == 'keyvalues2': + class AttributeReference: + def __init__(self,Owner,Name,Index=-1): + self.Owner = Owner + self.Name = Name + self.Index = Index + + def parse_line(line): + return re.findall("\"(.*?)\"",line.strip("\n\t ") ) + + def read_element(elem_type, line_tracker): + name = None + prefix = elem_type == "$prefix_element$" + if prefix: element_chain.append(dm.prefix_attributes) + + def read_value(name,type_str,kv2_value, index=-1): + if type_str == 'element': # make a record; will link everything up once all elements have been read + if not kv2_value: + return None + else: + element_users[kv2_value].append(AttributeReference(element_chain[-1], name, index)) + return dm.add_element("Missing element",id=uuid.UUID(hex=kv2_value),_is_placeholder=True) + + elif type_str == 'string': return kv2_value + elif type_str in ['int',"uint8"]: return int(kv2_value) + elif type_str == "uint64": return int(kv2_value, 0) + elif type_str == 'float': return float(kv2_value) + elif type_str == 'bool': return bool(int(kv2_value)) + elif type_str == 'time': return Time(kv2_value) + elif type_str.startswith('vector') or type_str in ['color','quaternion','angle','matrix']: + return _get_type_from_string(type_str)( [float(i) for i in kv2_value.split(" ")] ) + elif type_str == 'binary': return Binary(binascii.unhexlify(kv2_value)) + + new_elem = None + for line_raw in in_file: + next(line_tracker) + if line_raw.strip("\n\t, ").endswith("}"): + #print("{}- {}".format('\t' * (len(element_chain)-1),element_chain[-1].name)) + return element_chain.pop() + + line = parse_line(line_raw) + if len(line) == 0: + continue + + if line[0] == 'id': + if not prefix: + new_elem = dm.add_element(name,elem_type,uuid.UUID(hex=line[2])) + element_chain.append(new_elem) + continue + elif line[0] == 'name': + if len(line) > 2: # unnamed element? + if new_elem: new_elem.name = line[2] + else: name = line[2] + continue + + # don't read elements outside the element path + if max_elem_path and name and len(dm.elements): + if len(element_path): + skip = name.lower() != element_path[0].lower() + else: + skip = len(element_chain) < max_elem_path + if skip: + child_level = 0 + for line_raw in in_file: + next(line_tracker) + if "{" in line_raw: child_level += 1 + if "}" in line_raw: + if child_level == 0: return + else: child_level -= 1 + return + elif len(element_path): + del element_path[0] + + if new_elem == None and not prefix: + continue + + if len(line) >= 2: + if line[1] == "element_array": + arr_name = line[0] + arr = _ElementArray() + + if "[" not in line_raw: # immediate "[" means and empty array; elements must be on separate lines + for line in in_file: + next(line_tracker) + if "[" in line: continue + if "]" in line: break + line = parse_line(line) + + if len(line) == 1: + arr.append( read_element(line[0], line_tracker) ) + elif len(line) == 2: + arr.append( read_value(arr_name,"element",line[1],index=len(arr)) ) + + element_chain[-1][arr_name] = arr + continue + + elif line[1].endswith("_array"): + arr_name = line[0] + arr_type_str = line[1].split("_")[0] + arr = _get_array_type(_get_type_from_string(arr_type_str))() + + if "[" in line_raw: # one-line array + for item in line[2:]: + arr.append(read_value(arr_name,arr_type_str,item)) + element_chain[-1][arr_name] = arr + + else: # multi-line array + for line in in_file: + next(line_tracker) + if "[" in line: + continue + if "]" in line: + element_chain[-1][arr_name] = arr + break + + line = parse_line(line) + if line: + arr.append(read_value(arr_name,arr_type_str,line[0])) + + elif len(line) == 2: # inline element or binary + if line[1] == "binary": + num_quotes = 0 + value = Binary() + for line in in_file: + next(line_tracker) + if "\"" in line: + num_quotes += 1 + if num_quotes == 2: break + else: + value = read_value(line[0],line[1], in_file.readline().strip()) + next(line_tracker) + else: + value = read_element(line[1], line_tracker) + element_chain[-1][line[0]] = value + elif len(line) == 3: # ordinary attribute or element ID + element_chain[-1][line[0]] = read_value(line[0],line[1],line[2]) + + raise IOError("Unexpected EOF") + + if hasattr(in_file,'mode') and 'b' in in_file.mode: in_file = io.TextIOWrapper(in_file) + in_file.seek(len(header)) + + element_chain = [] + element_users = collections.defaultdict(list) + for line in in_file: + try: + next(line_tracker) + line = parse_line(line) + if len(line) == 0: continue + + if len(element_chain) == 0 and len(line) == 1: + read_element(line[0], line_tracker) + except Exception as ex: + raise DatamodelParseError("Parsing of {} failed on line {}".format(path, line_tracker.line)) from ex + + for element in dm.elements: + if element._is_placeholder == True: continue + users = element_users[str(element.id)] + for user_info in users: + if user_info.Index == -1: + user_info.Owner[user_info.Name] = element + else: + user_info.Owner[user_info.Name][user_info.Index] = element + + elif encoding in ['binary', 'binary_proto']: + in_file.seek(2,1) # skip header's line break and null terminator + + def get_value(attr_type,from_array = False): + if attr_type == Element: + element_index = get_int(in_file) + if element_index == -1: + return None + elif element_index == -2: + return dm.add_element("Missing element",id=uuid.UUID(hex=get_str(in_file)),_is_placeholder=True) + else: + return dm.elements[element_index] + + elif attr_type == str: return get_str(in_file) if encoding_ver < 4 or from_array else dm._string_dict.read_string(in_file) + elif attr_type == int: return get_int(in_file) + elif attr_type == float: return get_float(in_file) + elif attr_type == bool: return get_bool(in_file) + + elif attr_type == Vector2: return Vector2(get_vec(in_file,2)) + elif attr_type == Vector3: return Vector3(get_vec(in_file,3)) + elif attr_type == Angle: return Angle(get_vec(in_file,3)) + elif attr_type == Vector4: return Vector4(get_vec(in_file,4)) + elif attr_type == Quaternion: return Quaternion(get_vec(in_file,4)) + elif attr_type == Matrix: + out = [] + for _ in range(4): out.append(get_vec(in_file,4)) + return Matrix(out) + + elif attr_type == Color: return get_color(in_file) + elif attr_type == Time: return Time.from_int(get_int(in_file)) + elif attr_type == Binary: return Binary(in_file.read(get_int(in_file))) + + else: + raise TypeError("Cannot read attributes of type {}".format(attr_type)) + + def read_element(elem, use_string_dict = True): + #print(elem.name,"@",in_file.tell()) + num_attributes = get_int(in_file) + for _ in range(num_attributes): + #start = in_file.tell() + name = dm._string_dict.read_string(in_file) if use_string_dict else get_str(in_file) + attr_type = _get_dmx_id_type(encoding,encoding_ver,get_byte(in_file)) + #print("\t",name,"@",start,attr_type) + if attr_type in _dmxtypes: + elem[name] = get_value(attr_type) + elif attr_type in _dmxtypes_array: + array_len = get_int(in_file) + arr = elem[name] = attr_type() + arr_item_type = _get_single_type(attr_type) + for _ in range(array_len): + arr.append( get_value(arr_item_type,from_array=True) ) + + # prefix attributes + if encoding_ver >= 9: + for _ in range(get_int(in_file)): + read_element(dm.prefix_attributes, use_string_dict = False) + + dm._string_dict = _StringDictionary(encoding,encoding_ver,in_file=in_file) + num_elements = get_int(in_file) + + # element headers + for _ in range(num_elements): + elemtype = dm._string_dict.read_string(in_file) + name = dm._string_dict.read_string(in_file) if encoding_ver >= 4 else get_str(in_file) + id = uuid.UUID(bytes_le = in_file.read(16)) # little-endian + dm.add_element(name,elemtype,id) + + # element bodies + for elem in [elem for elem in dm.elements if not elem._is_placeholder]: + read_element(elem) + + dm._string_dict = None + return dm + finally: + if in_file: in_file.close() diff --git a/io_scene_valvesource/export_smd.py b/io_scene_valvesource/export_smd.py index 64ff939..096f0a1 100644 --- a/io_scene_valvesource/export_smd.py +++ b/io_scene_valvesource/export_smd.py @@ -1,2082 +1,2082 @@ -# Copyright (c) 2014 Tom Edwards contact@steamreview.org -# -# ##### BEGIN GPL LICENSE BLOCK ##### -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# ##### END GPL LICENSE BLOCK ##### - -import bpy, bmesh, subprocess, collections, re -from bpy import ops -from bpy.app.translations import pgettext -from mathutils import Vector, Matrix -from math import * -from bpy.types import Collection -from bpy.props import CollectionProperty, StringProperty, BoolProperty - -from .utils import * -from . import datamodel, ordered_set, flex - -class SMD_OT_Compile(bpy.types.Operator, Logger): - bl_idname = "smd.compile_qc" - bl_label = get_id("qc_compile_title") - bl_description = get_id("qc_compile_tip") - - files : CollectionProperty(type=bpy.types.OperatorFileListElement) - directory : StringProperty(maxlen=1024, default="", subtype='FILE_PATH') - - filepath : StringProperty(name="File path", maxlen=1024, default="", subtype='FILE_PATH') - - filter_folder : BoolProperty(default=True, options={'HIDDEN'}) - filter_glob : StringProperty(default="*.qc;*.qci", options={'HIDDEN'}) - - @classmethod - def poll(cls,context): - return State.gamePath is not None and State.compiler == Compiler.STUDIOMDL - - def invoke(self,context, event): - bpy.context.window_manager.fileselect_add(self) - return {'RUNNING_MODAL'} - - def execute(self,context): - multi_files = len([file for file in self.properties.files if file.name]) > 0 - if not multi_files and not (self.properties.filepath == "*" or os.path.isfile(self.properties.filepath)): - self.report({'ERROR'},"No QC files selected for compile.") - return {'CANCELLED'} - - num = self.compileQCs([os.path.join(self.properties.directory,file.name) for file in self.properties.files] if multi_files else self.properties.filepath) - #if num > 1: - # bpy.context.window_manager.progress_begin(0,1) - self.errorReport(get_id("qc_compile_complete",True).format(num,State.engineBranchTitle)) - bpy.context.window_manager.progress_end() - return {'FINISHED'} - - @classmethod - def getQCs(cls, path : str = None) -> list: - import glob - ext = ".qc" - out = [] - internal = False - if not path: - path = bpy.path.abspath(bpy.context.scene.vs.qc_path) - internal = True - for result in glob.glob(path): - if result.endswith(ext): - out.append(result) - - if not internal and not len(out) and not path.endswith(ext): - out = cls.getQCs(path + ext) - return out - - def compileQCs(self,path=None): - scene = bpy.context.scene - print("\n") - - studiomdl_path = os.path.join(bpy.path.abspath(scene.vs.engine_path),"studiomdl.exe") - - if path == "*": - paths = SMD_OT_Compile.getQCs() - elif isinstance(path,str): - paths = [os.path.realpath(bpy.path.abspath(path))] - elif hasattr(path,"__getitem__"): - paths = path - else: - paths = SMD_OT_Compile.getQCs() - num_good_compiles = 0 - num_qcs = len(paths) - if num_qcs == 0: - self.error(get_id("qc_compile_err_nofiles")) - elif not os.path.exists(studiomdl_path): - self.error(get_id("qc_compile_err_compiler", True).format(studiomdl_path) ) - else: - i = 0 - for qc in paths: - bpy.context.window_manager.progress_update((i+1) / (num_qcs+1)) - # save any version of the file currently open in Blender - qc_mangled = qc.lower().replace('\\','/') - for candidate_area in bpy.context.screen.areas: - if candidate_area.type == 'TEXT_EDITOR' and candidate_area.spaces[0].text and candidate_area.spaces[0].text.filepath.lower().replace('\\','/') == qc_mangled: - oldType = bpy.context.area.type - bpy.context.area.type = 'TEXT_EDITOR' - bpy.context.area.spaces[0].text = candidate_area.spaces[0].text - ops.text.save() - bpy.context.area.type = oldType - break #what a farce! - - print( "Running studiomdl for \"{}\"...\n".format(os.path.basename(qc)) ) - studiomdl = subprocess.Popen([studiomdl_path, "-nop4", "-game", State.gamePath, qc]) - studiomdl.communicate() - - if studiomdl.returncode == 0: - num_good_compiles += 1 - else: - self.error(get_id("qc_compile_err_unknown", True).format(os.path.basename(qc))) - i+=1 - return num_good_compiles - -class SmdExporter(bpy.types.Operator, Logger): - bl_idname = "export_scene.smd" - bl_label = get_id("exporter_title") - bl_description = get_id("exporter_tip") - - collection : bpy.props.StringProperty(name=get_id("exporter_prop_group"),description=get_id("exporter_prop_group_tip")) - export_scene : bpy.props.BoolProperty(name=get_id("scene_export"),description=get_id("exporter_prop_scene_tip"),default=False) - - @classmethod - def poll(cls,context): - return len(context.scene.vs.export_list) - - def invoke(self, context, event): - State.update_scene() - ops.wm.call_menu(name="SMD_MT_ExportChoice") - return {'PASS_THROUGH'} - - def execute(self, context): - #bpy.context.window_manager.progress_begin(0,1) - - # Misconfiguration? - if State.datamodelEncoding != 0 and context.scene.vs.export_format == 'DMX': - datamodel.check_support("binary",State.datamodelEncoding) - if State.datamodelEncoding < 3 and State.datamodelFormat > 11 and not context.scene.vs.use_kv2: - self.report({'ERROR'},"DMX format \"Model {}\" requires DMX encoding \"Binary 3\" or later".format(State.datamodelFormat)) - return {'CANCELLED' } - if not context.scene.vs.export_path: - bpy.ops.wm.call_menu(name="SMD_MT_ConfigureScene") - return {'CANCELLED'} - if context.scene.vs.export_path.startswith("//") and not context.blend_data.filepath: - self.report({'ERROR'},get_id("exporter_err_relativeunsaved")) - return {'CANCELLED'} - if State.datamodelEncoding == 0 and context.scene.vs.export_format == 'DMX': - self.report({'ERROR'},get_id("exporter_err_dmxother")) - return {'CANCELLED'} - - # Don't create an undo level from edit mode - prev_mode = prev_hidden = None - if context.active_object: - if context.active_object.hide_viewport: - prev_hidden = context.active_object.name - context.active_object.hide_viewport = False - prev_mode = context.mode - if prev_mode.find("EDIT") != -1: prev_mode = 'EDIT' - elif prev_mode.find("PAINT") != -1: # FFS Blender! - prev_mode = prev_mode.split('_') - prev_mode.reverse() - prev_mode = "_".join(prev_mode) - ops.object.mode_set(mode='OBJECT') - - State.update_scene() - self.bake_results = [] - self.bone_ids = {} - self.materials_used = set() - - for ob in [ob for ob in bpy.context.scene.objects if ob.type == 'ARMATURE' and len(ob.vs.subdir) == 0]: - ob.vs.subdir = "anims" - - ops.ed.undo_push(message=self.bl_label) - - try: - context.tool_settings.use_keyframe_insert_auto = False - context.tool_settings.use_keyframe_insert_keyingset = False - context.preferences.edit.use_enter_edit_mode = False - State.unhook_events() - if context.scene.rigidbody_world: - context.scene.frame_set(context.scene.rigidbody_world.point_cache.frame_start) - - # lots of operators only work on visible objects - for ob in context.scene.objects: - ob.hide_viewport = False - # ensure that objects in all collections are accessible to operators - context.view_layer.layer_collection.exclude = False - - self.files_exported = self.attemptedExports = 0 - - if self.export_scene: - for id in [exportable.item for exportable in context.scene.vs.export_list]: - if type(id) == Collection: - if shouldExportGroup(id): - self.exportId(context, id) - elif id.vs.export: - self.exportId(context, id) - else: - if self.collection == "": - for exportable in getSelectedExportables(): - if type(exportable.item) != Collection: - self.exportId(context, exportable.item) - else: - collection = bpy.data.collections[self.collection] - if collection.vs.mute: self.error(get_id("exporter_err_groupmuted", True).format(collection.name)) - elif not collection.objects: self.error(get_id("exporter_err_groupempty", True).format(collection.name)) - else: self.exportId(context, collection) - - num_good_compiles = None - - if self.attemptedExports == 0: - self.report({'ERROR'},get_id("exporter_err_noexportables")) - elif context.scene.vs.qc_compile and context.scene.vs.qc_path: - # ...and compile the QC - if not SMD_OT_Compile.poll(context): - print("Skipping QC compile step: context incorrect\n") - else: - num_good_compiles = SMD_OT_Compile.compileQCs(self) # hack, use self as the logger - print("\n") - - if num_good_compiles != None: - self.errorReport(get_id("exporter_report_qc", True).format( - self.files_exported, - self.elapsed_time(), - num_good_compiles, - State.engineBranchTitle, - os.path.basename(State.gamePath) - )) - else: - self.errorReport(get_id("exporter_report", True).format( - self.files_exported, - self.elapsed_time() - )) - finally: - # Clean everything up - ops.ed.undo_push(message=self.bl_label) - if bpy.app.debug_value <= 1: ops.ed.undo() - - if prev_mode: - ops.object.mode_set(mode=prev_mode) - if prev_hidden: - context.scene.objects[prev_hidden].hide_viewport = True - context.scene.update_tag() - - context.window_manager.progress_end() - State.hook_events() - - self.collection = "" - self.export_scene = False - return {'FINISHED'} - - def sanitiseFilename(self,name): - new_name = name - for badchar in "/?<>\\:*|\"": - new_name = new_name.replace(badchar,"_") - if new_name != name: - self.warning(get_id("exporter_warn_sanitised_filename",True).format(name,new_name)) - return new_name - - def exportId(self,context,id): - self.attemptedExports += 1 - self.armature = self.armature_src = None - bench = BenchMarker() - - subdir = id.vs.subdir - - print( "\nBlender Source Tools: exporting {}".format(id.name) ) - - subdir = subdir.lstrip("/") # don't want //s here! - - path = os.path.join(bpy.path.abspath(context.scene.vs.export_path), subdir) - if not os.path.exists(path): - try: - os.makedirs(path) - except Exception as err: - self.error(get_id("exporter_err_makedirs", True).format(err)) - return - - if isinstance(id, bpy.types.Collection) and not any(ob.vs.export for ob in id.objects): - self.error(get_id("exporter_err_nogroupitems",True).format(id.name)) - return - - if isinstance(id, bpy.types.Object) and id.type == 'ARMATURE': - ad = id.animation_data - if not ad: return # otherwise we create a folder but put nothing in it - if id.data.vs.action_selection == 'FILTERED': - pass - elif ad.action: - export_name = ad.action.name - elif ad.nla_tracks: - export_name = id.name - else: - self.error(get_id("exporter_err_arm_noanims",True).format(id.name)) - else: - export_name = id.name - - # hide all metaballs that we don't want - for meta in [ob for ob in context.scene.objects if ob.type == 'META' and (not ob.vs.export or (isinstance(id, Collection) and not ob.name in id.objects))]: - for element in meta.data.elements: element.hide = True - - def find_basis_metaball(id): - basis_ns = id.name.rsplit(".") - if len(basis_ns) == 1: return id - - basis = id - for meta in [ob for ob in bpy.data.objects if ob.type == 'META']: - ns = meta.name.rsplit(".") - - if ns[0] != basis_ns[0]: - continue - if len(ns) == 1: - basis = meta - break - - try: - if int(ns[1]) < int(basis_ns[1]): - basis = meta - basis_ns = ns - except ValueError: - pass - return basis - - bake_results = [] - baked_metaballs = [] - - bench.report("setup") - - if bench.quiet: print("- Baking...") - - if type(id) == Collection: - group_vertex_maps = valvesource_vertex_maps(id) - for i, ob in enumerate([ob for ob in id.objects if ob.vs.export and ob.session_uid in State.exportableObjects]): - bpy.context.window_manager.progress_update(i / len(id.objects)) - if ob.type == 'META': - ob = find_basis_metaball(ob) - if ob in baked_metaballs: continue - else: baked_metaballs.append(ob) - - bake = self.bakeObj(ob) - for vertex_map_name in group_vertex_maps: - if not vertex_map_name in bake.object.data.vertex_colors: - vertex_map = bake.object.data.vertex_colors.new(vertex_map_name) - vertex_map.data.foreach_set("color",[1.0] * 4) - - if bake: - bake_results.append(bake) - bench.report("Group bake", len(bake_results)) - else: - if id.type == 'META': - bake = self.bakeObj(find_basis_metaball(id)) - bench.report("Metaball bake") - else: - bake = self.bakeObj(id) - bench.report("Standard bake") - - if bake: - bake_results.append(bake) - - if not any(bake_results): - return - - if State.exportFormat == ExportFormat.DMX and hasShapes(id): - self.flex_controller_mode = id.vs.flex_controller_mode - self.flex_controller_source = id.vs.flex_controller_source - - bpy.context.view_layer.objects.active = bake_results[0].object - bpy.ops.object.mode_set(mode='OBJECT') - mesh_bakes = [bake for bake in bake_results if bake.object.type == 'MESH'] - - skip_vca = False - if isinstance(id, Collection) and len(id.vs.vertex_animations) and len(id.objects) > 1: - if len(mesh_bakes) > len([bake for bake in bake_results if (type(bake.envelope) is str and bake.envelope == bake_results[0].envelope) or bake.envelope is None]): - self.error(get_id("exporter_err_unmergable",True).format(id.name)) - skip_vca = True - elif not id.vs.automerge: - id.vs.automerge = True - - for va in id.vs.vertex_animations: - if skip_vca: break - - if State.exportFormat == ExportFormat.DMX: - va.name = va.name.replace("_","-") - - vca = bake_results[0].vertex_animations[va.name] # only the first bake result will ever have a vertex animation defined - vca.export_sequence = va.export_sequence - vca.num_frames = va.end - va.start - two_percent = vca.num_frames * len(bake_results) / 50 - print("- Generating vertex animation \"{}\"".format(va.name)) - anim_bench = BenchMarker(1,va.name) - - for f in range(va.start,va.end): - bpy.context.scene.frame_set(f) - bpy.ops.object.select_all(action='DESELECT') - depsgraph = bpy.context.evaluated_depsgraph_get() - for bake in mesh_bakes: # create baked snapshots of each vertex animation frame - bake.fob = bpy.data.objects.new("{}-{}".format(va.name,f), bpy.data.meshes.new_from_object((bake.src.evaluated_get(depsgraph)))) - bake.fob.matrix_world = bake.src.matrix_world - bpy.context.scene.collection.objects.link(bake.fob) - bpy.context.view_layer.objects.active = bake.fob - bake.fob.select_set(True) - - top_parent = self.getTopParent(bake.src) - if top_parent: - bake.fob.location -= top_parent.location - - if context.scene.rigidbody_world: - # Blender 2.71 bug: https://developer.blender.org/T41388 - prev_rbw = bpy.context.scene.rigidbody_world.enabled - bpy.context.scene.rigidbody_world.enabled = False - - bpy.ops.object.transform_apply(location=True,scale=True,rotation=True) - - if context.scene.rigidbody_world: - bpy.context.scene.rigidbody_world.enabled = prev_rbw - - if bpy.context.selected_objects and State.exportFormat == ExportFormat.SMD: - bpy.context.view_layer.objects.active = bpy.context.selected_objects[0] - ops.object.join() - - vca.append(bpy.context.active_object if len(bpy.context.selected_objects) == 1 else bpy.context.selected_objects) - anim_bench.report("bake") - - if len(bpy.context.selected_objects) != 1: - for bake in mesh_bakes: - bpy.context.scene.collection.objects.unlink(bake.fob) - del bake.fob - - anim_bench.report("record") - - if two_percent and len(vca) / len(bake_results) % two_percent == 0: - print(".", debug_only=True, newline=False) - bpy.context.window_manager.progress_update(len(vca) / vca.num_frames) - - bench.report("\n" + va.name) - bpy.context.view_layer.objects.active = bake_results[0].src - - if isinstance(id, Collection) and State.exportFormat == ExportFormat.DMX and id.vs.automerge: - bone_parents = collections.defaultdict(list) - scene_obs = bpy.context.scene.collection.objects - view_obs = bpy.context.view_layer.objects - for bake in [bake for bake in bake_results if type(bake.envelope) is str or bake.envelope is None]: - bone_parents[bake.envelope].append(bake) - - for bp, parts in bone_parents.items(): - if len(parts) <= 1: continue - shape_names = set() - for key in [key for part in parts for key in part.shapes.keys()]: - shape_names.add(key) - - ops.object.select_all(action='DESELECT') - for part in parts: - ob = part.object.copy() - ob.data = ob.data.copy() - ob.data.uv_layers.active.name = "__dmx_uv__" - scene_obs.link(ob) - ob.select_set(True) - view_obs.active = ob - bake_results.remove(part) - - bpy.ops.object.join() - joined = self.BakeResult(bp + "_meshes" if bp else "loose_meshes") - joined.object = bpy.context.active_object - joined.object.name = joined.object.data.name = joined.name - joined.envelope = bp - - if parts[0].vertex_animations: - for src_name,src_vca in parts[0].vertex_animations.items(): - vca = joined.vertex_animations[src_name] = self.BakedVertexAnimation() - vca.bone_id = src_vca.bone_id - vca.export_sequence = src_vca.export_sequence - vca.num_frames = src_vca.num_frames - - for i,frame in enumerate(src_vca): - bpy.ops.object.select_all(action='DESELECT') - frame.reverse() - for ob in frame: - scene_obs.link(ob) - ob.select_set(True) - bpy.context.view_layer.objects.active = frame[0] - bpy.ops.object.join() - bpy.context.active_object.name = "{}-{}".format(src_name,i) - bpy.ops.object.transform_apply(location=True,scale=True,rotation=True) - vca.append(bpy.context.active_object) - scene_obs.unlink(bpy.context.active_object) - - bake_results.append(joined) - - for shape_name in shape_names: - ops.object.select_all(action='DESELECT') - - for part in parts: - mesh = part.shapes[shape_name] if shape_name in part.shapes else part.object.data - ob = bpy.data.objects.new(name="{} -> {}".format(part.name,shape_name),object_data = mesh.copy()) - scene_obs.link(ob) - ob.matrix_local = part.matrix - ob.select_set(True) - view_obs.active = ob - - bpy.ops.object.join() - joined.shapes[shape_name] = bpy.context.active_object.data - bpy.context.active_object.data.name = "{} -> {}".format(joined.object.name,shape_name) - - scene_obs.unlink(ob) - bpy.data.objects.remove(ob) - del ob - - view_obs.active = joined.object - bench.report("Mech merge") - - for result in bake_results: - if result.armature: - if not self.armature: - self.armature = result.armature.object - self.armature_src = result.armature.src - elif self.armature != result.armature.object: - self.warning(get_id("exporter_warn_multiarmature")) - - if self.armature_src: - if list(self.armature_src.scale).count(self.armature_src.scale[0]) != 3: - self.warning(get_id("exporter_err_arm_nonuniform",True).format(self.armature_src.name)) - if not self.armature: - self.armature = self.bakeObj(self.armature_src).object - exporting_armature = isinstance(id, bpy.types.Object) and id.type == 'ARMATURE' - self.exportable_bones = list([self.armature.pose.bones[edit_bone.name] for edit_bone in self.armature.data.bones if (exporting_armature or edit_bone.use_deform)]) - skipped_bones = len(self.armature.pose.bones) - len(self.exportable_bones) - if skipped_bones: - print("- Skipping {} non-deforming bones".format(skipped_bones)) - - write_func = self.writeDMX if State.exportFormat == ExportFormat.DMX else self.writeSMD - bench.report("Post Bake") - - if isinstance(id, bpy.types.Object) and id.type == 'ARMATURE' and id.data.vs.action_selection == 'FILTERED': - for action in actionsForFilter(id.vs.action_filter): - bake_results[0].object.animation_data.action = action - self.files_exported += write_func(id, bake_results, self.sanitiseFilename(action.name), path) - bench.report(write_func.__name__) - else: - self.files_exported += write_func(id, bake_results, self.sanitiseFilename(export_name), path) - bench.report(write_func.__name__) - - # Source doesn't handle Unicode characters in models. Detect any unicode strings and warn the user about them. - unicode_tested = set() - def test_for_unicode(name, id, display_type): - if id in unicode_tested: return; - unicode_tested.add(id) - - try: - name.encode('ascii') - except UnicodeEncodeError: - self.warning(get_id("exporter_warn_unicode", format_string=True).format(pgettext(display_type), name)) - - # Meanwhile, Source 2 wants only lowercase characters, digits, and underscore in model names - if State.compiler > Compiler.STUDIOMDL or State.datamodelFormat >= 22: - if re.match(r'[^a-z0-9_]', id.name): - self.warning(get_id("exporter_warn_source2names", format_string=True).format(id.name)) - - for bake in bake_results: - test_for_unicode(bake.name, bake, type(bake.src).__name__) - for shape_name, shape_id in bake.shapes.items(): - test_for_unicode(shape_name, shape_id, "Shape Key") - if hasattr(bake.object,"objects"): - for ob in bake.object.objects: - test_for_unicode(ob.name, ob, ob.type.capitalize()) - for mat in self.materials_used: - test_for_unicode(mat[0], mat[1], type(mat[1]).__name__) - - - def getWeightmap(self,bake_result): - out = [] - amod = bake_result.envelope - ob = bake_result.object - if not amod or not isinstance(amod, bpy.types.ArmatureModifier): return out - - amod_vg = ob.vertex_groups.get(amod.vertex_group) - - try: - amod_ob = next((bake.object for bake in self.bake_results if bake.src == amod.object)) - except StopIteration as e: - raise ValueError("Armature for exportable \"{}\" was not baked".format(bake_result.name)) from e - - model_mat = amod_ob.matrix_world.inverted() @ ob.matrix_world - - num_verts = len(ob.data.vertices) - for v in ob.data.vertices: - weights = [] - total_weight = 0 - if len(out) % 50 == 0: bpy.context.window_manager.progress_update(len(out) / num_verts) - - if amod.use_vertex_groups: - for v_group in v.groups: - if v_group.group < len(ob.vertex_groups): - ob_group = ob.vertex_groups[v_group.group] - group_name = ob_group.name - group_weight = v_group.weight - else: - continue # Vertex group might not exist on object if it's re-using a datablock - - bone = amod_ob.pose.bones.get(group_name) - if bone and bone in self.exportable_bones: - weights.append([ self.bone_ids[bone.name], group_weight ]) - total_weight += group_weight - - if amod.use_bone_envelopes and total_weight == 0: # vertex groups completely override envelopes - for pose_bone in [pb for pb in amod_ob.pose.bones if pb in self.exportable_bones]: - weight = pose_bone.bone.envelope_weight * pose_bone.evaluate_envelope( model_mat @ v.co ) - if weight: - weights.append([ self.bone_ids[pose_bone.name], weight ]) - total_weight += weight - - # normalise weights, like Blender does. Otherwise Studiomdl puts anything left over onto the root bone. - if total_weight not in [0,1]: - for link in weights: - link[1] *= 1/total_weight - - # apply armature modifier vertex group - if amod_vg and total_weight > 0: - amod_vg_weight = 0 - for v_group in v.groups: - if v_group.group == amod_vg.index: - amod_vg_weight = v_group.weight - break - if amod.invert_vertex_group: - amod_vg_weight = 1 - amod_vg_weight - for link in weights: - link[1] *= amod_vg_weight - - out.append(weights) - return out - - def GetMaterialName(self, ob, material_index): - mat_name = None - mat_id = None - if len(ob.material_slots) > material_index: - mat_id = ob.material_slots[material_index].material - if mat_id: - mat_name = mat_id.name - if mat_name: - self.materials_used.add((mat_name,mat_id)) - return mat_name, True - else: - return "no_material", ob.display_type != 'TEXTURED' # assume it's a collision mesh if it's not textured - - def getTopParent(self,id): - top_parent = id - while top_parent.parent: - top_parent = top_parent.parent - return top_parent - - def getEvaluatedPoseBones(self): - depsgraph = bpy.context.evaluated_depsgraph_get() - evaluated_armature = self.armature.evaluated_get(depsgraph) - - return [evaluated_armature.pose.bones[bone.name] for bone in self.exportable_bones] - - class BakedVertexAnimation(list): - def __init__(self): - self.export_sequence = False - self.bone_id = -1 - self.num_frames = 0 - - class VertexAnimationKey(): - def __init__(self,vert_index,co,norm): - self.vert_index = vert_index - self.co = co - self.norm = norm - - class BakeResult: - def __init__(self,name): - self.name = name - self.object = None - self.matrix = Matrix() - self.envelope = None - self.bone_parent_matrix = None - self.src = None - self.armature = None - self.balance_vg = None - self.shapes = collections.OrderedDict() - self.vertex_animations = collections.defaultdict(SmdExporter.BakedVertexAnimation) - - # Creates a mesh with object transformations and modifiers applied - def bakeObj(self,id, generate_uvs = True): - for bake in (bake for bake in self.bake_results if bake.src == id or bake.object == id): - return bake - - result = self.BakeResult(id.name) - result.src = id - self.bake_results.append(result) - - try: - select_only(id) - except RuntimeError: - self.warning(get_id("exporter_err_hidden", True).format(id.name)) - return - - should_triangulate = State.exportFormat == ExportFormat.SMD or id.vs.triangulate - - def triangulate(): - ops.object.mode_set(mode='EDIT') - ops.mesh.select_all(action='SELECT') - ops.mesh.quads_convert_to_tris(quad_method='FIXED') - ops.object.mode_set(mode='OBJECT') - - duplis = [] - if id.instance_type != 'NONE': - bpy.ops.object.duplicates_make_real() - id.select_set(False) - if bpy.context.selected_objects: - bpy.context.view_layer.objects.active = bpy.context.selected_objects[0] - bpy.ops.object.join() - duplis = bpy.context.active_object - duplis.parent = id - duplis = self.bakeObj(duplis, generate_uvs = False).object - if should_triangulate: triangulate() - elif id.type not in exportable_types: - return - else: - duplis = None - - if id.type != 'META': # eek, what about lib data? - id = id.copy() - bpy.context.scene.collection.objects.link(id) - if id.data: - id.data = id.data.copy() - - if bpy.context.active_object: - ops.object.mode_set(mode='OBJECT') - select_only(id) - - if hasShapes(id): - id.active_shape_key_index = 0 - - top_parent = self.getTopParent(id) # record this before changing hierarchies! - - def captureBoneParent(armature, boneName): - result.envelope = boneName - result.armature = self.bakeObj(armature) - select_only(id) - - # Objects with bone parents are not updated in sync with depsgraph evaluation (as of Blender 3.0.1). So capture the correct matrix before we start to mess with them. - # Furthemore, Blender's bone transforms are inconsistent with object transforms: - # - A bone's matrix value is local to the armature, NOT the bone's parent - # - Object bone parent matricies are calculated from the head of the bone, NOT the tail (even though the tail defines the bone's location in pose mode!) - # - Bones are Y up, NOT Z up like everything else in Blender, and this affects their children's transforms - # To avoid this mess, we can use the bone and object world transforms to calculate a sane local matrix - result.bone_parent_matrix = armature.pose.bones[boneName].matrix.inverted() @ armature.matrix_world.inverted() @ id.matrix_world - - cur = id - while cur: - if cur.parent_bone and cur.parent_type == 'BONE' and not result.envelope: - captureBoneParent(cur.parent, cur.parent_bone) - for con in [con for con in cur.constraints if not con.mute]: - if con.type in ['CHILD_OF','COPY_TRANSFORMS'] and con.target and con.target.type == 'ARMATURE' and con.subtarget: - if not result.envelope: - captureBoneParent(con.target, con.subtarget) - else: - self.warning(get_id("exporter_err_dupeenv_con",True).format(con.name,cur.name)) - if result.envelope: - break - cur = cur.parent - del cur - - if id.type == 'MESH': - ops.object.mode_set(mode='EDIT') - ops.mesh.reveal() - - if id.matrix_world.is_negative: - ops.mesh.select_all(action='SELECT') - ops.mesh.flip_normals() - - ops.mesh.select_all(action="DESELECT") - ops.object.mode_set(mode='OBJECT') - - ops.object.parent_clear(type='CLEAR_KEEP_TRANSFORM') - id.matrix_world = Matrix.Translation(top_parent.location).inverted() @ getUpAxisMat(bpy.context.scene.vs.up_axis).inverted() @ id.matrix_world - - if id.type == 'ARMATURE': - for posebone in id.pose.bones: posebone.matrix_basis.identity() - if self.armature and self.armature != id: - self.warning(get_id("exporter_warn_multiarmature")) - result.armature = result - result.object = id - return result - - if id.type == 'CURVE': - id.data.dimensions = '3D' - - for con in [con for con in id.constraints if not con.mute]: - con.mute = True - - solidify_fill_rim = None - shapes_invalid = False - for mod in id.modifiers: - if mod.type == 'ARMATURE' and mod.object: - if result.envelope and any(br for br in self.bake_results if br.envelope != mod.object): - self.warning(get_id("exporter_err_dupeenv_arm",True).format(mod.name,id.name)) - else: - result.armature = self.bakeObj(mod.object) - result.envelope = mod - select_only(id) - mod.show_viewport = False - elif mod.type == 'SOLIDIFY' and not solidify_fill_rim: - solidify_fill_rim = mod.use_rim - elif hasShapes(id) and mod.type == 'DECIMATE' and mod.decimate_type != 'UNSUBDIV': - self.error(get_id("exporter_err_shapes_decimate", True).format(id.name,mod.decimate_type)) - shapes_invalid = True - ops.object.mode_set(mode='OBJECT') - - depsgraph = bpy.context.evaluated_depsgraph_get() - - if id.type in exportable_types: - # Bake reference mesh - data = bpy.data.meshes.new_from_object(id.evaluated_get(depsgraph), preserve_all_data_layers=True, depsgraph=depsgraph) - data.name = id.name + "_baked" - - def put_in_object(id, data, quiet=False): - if bpy.context.view_layer.objects.active: - ops.object.mode_set(mode='OBJECT') - - ob = bpy.data.objects.new(name=id.name,object_data=data) - ob.matrix_world = id.matrix_world - - bpy.context.scene.collection.objects.link(ob) - - select_only(ob) - - exporting_smd = State.exportFormat == ExportFormat.SMD - ops.object.transform_apply(scale=True, location=exporting_smd, rotation=exporting_smd) - - if hasCurves(id): - ops.object.mode_set(mode='EDIT') - ops.mesh.select_all(action='SELECT') - if id.data.vs.faces == 'BOTH': - ops.mesh.duplicate() - if solidify_fill_rim and not quiet: - self.warning(get_id("exporter_err_solidifyinside", True).format(id.name)) - if id.data.vs.faces != 'FORWARD': - ops.mesh.flip_normals() - ops.object.mode_set(mode='OBJECT') - - return ob - - baked = put_in_object(id,data) - - if should_triangulate: triangulate() - - if duplis: - if not id.type in exportable_types: - id.select_set(False) - bpy.context.view_layer.objects.active = duplis - duplis.select_set(True) - bpy.ops.object.join() - baked = bpy.context.active_object - - result.object = baked - data = baked.data - - if not data.polygons: - self.error(get_id("exporter_err_nopolys", True).format(result.name)) - return - - result.matrix = baked.matrix_world - - if not shapes_invalid and hasShapes(id): - # calculate vert balance - if State.exportFormat == ExportFormat.DMX: - if id.data.vs.flex_stereo_mode == 'VGROUP': - if id.data.vs.flex_stereo_vg == "": - self.warning(get_id("exporter_err_splitvgroup_undefined",True).format(id.name)) - else: - result.balance_vg = baked.vertex_groups.get(id.data.vs.flex_stereo_vg) - if not result.balance_vg: - self.warning(get_id("exporter_err_splitvgroup_missing", True).format(id.data.vs.flex_stereo_vg,id.name)) - else: - axis = axes_lookup[id.data.vs.flex_stereo_mode] - balance_width = baked.dimensions[axis] * ( 1 - (id.data.vs.flex_stereo_sharpness / 100) ) - result.balance_vg = baked.vertex_groups.new(name="__dmx_balance__") - zeroes = [] - ones = [] - for vert in baked.data.vertices: - if balance_width == 0: - if vert.co[axis] > 0: ones.append(vert.index) - else: zeroes.append(vert.index) - else: - balance = min(1,max(0, (-vert.co[axis] / balance_width / 2) + 0.5)) - if balance == 1: ones.append(vert.index) - elif balance == 0: zeroes.append(vert.index) - else: result.balance_vg.add([vert.index], balance, 'REPLACE') - result.balance_vg.add(ones, 1, 'REPLACE') - result.balance_vg.add(zeroes, 0, 'REPLACE') - - # bake shapes - id.show_only_shape_key = True - for i, shape in enumerate(id.data.shape_keys.key_blocks): - if i == 0: continue - id.active_shape_key_index = i - depsgraph = bpy.context.evaluated_depsgraph_get() - baked_shape = bpy.data.meshes.new_from_object(id.evaluated_get(depsgraph)) - baked_shape.name = "{} -> {}".format(id.name,shape.name) - - shape_ob = put_in_object(id,baked_shape, quiet = True) - - if duplis: - select_only(shape_ob) - duplis.select_set(True) - bpy.ops.object.join() - shape_ob = bpy.context.active_object - - result.shapes[shape.name] = shape_ob.data - - if should_triangulate: - bpy.context.view_layer.objects.active = shape_ob - triangulate() - - bpy.context.scene.collection.objects.unlink(shape_ob) - bpy.data.objects.remove(shape_ob) - del shape_ob - - for mod in id.modifiers: - mod.show_viewport = False # mainly to disable physics modifiers - - bpy.context.view_layer.objects.active = baked - baked.select_set(True) - - # project a UV map - if generate_uvs and not baked.data.uv_layers: - ops.object.mode_set(mode='EDIT') - ops.mesh.select_all(action='SELECT') - if len(result.object.data.vertices) < 2000: - result.object.data.uv_layers.new() - ops.uv.smart_project() - else: - ops.uv.unwrap() - ops.object.mode_set(mode='OBJECT') - - return result - - def openSMD(self,path,name,description): - full_path = os.path.realpath(os.path.join(path, name)) - - try: - f = open(full_path, 'w',encoding='utf-8') - except Exception as err: - self.error(get_id("exporter_err_open", True).format(description, err)) - return None - - f.write("version 1\n") - print("-",full_path) - return f - - def writeSMD(self, id, bake_results, name, filepath, filetype = 'smd'): - bench = BenchMarker(1,"SMD") - goldsrc = bpy.context.scene.vs.smd_format == "GOLDSOURCE" - - self.smd_file = self.openSMD(filepath,name + "." + filetype,filetype.upper()) - if self.smd_file == None: return 0 - - if State.compiler > Compiler.STUDIOMDL: - self.warning(get_id("exporter_warn_source2smdsupport")) - - # BONES - self.smd_file.write("nodes\n") - curID = 0 - if not self.armature: - self.smd_file.write("0 \"root\" -1\n") - if filetype == 'smd': print("- No skeleton to export") - else: - if self.armature.data.vs.implicit_zero_bone: - self.smd_file.write("0 \"{}\" -1\n".format(implicit_bone_name)) - curID += 1 - - # Write to file - for bone in self.exportable_bones: - parent = bone.parent - while parent and not parent in self.exportable_bones: - parent = parent.parent - - line = "{} ".format(curID) - self.bone_ids[bone.name] = curID - curID += 1 - - bone_name = bone.name - line += "\"" + bone_name + "\" " - - if parent: - line += str(self.bone_ids[parent.name]) - else: - line += "-1" - - self.smd_file.write(line + "\n") - - num_bones = len(self.armature.data.bones) - if filetype == 'smd': print("- Exported",num_bones,"bones") - - max_bones = 128 - if num_bones > max_bones: - self.warning(get_id("exporter_err_bonelimit", True).format(num_bones,max_bones)) - - for vca in [vca for vca in bake_results[0].vertex_animations.items() if vca[1].export_sequence]: - curID += 1 - vca[1].bone_id = curID - self.smd_file.write("{} \"vcabone_{}\" -1\n".format(curID,vca[0])) - - self.smd_file.write("end\n") - - if filetype == 'smd': - # ANIMATION - self.smd_file.write("skeleton\n") - if not self.armature: - self.smd_file.write("time 0\n0 0 0 0 0 0 0\nend\n") - else: - # Get the working frame range - is_anim = len(bake_results) == 1 and bake_results[0].object.type == 'ARMATURE' - if is_anim: - ad = self.armature.animation_data - anim_len = animationLength(ad) + 1 # frame 0 is a frame too... - if anim_len == 1: - self.warning(get_id("exporter_err_noframes",True).format(self.armature_src.name)) - - if ad.action and hasattr(ad.action,'fps'): - bpy.context.scene.render.fps = ad.action.fps - bpy.context.scene.render.fps_base = 1 - else: - anim_len = 1 - - # remove any unkeyed poses, e.g. from other animations in this export operation. - for posebone in self.armature.pose.bones: posebone.matrix_basis.identity() - - # Start writing out the animation - for i in range(anim_len): - bpy.context.window_manager.progress_update(i / anim_len) - self.smd_file.write("time {}\n".format(i)) - - if self.armature.data.vs.implicit_zero_bone: - self.smd_file.write("0 0 0 0 0 0 0\n") - - if is_anim: - bpy.context.scene.frame_set(i) - - evaluated_bones = self.getEvaluatedPoseBones() - for posebone in evaluated_bones: - parent = posebone.parent - while parent and not parent in evaluated_bones: - parent = parent.parent - - # Get the bone's Matrix from the current pose - PoseMatrix = posebone.matrix - if self.armature.data.vs.legacy_rotation: - PoseMatrix @= mat_BlenderToSMD - if parent: - parentMat = parent.matrix - if self.armature.data.vs.legacy_rotation: parentMat @= mat_BlenderToSMD - PoseMatrix = parentMat.inverted() @ PoseMatrix - else: - PoseMatrix = self.armature.matrix_world @ PoseMatrix - - self.smd_file.write("{} {} {}\n".format(self.bone_ids[posebone.name], getSmdVec(PoseMatrix.to_translation()), getSmdVec(PoseMatrix.to_euler()))) - - self.smd_file.write("end\n") - - ops.object.mode_set(mode='OBJECT') - - print("- Exported {} frames{}".format(anim_len," (legacy rotation)" if self.armature.data.vs.legacy_rotation else "")) - - # POLYGONS - done_header = False - for bake in [bake for bake in bake_results if bake.object.type != 'ARMATURE']: - if not done_header: - self.smd_file.write("triangles\n") - done_header = True - face_index = 0 - ob = bake.object - data = ob.data - - uv_loop = data.uv_layers.active.data - - weights = self.getWeightmap(bake) - - ob_weight_str = None - if type(bake.envelope) == str and bake.envelope in self.bone_ids: - ob_weight_str = (" 1 {} 1" if not goldsrc else "{}").format(self.bone_ids[bake.envelope]) - elif not weights: - ob_weight_str = " 0" if not goldsrc else "0" - - bad_face_mats = 0 - multi_weight_verts = set() # only relevant for GoldSrc exports - p = 0 - for poly in data.polygons: - if p % 10 == 0: bpy.context.window_manager.progress_update(p / len(data.polygons)) - mat_name, mat_success = self.GetMaterialName(ob, poly.material_index) - if not mat_success: - bad_face_mats += 1 - - self.smd_file.write(mat_name + "\n") - - for loop in [data.loops[l] for l in poly.loop_indices]: - # Vertex locations, normal directions - v = data.vertices[loop.vertex_index] - pos_norm = " {} {} ".format(getSmdVec(v.co),getSmdVec(loop.normal)) - - # UVs - uv = " ".join([getSmdFloat(j) for j in uv_loop[loop.index].uv]) - - if not goldsrc: - # Weightmaps - weight_string = "" - if ob_weight_str: - weight_string = ob_weight_str - else: - valid_weights = 0 - for link in [link for link in weights[v.index] if link[1] > 0]: - weight_string += " {} {}".format(link[0], getSmdFloat(link[1])) - valid_weights += 1 - weight_string = " {}{}".format(valid_weights,weight_string) - - self.smd_file.write("0" + pos_norm + uv + weight_string + "\n") # write to file - - else: - if ob_weight_str: - weight_string = ob_weight_str - else: - goldsrc_weights = [link for link in weights[v.index] if link[1] > 0] - if len(goldsrc_weights) == 0: - weight_string = "0" - else: - if len(goldsrc_weights) > 1: - multi_weight_verts.add(v) - weight_string = str(goldsrc_weights[0][0]) - self.smd_file.write(weight_string + pos_norm + uv + "\n") # write to file - - face_index += 1 - - if goldsrc and multi_weight_verts: - self.warning(get_id("exporterr_goldsrc_multiweights", format_string=True).format(len(multi_weight_verts), bake.src.data.name)) - if bad_face_mats: - self.warning(get_id("exporter_err_facesnotex_ormat").format(bad_face_mats,bake.src.data.name)) - - print("- Exported",face_index,"polys") - - print("- Exported {} materials".format(len(self.materials_used))) - for mat in self.materials_used: - print(" " + mat[0]) - - if done_header: - self.smd_file.write("end\n") - elif filetype == 'vta': - self.smd_file.write("skeleton\n") - - def _writeTime(time, shape_name = None): - self.smd_file.write( "time {}{}\n".format(time, " # {}".format(shape_name) if shape_name else "")) - - shape_names = ordered_set.OrderedSet() - for bake in [bake for bake in bake_results if bake.object.type != 'ARMATURE']: - for shape_name in bake.shapes.keys(): - shape_names.add(shape_name) - - _writeTime(0) - for i, shape_name in enumerate(shape_names): - _writeTime(i+1, shape_name) - self.smd_file.write("end\n") - - self.smd_file.write("vertexanimation\n") - - total_verts = 0 - vert_id = 0 - - def _makeVertLine(i,co,norm): - return "{} {} {}\n".format(i, getSmdVec(co), getSmdVec(norm)) - - _writeTime(0) - for bake in [bake for bake in bake_results if bake.object.type != 'ARMATURE']: - bake.offset = vert_id - verts = bake.object.data.vertices - for loop in [bake.object.data.loops[l] for poly in bake.object.data.polygons for l in poly.loop_indices]: - self.smd_file.write(_makeVertLine(vert_id,verts[loop.vertex_index].co,loop.normal)) - vert_id += 1 - - for i, shape_name in enumerate(shape_names): - i += 1 - bpy.context.window_manager.progress_update(i / len(shape_names)) - _writeTime(i,shape_name) - for bake in [bake for bake in bake_results if bake.object.type != 'ARMATURE']: - shape = bake.shapes.get(shape_name) - if not shape: continue - - vert_index = bake.offset - mesh_verts = bake.object.data.vertices - shape_verts = shape.vertices - - for mesh_loop in [bake.object.data.loops[l] for poly in bake.object.data.polygons for l in poly.loop_indices]: - shape_vert = shape_verts[mesh_loop.vertex_index] - shape_loop = shape.loops[mesh_loop.index] - mesh_vert = mesh_verts[mesh_loop.vertex_index] - diff_vec = shape_vert.co - mesh_vert.co - if diff_vec > epsilon or shape_loop.normal - mesh_loop.normal > epsilon: - self.smd_file.write(_makeVertLine(vert_index,shape_vert.co,shape_loop.normal)) - total_verts += 1 - vert_index += 1 - - self.smd_file.write("end\n") - print("- Exported {} flex shapes ({} verts)".format(i,total_verts)) - - self.smd_file.close() - - - if bench.quiet: - print("- {} export took".format(filetype.upper()) ,bench.total(),"\n") - - written = 1 - if filetype == 'smd': - for bake in [bake for bake in bake_results if bake.shapes]: - written += self.writeSMD(id,bake_results,name,filepath,filetype='vta') - for name,vca in bake_results[0].vertex_animations.items(): - written += self.writeVCA(name,vca,filepath) - if vca.export_sequence: - written += self.writeVCASequence(name,vca,filepath) - return written - - def writeVCA(self,name,vca,filepath): - bench = BenchMarker() - self.smd_file = self.openSMD(filepath,name + ".vta","vertex animation") - if self.smd_file == None: return 0 - - self.smd_file.write( -'''nodes -0 "root" -1 -end -skeleton -''') - for i,frame in enumerate(vca): - self.smd_file.write("time {}\n0 0 0 0 0 0 0\n".format(i)) - - self.smd_file.write("end\nvertexanimation\n") - num_frames = len(vca) - two_percent = num_frames / 50 - - for frame, vca_ob in enumerate(vca): - self.smd_file.write("time {}\n".format(frame)) - - self.smd_file.writelines(["{} {} {}\n".format(loop.index, getSmdVec(vca_ob.data.vertices[loop.vertex_index].co), getSmdVec(loop.normal)) for loop in vca_ob.data.loops]) - - if two_percent and frame % two_percent == 0: - print(".", debug_only=True, newline=False) - bpy.context.window_manager.progress_update(frame / num_frames) - - removeObject(vca_ob) - vca[frame] = None - - self.smd_file.write("end\n") - print(debug_only=True) - print("Exported {} frames ({:.1f}MB)".format(num_frames, self.smd_file.tell() / 1024 / 1024)) - self.smd_file.close() - bench.report("Vertex animation") - print() - return 1 - - def writeVCASequence(self,name,vca,dir_path): - self.smd_file = self.openSMD(dir_path,"vcaanim_{}.smd".format(name),"SMD") - if self.smd_file == None: return 0 - - self.smd_file.write( -'''nodes -{2} -{0} "vcabone_{1}" -1 -end -skeleton -'''.format(vca.bone_id, name, - "\n".join(['''{} "{}" -1'''.format(self.bone_ids[b.name],b.name) for b in self.exportable_bones if b.parent == None]) - if self.armature_src else '0 "root" -1') - ) - - max_frame = float(len(vca)-1) - for i in range(len(vca)): - self.smd_file.write("time {}\n".format(i)) - if self.armature_src: - for root_bone in [b for b in self.exportable_bones if b.parent == None]: - mat = getUpAxisMat('Y').inverted() @ self.armature.matrix_world @ root_bone.matrix - self.smd_file.write("{} {} {}\n".format(self.bone_ids[root_bone.name], getSmdVec(mat.to_translation()), getSmdVec(mat.to_euler()))) - else: - self.smd_file.write("0 0 0 0 {} 0 0\n".format("-1.570797" if bpy.context.scene.vs.up_axis == 'Z' else "0")) - self.smd_file.write("{0} 1.0 {1} 0 0 0 0\n".format(vca.bone_id,getSmdFloat(i / max_frame))) - self.smd_file.write("end\n") - self.smd_file.close() - return 1 - - def writeDMX(self, id, bake_results, name, dir_path): - bench = BenchMarker(1,"DMX") - filepath = os.path.realpath(os.path.join(dir_path,name + ".dmx")) - print("-",filepath) - armature_name = self.armature_src.name if self.armature_src else name - materials = {} - written = 0 - - def makeTransform(name,matrix,object_name): - trfm = dm.add_element(name,"DmeTransform",id=object_name+"transform") - trfm["position"] = datamodel.Vector3(matrix.to_translation()) - trfm["orientation"] = getDatamodelQuat(matrix.to_quaternion()) - return trfm - - dm = datamodel.DataModel("model",State.datamodelFormat) - dm.allow_random_ids = False - - source2 = dm.format_ver >= 22 - - root = dm.add_element(bpy.context.scene.name,id="Scene"+bpy.context.scene.name) - DmeModel = dm.add_element(armature_name,"DmeModel",id="Object" + armature_name) - DmeModel_children = DmeModel["children"] = datamodel.make_array([],datamodel.Element) - - DmeModel_transforms = dm.add_element("base","DmeTransformList",id="transforms"+bpy.context.scene.name) - DmeModel["baseStates"] = datamodel.make_array([ DmeModel_transforms ],datamodel.Element) - DmeModel_transforms["transforms"] = datamodel.make_array([],datamodel.Element) - DmeModel_transforms = DmeModel_transforms["transforms"] - - if source2: - DmeAxisSystem = DmeModel["axisSystem"] = dm.add_element("axisSystem","DmeAxisSystem","AxisSys" + armature_name) - DmeAxisSystem["upAxis"] = axes_lookup_source2[bpy.context.scene.vs.up_axis] - DmeAxisSystem["forwardParity"] = 1 # ?? - DmeAxisSystem["coordSys"] = 0 # ?? - - DmeModel["transform"] = makeTransform("",Matrix(),DmeModel.name + "transform") - - keywords = getDmxKeywords(dm.format_ver) - - # skeleton - root["skeleton"] = DmeModel - want_jointlist = dm.format_ver >= 11 - want_jointtransforms = dm.format_ver in range(0,21) - if want_jointlist: - jointList = DmeModel["jointList"] = datamodel.make_array([],datamodel.Element) - if source2: - jointList.append(DmeModel) - if want_jointtransforms: - jointTransforms = DmeModel["jointTransforms"] = datamodel.make_array([],datamodel.Element) - if source2: - jointTransforms.append(DmeModel["transform"]) - bone_elements = {} - if self.armature: armature_scale = self.armature.matrix_world.to_scale() - - def writeBone(bone): - if isinstance(bone,str): - bone_name = bone - bone = None - else: - if bone and not bone in self.exportable_bones: - children = [] - for child_elems in [writeBone(child) for child in bone.children]: - if child_elems: children.extend(child_elems) - return children - bone_name = bone.name - - bone_elements[bone_name] = bone_elem = dm.add_element(bone_name,"DmeJoint",id=bone_name) - if want_jointlist: jointList.append(bone_elem) - self.bone_ids[bone_name] = len(bone_elements) - (0 if source2 else 1) # in Source 2, index 0 is the DmeModel - - if not bone: relMat = Matrix() - else: - cur_p = bone.parent - while cur_p and not cur_p in self.exportable_bones: cur_p = cur_p.parent - if cur_p: - relMat = cur_p.matrix.inverted() @ bone.matrix - else: - relMat = self.armature.matrix_world @ bone.matrix - - trfm = makeTransform(bone_name,relMat,"bone"+bone_name) - trfm_base = makeTransform(bone_name,relMat,"bone_base"+bone_name) - - if bone and bone.parent: - for j in range(3): - trfm["position"][j] *= armature_scale[j] - trfm_base["position"] = trfm["position"] - - if want_jointtransforms: jointTransforms.append(trfm) - bone_elem["transform"] = trfm - - DmeModel_transforms.append(trfm_base) - - if bone: - children = bone_elem["children"] = datamodel.make_array([],datamodel.Element) - for child_elems in [writeBone(child) for child in bone.children]: - if child_elems: children.extend(child_elems) - - bpy.context.window_manager.progress_update(len(bone_elements)/num_bones) - return [bone_elem] - - if self.armature: - num_bones = len(self.exportable_bones) - add_implicit_bone = not source2 - - if add_implicit_bone: - DmeModel_children.extend(writeBone(implicit_bone_name)) - for root_elems in [writeBone(bone) for bone in self.armature.pose.bones if not bone.parent and not (add_implicit_bone and bone.name == implicit_bone_name)]: - if root_elems: DmeModel_children.extend(root_elems) - - bench.report("Bones") - - for vca in bake_results[0].vertex_animations: - DmeModel_children.extend(writeBone("vcabone_{}".format(vca))) - - DmeCombinationOperator = None - for _ in [bake for bake in bake_results if bake.shapes]: - if self.flex_controller_mode == 'ADVANCED': - if not hasFlexControllerSource(self.flex_controller_source): - self.error(get_id("exporter_err_flexctrl_undefined",True).format(name) ) - return written - - text = bpy.data.texts.get(self.flex_controller_source) - msg = "- Loading flex controllers from " - element_path = [ "combinationOperator" ] - try: - if text: - print(msg + "text block \"{}\"".format(text.name)) - controller_dm = datamodel.parse(text.as_string(),element_path=element_path) - else: - path = os.path.realpath(bpy.path.abspath(self.flex_controller_source)) - print(msg + path) - controller_dm = datamodel.load(path=path,element_path=element_path) - - DmeCombinationOperator = controller_dm.root["combinationOperator"] - - for elem in [elem for elem in DmeCombinationOperator["targets"] if elem.type != "DmeFlexRules"]: - DmeCombinationOperator["targets"].remove(elem) - except Exception as err: - self.error(get_id("exporter_err_flexctrl_loadfail", True).format(err)) - return written - else: - DmeCombinationOperator = flex.DmxWriteFlexControllers.make_controllers(id).root["combinationOperator"] - - break - - if not DmeCombinationOperator and len(bake_results[0].vertex_animations): - DmeCombinationOperator = flex.DmxWriteFlexControllers.make_controllers(id).root["combinationOperator"] - - if DmeCombinationOperator: - root["combinationOperator"] = DmeCombinationOperator - bench.report("Flex setup") - - for bake in [bake for bake in bake_results if bake.object.type != 'ARMATURE']: - root["model"] = DmeModel - - ob = bake.object - - vertex_data = dm.add_element("bind","DmeVertexData",id=bake.name+"verts") - - DmeMesh = dm.add_element(bake.name,"DmeMesh",id=bake.name+"mesh") - DmeMesh["visible"] = True - DmeMesh["bindState"] = vertex_data - DmeMesh["currentState"] = vertex_data - DmeMesh["baseStates"] = datamodel.make_array([vertex_data],datamodel.Element) - - DmeDag = dm.add_element(bake.name,"DmeDag",id="ob"+bake.name+"dag") - if want_jointlist: jointList.append(DmeDag) - DmeDag["shape"] = DmeMesh - - bone_child = isinstance(bake.envelope, str) - if bone_child and bake.envelope in bone_elements: - bone_elements[bake.envelope]["children"].append(DmeDag) - trfm_mat = bake.bone_parent_matrix - else: - DmeModel_children.append(DmeDag) - trfm_mat = ob.matrix_world - - trfm = makeTransform(bake.name, trfm_mat, "ob"+bake.name) - - if want_jointtransforms: jointTransforms.append(trfm) - - DmeDag["transform"] = trfm - DmeModel_transforms.append(makeTransform(bake.name, trfm_mat, "ob_base"+bake.name)) - - jointCount = 0 - weight_link_limit = 4 if source2 else 3 - badJointCounts = 0 - culled_weight_links = 0 - cull_threshold = bpy.context.scene.vs.dmx_weightlink_threshold - have_weightmap = False - - if type(bake.envelope) is bpy.types.ArmatureModifier: - ob_weights = self.getWeightmap(bake) - - for vert_weights in ob_weights: - count = len(vert_weights) - - if weight_link_limit: - if count > weight_link_limit and cull_threshold > 0: - vert_weights.sort(key=lambda link: link[1],reverse=True) - while len(vert_weights) > weight_link_limit and vert_weights[-1][1] <= cull_threshold: - vert_weights.pop() - culled_weight_links += 1 - count = len(vert_weights) - if count > weight_link_limit: badJointCounts += 1 - - jointCount = max(jointCount,count) - if jointCount: have_weightmap = True - elif bake.envelope: - jointCount = 1 - - if badJointCounts: - self.warning(get_id("exporter_warn_weightlinks_excess",True).format(badJointCounts,bake.src.name,weight_link_limit)) - if culled_weight_links: - self.warning(get_id("exporter_warn_weightlinks_culled",True).format(culled_weight_links,cull_threshold,bake.src.name)) - - format = vertex_data["vertexFormat"] = datamodel.make_array( [ keywords['pos'], keywords['norm'] ], str) - - vertex_data["flipVCoordinates"] = True - vertex_data["jointCount"] = jointCount - - num_verts = len(ob.data.vertices) - num_loops = len(ob.data.loops) - norms = [None] * num_loops - texco = ordered_set.OrderedSet() - face_sets = collections.OrderedDict() - texcoIndices = [None] * num_loops - jointWeights = [] - jointIndices = [] - balance = [0.0] * num_verts - - Indices = [None] * num_loops - - uv_layer = ob.data.uv_layers.active.data - - bench.report("object setup") - - v=0 - for vert in ob.data.vertices: - vert.select = False - - if bake.shapes and bake.balance_vg: - try: balance[vert.index] = bake.balance_vg.weight(vert.index) - except: pass - - if have_weightmap: - weights = [0.0] * jointCount - indices = [0] * jointCount - i = 0 - total_weight = 0 - vert_weights = ob_weights[vert.index] - for i in range(len(vert_weights)): - indices[i] = vert_weights[i][0] - weights[i] = vert_weights[i][1] - total_weight += weights[i] - i+=1 - - if source2 and total_weight == 0: - weights[0] = 1.0 # attach to the DmeModel itself, avoiding motion. - - jointWeights.extend(weights) - jointIndices.extend(indices) - v += 1 - if v % 50 == 0: - bpy.context.window_manager.progress_update(v / num_verts) - - bench.report("verts") - - for loop in [ob.data.loops[i] for poly in ob.data.polygons for i in poly.loop_indices]: - texcoIndices[loop.index] = texco.add(datamodel.Vector2(uv_layer[loop.index].uv)) - norms[loop.index] = datamodel.Vector3(loop.normal) - Indices[loop.index] = loop.vertex_index - - bench.report("loops") - - bpy.context.view_layer.objects.active = ob - bpy.ops.object.mode_set(mode='EDIT') - bm = bmesh.from_edit_mesh(ob.data) - bm.verts.ensure_lookup_table() - bm.faces.ensure_lookup_table() - - vertex_data[keywords['pos']] = datamodel.make_array((v.co for v in bm.verts),datamodel.Vector3) - vertex_data[keywords['pos'] + "Indices"] = datamodel.make_array((l.vert.index for f in bm.faces for l in f.loops),int) - - if source2: # write out arbitrary vertex data - loops = [loop for face in bm.faces for loop in face.loops] - loop_indices = datamodel.make_array([loop.index for loop in loops], int) - layerGroups = bm.loops.layers - - class exportLayer: - name : str - - def __init__(self, layer, exportName = None): - self._layer = layer - self.name = exportName or layer.name - - def data_for(self, loop): return loop[self._layer] - - def get_bmesh_layers(layerGroup): - return [exportLayer(l) for l in layerGroup if re.match(r".*\$[0-9]+", l.name)] - - defaultUvLayer = "texcoord$0" - uv_layers_to_export = list(get_bmesh_layers(layerGroups.uv)) - if not defaultUvLayer in [l.name for l in uv_layers_to_export]: # select a default UV map - uv_render_layer = next((l.name for l in ob.data.uv_layers if l.active_render and not l in uv_layers_to_export), None) - if uv_render_layer: - uv_layers_to_export.append(exportLayer(layerGroups.uv[uv_render_layer], defaultUvLayer)) - print("- Exporting '{}' as {}".format(uv_render_layer, defaultUvLayer)) - else: - self.warning("'{}' does not contain a UV Map called {} and no suitable fallback map could be found. The model may be missing UV data.".format(bake.name, defaultUvLayer)) - - for layer in uv_layers_to_export: - uv_set = ordered_set.OrderedSet() - uv_indices = [] - for uv in (layer.data_for(loop).uv for loop in loops): - uv_indices.append(uv_set.add(datamodel.Vector2(uv))) - - vertex_data[layer.name] = datamodel.make_array(uv_set, datamodel.Vector2) - vertex_data[layer.name + "Indices"] = datamodel.make_array(uv_indices, int) - format.append(layer.name) - - def make_vertex_layer(layer : exportLayer, arrayType): - vertex_data[layer.name] = datamodel.make_array([layer.data_for(loop) for loop in loops], arrayType) - vertex_data[layer.name + "Indices"] = loop_indices - format.append(layer.name) - - for layer in get_bmesh_layers(layerGroups.color): - make_vertex_layer(layer, datamodel.Vector4) - for layer in get_bmesh_layers(layerGroups.float): - make_vertex_layer(layer, float) - for layer in get_bmesh_layers(layerGroups.int): - make_vertex_layer(layer, int) - for layer in get_bmesh_layers(layerGroups.string): - make_vertex_layer(layer, str) - - bench.report("Source 2 vertex data") - - else: - format.append("textureCoordinates") - vertex_data["textureCoordinates"] = datamodel.make_array(texco,datamodel.Vector2) - vertex_data["textureCoordinatesIndices"] = datamodel.make_array(texcoIndices,int) - - if have_weightmap: - vertex_data[keywords["weight"]] = datamodel.make_array(jointWeights,float) - vertex_data[keywords["weight_indices"]] = datamodel.make_array(jointIndices,int) - format.extend( [ keywords['weight'], keywords["weight_indices"] ] ) - - deform_layer = bm.verts.layers.deform.active - if deform_layer: - for cloth_enable in (group for group in ob.vertex_groups if re.match(r"cloth_enable\$[0-9]+", group.name)): - format.append(cloth_enable.name) - values = [v[deform_layer].get(cloth_enable.index, 0) for v in bm.verts] - valueSet = ordered_set.OrderedSet(values) - vertex_data[cloth_enable.name] = datamodel.make_array(valueSet, float) - vertex_data[cloth_enable.name + "Indices"] = datamodel.make_array((valueSet.index(values[i]) for i in Indices), int) - - if bake.shapes and bake.balance_vg: - vertex_data[keywords["balance"]] = datamodel.make_array(balance,float) - vertex_data[keywords["balance"] + "Indices"] = datamodel.make_array(Indices,int) - format.append(keywords["balance"]) - - vertex_data[keywords['norm']] = datamodel.make_array(norms,datamodel.Vector3) - vertex_data[keywords['norm'] + "Indices"] = datamodel.make_array(range(len(norms)),int) - - bench.report("insert") - - bad_face_mats = 0 - p = 0 - num_polys = len(bm.faces) - - two_percent = int(num_polys / 50) - print("Polygons: ",debug_only=True,newline=False) - - bm_face_sets = collections.defaultdict(list) - for face in bm.faces: - mat_name, mat_success = self.GetMaterialName(ob, face.material_index) - if not mat_success: - bad_face_mats += 1 - bm_face_sets[mat_name].extend((*(l.index for l in face.loops),-1)) - - p+=1 - if two_percent and p % two_percent == 0: - print(".", debug_only=True, newline=False) - bpy.context.window_manager.progress_update(p / num_polys) - - for (mat_name,indices) in bm_face_sets.items(): - material_elem = materials.get(mat_name) - if not material_elem: - materials[mat_name] = material_elem = dm.add_element(mat_name,"DmeMaterial",id=mat_name + "mat") - material_elem["mtlName"] = os.path.join(bpy.context.scene.vs.material_path, mat_name).replace('\\','/') - - face_set = dm.add_element(mat_name,"DmeFaceSet",id=bake.name+mat_name+"faces") - face_sets[mat_name] = face_set - - face_set["material"] = material_elem - face_set["faces"] = datamodel.make_array(indices,int) - - print(debug_only=True) - DmeMesh["faceSets"] = datamodel.make_array(list(face_sets.values()),datamodel.Element) - - if bad_face_mats: - self.warning(get_id("exporter_err_facesnotex_ormat").format(bad_face_mats, bake.name)) - bench.report("polys") - - bpy.ops.object.mode_set(mode='OBJECT') - del bm - - two_percent = int(len(bake.shapes) / 50) - print("Shapes: ",debug_only=True,newline=False) - delta_states = [] - corrective_shapes_seen = [] - if bake.shapes: - shape_names = [] - num_shapes = len(bake.shapes) - num_correctives = 0 - num_wrinkles = 0 - - for shape_name,shape in bake.shapes.items(): - wrinkle_scale = 0 - corrective = getCorrectiveShapeSeparator() in shape_name - if corrective: - # drivers always override shape name to avoid name truncation issues - corrective_targets_driver = ordered_set.OrderedSet(flex.getCorrectiveShapeKeyDrivers(bake.src.data.shape_keys.key_blocks[shape_name]) or []) - corrective_targets_name = ordered_set.OrderedSet(shape_name.split(getCorrectiveShapeSeparator())) - corrective_targets = corrective_targets_driver or corrective_targets_name - corrective_targets.source = shape_name - - if(corrective_targets in corrective_shapes_seen): - previous_shape = next(x for x in corrective_shapes_seen if x == corrective_targets) - self.warning(get_id("exporter_warn_correctiveshape_duplicate", True).format(shape_name, "+".join(corrective_targets), previous_shape.source)) - continue - else: - corrective_shapes_seen.append(corrective_targets) - - if corrective_targets_driver and corrective_targets_driver != corrective_targets_name: - generated_shape_name = getCorrectiveShapeSeparator().join(corrective_targets_driver) - print("- Renamed shape key '{}' to '{}' to match its corrective shape drivers.".format(shape_name, generated_shape_name)) - shape_name = generated_shape_name - num_correctives += 1 - else: - if self.flex_controller_mode == 'ADVANCED': - def _FindScale(): - for control in controller_dm.root["combinationOperator"]["controls"]: - for i in range(len(control["rawControlNames"])): - if control["rawControlNames"][i] == shape_name: - scales = control.get("wrinkleScales") - return scales[i] if scales else 0 - raise ValueError() - try: - wrinkle_scale = _FindScale() - except ValueError: - self.warning(get_id("exporter_err_flexctrl_missing", True).format(shape_name)) - pass - - shape_names.append(shape_name) - DmeVertexDeltaData = dm.add_element(shape_name,"DmeVertexDeltaData",id=ob.name+shape_name) - delta_states.append(DmeVertexDeltaData) - - vertexFormat = DmeVertexDeltaData["vertexFormat"] = datamodel.make_array([ keywords['pos'], keywords['norm'] ],str) - - wrinkle = [] - wrinkleIndices = [] - - # what do these do? - #DmeVertexDeltaData["flipVCoordinates"] = False - #DmeVertexDeltaData["corrected"] = True - - shape_pos = [] - shape_posIndices = [] - shape_norms = [] - shape_normIndices = [] - cache_deltas = wrinkle_scale - if cache_deltas: - delta_lengths = [None] * len(ob.data.vertices) - max_delta = 0 - - for ob_vert in ob.data.vertices: - shape_vert = shape.vertices[ob_vert.index] - - if ob_vert.co != shape_vert.co: - delta = shape_vert.co - ob_vert.co - delta_length = delta.length - - if abs(delta_length) > 1e-5: - if cache_deltas: - delta_lengths[ob_vert.index] = delta_length - shape_pos.append(datamodel.Vector3(delta)) - shape_posIndices.append(ob_vert.index) - - if corrective: - corrective_target_shapes = [] - for corrective_shape_name in corrective_targets: - corrective_target = bake.shapes.get(corrective_shape_name) - if corrective_target: - corrective_target_shapes.append(corrective_target) - else: - self.warning(get_id("exporter_err_missing_corrective_target", format_string=True).format(shape_name, corrective_shape_name)) - continue - - # We need the absolute normals as generated by Blender - for shape_vert in shape.vertices: - shape_vert.co -= ob.data.vertices[shape_vert.index].co - corrective_target.vertices[shape_vert.index].co - - for ob_loop in ob.data.loops: - shape_loop = shape.loops[ob_loop.index] - norm = shape_loop.normal - - if corrective: - base = Vector(ob_loop.normal) - for corrective_target in corrective_target_shapes: - # Normals for corrective shape keys are deltas from those of the deformed mesh, not the basis shape. - base += corrective_target.loops[shape_loop.index].normal - ob_loop.normal - else: - base = ob_loop.normal - - if norm.dot(base.normalized()) < 1 - 1e-3: - shape_norms.append(datamodel.Vector3(norm - base)) - shape_normIndices.append(shape_loop.index) - - if wrinkle_scale: - delta_len = delta_lengths[ob_loop.vertex_index] - if delta_len: - max_delta = max(max_delta,delta_len) - wrinkle.append(delta_len) - wrinkleIndices.append(texcoIndices[ob_loop.index]) - - del shape_vert - - if wrinkle_scale and max_delta: - wrinkle_mod = wrinkle_scale / max_delta - if wrinkle_mod != 1: - for i in range(len(wrinkle)): - wrinkle[i] *= wrinkle_mod - - DmeVertexDeltaData[keywords['pos']] = datamodel.make_array(shape_pos,datamodel.Vector3) - DmeVertexDeltaData[keywords['pos'] + "Indices"] = datamodel.make_array(shape_posIndices,int) - DmeVertexDeltaData[keywords['norm']] = datamodel.make_array(shape_norms,datamodel.Vector3) - DmeVertexDeltaData[keywords['norm'] + "Indices"] = datamodel.make_array(shape_normIndices,int) - - if wrinkle_scale: - vertexFormat.append(keywords["wrinkle"]) - num_wrinkles += 1 - DmeVertexDeltaData[keywords["wrinkle"]] = datamodel.make_array(wrinkle,float) - DmeVertexDeltaData[keywords["wrinkle"] + "Indices"] = datamodel.make_array(wrinkleIndices,int) - - bpy.context.window_manager.progress_update(len(shape_names) / num_shapes) - if two_percent and len(shape_names) % two_percent == 0: - print(".",debug_only=True,newline=False) - - if bpy.app.debug_value <= 1: - for shape in bake.shapes.values(): - bpy.data.meshes.remove(shape) - del shape - bake.shapes.clear() - - print(debug_only=True) - bench.report("shapes") - print("- {} flexes ({} with wrinklemaps) + {} correctives".format(num_shapes - num_correctives,num_wrinkles,num_correctives)) - - vca_matrix = ob.matrix_world.inverted() - for vca_name,vca in bake_results[0].vertex_animations.items(): - frame_shapes = [] - - for i, vca_ob in enumerate(vca): - DmeVertexDeltaData = dm.add_element("{}-{}".format(vca_name,i),"DmeVertexDeltaData",id=ob.name+vca_name+str(i)) - delta_states.append(DmeVertexDeltaData) - frame_shapes.append(DmeVertexDeltaData) - DmeVertexDeltaData["vertexFormat"] = datamodel.make_array([ "positions", "normals" ],str) - - shape_pos = [] - shape_posIndices = [] - shape_norms = [] - shape_normIndices = [] - - for shape_loop in vca_ob.data.loops: - shape_vert = vca_ob.data.vertices[shape_loop.vertex_index] - ob_loop = ob.data.loops[shape_loop.index] - ob_vert = ob.data.vertices[ob_loop.vertex_index] - - if ob_vert.co != shape_vert.co: - delta = vca_matrix @ shape_vert.co - ob_vert.co - - if abs(delta.length) > 1e-5: - shape_pos.append(datamodel.Vector3(delta)) - shape_posIndices.append(ob_vert.index) - - norm = Vector(shape_loop.normal) - norm.rotate(vca_matrix) - if abs(1.0 - norm.dot(ob_loop.normal)) > epsilon[0]: - shape_norms.append(datamodel.Vector3(norm - ob_loop.normal)) - shape_normIndices.append(shape_loop.index) - - DmeVertexDeltaData["positions"] = datamodel.make_array(shape_pos,datamodel.Vector3) - DmeVertexDeltaData["positionsIndices"] = datamodel.make_array(shape_posIndices,int) - DmeVertexDeltaData["normals"] = datamodel.make_array(shape_norms,datamodel.Vector3) - DmeVertexDeltaData["normalsIndices"] = datamodel.make_array(shape_normIndices,int) - - removeObject(vca_ob) - vca[i] = None - - if vca.export_sequence: # generate and export a skeletal animation that drives the vertex animation - vca_arm = bpy.data.objects.new("vca_arm",bpy.data.armatures.new("vca_arm")) - bpy.context.scene.collection.objects.link(vca_arm) - bpy.context.view_layer.objects.active = vca_arm - - bpy.ops.object.mode_set(mode='EDIT') - vca_bone_name = "vcabone_" + vca_name - vca_bone = vca_arm.data.edit_bones.new(vca_bone_name) - vca_bone.tail.y = 1 - - bpy.context.scene.frame_set(0) - mat = getUpAxisMat('y').inverted() - # DMX animations don't handle missing root bones or meshes, so create bones to represent them - if self.armature_src: - for bone in [bone for bone in self.armature_src.data.bones if bone.parent is None]: - b = vca_arm.data.edit_bones.new(bone.name) - b.head = mat @ bone.head - b.tail = mat @ bone.tail - else: - for bake in bake_results: - bake_mat = mat @ bake.object.matrix_world - b = vca_arm.data.edit_bones.new(bake.name) - b.head = bake_mat @ b.head - b.tail = bake_mat @ Vector([0,1,0]) - - bpy.ops.object.mode_set(mode='POSE') - ops.pose.armature_apply() # refreshes the armature's internal state, required! - action = vca_arm.animation_data_create().action = bpy.data.actions.new("vcaanim_" + vca_name) - for i in range(2): - fc = action.fcurves.new('pose.bones["{}"].location'.format(vca_bone_name),index=i) - fc.keyframe_points.add(count=2) - for key in fc.keyframe_points: key.interpolation = 'LINEAR' - if i == 0: fc.keyframe_points[0].co = (0,1.0) - fc.keyframe_points[1].co = (vca.num_frames,1.0) - fc.update() - - # finally, write it out - self.exportId(bpy.context,vca_arm) - written += 1 - - if delta_states: - DmeMesh["deltaStates"] = datamodel.make_array(delta_states,datamodel.Element) - DmeMesh["deltaStateWeights"] = DmeMesh["deltaStateWeightsLagged"] = \ - datamodel.make_array([datamodel.Vector2([0.0,0.0])] * len(delta_states),datamodel.Vector2) - - targets = DmeCombinationOperator["targets"] - added = False - for elem in targets: - if elem.type == "DmeFlexRules": - if elem["deltaStates"][0].name in shape_names: # can't have the same delta name on multiple objects - elem["target"] = DmeMesh - added = True - if not added: - targets.append(DmeMesh) - - if len(bake_results) == 1 and bake_results[0].object.type == 'ARMATURE': # animation - ad = self.armature.animation_data - - anim_len = animationLength(ad) if ad else 0 - if anim_len == 0: - self.warning(get_id("exporter_err_noframes",True).format(self.armature_src.name)) - - if ad.action and hasattr(ad.action,'fps'): - fps = bpy.context.scene.render.fps = ad.action.fps - bpy.context.scene.render.fps_base = 1 - else: - fps = bpy.context.scene.render.fps * bpy.context.scene.render.fps_base - - DmeChannelsClip = dm.add_element(name,"DmeChannelsClip",id=name+"clip") - DmeAnimationList = dm.add_element(armature_name,"DmeAnimationList",id=armature_name+"list") - DmeAnimationList["animations"] = datamodel.make_array([DmeChannelsClip],datamodel.Element) - root["animationList"] = DmeAnimationList - - DmeTimeFrame = dm.add_element("timeframe","DmeTimeFrame",id=name+"time") - duration = anim_len / fps - if dm.format_ver >= 11: - DmeTimeFrame["duration"] = datamodel.Time(duration) - else: - DmeTimeFrame["durationTime"] = int(duration * 10000) - DmeTimeFrame["scale"] = 1.0 - DmeChannelsClip["timeFrame"] = DmeTimeFrame - DmeChannelsClip["frameRate"] = fps if source2 else int(fps) - - channels = DmeChannelsClip["channels"] = datamodel.make_array([],datamodel.Element) - bone_channels = {} - def makeChannel(bone): - bone_channels[bone.name] = [] - channel_template = [ - [ "_p", "position", "Vector3", datamodel.Vector3 ], - [ "_o", "orientation", "Quaternion", datamodel.Quaternion ] - ] - for template in channel_template: - cur = dm.add_element(bone.name + template[0],"DmeChannel",id=bone.name+template[0]) - cur["toAttribute"] = template[1] - cur["toElement"] = (bone_elements[bone.name] if bone else DmeModel)["transform"] - cur["mode"] = 1 - val_arr = dm.add_element(template[2]+" log","Dme"+template[2]+"LogLayer",cur.name+"loglayer") - cur["log"] = dm.add_element(template[2]+" log","Dme"+template[2]+"Log",cur.name+"log") - cur["log"]["layers"] = datamodel.make_array([val_arr],datamodel.Element) - val_arr["times"] = datamodel.make_array([],datamodel.Time if dm.format_ver > 11 else int) - val_arr["values"] = datamodel.make_array([],template[3]) - if bone: bone_channels[bone.name].append(val_arr) - channels.append(cur) - - for bone in self.exportable_bones: - makeChannel(bone) - num_frames = int(anim_len + 1) - bench.report("Animation setup") - prev_pos = {} - prev_rot = {} - skipped_pos = {} - skipped_rot = {} - - two_percent = num_frames / 50 - print("Frames: ",debug_only=True,newline=False) - for frame in range(0,num_frames): - bpy.context.window_manager.progress_update(frame/num_frames) - bpy.context.scene.frame_set(frame) - keyframe_time = datamodel.Time(frame / fps) if dm.format_ver > 11 else int(frame/fps * 10000) - evaluated_bones = self.getEvaluatedPoseBones() - for bone in evaluated_bones: - channel = bone_channels[bone.name] - - cur_p = bone.parent - while cur_p and not cur_p in evaluated_bones: cur_p = cur_p.parent - if cur_p: - relMat = cur_p.matrix.inverted() @ bone.matrix - else: - relMat = self.armature.matrix_world @ bone.matrix - - pos = relMat.to_translation() - if bone.parent: - for j in range(3): pos[j] *= armature_scale[j] - - rot = relMat.to_quaternion() - rot_vec = Vector(rot.to_euler()) - - if not prev_pos.get(bone) or pos - prev_pos[bone] > epsilon: - skip_time = skipped_pos.get(bone) - if skip_time != None: - channel[0]["times"].append(skip_time) - channel[0]["values"].append(channel[0]["values"][-1]) - del skipped_pos[bone] - - channel[0]["times"].append(keyframe_time) - channel[0]["values"].append(datamodel.Vector3(pos)) - else: - skipped_pos[bone] = keyframe_time - - - if not prev_rot.get(bone) or rot_vec - prev_rot[bone] > epsilon: - skip_time = skipped_rot.get(bone) - if skip_time != None: - channel[1]["times"].append(skip_time) - channel[1]["values"].append(channel[1]["values"][-1]) - del skipped_rot[bone] - - channel[1]["times"].append(keyframe_time) - channel[1]["values"].append(getDatamodelQuat(rot)) - else: - skipped_rot[bone] = keyframe_time - - prev_pos[bone] = pos - prev_rot[bone] = rot_vec - - if two_percent and frame % two_percent: - print(".",debug_only=True,newline=False) - print(debug_only=True) - - bpy.context.window_manager.progress_update(0.99) - print("- Writing DMX...") - try: - if bpy.context.scene.vs.use_kv2: - dm.write(filepath,"keyvalues2",1) - else: - dm.write(filepath,"binary",State.datamodelEncoding) - written += 1 - except (PermissionError, FileNotFoundError) as err: - self.error(get_id("exporter_err_open", True).format("DMX",err)) - - bench.report("write") - if bench.quiet: - print("- DMX export took",bench.total(),"\n") - - return written +# Copyright (c) 2014 Tom Edwards contact@steamreview.org +# +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +import bpy, bmesh, subprocess, collections, re +from bpy import ops +from bpy.app.translations import pgettext +from mathutils import Vector, Matrix +from math import * +from bpy.types import Collection +from bpy.props import CollectionProperty, StringProperty, BoolProperty + +from .utils import * +from . import datamodel, ordered_set, flex + +class SMD_OT_Compile(bpy.types.Operator, Logger): + bl_idname = "smd.compile_qc" + bl_label = get_id("qc_compile_title") + bl_description = get_id("qc_compile_tip") + + files : CollectionProperty(type=bpy.types.OperatorFileListElement) + directory : StringProperty(maxlen=1024, default="", subtype='FILE_PATH') + + filepath : StringProperty(name="File path", maxlen=1024, default="", subtype='FILE_PATH') + + filter_folder : BoolProperty(default=True, options={'HIDDEN'}) + filter_glob : StringProperty(default="*.qc;*.qci", options={'HIDDEN'}) + + @classmethod + def poll(cls,context): + return State.gamePath is not None and State.compiler == Compiler.STUDIOMDL + + def invoke(self,context, event): + bpy.context.window_manager.fileselect_add(self) + return {'RUNNING_MODAL'} + + def execute(self,context): + multi_files = len([file for file in self.properties.files if file.name]) > 0 + if not multi_files and not (self.properties.filepath == "*" or os.path.isfile(self.properties.filepath)): + self.report({'ERROR'},"No QC files selected for compile.") + return {'CANCELLED'} + + num = self.compileQCs([os.path.join(self.properties.directory,file.name) for file in self.properties.files] if multi_files else self.properties.filepath) + #if num > 1: + # bpy.context.window_manager.progress_begin(0,1) + self.errorReport(get_id("qc_compile_complete",True).format(num,State.engineBranchTitle)) + bpy.context.window_manager.progress_end() + return {'FINISHED'} + + @classmethod + def getQCs(cls, path : str = None) -> list: + import glob + ext = ".qc" + out = [] + internal = False + if not path: + path = bpy.path.abspath(bpy.context.scene.vs.qc_path) + internal = True + for result in glob.glob(path): + if result.endswith(ext): + out.append(result) + + if not internal and not len(out) and not path.endswith(ext): + out = cls.getQCs(path + ext) + return out + + def compileQCs(self,path=None): + scene = bpy.context.scene + print("\n") + + studiomdl_path = os.path.join(bpy.path.abspath(scene.vs.engine_path),"studiomdl.exe") + + if path == "*": + paths = SMD_OT_Compile.getQCs() + elif isinstance(path,str): + paths = [os.path.realpath(bpy.path.abspath(path))] + elif hasattr(path,"__getitem__"): + paths = path + else: + paths = SMD_OT_Compile.getQCs() + num_good_compiles = 0 + num_qcs = len(paths) + if num_qcs == 0: + self.error(get_id("qc_compile_err_nofiles")) + elif not os.path.exists(studiomdl_path): + self.error(get_id("qc_compile_err_compiler", True).format(studiomdl_path) ) + else: + i = 0 + for qc in paths: + bpy.context.window_manager.progress_update((i+1) / (num_qcs+1)) + # save any version of the file currently open in Blender + qc_mangled = qc.lower().replace('\\','/') + for candidate_area in bpy.context.screen.areas: + if candidate_area.type == 'TEXT_EDITOR' and candidate_area.spaces[0].text and candidate_area.spaces[0].text.filepath.lower().replace('\\','/') == qc_mangled: + oldType = bpy.context.area.type + bpy.context.area.type = 'TEXT_EDITOR' + bpy.context.area.spaces[0].text = candidate_area.spaces[0].text + ops.text.save() + bpy.context.area.type = oldType + break #what a farce! + + print( "Running studiomdl for \"{}\"...\n".format(os.path.basename(qc)) ) + studiomdl = subprocess.Popen([studiomdl_path, "-nop4", "-game", State.gamePath, qc]) + studiomdl.communicate() + + if studiomdl.returncode == 0: + num_good_compiles += 1 + else: + self.error(get_id("qc_compile_err_unknown", True).format(os.path.basename(qc))) + i+=1 + return num_good_compiles + +class SmdExporter(bpy.types.Operator, Logger): + bl_idname = "export_scene.smd" + bl_label = get_id("exporter_title") + bl_description = get_id("exporter_tip") + + collection : bpy.props.StringProperty(name=get_id("exporter_prop_group"),description=get_id("exporter_prop_group_tip")) + export_scene : bpy.props.BoolProperty(name=get_id("scene_export"),description=get_id("exporter_prop_scene_tip"),default=False) + + @classmethod + def poll(cls,context): + return len(context.scene.vs.export_list) + + def invoke(self, context, event): + State.update_scene() + ops.wm.call_menu(name="SMD_MT_ExportChoice") + return {'PASS_THROUGH'} + + def execute(self, context): + #bpy.context.window_manager.progress_begin(0,1) + + # Misconfiguration? + if State.datamodelEncoding != 0 and context.scene.vs.export_format == 'DMX': + datamodel.check_support("binary",State.datamodelEncoding) + if State.datamodelEncoding < 3 and State.datamodelFormat > 11 and not context.scene.vs.use_kv2: + self.report({'ERROR'},"DMX format \"Model {}\" requires DMX encoding \"Binary 3\" or later".format(State.datamodelFormat)) + return {'CANCELLED' } + if not context.scene.vs.export_path: + bpy.ops.wm.call_menu(name="SMD_MT_ConfigureScene") + return {'CANCELLED'} + if context.scene.vs.export_path.startswith("//") and not context.blend_data.filepath: + self.report({'ERROR'},get_id("exporter_err_relativeunsaved")) + return {'CANCELLED'} + if State.datamodelEncoding == 0 and context.scene.vs.export_format == 'DMX': + self.report({'ERROR'},get_id("exporter_err_dmxother")) + return {'CANCELLED'} + + # Don't create an undo level from edit mode + prev_mode = prev_hidden = None + if context.active_object: + if context.active_object.hide_viewport: + prev_hidden = context.active_object.name + context.active_object.hide_viewport = False + prev_mode = context.mode + if prev_mode.find("EDIT") != -1: prev_mode = 'EDIT' + elif prev_mode.find("PAINT") != -1: # FFS Blender! + prev_mode = prev_mode.split('_') + prev_mode.reverse() + prev_mode = "_".join(prev_mode) + ops.object.mode_set(mode='OBJECT') + + State.update_scene() + self.bake_results = [] + self.bone_ids = {} + self.materials_used = set() + + for ob in [ob for ob in bpy.context.scene.objects if ob.type == 'ARMATURE' and len(ob.vs.subdir) == 0]: + ob.vs.subdir = "anims" + + ops.ed.undo_push(message=self.bl_label) + + try: + context.tool_settings.use_keyframe_insert_auto = False + context.tool_settings.use_keyframe_insert_keyingset = False + context.preferences.edit.use_enter_edit_mode = False + State.unhook_events() + if context.scene.rigidbody_world: + context.scene.frame_set(context.scene.rigidbody_world.point_cache.frame_start) + + # lots of operators only work on visible objects + for ob in context.scene.objects: + ob.hide_viewport = False + # ensure that objects in all collections are accessible to operators + context.view_layer.layer_collection.exclude = False + + self.files_exported = self.attemptedExports = 0 + + if self.export_scene: + for id in [exportable.item for exportable in context.scene.vs.export_list]: + if type(id) == Collection: + if shouldExportGroup(id): + self.exportId(context, id) + elif id.vs.export: + self.exportId(context, id) + else: + if self.collection == "": + for exportable in getSelectedExportables(): + if type(exportable.item) != Collection: + self.exportId(context, exportable.item) + else: + collection = bpy.data.collections[self.collection] + if collection.vs.mute: self.error(get_id("exporter_err_groupmuted", True).format(collection.name)) + elif not collection.objects: self.error(get_id("exporter_err_groupempty", True).format(collection.name)) + else: self.exportId(context, collection) + + num_good_compiles = None + + if self.attemptedExports == 0: + self.report({'ERROR'},get_id("exporter_err_noexportables")) + elif context.scene.vs.qc_compile and context.scene.vs.qc_path: + # ...and compile the QC + if not SMD_OT_Compile.poll(context): + print("Skipping QC compile step: context incorrect\n") + else: + num_good_compiles = SMD_OT_Compile.compileQCs(self) # hack, use self as the logger + print("\n") + + if num_good_compiles != None: + self.errorReport(get_id("exporter_report_qc", True).format( + self.files_exported, + self.elapsed_time(), + num_good_compiles, + State.engineBranchTitle, + os.path.basename(State.gamePath) + )) + else: + self.errorReport(get_id("exporter_report", True).format( + self.files_exported, + self.elapsed_time() + )) + finally: + # Clean everything up + ops.ed.undo_push(message=self.bl_label) + if bpy.app.debug_value <= 1: ops.ed.undo() + + if prev_mode: + ops.object.mode_set(mode=prev_mode) + if prev_hidden: + context.scene.objects[prev_hidden].hide_viewport = True + context.scene.update_tag() + + context.window_manager.progress_end() + State.hook_events() + + self.collection = "" + self.export_scene = False + return {'FINISHED'} + + def sanitiseFilename(self,name): + new_name = name + for badchar in "/?<>\\:*|\"": + new_name = new_name.replace(badchar,"_") + if new_name != name: + self.warning(get_id("exporter_warn_sanitised_filename",True).format(name,new_name)) + return new_name + + def exportId(self,context,id): + self.attemptedExports += 1 + self.armature = self.armature_src = None + bench = BenchMarker() + + subdir = id.vs.subdir + + print( "\nBlender Source Tools: exporting {}".format(id.name) ) + + subdir = subdir.lstrip("/") # don't want //s here! + + path = os.path.join(bpy.path.abspath(context.scene.vs.export_path), subdir) + if not os.path.exists(path): + try: + os.makedirs(path) + except Exception as err: + self.error(get_id("exporter_err_makedirs", True).format(err)) + return + + if isinstance(id, bpy.types.Collection) and not any(ob.vs.export for ob in id.objects): + self.error(get_id("exporter_err_nogroupitems",True).format(id.name)) + return + + if isinstance(id, bpy.types.Object) and id.type == 'ARMATURE': + ad = id.animation_data + if not ad: return # otherwise we create a folder but put nothing in it + if id.data.vs.action_selection == 'FILTERED': + pass + elif ad.action: + export_name = ad.action.name + elif ad.nla_tracks: + export_name = id.name + else: + self.error(get_id("exporter_err_arm_noanims",True).format(id.name)) + else: + export_name = id.name + + # hide all metaballs that we don't want + for meta in [ob for ob in context.scene.objects if ob.type == 'META' and (not ob.vs.export or (isinstance(id, Collection) and not ob.name in id.objects))]: + for element in meta.data.elements: element.hide = True + + def find_basis_metaball(id): + basis_ns = id.name.rsplit(".") + if len(basis_ns) == 1: return id + + basis = id + for meta in [ob for ob in bpy.data.objects if ob.type == 'META']: + ns = meta.name.rsplit(".") + + if ns[0] != basis_ns[0]: + continue + if len(ns) == 1: + basis = meta + break + + try: + if int(ns[1]) < int(basis_ns[1]): + basis = meta + basis_ns = ns + except ValueError: + pass + return basis + + bake_results = [] + baked_metaballs = [] + + bench.report("setup") + + if bench.quiet: print("- Baking...") + + if type(id) == Collection: + group_vertex_maps = valvesource_vertex_maps(id) + for i, ob in enumerate([ob for ob in id.objects if ob.vs.export and ob.session_uid in State.exportableObjects]): + bpy.context.window_manager.progress_update(i / len(id.objects)) + if ob.type == 'META': + ob = find_basis_metaball(ob) + if ob in baked_metaballs: continue + else: baked_metaballs.append(ob) + + bake = self.bakeObj(ob) + for vertex_map_name in group_vertex_maps: + if not vertex_map_name in bake.object.data.vertex_colors: + vertex_map = bake.object.data.vertex_colors.new(vertex_map_name) + vertex_map.data.foreach_set("color",[1.0] * 4) + + if bake: + bake_results.append(bake) + bench.report("Group bake", len(bake_results)) + else: + if id.type == 'META': + bake = self.bakeObj(find_basis_metaball(id)) + bench.report("Metaball bake") + else: + bake = self.bakeObj(id) + bench.report("Standard bake") + + if bake: + bake_results.append(bake) + + if not any(bake_results): + return + + if State.exportFormat == ExportFormat.DMX and hasShapes(id): + self.flex_controller_mode = id.vs.flex_controller_mode + self.flex_controller_source = id.vs.flex_controller_source + + bpy.context.view_layer.objects.active = bake_results[0].object + bpy.ops.object.mode_set(mode='OBJECT') + mesh_bakes = [bake for bake in bake_results if bake.object.type == 'MESH'] + + skip_vca = False + if isinstance(id, Collection) and len(id.vs.vertex_animations) and len(id.objects) > 1: + if len(mesh_bakes) > len([bake for bake in bake_results if (type(bake.envelope) is str and bake.envelope == bake_results[0].envelope) or bake.envelope is None]): + self.error(get_id("exporter_err_unmergable",True).format(id.name)) + skip_vca = True + elif not id.vs.automerge: + id.vs.automerge = True + + for va in id.vs.vertex_animations: + if skip_vca: break + + if State.exportFormat == ExportFormat.DMX: + va.name = va.name.replace("_","-") + + vca = bake_results[0].vertex_animations[va.name] # only the first bake result will ever have a vertex animation defined + vca.export_sequence = va.export_sequence + vca.num_frames = va.end - va.start + two_percent = vca.num_frames * len(bake_results) / 50 + print("- Generating vertex animation \"{}\"".format(va.name)) + anim_bench = BenchMarker(1,va.name) + + for f in range(va.start,va.end): + bpy.context.scene.frame_set(f) + bpy.ops.object.select_all(action='DESELECT') + depsgraph = bpy.context.evaluated_depsgraph_get() + for bake in mesh_bakes: # create baked snapshots of each vertex animation frame + bake.fob = bpy.data.objects.new("{}-{}".format(va.name,f), bpy.data.meshes.new_from_object((bake.src.evaluated_get(depsgraph)))) + bake.fob.matrix_world = bake.src.matrix_world + bpy.context.scene.collection.objects.link(bake.fob) + bpy.context.view_layer.objects.active = bake.fob + bake.fob.select_set(True) + + top_parent = self.getTopParent(bake.src) + if top_parent: + bake.fob.location -= top_parent.location + + if context.scene.rigidbody_world: + # Blender 2.71 bug: https://developer.blender.org/T41388 + prev_rbw = bpy.context.scene.rigidbody_world.enabled + bpy.context.scene.rigidbody_world.enabled = False + + bpy.ops.object.transform_apply(location=True,scale=True,rotation=True) + + if context.scene.rigidbody_world: + bpy.context.scene.rigidbody_world.enabled = prev_rbw + + if bpy.context.selected_objects and State.exportFormat == ExportFormat.SMD: + bpy.context.view_layer.objects.active = bpy.context.selected_objects[0] + ops.object.join() + + vca.append(bpy.context.active_object if len(bpy.context.selected_objects) == 1 else bpy.context.selected_objects) + anim_bench.report("bake") + + if len(bpy.context.selected_objects) != 1: + for bake in mesh_bakes: + bpy.context.scene.collection.objects.unlink(bake.fob) + del bake.fob + + anim_bench.report("record") + + if two_percent and len(vca) / len(bake_results) % two_percent == 0: + print(".", debug_only=True, newline=False) + bpy.context.window_manager.progress_update(len(vca) / vca.num_frames) + + bench.report("\n" + va.name) + bpy.context.view_layer.objects.active = bake_results[0].src + + if isinstance(id, Collection) and State.exportFormat == ExportFormat.DMX and id.vs.automerge: + bone_parents = collections.defaultdict(list) + scene_obs = bpy.context.scene.collection.objects + view_obs = bpy.context.view_layer.objects + for bake in [bake for bake in bake_results if type(bake.envelope) is str or bake.envelope is None]: + bone_parents[bake.envelope].append(bake) + + for bp, parts in bone_parents.items(): + if len(parts) <= 1: continue + shape_names = set() + for key in [key for part in parts for key in part.shapes.keys()]: + shape_names.add(key) + + ops.object.select_all(action='DESELECT') + for part in parts: + ob = part.object.copy() + ob.data = ob.data.copy() + ob.data.uv_layers.active.name = "__dmx_uv__" + scene_obs.link(ob) + ob.select_set(True) + view_obs.active = ob + bake_results.remove(part) + + bpy.ops.object.join() + joined = self.BakeResult(bp + "_meshes" if bp else "loose_meshes") + joined.object = bpy.context.active_object + joined.object.name = joined.object.data.name = joined.name + joined.envelope = bp + + if parts[0].vertex_animations: + for src_name,src_vca in parts[0].vertex_animations.items(): + vca = joined.vertex_animations[src_name] = self.BakedVertexAnimation() + vca.bone_id = src_vca.bone_id + vca.export_sequence = src_vca.export_sequence + vca.num_frames = src_vca.num_frames + + for i,frame in enumerate(src_vca): + bpy.ops.object.select_all(action='DESELECT') + frame.reverse() + for ob in frame: + scene_obs.link(ob) + ob.select_set(True) + bpy.context.view_layer.objects.active = frame[0] + bpy.ops.object.join() + bpy.context.active_object.name = "{}-{}".format(src_name,i) + bpy.ops.object.transform_apply(location=True,scale=True,rotation=True) + vca.append(bpy.context.active_object) + scene_obs.unlink(bpy.context.active_object) + + bake_results.append(joined) + + for shape_name in shape_names: + ops.object.select_all(action='DESELECT') + + for part in parts: + mesh = part.shapes[shape_name] if shape_name in part.shapes else part.object.data + ob = bpy.data.objects.new(name="{} -> {}".format(part.name,shape_name),object_data = mesh.copy()) + scene_obs.link(ob) + ob.matrix_local = part.matrix + ob.select_set(True) + view_obs.active = ob + + bpy.ops.object.join() + joined.shapes[shape_name] = bpy.context.active_object.data + bpy.context.active_object.data.name = "{} -> {}".format(joined.object.name,shape_name) + + scene_obs.unlink(ob) + bpy.data.objects.remove(ob) + del ob + + view_obs.active = joined.object + bench.report("Mech merge") + + for result in bake_results: + if result.armature: + if not self.armature: + self.armature = result.armature.object + self.armature_src = result.armature.src + elif self.armature != result.armature.object: + self.warning(get_id("exporter_warn_multiarmature")) + + if self.armature_src: + if list(self.armature_src.scale).count(self.armature_src.scale[0]) != 3: + self.warning(get_id("exporter_err_arm_nonuniform",True).format(self.armature_src.name)) + if not self.armature: + self.armature = self.bakeObj(self.armature_src).object + exporting_armature = isinstance(id, bpy.types.Object) and id.type == 'ARMATURE' + self.exportable_bones = list([self.armature.pose.bones[edit_bone.name] for edit_bone in self.armature.data.bones if (exporting_armature or edit_bone.use_deform)]) + skipped_bones = len(self.armature.pose.bones) - len(self.exportable_bones) + if skipped_bones: + print("- Skipping {} non-deforming bones".format(skipped_bones)) + + write_func = self.writeDMX if State.exportFormat == ExportFormat.DMX else self.writeSMD + bench.report("Post Bake") + + if isinstance(id, bpy.types.Object) and id.type == 'ARMATURE' and id.data.vs.action_selection == 'FILTERED': + for action in actionsForFilter(id.vs.action_filter): + bake_results[0].object.animation_data.action = action + self.files_exported += write_func(id, bake_results, self.sanitiseFilename(action.name), path) + bench.report(write_func.__name__) + else: + self.files_exported += write_func(id, bake_results, self.sanitiseFilename(export_name), path) + bench.report(write_func.__name__) + + # Source doesn't handle Unicode characters in models. Detect any unicode strings and warn the user about them. + unicode_tested = set() + def test_for_unicode(name, id, display_type): + if id in unicode_tested: return; + unicode_tested.add(id) + + try: + name.encode('ascii') + except UnicodeEncodeError: + self.warning(get_id("exporter_warn_unicode", format_string=True).format(pgettext(display_type), name)) + + # Meanwhile, Source 2 wants only lowercase characters, digits, and underscore in model names + if State.compiler > Compiler.STUDIOMDL or State.datamodelFormat >= 22: + if re.match(r'[^a-z0-9_]', id.name): + self.warning(get_id("exporter_warn_source2names", format_string=True).format(id.name)) + + for bake in bake_results: + test_for_unicode(bake.name, bake, type(bake.src).__name__) + for shape_name, shape_id in bake.shapes.items(): + test_for_unicode(shape_name, shape_id, "Shape Key") + if hasattr(bake.object,"objects"): + for ob in bake.object.objects: + test_for_unicode(ob.name, ob, ob.type.capitalize()) + for mat in self.materials_used: + test_for_unicode(mat[0], mat[1], type(mat[1]).__name__) + + + def getWeightmap(self,bake_result): + out = [] + amod = bake_result.envelope + ob = bake_result.object + if not amod or not isinstance(amod, bpy.types.ArmatureModifier): return out + + amod_vg = ob.vertex_groups.get(amod.vertex_group) + + try: + amod_ob = next((bake.object for bake in self.bake_results if bake.src == amod.object)) + except StopIteration as e: + raise ValueError("Armature for exportable \"{}\" was not baked".format(bake_result.name)) from e + + model_mat = amod_ob.matrix_world.inverted() @ ob.matrix_world + + num_verts = len(ob.data.vertices) + for v in ob.data.vertices: + weights = [] + total_weight = 0 + if len(out) % 50 == 0: bpy.context.window_manager.progress_update(len(out) / num_verts) + + if amod.use_vertex_groups: + for v_group in v.groups: + if v_group.group < len(ob.vertex_groups): + ob_group = ob.vertex_groups[v_group.group] + group_name = ob_group.name + group_weight = v_group.weight + else: + continue # Vertex group might not exist on object if it's re-using a datablock + + bone = amod_ob.pose.bones.get(group_name) + if bone and bone in self.exportable_bones: + weights.append([ self.bone_ids[bone.name], group_weight ]) + total_weight += group_weight + + if amod.use_bone_envelopes and total_weight == 0: # vertex groups completely override envelopes + for pose_bone in [pb for pb in amod_ob.pose.bones if pb in self.exportable_bones]: + weight = pose_bone.bone.envelope_weight * pose_bone.evaluate_envelope( model_mat @ v.co ) + if weight: + weights.append([ self.bone_ids[pose_bone.name], weight ]) + total_weight += weight + + # normalise weights, like Blender does. Otherwise Studiomdl puts anything left over onto the root bone. + if total_weight not in [0,1]: + for link in weights: + link[1] *= 1/total_weight + + # apply armature modifier vertex group + if amod_vg and total_weight > 0: + amod_vg_weight = 0 + for v_group in v.groups: + if v_group.group == amod_vg.index: + amod_vg_weight = v_group.weight + break + if amod.invert_vertex_group: + amod_vg_weight = 1 - amod_vg_weight + for link in weights: + link[1] *= amod_vg_weight + + out.append(weights) + return out + + def GetMaterialName(self, ob, material_index): + mat_name = None + mat_id = None + if len(ob.material_slots) > material_index: + mat_id = ob.material_slots[material_index].material + if mat_id: + mat_name = mat_id.name + if mat_name: + self.materials_used.add((mat_name,mat_id)) + return mat_name, True + else: + return "no_material", ob.display_type != 'TEXTURED' # assume it's a collision mesh if it's not textured + + def getTopParent(self,id): + top_parent = id + while top_parent.parent: + top_parent = top_parent.parent + return top_parent + + def getEvaluatedPoseBones(self): + depsgraph = bpy.context.evaluated_depsgraph_get() + evaluated_armature = self.armature.evaluated_get(depsgraph) + + return [evaluated_armature.pose.bones[bone.name] for bone in self.exportable_bones] + + class BakedVertexAnimation(list): + def __init__(self): + self.export_sequence = False + self.bone_id = -1 + self.num_frames = 0 + + class VertexAnimationKey(): + def __init__(self,vert_index,co,norm): + self.vert_index = vert_index + self.co = co + self.norm = norm + + class BakeResult: + def __init__(self,name): + self.name = name + self.object = None + self.matrix = Matrix() + self.envelope = None + self.bone_parent_matrix = None + self.src = None + self.armature = None + self.balance_vg = None + self.shapes = collections.OrderedDict() + self.vertex_animations = collections.defaultdict(SmdExporter.BakedVertexAnimation) + + # Creates a mesh with object transformations and modifiers applied + def bakeObj(self,id, generate_uvs = True): + for bake in (bake for bake in self.bake_results if bake.src == id or bake.object == id): + return bake + + result = self.BakeResult(id.name) + result.src = id + self.bake_results.append(result) + + try: + select_only(id) + except RuntimeError: + self.warning(get_id("exporter_err_hidden", True).format(id.name)) + return + + should_triangulate = State.exportFormat == ExportFormat.SMD or id.vs.triangulate + + def triangulate(): + ops.object.mode_set(mode='EDIT') + ops.mesh.select_all(action='SELECT') + ops.mesh.quads_convert_to_tris(quad_method='FIXED') + ops.object.mode_set(mode='OBJECT') + + duplis = [] + if id.instance_type != 'NONE': + bpy.ops.object.duplicates_make_real() + id.select_set(False) + if bpy.context.selected_objects: + bpy.context.view_layer.objects.active = bpy.context.selected_objects[0] + bpy.ops.object.join() + duplis = bpy.context.active_object + duplis.parent = id + duplis = self.bakeObj(duplis, generate_uvs = False).object + if should_triangulate: triangulate() + elif id.type not in exportable_types: + return + else: + duplis = None + + if id.type != 'META': # eek, what about lib data? + id = id.copy() + bpy.context.scene.collection.objects.link(id) + if id.data: + id.data = id.data.copy() + + if bpy.context.active_object: + ops.object.mode_set(mode='OBJECT') + select_only(id) + + if hasShapes(id): + id.active_shape_key_index = 0 + + top_parent = self.getTopParent(id) # record this before changing hierarchies! + + def captureBoneParent(armature, boneName): + result.envelope = boneName + result.armature = self.bakeObj(armature) + select_only(id) + + # Objects with bone parents are not updated in sync with depsgraph evaluation (as of Blender 3.0.1). So capture the correct matrix before we start to mess with them. + # Furthemore, Blender's bone transforms are inconsistent with object transforms: + # - A bone's matrix value is local to the armature, NOT the bone's parent + # - Object bone parent matricies are calculated from the head of the bone, NOT the tail (even though the tail defines the bone's location in pose mode!) + # - Bones are Y up, NOT Z up like everything else in Blender, and this affects their children's transforms + # To avoid this mess, we can use the bone and object world transforms to calculate a sane local matrix + result.bone_parent_matrix = armature.pose.bones[boneName].matrix.inverted() @ armature.matrix_world.inverted() @ id.matrix_world + + cur = id + while cur: + if cur.parent_bone and cur.parent_type == 'BONE' and not result.envelope: + captureBoneParent(cur.parent, cur.parent_bone) + for con in [con for con in cur.constraints if not con.mute]: + if con.type in ['CHILD_OF','COPY_TRANSFORMS'] and con.target and con.target.type == 'ARMATURE' and con.subtarget: + if not result.envelope: + captureBoneParent(con.target, con.subtarget) + else: + self.warning(get_id("exporter_err_dupeenv_con",True).format(con.name,cur.name)) + if result.envelope: + break + cur = cur.parent + del cur + + if id.type == 'MESH': + ops.object.mode_set(mode='EDIT') + ops.mesh.reveal() + + if id.matrix_world.is_negative: + ops.mesh.select_all(action='SELECT') + ops.mesh.flip_normals() + + ops.mesh.select_all(action="DESELECT") + ops.object.mode_set(mode='OBJECT') + + ops.object.parent_clear(type='CLEAR_KEEP_TRANSFORM') + id.matrix_world = Matrix.Translation(top_parent.location).inverted() @ getUpAxisMat(bpy.context.scene.vs.up_axis).inverted() @ id.matrix_world + + if id.type == 'ARMATURE': + for posebone in id.pose.bones: posebone.matrix_basis.identity() + if self.armature and self.armature != id: + self.warning(get_id("exporter_warn_multiarmature")) + result.armature = result + result.object = id + return result + + if id.type == 'CURVE': + id.data.dimensions = '3D' + + for con in [con for con in id.constraints if not con.mute]: + con.mute = True + + solidify_fill_rim = None + shapes_invalid = False + for mod in id.modifiers: + if mod.type == 'ARMATURE' and mod.object: + if result.envelope and any(br for br in self.bake_results if br.envelope != mod.object): + self.warning(get_id("exporter_err_dupeenv_arm",True).format(mod.name,id.name)) + else: + result.armature = self.bakeObj(mod.object) + result.envelope = mod + select_only(id) + mod.show_viewport = False + elif mod.type == 'SOLIDIFY' and not solidify_fill_rim: + solidify_fill_rim = mod.use_rim + elif hasShapes(id) and mod.type == 'DECIMATE' and mod.decimate_type != 'UNSUBDIV': + self.error(get_id("exporter_err_shapes_decimate", True).format(id.name,mod.decimate_type)) + shapes_invalid = True + ops.object.mode_set(mode='OBJECT') + + depsgraph = bpy.context.evaluated_depsgraph_get() + + if id.type in exportable_types: + # Bake reference mesh + data = bpy.data.meshes.new_from_object(id.evaluated_get(depsgraph), preserve_all_data_layers=True, depsgraph=depsgraph) + data.name = id.name + "_baked" + + def put_in_object(id, data, quiet=False): + if bpy.context.view_layer.objects.active: + ops.object.mode_set(mode='OBJECT') + + ob = bpy.data.objects.new(name=id.name,object_data=data) + ob.matrix_world = id.matrix_world + + bpy.context.scene.collection.objects.link(ob) + + select_only(ob) + + exporting_smd = State.exportFormat == ExportFormat.SMD + ops.object.transform_apply(scale=True, location=exporting_smd, rotation=exporting_smd) + + if hasCurves(id): + ops.object.mode_set(mode='EDIT') + ops.mesh.select_all(action='SELECT') + if id.data.vs.faces == 'BOTH': + ops.mesh.duplicate() + if solidify_fill_rim and not quiet: + self.warning(get_id("exporter_err_solidifyinside", True).format(id.name)) + if id.data.vs.faces != 'FORWARD': + ops.mesh.flip_normals() + ops.object.mode_set(mode='OBJECT') + + return ob + + baked = put_in_object(id,data) + + if should_triangulate: triangulate() + + if duplis: + if not id.type in exportable_types: + id.select_set(False) + bpy.context.view_layer.objects.active = duplis + duplis.select_set(True) + bpy.ops.object.join() + baked = bpy.context.active_object + + result.object = baked + data = baked.data + + if not data.polygons: + self.error(get_id("exporter_err_nopolys", True).format(result.name)) + return + + result.matrix = baked.matrix_world + + if not shapes_invalid and hasShapes(id): + # calculate vert balance + if State.exportFormat == ExportFormat.DMX: + if id.data.vs.flex_stereo_mode == 'VGROUP': + if id.data.vs.flex_stereo_vg == "": + self.warning(get_id("exporter_err_splitvgroup_undefined",True).format(id.name)) + else: + result.balance_vg = baked.vertex_groups.get(id.data.vs.flex_stereo_vg) + if not result.balance_vg: + self.warning(get_id("exporter_err_splitvgroup_missing", True).format(id.data.vs.flex_stereo_vg,id.name)) + else: + axis = axes_lookup[id.data.vs.flex_stereo_mode] + balance_width = baked.dimensions[axis] * ( 1 - (id.data.vs.flex_stereo_sharpness / 100) ) + result.balance_vg = baked.vertex_groups.new(name="__dmx_balance__") + zeroes = [] + ones = [] + for vert in baked.data.vertices: + if balance_width == 0: + if vert.co[axis] > 0: ones.append(vert.index) + else: zeroes.append(vert.index) + else: + balance = min(1,max(0, (-vert.co[axis] / balance_width / 2) + 0.5)) + if balance == 1: ones.append(vert.index) + elif balance == 0: zeroes.append(vert.index) + else: result.balance_vg.add([vert.index], balance, 'REPLACE') + result.balance_vg.add(ones, 1, 'REPLACE') + result.balance_vg.add(zeroes, 0, 'REPLACE') + + # bake shapes + id.show_only_shape_key = True + for i, shape in enumerate(id.data.shape_keys.key_blocks): + if i == 0: continue + id.active_shape_key_index = i + depsgraph = bpy.context.evaluated_depsgraph_get() + baked_shape = bpy.data.meshes.new_from_object(id.evaluated_get(depsgraph)) + baked_shape.name = "{} -> {}".format(id.name,shape.name) + + shape_ob = put_in_object(id,baked_shape, quiet = True) + + if duplis: + select_only(shape_ob) + duplis.select_set(True) + bpy.ops.object.join() + shape_ob = bpy.context.active_object + + result.shapes[shape.name] = shape_ob.data + + if should_triangulate: + bpy.context.view_layer.objects.active = shape_ob + triangulate() + + bpy.context.scene.collection.objects.unlink(shape_ob) + bpy.data.objects.remove(shape_ob) + del shape_ob + + for mod in id.modifiers: + mod.show_viewport = False # mainly to disable physics modifiers + + bpy.context.view_layer.objects.active = baked + baked.select_set(True) + + # project a UV map + if generate_uvs and not baked.data.uv_layers: + ops.object.mode_set(mode='EDIT') + ops.mesh.select_all(action='SELECT') + if len(result.object.data.vertices) < 2000: + result.object.data.uv_layers.new() + ops.uv.smart_project() + else: + ops.uv.unwrap() + ops.object.mode_set(mode='OBJECT') + + return result + + def openSMD(self,path,name,description): + full_path = os.path.realpath(os.path.join(path, name)) + + try: + f = open(full_path, 'w',encoding='utf-8') + except Exception as err: + self.error(get_id("exporter_err_open", True).format(description, err)) + return None + + f.write("version 1\n") + print("-",full_path) + return f + + def writeSMD(self, id, bake_results, name, filepath, filetype = 'smd'): + bench = BenchMarker(1,"SMD") + goldsrc = bpy.context.scene.vs.smd_format == "GOLDSOURCE" + + self.smd_file = self.openSMD(filepath,name + "." + filetype,filetype.upper()) + if self.smd_file == None: return 0 + + if State.compiler > Compiler.STUDIOMDL: + self.warning(get_id("exporter_warn_source2smdsupport")) + + # BONES + self.smd_file.write("nodes\n") + curID = 0 + if not self.armature: + self.smd_file.write("0 \"root\" -1\n") + if filetype == 'smd': print("- No skeleton to export") + else: + if self.armature.data.vs.implicit_zero_bone: + self.smd_file.write("0 \"{}\" -1\n".format(implicit_bone_name)) + curID += 1 + + # Write to file + for bone in self.exportable_bones: + parent = bone.parent + while parent and not parent in self.exportable_bones: + parent = parent.parent + + line = "{} ".format(curID) + self.bone_ids[bone.name] = curID + curID += 1 + + bone_name = bone.name + line += "\"" + bone_name + "\" " + + if parent: + line += str(self.bone_ids[parent.name]) + else: + line += "-1" + + self.smd_file.write(line + "\n") + + num_bones = len(self.armature.data.bones) + if filetype == 'smd': print("- Exported",num_bones,"bones") + + max_bones = 128 + if num_bones > max_bones: + self.warning(get_id("exporter_err_bonelimit", True).format(num_bones,max_bones)) + + for vca in [vca for vca in bake_results[0].vertex_animations.items() if vca[1].export_sequence]: + curID += 1 + vca[1].bone_id = curID + self.smd_file.write("{} \"vcabone_{}\" -1\n".format(curID,vca[0])) + + self.smd_file.write("end\n") + + if filetype == 'smd': + # ANIMATION + self.smd_file.write("skeleton\n") + if not self.armature: + self.smd_file.write("time 0\n0 0 0 0 0 0 0\nend\n") + else: + # Get the working frame range + is_anim = len(bake_results) == 1 and bake_results[0].object.type == 'ARMATURE' + if is_anim: + ad = self.armature.animation_data + anim_len = animationLength(ad) + 1 # frame 0 is a frame too... + if anim_len == 1: + self.warning(get_id("exporter_err_noframes",True).format(self.armature_src.name)) + + if ad.action and hasattr(ad.action,'fps'): + bpy.context.scene.render.fps = ad.action.fps + bpy.context.scene.render.fps_base = 1 + else: + anim_len = 1 + + # remove any unkeyed poses, e.g. from other animations in this export operation. + for posebone in self.armature.pose.bones: posebone.matrix_basis.identity() + + # Start writing out the animation + for i in range(anim_len): + bpy.context.window_manager.progress_update(i / anim_len) + self.smd_file.write("time {}\n".format(i)) + + if self.armature.data.vs.implicit_zero_bone: + self.smd_file.write("0 0 0 0 0 0 0\n") + + if is_anim: + bpy.context.scene.frame_set(i) + + evaluated_bones = self.getEvaluatedPoseBones() + for posebone in evaluated_bones: + parent = posebone.parent + while parent and not parent in evaluated_bones: + parent = parent.parent + + # Get the bone's Matrix from the current pose + PoseMatrix = posebone.matrix + if self.armature.data.vs.legacy_rotation: + PoseMatrix @= mat_BlenderToSMD + if parent: + parentMat = parent.matrix + if self.armature.data.vs.legacy_rotation: parentMat @= mat_BlenderToSMD + PoseMatrix = parentMat.inverted() @ PoseMatrix + else: + PoseMatrix = self.armature.matrix_world @ PoseMatrix + + self.smd_file.write("{} {} {}\n".format(self.bone_ids[posebone.name], getSmdVec(PoseMatrix.to_translation()), getSmdVec(PoseMatrix.to_euler()))) + + self.smd_file.write("end\n") + + ops.object.mode_set(mode='OBJECT') + + print("- Exported {} frames{}".format(anim_len," (legacy rotation)" if self.armature.data.vs.legacy_rotation else "")) + + # POLYGONS + done_header = False + for bake in [bake for bake in bake_results if bake.object.type != 'ARMATURE']: + if not done_header: + self.smd_file.write("triangles\n") + done_header = True + face_index = 0 + ob = bake.object + data = ob.data + + uv_loop = data.uv_layers.active.data + + weights = self.getWeightmap(bake) + + ob_weight_str = None + if type(bake.envelope) == str and bake.envelope in self.bone_ids: + ob_weight_str = (" 1 {} 1" if not goldsrc else "{}").format(self.bone_ids[bake.envelope]) + elif not weights: + ob_weight_str = " 0" if not goldsrc else "0" + + bad_face_mats = 0 + multi_weight_verts = set() # only relevant for GoldSrc exports + p = 0 + for poly in data.polygons: + if p % 10 == 0: bpy.context.window_manager.progress_update(p / len(data.polygons)) + mat_name, mat_success = self.GetMaterialName(ob, poly.material_index) + if not mat_success: + bad_face_mats += 1 + + self.smd_file.write(mat_name + "\n") + + for loop in [data.loops[l] for l in poly.loop_indices]: + # Vertex locations, normal directions + v = data.vertices[loop.vertex_index] + pos_norm = " {} {} ".format(getSmdVec(v.co),getSmdVec(loop.normal)) + + # UVs + uv = " ".join([getSmdFloat(j) for j in uv_loop[loop.index].uv]) + + if not goldsrc: + # Weightmaps + weight_string = "" + if ob_weight_str: + weight_string = ob_weight_str + else: + valid_weights = 0 + for link in [link for link in weights[v.index] if link[1] > 0]: + weight_string += " {} {}".format(link[0], getSmdFloat(link[1])) + valid_weights += 1 + weight_string = " {}{}".format(valid_weights,weight_string) + + self.smd_file.write("0" + pos_norm + uv + weight_string + "\n") # write to file + + else: + if ob_weight_str: + weight_string = ob_weight_str + else: + goldsrc_weights = [link for link in weights[v.index] if link[1] > 0] + if len(goldsrc_weights) == 0: + weight_string = "0" + else: + if len(goldsrc_weights) > 1: + multi_weight_verts.add(v) + weight_string = str(goldsrc_weights[0][0]) + self.smd_file.write(weight_string + pos_norm + uv + "\n") # write to file + + face_index += 1 + + if goldsrc and multi_weight_verts: + self.warning(get_id("exporterr_goldsrc_multiweights", format_string=True).format(len(multi_weight_verts), bake.src.data.name)) + if bad_face_mats: + self.warning(get_id("exporter_err_facesnotex_ormat").format(bad_face_mats,bake.src.data.name)) + + print("- Exported",face_index,"polys") + + print("- Exported {} materials".format(len(self.materials_used))) + for mat in self.materials_used: + print(" " + mat[0]) + + if done_header: + self.smd_file.write("end\n") + elif filetype == 'vta': + self.smd_file.write("skeleton\n") + + def _writeTime(time, shape_name = None): + self.smd_file.write( "time {}{}\n".format(time, " # {}".format(shape_name) if shape_name else "")) + + shape_names = ordered_set.OrderedSet() + for bake in [bake for bake in bake_results if bake.object.type != 'ARMATURE']: + for shape_name in bake.shapes.keys(): + shape_names.add(shape_name) + + _writeTime(0) + for i, shape_name in enumerate(shape_names): + _writeTime(i+1, shape_name) + self.smd_file.write("end\n") + + self.smd_file.write("vertexanimation\n") + + total_verts = 0 + vert_id = 0 + + def _makeVertLine(i,co,norm): + return "{} {} {}\n".format(i, getSmdVec(co), getSmdVec(norm)) + + _writeTime(0) + for bake in [bake for bake in bake_results if bake.object.type != 'ARMATURE']: + bake.offset = vert_id + verts = bake.object.data.vertices + for loop in [bake.object.data.loops[l] for poly in bake.object.data.polygons for l in poly.loop_indices]: + self.smd_file.write(_makeVertLine(vert_id,verts[loop.vertex_index].co,loop.normal)) + vert_id += 1 + + for i, shape_name in enumerate(shape_names): + i += 1 + bpy.context.window_manager.progress_update(i / len(shape_names)) + _writeTime(i,shape_name) + for bake in [bake for bake in bake_results if bake.object.type != 'ARMATURE']: + shape = bake.shapes.get(shape_name) + if not shape: continue + + vert_index = bake.offset + mesh_verts = bake.object.data.vertices + shape_verts = shape.vertices + + for mesh_loop in [bake.object.data.loops[l] for poly in bake.object.data.polygons for l in poly.loop_indices]: + shape_vert = shape_verts[mesh_loop.vertex_index] + shape_loop = shape.loops[mesh_loop.index] + mesh_vert = mesh_verts[mesh_loop.vertex_index] + diff_vec = shape_vert.co - mesh_vert.co + if diff_vec > epsilon or shape_loop.normal - mesh_loop.normal > epsilon: + self.smd_file.write(_makeVertLine(vert_index,shape_vert.co,shape_loop.normal)) + total_verts += 1 + vert_index += 1 + + self.smd_file.write("end\n") + print("- Exported {} flex shapes ({} verts)".format(i,total_verts)) + + self.smd_file.close() + + + if bench.quiet: + print("- {} export took".format(filetype.upper()) ,bench.total(),"\n") + + written = 1 + if filetype == 'smd': + for bake in [bake for bake in bake_results if bake.shapes]: + written += self.writeSMD(id,bake_results,name,filepath,filetype='vta') + for name,vca in bake_results[0].vertex_animations.items(): + written += self.writeVCA(name,vca,filepath) + if vca.export_sequence: + written += self.writeVCASequence(name,vca,filepath) + return written + + def writeVCA(self,name,vca,filepath): + bench = BenchMarker() + self.smd_file = self.openSMD(filepath,name + ".vta","vertex animation") + if self.smd_file == None: return 0 + + self.smd_file.write( +'''nodes +0 "root" -1 +end +skeleton +''') + for i,frame in enumerate(vca): + self.smd_file.write("time {}\n0 0 0 0 0 0 0\n".format(i)) + + self.smd_file.write("end\nvertexanimation\n") + num_frames = len(vca) + two_percent = num_frames / 50 + + for frame, vca_ob in enumerate(vca): + self.smd_file.write("time {}\n".format(frame)) + + self.smd_file.writelines(["{} {} {}\n".format(loop.index, getSmdVec(vca_ob.data.vertices[loop.vertex_index].co), getSmdVec(loop.normal)) for loop in vca_ob.data.loops]) + + if two_percent and frame % two_percent == 0: + print(".", debug_only=True, newline=False) + bpy.context.window_manager.progress_update(frame / num_frames) + + removeObject(vca_ob) + vca[frame] = None + + self.smd_file.write("end\n") + print(debug_only=True) + print("Exported {} frames ({:.1f}MB)".format(num_frames, self.smd_file.tell() / 1024 / 1024)) + self.smd_file.close() + bench.report("Vertex animation") + print() + return 1 + + def writeVCASequence(self,name,vca,dir_path): + self.smd_file = self.openSMD(dir_path,"vcaanim_{}.smd".format(name),"SMD") + if self.smd_file == None: return 0 + + self.smd_file.write( +'''nodes +{2} +{0} "vcabone_{1}" -1 +end +skeleton +'''.format(vca.bone_id, name, + "\n".join(['''{} "{}" -1'''.format(self.bone_ids[b.name],b.name) for b in self.exportable_bones if b.parent == None]) + if self.armature_src else '0 "root" -1') + ) + + max_frame = float(len(vca)-1) + for i in range(len(vca)): + self.smd_file.write("time {}\n".format(i)) + if self.armature_src: + for root_bone in [b for b in self.exportable_bones if b.parent == None]: + mat = getUpAxisMat('Y').inverted() @ self.armature.matrix_world @ root_bone.matrix + self.smd_file.write("{} {} {}\n".format(self.bone_ids[root_bone.name], getSmdVec(mat.to_translation()), getSmdVec(mat.to_euler()))) + else: + self.smd_file.write("0 0 0 0 {} 0 0\n".format("-1.570797" if bpy.context.scene.vs.up_axis == 'Z' else "0")) + self.smd_file.write("{0} 1.0 {1} 0 0 0 0\n".format(vca.bone_id,getSmdFloat(i / max_frame))) + self.smd_file.write("end\n") + self.smd_file.close() + return 1 + + def writeDMX(self, id, bake_results, name, dir_path): + bench = BenchMarker(1,"DMX") + filepath = os.path.realpath(os.path.join(dir_path,name + ".dmx")) + print("-",filepath) + armature_name = self.armature_src.name if self.armature_src else name + materials = {} + written = 0 + + def makeTransform(name,matrix,object_name): + trfm = dm.add_element(name,"DmeTransform",id=object_name+"transform") + trfm["position"] = datamodel.Vector3(matrix.to_translation()) + trfm["orientation"] = getDatamodelQuat(matrix.to_quaternion()) + return trfm + + dm = datamodel.DataModel("model",State.datamodelFormat) + dm.allow_random_ids = False + + source2 = dm.format_ver >= 22 + + root = dm.add_element(bpy.context.scene.name,id="Scene"+bpy.context.scene.name) + DmeModel = dm.add_element(armature_name,"DmeModel",id="Object" + armature_name) + DmeModel_children = DmeModel["children"] = datamodel.make_array([],datamodel.Element) + + DmeModel_transforms = dm.add_element("base","DmeTransformList",id="transforms"+bpy.context.scene.name) + DmeModel["baseStates"] = datamodel.make_array([ DmeModel_transforms ],datamodel.Element) + DmeModel_transforms["transforms"] = datamodel.make_array([],datamodel.Element) + DmeModel_transforms = DmeModel_transforms["transforms"] + + if source2: + DmeAxisSystem = DmeModel["axisSystem"] = dm.add_element("axisSystem","DmeAxisSystem","AxisSys" + armature_name) + DmeAxisSystem["upAxis"] = axes_lookup_source2[bpy.context.scene.vs.up_axis] + DmeAxisSystem["forwardParity"] = 1 # ?? + DmeAxisSystem["coordSys"] = 0 # ?? + + DmeModel["transform"] = makeTransform("",Matrix(),DmeModel.name + "transform") + + keywords = getDmxKeywords(dm.format_ver) + + # skeleton + root["skeleton"] = DmeModel + want_jointlist = dm.format_ver >= 11 + want_jointtransforms = dm.format_ver in range(0,21) + if want_jointlist: + jointList = DmeModel["jointList"] = datamodel.make_array([],datamodel.Element) + if source2: + jointList.append(DmeModel) + if want_jointtransforms: + jointTransforms = DmeModel["jointTransforms"] = datamodel.make_array([],datamodel.Element) + if source2: + jointTransforms.append(DmeModel["transform"]) + bone_elements = {} + if self.armature: armature_scale = self.armature.matrix_world.to_scale() + + def writeBone(bone): + if isinstance(bone,str): + bone_name = bone + bone = None + else: + if bone and not bone in self.exportable_bones: + children = [] + for child_elems in [writeBone(child) for child in bone.children]: + if child_elems: children.extend(child_elems) + return children + bone_name = bone.name + + bone_elements[bone_name] = bone_elem = dm.add_element(bone_name,"DmeJoint",id=bone_name) + if want_jointlist: jointList.append(bone_elem) + self.bone_ids[bone_name] = len(bone_elements) - (0 if source2 else 1) # in Source 2, index 0 is the DmeModel + + if not bone: relMat = Matrix() + else: + cur_p = bone.parent + while cur_p and not cur_p in self.exportable_bones: cur_p = cur_p.parent + if cur_p: + relMat = cur_p.matrix.inverted() @ bone.matrix + else: + relMat = self.armature.matrix_world @ bone.matrix + + trfm = makeTransform(bone_name,relMat,"bone"+bone_name) + trfm_base = makeTransform(bone_name,relMat,"bone_base"+bone_name) + + if bone and bone.parent: + for j in range(3): + trfm["position"][j] *= armature_scale[j] + trfm_base["position"] = trfm["position"] + + if want_jointtransforms: jointTransforms.append(trfm) + bone_elem["transform"] = trfm + + DmeModel_transforms.append(trfm_base) + + if bone: + children = bone_elem["children"] = datamodel.make_array([],datamodel.Element) + for child_elems in [writeBone(child) for child in bone.children]: + if child_elems: children.extend(child_elems) + + bpy.context.window_manager.progress_update(len(bone_elements)/num_bones) + return [bone_elem] + + if self.armature: + num_bones = len(self.exportable_bones) + add_implicit_bone = not source2 + + if add_implicit_bone: + DmeModel_children.extend(writeBone(implicit_bone_name)) + for root_elems in [writeBone(bone) for bone in self.armature.pose.bones if not bone.parent and not (add_implicit_bone and bone.name == implicit_bone_name)]: + if root_elems: DmeModel_children.extend(root_elems) + + bench.report("Bones") + + for vca in bake_results[0].vertex_animations: + DmeModel_children.extend(writeBone("vcabone_{}".format(vca))) + + DmeCombinationOperator = None + for _ in [bake for bake in bake_results if bake.shapes]: + if self.flex_controller_mode == 'ADVANCED': + if not hasFlexControllerSource(self.flex_controller_source): + self.error(get_id("exporter_err_flexctrl_undefined",True).format(name) ) + return written + + text = bpy.data.texts.get(self.flex_controller_source) + msg = "- Loading flex controllers from " + element_path = [ "combinationOperator" ] + try: + if text: + print(msg + "text block \"{}\"".format(text.name)) + controller_dm = datamodel.parse(text.as_string(),element_path=element_path) + else: + path = os.path.realpath(bpy.path.abspath(self.flex_controller_source)) + print(msg + path) + controller_dm = datamodel.load(path=path,element_path=element_path) + + DmeCombinationOperator = controller_dm.root["combinationOperator"] + + for elem in [elem for elem in DmeCombinationOperator["targets"] if elem.type != "DmeFlexRules"]: + DmeCombinationOperator["targets"].remove(elem) + except Exception as err: + self.error(get_id("exporter_err_flexctrl_loadfail", True).format(err)) + return written + else: + DmeCombinationOperator = flex.DmxWriteFlexControllers.make_controllers(id).root["combinationOperator"] + + break + + if not DmeCombinationOperator and len(bake_results[0].vertex_animations): + DmeCombinationOperator = flex.DmxWriteFlexControllers.make_controllers(id).root["combinationOperator"] + + if DmeCombinationOperator: + root["combinationOperator"] = DmeCombinationOperator + bench.report("Flex setup") + + for bake in [bake for bake in bake_results if bake.object.type != 'ARMATURE']: + root["model"] = DmeModel + + ob = bake.object + + vertex_data = dm.add_element("bind","DmeVertexData",id=bake.name+"verts") + + DmeMesh = dm.add_element(bake.name,"DmeMesh",id=bake.name+"mesh") + DmeMesh["visible"] = True + DmeMesh["bindState"] = vertex_data + DmeMesh["currentState"] = vertex_data + DmeMesh["baseStates"] = datamodel.make_array([vertex_data],datamodel.Element) + + DmeDag = dm.add_element(bake.name,"DmeDag",id="ob"+bake.name+"dag") + if want_jointlist: jointList.append(DmeDag) + DmeDag["shape"] = DmeMesh + + bone_child = isinstance(bake.envelope, str) + if bone_child and bake.envelope in bone_elements: + bone_elements[bake.envelope]["children"].append(DmeDag) + trfm_mat = bake.bone_parent_matrix + else: + DmeModel_children.append(DmeDag) + trfm_mat = ob.matrix_world + + trfm = makeTransform(bake.name, trfm_mat, "ob"+bake.name) + + if want_jointtransforms: jointTransforms.append(trfm) + + DmeDag["transform"] = trfm + DmeModel_transforms.append(makeTransform(bake.name, trfm_mat, "ob_base"+bake.name)) + + jointCount = 0 + weight_link_limit = 4 if source2 else 3 + badJointCounts = 0 + culled_weight_links = 0 + cull_threshold = bpy.context.scene.vs.dmx_weightlink_threshold + have_weightmap = False + + if type(bake.envelope) is bpy.types.ArmatureModifier: + ob_weights = self.getWeightmap(bake) + + for vert_weights in ob_weights: + count = len(vert_weights) + + if weight_link_limit: + if count > weight_link_limit and cull_threshold > 0: + vert_weights.sort(key=lambda link: link[1],reverse=True) + while len(vert_weights) > weight_link_limit and vert_weights[-1][1] <= cull_threshold: + vert_weights.pop() + culled_weight_links += 1 + count = len(vert_weights) + if count > weight_link_limit: badJointCounts += 1 + + jointCount = max(jointCount,count) + if jointCount: have_weightmap = True + elif bake.envelope: + jointCount = 1 + + if badJointCounts: + self.warning(get_id("exporter_warn_weightlinks_excess",True).format(badJointCounts,bake.src.name,weight_link_limit)) + if culled_weight_links: + self.warning(get_id("exporter_warn_weightlinks_culled",True).format(culled_weight_links,cull_threshold,bake.src.name)) + + format = vertex_data["vertexFormat"] = datamodel.make_array( [ keywords['pos'], keywords['norm'] ], str) + + vertex_data["flipVCoordinates"] = True + vertex_data["jointCount"] = jointCount + + num_verts = len(ob.data.vertices) + num_loops = len(ob.data.loops) + norms = [None] * num_loops + texco = ordered_set.OrderedSet() + face_sets = collections.OrderedDict() + texcoIndices = [None] * num_loops + jointWeights = [] + jointIndices = [] + balance = [0.0] * num_verts + + Indices = [None] * num_loops + + uv_layer = ob.data.uv_layers.active.data + + bench.report("object setup") + + v=0 + for vert in ob.data.vertices: + vert.select = False + + if bake.shapes and bake.balance_vg: + try: balance[vert.index] = bake.balance_vg.weight(vert.index) + except: pass + + if have_weightmap: + weights = [0.0] * jointCount + indices = [0] * jointCount + i = 0 + total_weight = 0 + vert_weights = ob_weights[vert.index] + for i in range(len(vert_weights)): + indices[i] = vert_weights[i][0] + weights[i] = vert_weights[i][1] + total_weight += weights[i] + i+=1 + + if source2 and total_weight == 0: + weights[0] = 1.0 # attach to the DmeModel itself, avoiding motion. + + jointWeights.extend(weights) + jointIndices.extend(indices) + v += 1 + if v % 50 == 0: + bpy.context.window_manager.progress_update(v / num_verts) + + bench.report("verts") + + for loop in [ob.data.loops[i] for poly in ob.data.polygons for i in poly.loop_indices]: + texcoIndices[loop.index] = texco.add(datamodel.Vector2(uv_layer[loop.index].uv)) + norms[loop.index] = datamodel.Vector3(loop.normal) + Indices[loop.index] = loop.vertex_index + + bench.report("loops") + + bpy.context.view_layer.objects.active = ob + bpy.ops.object.mode_set(mode='EDIT') + bm = bmesh.from_edit_mesh(ob.data) + bm.verts.ensure_lookup_table() + bm.faces.ensure_lookup_table() + + vertex_data[keywords['pos']] = datamodel.make_array((v.co for v in bm.verts),datamodel.Vector3) + vertex_data[keywords['pos'] + "Indices"] = datamodel.make_array((l.vert.index for f in bm.faces for l in f.loops),int) + + if source2: # write out arbitrary vertex data + loops = [loop for face in bm.faces for loop in face.loops] + loop_indices = datamodel.make_array([loop.index for loop in loops], int) + layerGroups = bm.loops.layers + + class exportLayer: + name : str + + def __init__(self, layer, exportName = None): + self._layer = layer + self.name = exportName or layer.name + + def data_for(self, loop): return loop[self._layer] + + def get_bmesh_layers(layerGroup): + return [exportLayer(l) for l in layerGroup if re.match(r".*\$[0-9]+", l.name)] + + defaultUvLayer = "texcoord$0" + uv_layers_to_export = list(get_bmesh_layers(layerGroups.uv)) + if not defaultUvLayer in [l.name for l in uv_layers_to_export]: # select a default UV map + uv_render_layer = next((l.name for l in ob.data.uv_layers if l.active_render and not l in uv_layers_to_export), None) + if uv_render_layer: + uv_layers_to_export.append(exportLayer(layerGroups.uv[uv_render_layer], defaultUvLayer)) + print("- Exporting '{}' as {}".format(uv_render_layer, defaultUvLayer)) + else: + self.warning("'{}' does not contain a UV Map called {} and no suitable fallback map could be found. The model may be missing UV data.".format(bake.name, defaultUvLayer)) + + for layer in uv_layers_to_export: + uv_set = ordered_set.OrderedSet() + uv_indices = [] + for uv in (layer.data_for(loop).uv for loop in loops): + uv_indices.append(uv_set.add(datamodel.Vector2(uv))) + + vertex_data[layer.name] = datamodel.make_array(uv_set, datamodel.Vector2) + vertex_data[layer.name + "Indices"] = datamodel.make_array(uv_indices, int) + format.append(layer.name) + + def make_vertex_layer(layer : exportLayer, arrayType): + vertex_data[layer.name] = datamodel.make_array([layer.data_for(loop) for loop in loops], arrayType) + vertex_data[layer.name + "Indices"] = loop_indices + format.append(layer.name) + + for layer in get_bmesh_layers(layerGroups.color): + make_vertex_layer(layer, datamodel.Vector4) + for layer in get_bmesh_layers(layerGroups.float): + make_vertex_layer(layer, float) + for layer in get_bmesh_layers(layerGroups.int): + make_vertex_layer(layer, int) + for layer in get_bmesh_layers(layerGroups.string): + make_vertex_layer(layer, str) + + bench.report("Source 2 vertex data") + + else: + format.append("textureCoordinates") + vertex_data["textureCoordinates"] = datamodel.make_array(texco,datamodel.Vector2) + vertex_data["textureCoordinatesIndices"] = datamodel.make_array(texcoIndices,int) + + if have_weightmap: + vertex_data[keywords["weight"]] = datamodel.make_array(jointWeights,float) + vertex_data[keywords["weight_indices"]] = datamodel.make_array(jointIndices,int) + format.extend( [ keywords['weight'], keywords["weight_indices"] ] ) + + deform_layer = bm.verts.layers.deform.active + if deform_layer: + for cloth_enable in (group for group in ob.vertex_groups if re.match(r"cloth_enable\$[0-9]+", group.name)): + format.append(cloth_enable.name) + values = [v[deform_layer].get(cloth_enable.index, 0) for v in bm.verts] + valueSet = ordered_set.OrderedSet(values) + vertex_data[cloth_enable.name] = datamodel.make_array(valueSet, float) + vertex_data[cloth_enable.name + "Indices"] = datamodel.make_array((valueSet.index(values[i]) for i in Indices), int) + + if bake.shapes and bake.balance_vg: + vertex_data[keywords["balance"]] = datamodel.make_array(balance,float) + vertex_data[keywords["balance"] + "Indices"] = datamodel.make_array(Indices,int) + format.append(keywords["balance"]) + + vertex_data[keywords['norm']] = datamodel.make_array(norms,datamodel.Vector3) + vertex_data[keywords['norm'] + "Indices"] = datamodel.make_array(range(len(norms)),int) + + bench.report("insert") + + bad_face_mats = 0 + p = 0 + num_polys = len(bm.faces) + + two_percent = int(num_polys / 50) + print("Polygons: ",debug_only=True,newline=False) + + bm_face_sets = collections.defaultdict(list) + for face in bm.faces: + mat_name, mat_success = self.GetMaterialName(ob, face.material_index) + if not mat_success: + bad_face_mats += 1 + bm_face_sets[mat_name].extend((*(l.index for l in face.loops),-1)) + + p+=1 + if two_percent and p % two_percent == 0: + print(".", debug_only=True, newline=False) + bpy.context.window_manager.progress_update(p / num_polys) + + for (mat_name,indices) in bm_face_sets.items(): + material_elem = materials.get(mat_name) + if not material_elem: + materials[mat_name] = material_elem = dm.add_element(mat_name,"DmeMaterial",id=mat_name + "mat") + material_elem["mtlName"] = os.path.join(bpy.context.scene.vs.material_path, mat_name).replace('\\','/') + + face_set = dm.add_element(mat_name,"DmeFaceSet",id=bake.name+mat_name+"faces") + face_sets[mat_name] = face_set + + face_set["material"] = material_elem + face_set["faces"] = datamodel.make_array(indices,int) + + print(debug_only=True) + DmeMesh["faceSets"] = datamodel.make_array(list(face_sets.values()),datamodel.Element) + + if bad_face_mats: + self.warning(get_id("exporter_err_facesnotex_ormat").format(bad_face_mats, bake.name)) + bench.report("polys") + + bpy.ops.object.mode_set(mode='OBJECT') + del bm + + two_percent = int(len(bake.shapes) / 50) + print("Shapes: ",debug_only=True,newline=False) + delta_states = [] + corrective_shapes_seen = [] + if bake.shapes: + shape_names = [] + num_shapes = len(bake.shapes) + num_correctives = 0 + num_wrinkles = 0 + + for shape_name,shape in bake.shapes.items(): + wrinkle_scale = 0 + corrective = getCorrectiveShapeSeparator() in shape_name + if corrective: + # drivers always override shape name to avoid name truncation issues + corrective_targets_driver = ordered_set.OrderedSet(flex.getCorrectiveShapeKeyDrivers(bake.src.data.shape_keys.key_blocks[shape_name]) or []) + corrective_targets_name = ordered_set.OrderedSet(shape_name.split(getCorrectiveShapeSeparator())) + corrective_targets = corrective_targets_driver or corrective_targets_name + corrective_targets.source = shape_name + + if(corrective_targets in corrective_shapes_seen): + previous_shape = next(x for x in corrective_shapes_seen if x == corrective_targets) + self.warning(get_id("exporter_warn_correctiveshape_duplicate", True).format(shape_name, "+".join(corrective_targets), previous_shape.source)) + continue + else: + corrective_shapes_seen.append(corrective_targets) + + if corrective_targets_driver and corrective_targets_driver != corrective_targets_name: + generated_shape_name = getCorrectiveShapeSeparator().join(corrective_targets_driver) + print("- Renamed shape key '{}' to '{}' to match its corrective shape drivers.".format(shape_name, generated_shape_name)) + shape_name = generated_shape_name + num_correctives += 1 + else: + if self.flex_controller_mode == 'ADVANCED': + def _FindScale(): + for control in controller_dm.root["combinationOperator"]["controls"]: + for i in range(len(control["rawControlNames"])): + if control["rawControlNames"][i] == shape_name: + scales = control.get("wrinkleScales") + return scales[i] if scales else 0 + raise ValueError() + try: + wrinkle_scale = _FindScale() + except ValueError: + self.warning(get_id("exporter_err_flexctrl_missing", True).format(shape_name)) + pass + + shape_names.append(shape_name) + DmeVertexDeltaData = dm.add_element(shape_name,"DmeVertexDeltaData",id=ob.name+shape_name) + delta_states.append(DmeVertexDeltaData) + + vertexFormat = DmeVertexDeltaData["vertexFormat"] = datamodel.make_array([ keywords['pos'], keywords['norm'] ],str) + + wrinkle = [] + wrinkleIndices = [] + + # what do these do? + #DmeVertexDeltaData["flipVCoordinates"] = False + #DmeVertexDeltaData["corrected"] = True + + shape_pos = [] + shape_posIndices = [] + shape_norms = [] + shape_normIndices = [] + cache_deltas = wrinkle_scale + if cache_deltas: + delta_lengths = [None] * len(ob.data.vertices) + max_delta = 0 + + for ob_vert in ob.data.vertices: + shape_vert = shape.vertices[ob_vert.index] + + if ob_vert.co != shape_vert.co: + delta = shape_vert.co - ob_vert.co + delta_length = delta.length + + if abs(delta_length) > 1e-5: + if cache_deltas: + delta_lengths[ob_vert.index] = delta_length + shape_pos.append(datamodel.Vector3(delta)) + shape_posIndices.append(ob_vert.index) + + if corrective: + corrective_target_shapes = [] + for corrective_shape_name in corrective_targets: + corrective_target = bake.shapes.get(corrective_shape_name) + if corrective_target: + corrective_target_shapes.append(corrective_target) + else: + self.warning(get_id("exporter_err_missing_corrective_target", format_string=True).format(shape_name, corrective_shape_name)) + continue + + # We need the absolute normals as generated by Blender + for shape_vert in shape.vertices: + shape_vert.co -= ob.data.vertices[shape_vert.index].co - corrective_target.vertices[shape_vert.index].co + + for ob_loop in ob.data.loops: + shape_loop = shape.loops[ob_loop.index] + norm = shape_loop.normal + + if corrective: + base = Vector(ob_loop.normal) + for corrective_target in corrective_target_shapes: + # Normals for corrective shape keys are deltas from those of the deformed mesh, not the basis shape. + base += corrective_target.loops[shape_loop.index].normal - ob_loop.normal + else: + base = ob_loop.normal + + if norm.dot(base.normalized()) < 1 - 1e-3: + shape_norms.append(datamodel.Vector3(norm - base)) + shape_normIndices.append(shape_loop.index) + + if wrinkle_scale: + delta_len = delta_lengths[ob_loop.vertex_index] + if delta_len: + max_delta = max(max_delta,delta_len) + wrinkle.append(delta_len) + wrinkleIndices.append(texcoIndices[ob_loop.index]) + + del shape_vert + + if wrinkle_scale and max_delta: + wrinkle_mod = wrinkle_scale / max_delta + if wrinkle_mod != 1: + for i in range(len(wrinkle)): + wrinkle[i] *= wrinkle_mod + + DmeVertexDeltaData[keywords['pos']] = datamodel.make_array(shape_pos,datamodel.Vector3) + DmeVertexDeltaData[keywords['pos'] + "Indices"] = datamodel.make_array(shape_posIndices,int) + DmeVertexDeltaData[keywords['norm']] = datamodel.make_array(shape_norms,datamodel.Vector3) + DmeVertexDeltaData[keywords['norm'] + "Indices"] = datamodel.make_array(shape_normIndices,int) + + if wrinkle_scale: + vertexFormat.append(keywords["wrinkle"]) + num_wrinkles += 1 + DmeVertexDeltaData[keywords["wrinkle"]] = datamodel.make_array(wrinkle,float) + DmeVertexDeltaData[keywords["wrinkle"] + "Indices"] = datamodel.make_array(wrinkleIndices,int) + + bpy.context.window_manager.progress_update(len(shape_names) / num_shapes) + if two_percent and len(shape_names) % two_percent == 0: + print(".",debug_only=True,newline=False) + + if bpy.app.debug_value <= 1: + for shape in bake.shapes.values(): + bpy.data.meshes.remove(shape) + del shape + bake.shapes.clear() + + print(debug_only=True) + bench.report("shapes") + print("- {} flexes ({} with wrinklemaps) + {} correctives".format(num_shapes - num_correctives,num_wrinkles,num_correctives)) + + vca_matrix = ob.matrix_world.inverted() + for vca_name,vca in bake_results[0].vertex_animations.items(): + frame_shapes = [] + + for i, vca_ob in enumerate(vca): + DmeVertexDeltaData = dm.add_element("{}-{}".format(vca_name,i),"DmeVertexDeltaData",id=ob.name+vca_name+str(i)) + delta_states.append(DmeVertexDeltaData) + frame_shapes.append(DmeVertexDeltaData) + DmeVertexDeltaData["vertexFormat"] = datamodel.make_array([ "positions", "normals" ],str) + + shape_pos = [] + shape_posIndices = [] + shape_norms = [] + shape_normIndices = [] + + for shape_loop in vca_ob.data.loops: + shape_vert = vca_ob.data.vertices[shape_loop.vertex_index] + ob_loop = ob.data.loops[shape_loop.index] + ob_vert = ob.data.vertices[ob_loop.vertex_index] + + if ob_vert.co != shape_vert.co: + delta = vca_matrix @ shape_vert.co - ob_vert.co + + if abs(delta.length) > 1e-5: + shape_pos.append(datamodel.Vector3(delta)) + shape_posIndices.append(ob_vert.index) + + norm = Vector(shape_loop.normal) + norm.rotate(vca_matrix) + if abs(1.0 - norm.dot(ob_loop.normal)) > epsilon[0]: + shape_norms.append(datamodel.Vector3(norm - ob_loop.normal)) + shape_normIndices.append(shape_loop.index) + + DmeVertexDeltaData["positions"] = datamodel.make_array(shape_pos,datamodel.Vector3) + DmeVertexDeltaData["positionsIndices"] = datamodel.make_array(shape_posIndices,int) + DmeVertexDeltaData["normals"] = datamodel.make_array(shape_norms,datamodel.Vector3) + DmeVertexDeltaData["normalsIndices"] = datamodel.make_array(shape_normIndices,int) + + removeObject(vca_ob) + vca[i] = None + + if vca.export_sequence: # generate and export a skeletal animation that drives the vertex animation + vca_arm = bpy.data.objects.new("vca_arm",bpy.data.armatures.new("vca_arm")) + bpy.context.scene.collection.objects.link(vca_arm) + bpy.context.view_layer.objects.active = vca_arm + + bpy.ops.object.mode_set(mode='EDIT') + vca_bone_name = "vcabone_" + vca_name + vca_bone = vca_arm.data.edit_bones.new(vca_bone_name) + vca_bone.tail.y = 1 + + bpy.context.scene.frame_set(0) + mat = getUpAxisMat('y').inverted() + # DMX animations don't handle missing root bones or meshes, so create bones to represent them + if self.armature_src: + for bone in [bone for bone in self.armature_src.data.bones if bone.parent is None]: + b = vca_arm.data.edit_bones.new(bone.name) + b.head = mat @ bone.head + b.tail = mat @ bone.tail + else: + for bake in bake_results: + bake_mat = mat @ bake.object.matrix_world + b = vca_arm.data.edit_bones.new(bake.name) + b.head = bake_mat @ b.head + b.tail = bake_mat @ Vector([0,1,0]) + + bpy.ops.object.mode_set(mode='POSE') + ops.pose.armature_apply() # refreshes the armature's internal state, required! + action = vca_arm.animation_data_create().action = bpy.data.actions.new("vcaanim_" + vca_name) + for i in range(2): + fc = action.fcurves.new('pose.bones["{}"].location'.format(vca_bone_name),index=i) + fc.keyframe_points.add(count=2) + for key in fc.keyframe_points: key.interpolation = 'LINEAR' + if i == 0: fc.keyframe_points[0].co = (0,1.0) + fc.keyframe_points[1].co = (vca.num_frames,1.0) + fc.update() + + # finally, write it out + self.exportId(bpy.context,vca_arm) + written += 1 + + if delta_states: + DmeMesh["deltaStates"] = datamodel.make_array(delta_states,datamodel.Element) + DmeMesh["deltaStateWeights"] = DmeMesh["deltaStateWeightsLagged"] = \ + datamodel.make_array([datamodel.Vector2([0.0,0.0])] * len(delta_states),datamodel.Vector2) + + targets = DmeCombinationOperator["targets"] + added = False + for elem in targets: + if elem.type == "DmeFlexRules": + if elem["deltaStates"][0].name in shape_names: # can't have the same delta name on multiple objects + elem["target"] = DmeMesh + added = True + if not added: + targets.append(DmeMesh) + + if len(bake_results) == 1 and bake_results[0].object.type == 'ARMATURE': # animation + ad = self.armature.animation_data + + anim_len = animationLength(ad) if ad else 0 + if anim_len == 0: + self.warning(get_id("exporter_err_noframes",True).format(self.armature_src.name)) + + if ad.action and hasattr(ad.action,'fps'): + fps = bpy.context.scene.render.fps = ad.action.fps + bpy.context.scene.render.fps_base = 1 + else: + fps = bpy.context.scene.render.fps * bpy.context.scene.render.fps_base + + DmeChannelsClip = dm.add_element(name,"DmeChannelsClip",id=name+"clip") + DmeAnimationList = dm.add_element(armature_name,"DmeAnimationList",id=armature_name+"list") + DmeAnimationList["animations"] = datamodel.make_array([DmeChannelsClip],datamodel.Element) + root["animationList"] = DmeAnimationList + + DmeTimeFrame = dm.add_element("timeframe","DmeTimeFrame",id=name+"time") + duration = anim_len / fps + if dm.format_ver >= 11: + DmeTimeFrame["duration"] = datamodel.Time(duration) + else: + DmeTimeFrame["durationTime"] = int(duration * 10000) + DmeTimeFrame["scale"] = 1.0 + DmeChannelsClip["timeFrame"] = DmeTimeFrame + DmeChannelsClip["frameRate"] = fps if source2 else int(fps) + + channels = DmeChannelsClip["channels"] = datamodel.make_array([],datamodel.Element) + bone_channels = {} + def makeChannel(bone): + bone_channels[bone.name] = [] + channel_template = [ + [ "_p", "position", "Vector3", datamodel.Vector3 ], + [ "_o", "orientation", "Quaternion", datamodel.Quaternion ] + ] + for template in channel_template: + cur = dm.add_element(bone.name + template[0],"DmeChannel",id=bone.name+template[0]) + cur["toAttribute"] = template[1] + cur["toElement"] = (bone_elements[bone.name] if bone else DmeModel)["transform"] + cur["mode"] = 1 + val_arr = dm.add_element(template[2]+" log","Dme"+template[2]+"LogLayer",cur.name+"loglayer") + cur["log"] = dm.add_element(template[2]+" log","Dme"+template[2]+"Log",cur.name+"log") + cur["log"]["layers"] = datamodel.make_array([val_arr],datamodel.Element) + val_arr["times"] = datamodel.make_array([],datamodel.Time if dm.format_ver > 11 else int) + val_arr["values"] = datamodel.make_array([],template[3]) + if bone: bone_channels[bone.name].append(val_arr) + channels.append(cur) + + for bone in self.exportable_bones: + makeChannel(bone) + num_frames = int(anim_len + 1) + bench.report("Animation setup") + prev_pos = {} + prev_rot = {} + skipped_pos = {} + skipped_rot = {} + + two_percent = num_frames / 50 + print("Frames: ",debug_only=True,newline=False) + for frame in range(0,num_frames): + bpy.context.window_manager.progress_update(frame/num_frames) + bpy.context.scene.frame_set(frame) + keyframe_time = datamodel.Time(frame / fps) if dm.format_ver > 11 else int(frame/fps * 10000) + evaluated_bones = self.getEvaluatedPoseBones() + for bone in evaluated_bones: + channel = bone_channels[bone.name] + + cur_p = bone.parent + while cur_p and not cur_p in evaluated_bones: cur_p = cur_p.parent + if cur_p: + relMat = cur_p.matrix.inverted() @ bone.matrix + else: + relMat = self.armature.matrix_world @ bone.matrix + + pos = relMat.to_translation() + if bone.parent: + for j in range(3): pos[j] *= armature_scale[j] + + rot = relMat.to_quaternion() + rot_vec = Vector(rot.to_euler()) + + if not prev_pos.get(bone) or pos - prev_pos[bone] > epsilon: + skip_time = skipped_pos.get(bone) + if skip_time != None: + channel[0]["times"].append(skip_time) + channel[0]["values"].append(channel[0]["values"][-1]) + del skipped_pos[bone] + + channel[0]["times"].append(keyframe_time) + channel[0]["values"].append(datamodel.Vector3(pos)) + else: + skipped_pos[bone] = keyframe_time + + + if not prev_rot.get(bone) or rot_vec - prev_rot[bone] > epsilon: + skip_time = skipped_rot.get(bone) + if skip_time != None: + channel[1]["times"].append(skip_time) + channel[1]["values"].append(channel[1]["values"][-1]) + del skipped_rot[bone] + + channel[1]["times"].append(keyframe_time) + channel[1]["values"].append(getDatamodelQuat(rot)) + else: + skipped_rot[bone] = keyframe_time + + prev_pos[bone] = pos + prev_rot[bone] = rot_vec + + if two_percent and frame % two_percent: + print(".",debug_only=True,newline=False) + print(debug_only=True) + + bpy.context.window_manager.progress_update(0.99) + print("- Writing DMX...") + try: + if bpy.context.scene.vs.use_kv2: + dm.write(filepath,"keyvalues2",1) + else: + dm.write(filepath,"binary",State.datamodelEncoding) + written += 1 + except (PermissionError, FileNotFoundError) as err: + self.error(get_id("exporter_err_open", True).format("DMX",err)) + + bench.report("write") + if bench.quiet: + print("- DMX export took",bench.total(),"\n") + + return written diff --git a/io_scene_valvesource/flex.py b/io_scene_valvesource/flex.py index b0bd9dc..8bc2931 100644 --- a/io_scene_valvesource/flex.py +++ b/io_scene_valvesource/flex.py @@ -1,232 +1,232 @@ -# Copyright (c) 2014 Tom Edwards contact@steamreview.org -# -# ##### BEGIN GPL LICENSE BLOCK ##### -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# ##### END GPL LICENSE BLOCK ##### - -import bpy, re -from . import datamodel, utils -from .utils import get_id, getCorrectiveShapeSeparator - -class DmxWriteFlexControllers(bpy.types.Operator): - bl_idname = "export_scene.dmx_flex_controller" - bl_label = get_id("gen_block") - bl_description = get_id("gen_block_tip") - bl_options = {'UNDO','INTERNAL'} - - @classmethod - def poll(cls, context): - return utils.hasShapes(utils.get_active_exportable(context).item, valid_only=False) - - @classmethod - def make_controllers(cls,id): - dm = datamodel.DataModel("model",1) - - objects = [] - shapes = set() - - if type(id) == bpy.types.Collection: - objects.extend(list([ob for ob in id.objects if ob.data and ob.type in utils.shape_types and ob.data.shape_keys])) - else: - objects.append(id) - - name = "flex_{}".format(id.name) - root = dm.add_element(name,id=name) - DmeCombinationOperator = dm.add_element("combinationOperator","DmeCombinationOperator",id=id.name+"controllers") - root["combinationOperator"] = DmeCombinationOperator - controls = DmeCombinationOperator["controls"] = datamodel.make_array([],datamodel.Element) - - def createController(namespace,name,deltas): - DmeCombinationInputControl = dm.add_element(name,"DmeCombinationInputControl",id=namespace + name + "inputcontrol") - controls.append(DmeCombinationInputControl) - - DmeCombinationInputControl["rawControlNames"] = datamodel.make_array(deltas,str) - DmeCombinationInputControl["stereo"] = False - DmeCombinationInputControl["eyelid"] = False - - DmeCombinationInputControl["flexMax"] = 1.0 - DmeCombinationInputControl["flexMin"] = 0.0 - - DmeCombinationInputControl["wrinkleScales"] = datamodel.make_array([0.0] * len(deltas),float) - - for ob in [ob for ob in objects if ob.data.shape_keys]: - for shape in [shape for shape in ob.data.shape_keys.key_blocks[1:] if not getCorrectiveShapeSeparator() in shape.name and shape.name not in shapes]: - createController(ob.name, shape.name, [shape.name]) - shapes.add(shape.name) - - for vca in id.vs.vertex_animations: - createController(id.name, vca.name, ["{}-{}".format(vca.name,i) for i in range(vca.end - vca.start)]) - - controlValues = DmeCombinationOperator["controlValues"] = datamodel.make_array( [ [0.0,0.0,0.5] ] * len(controls), datamodel.Vector3) - DmeCombinationOperator["controlValuesLagged"] = datamodel.make_array( controlValues, datamodel.Vector3) - DmeCombinationOperator["usesLaggedValues"] = False - - DmeCombinationOperator["dominators"] = datamodel.make_array([],datamodel.Element) - targets = DmeCombinationOperator["targets"] = datamodel.make_array([],datamodel.Element) - - return dm - - def execute(self, context): - utils.State.update_scene() - - id = utils.get_active_exportable(context).item - dm = self.make_controllers(id) - - text = bpy.data.texts.new(dm.root.name) - text.from_string(dm.echo("keyvalues2",1)) - - if not id.vs.flex_controller_source or bpy.data.texts.get(id.vs.flex_controller_source): - id.vs.flex_controller_source = text.name - - self.report({'INFO'},get_id("gen_block_success", True).format(text.name)) - - return {'FINISHED'} - -class ActiveDependencyShapes(bpy.types.Operator): - bl_idname = "object.shape_key_activate_dependents" - bl_label = get_id("activate_dep_shapes") - bl_description = get_id("activate_dep_shapes_tip") - bl_options = {'UNDO'} - - @classmethod - def poll(cls, context): - return context.active_object and context.active_object.active_shape_key and context.active_object.active_shape_key.name.find(getCorrectiveShapeSeparator()) != -1 - - def execute(self, context): - context.active_object.show_only_shape_key = False - active_key = context.active_object.active_shape_key - subkeys = set(getCorrectiveShapeKeyDrivers(active_key) or active_key.name.split(getCorrectiveShapeSeparator())) - num_activated = 0 - for key in context.active_object.data.shape_keys.key_blocks: - if key == active_key or set(key.name.split(getCorrectiveShapeSeparator())) <= subkeys: - key.value = 1 - num_activated += 1 - else: - key.value = 0 - self.report({'INFO'},get_id("activate_dep_shapes_success", True).format(num_activated - 1)) - return {'FINISHED'} - -class AddCorrectiveShapeDrivers(bpy.types.Operator): - bl_idname = "object.sourcetools_generate_corrective_drivers" - bl_label = get_id("gen_drivers") - bl_description = get_id("gen_drivers_tip") - bl_options = {'UNDO'} - - @classmethod - def poll(cls, context): - return context.active_object and context.active_object.active_shape_key - - def execute(self, context): - keys = context.active_object.data.shape_keys - for key in keys.key_blocks: - subkeys = getCorrectiveShapeKeyDrivers(key) or [] - if key.name.find(getCorrectiveShapeSeparator()) != -1: - name_subkeys = [subkey for subkey in key.name.split(getCorrectiveShapeSeparator()) if subkey in keys.key_blocks] - subkeys = set([*subkeys, *name_subkeys]) - if subkeys: - sorted = list(subkeys) - sorted.sort() - self.addDrivers(key, sorted) - return {'FINISHED'} - - @classmethod - def addDrivers(cls, key, driver_names): - key.driver_remove("value") - fcurve = key.driver_add("value") - fcurve.modifiers.remove(fcurve.modifiers[0]) - fcurve.driver.type = 'MIN' - for driver_key in driver_names: - var = fcurve.driver.variables.new() - var.name = driver_key - var.targets[0].id_type = 'KEY' - var.targets[0].id = key.id_data - var.targets[0].data_path = "key_blocks[\"{}\"].value".format(driver_key) - -class RenameShapesToMatchCorrectiveDrivers(bpy.types.Operator): - bl_idname = "object.sourcetools_rename_to_corrective_drivers" - bl_label = get_id("apply_drivers") - bl_description = get_id("apply_drivers_tip") - bl_options = {'UNDO'} - - @classmethod - def poll(cls, context): - return context.active_object and context.active_object.active_shape_key - - def execute(self, context): - renamed = 0 - for key in context.active_object.data.shape_keys.key_blocks: - driver_shapes = getCorrectiveShapeKeyDrivers(key) - if driver_shapes: - generated_name = getCorrectiveShapeSeparator().join(driver_shapes) - if key.name != generated_name: - key.name = generated_name - renamed += 1 - - self.report({'INFO'},get_id("apply_drivers_success", True).format(renamed)) - return {'FINISHED'} - -class InsertUUID(bpy.types.Operator): - bl_idname = "text.insert_uuid" - bl_label = get_id("insert_uuid") - bl_description = get_id("insert_uuid_tip") - - @classmethod - def poll(cls,context): - return context.space_data.type == 'TEXT_EDITOR' and context.space_data.text - - def execute(self,context): - text = context.space_data.text - line = text.current_line - if 0 and len(line.body) >= 36: # 2.69 https://developer.blender.org/T38386 - sel_range = [max(0,text.current_character - 36),min(len(line.body),text.current_character + 36)] - sel_range.sort() - - m = re.search(r"[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}",line.body[sel_range[0]:sel_range[1]],re.I) - if m: - line.body = line.body[:m.start()] + str(datamodel.uuid.uuid4()) + line.body[m.end():] - return {'FINISHED'} - - text.write(str(datamodel.uuid.uuid4())) - return {'FINISHED'} - -class InvalidDriverError(LookupError): - def __init__(self, key, target_key): - LookupError(self, "Shape key '{}' has an invalid corrective driver targeting key '{}'".format(key, target_key)) - self.key = key - self.target_key = target_key - -def getCorrectiveShapeKeyDrivers(shape_key, raise_on_invalid = False): - owner = shape_key.id_data - drivers = owner.animation_data.drivers if owner.animation_data else None - if not drivers: return None - - def shapeName(path): - m = re.match(r'key_blocks\["(.*?)"\].value', path) - return m[1] if m else None - - fcurve = next((fc for fc in drivers if shapeName(fc.data_path) == shape_key.name), None) - if not fcurve or not fcurve.driver or not fcurve.driver.type == 'MIN': return None - - keys = [] - for variable in (v for v in fcurve.driver.variables if v.type == 'SINGLE_PROP' and v.id_data == owner and v.targets): - target_key = shapeName(variable.targets[0].data_path) - if target_key: - if raise_on_invalid and not variable.is_valid: - raise InvalidDriverError(shape_key, target_key) - keys.append(target_key) - - return keys +# Copyright (c) 2014 Tom Edwards contact@steamreview.org +# +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +import bpy, re +from . import datamodel, utils +from .utils import get_id, getCorrectiveShapeSeparator + +class DmxWriteFlexControllers(bpy.types.Operator): + bl_idname = "export_scene.dmx_flex_controller" + bl_label = get_id("gen_block") + bl_description = get_id("gen_block_tip") + bl_options = {'UNDO','INTERNAL'} + + @classmethod + def poll(cls, context): + return utils.hasShapes(utils.get_active_exportable(context).item, valid_only=False) + + @classmethod + def make_controllers(cls,id): + dm = datamodel.DataModel("model",1) + + objects = [] + shapes = set() + + if type(id) == bpy.types.Collection: + objects.extend(list([ob for ob in id.objects if ob.data and ob.type in utils.shape_types and ob.data.shape_keys])) + else: + objects.append(id) + + name = "flex_{}".format(id.name) + root = dm.add_element(name,id=name) + DmeCombinationOperator = dm.add_element("combinationOperator","DmeCombinationOperator",id=id.name+"controllers") + root["combinationOperator"] = DmeCombinationOperator + controls = DmeCombinationOperator["controls"] = datamodel.make_array([],datamodel.Element) + + def createController(namespace,name,deltas): + DmeCombinationInputControl = dm.add_element(name,"DmeCombinationInputControl",id=namespace + name + "inputcontrol") + controls.append(DmeCombinationInputControl) + + DmeCombinationInputControl["rawControlNames"] = datamodel.make_array(deltas,str) + DmeCombinationInputControl["stereo"] = False + DmeCombinationInputControl["eyelid"] = False + + DmeCombinationInputControl["flexMax"] = 1.0 + DmeCombinationInputControl["flexMin"] = 0.0 + + DmeCombinationInputControl["wrinkleScales"] = datamodel.make_array([0.0] * len(deltas),float) + + for ob in [ob for ob in objects if ob.data.shape_keys]: + for shape in [shape for shape in ob.data.shape_keys.key_blocks[1:] if not getCorrectiveShapeSeparator() in shape.name and shape.name not in shapes]: + createController(ob.name, shape.name, [shape.name]) + shapes.add(shape.name) + + for vca in id.vs.vertex_animations: + createController(id.name, vca.name, ["{}-{}".format(vca.name,i) for i in range(vca.end - vca.start)]) + + controlValues = DmeCombinationOperator["controlValues"] = datamodel.make_array( [ [0.0,0.0,0.5] ] * len(controls), datamodel.Vector3) + DmeCombinationOperator["controlValuesLagged"] = datamodel.make_array( controlValues, datamodel.Vector3) + DmeCombinationOperator["usesLaggedValues"] = False + + DmeCombinationOperator["dominators"] = datamodel.make_array([],datamodel.Element) + targets = DmeCombinationOperator["targets"] = datamodel.make_array([],datamodel.Element) + + return dm + + def execute(self, context): + utils.State.update_scene() + + id = utils.get_active_exportable(context).item + dm = self.make_controllers(id) + + text = bpy.data.texts.new(dm.root.name) + text.from_string(dm.echo("keyvalues2",1)) + + if not id.vs.flex_controller_source or bpy.data.texts.get(id.vs.flex_controller_source): + id.vs.flex_controller_source = text.name + + self.report({'INFO'},get_id("gen_block_success", True).format(text.name)) + + return {'FINISHED'} + +class ActiveDependencyShapes(bpy.types.Operator): + bl_idname = "object.shape_key_activate_dependents" + bl_label = get_id("activate_dep_shapes") + bl_description = get_id("activate_dep_shapes_tip") + bl_options = {'UNDO'} + + @classmethod + def poll(cls, context): + return context.active_object and context.active_object.active_shape_key and context.active_object.active_shape_key.name.find(getCorrectiveShapeSeparator()) != -1 + + def execute(self, context): + context.active_object.show_only_shape_key = False + active_key = context.active_object.active_shape_key + subkeys = set(getCorrectiveShapeKeyDrivers(active_key) or active_key.name.split(getCorrectiveShapeSeparator())) + num_activated = 0 + for key in context.active_object.data.shape_keys.key_blocks: + if key == active_key or set(key.name.split(getCorrectiveShapeSeparator())) <= subkeys: + key.value = 1 + num_activated += 1 + else: + key.value = 0 + self.report({'INFO'},get_id("activate_dep_shapes_success", True).format(num_activated - 1)) + return {'FINISHED'} + +class AddCorrectiveShapeDrivers(bpy.types.Operator): + bl_idname = "object.sourcetools_generate_corrective_drivers" + bl_label = get_id("gen_drivers") + bl_description = get_id("gen_drivers_tip") + bl_options = {'UNDO'} + + @classmethod + def poll(cls, context): + return context.active_object and context.active_object.active_shape_key + + def execute(self, context): + keys = context.active_object.data.shape_keys + for key in keys.key_blocks: + subkeys = getCorrectiveShapeKeyDrivers(key) or [] + if key.name.find(getCorrectiveShapeSeparator()) != -1: + name_subkeys = [subkey for subkey in key.name.split(getCorrectiveShapeSeparator()) if subkey in keys.key_blocks] + subkeys = set([*subkeys, *name_subkeys]) + if subkeys: + sorted = list(subkeys) + sorted.sort() + self.addDrivers(key, sorted) + return {'FINISHED'} + + @classmethod + def addDrivers(cls, key, driver_names): + key.driver_remove("value") + fcurve = key.driver_add("value") + fcurve.modifiers.remove(fcurve.modifiers[0]) + fcurve.driver.type = 'MIN' + for driver_key in driver_names: + var = fcurve.driver.variables.new() + var.name = driver_key + var.targets[0].id_type = 'KEY' + var.targets[0].id = key.id_data + var.targets[0].data_path = "key_blocks[\"{}\"].value".format(driver_key) + +class RenameShapesToMatchCorrectiveDrivers(bpy.types.Operator): + bl_idname = "object.sourcetools_rename_to_corrective_drivers" + bl_label = get_id("apply_drivers") + bl_description = get_id("apply_drivers_tip") + bl_options = {'UNDO'} + + @classmethod + def poll(cls, context): + return context.active_object and context.active_object.active_shape_key + + def execute(self, context): + renamed = 0 + for key in context.active_object.data.shape_keys.key_blocks: + driver_shapes = getCorrectiveShapeKeyDrivers(key) + if driver_shapes: + generated_name = getCorrectiveShapeSeparator().join(driver_shapes) + if key.name != generated_name: + key.name = generated_name + renamed += 1 + + self.report({'INFO'},get_id("apply_drivers_success", True).format(renamed)) + return {'FINISHED'} + +class InsertUUID(bpy.types.Operator): + bl_idname = "text.insert_uuid" + bl_label = get_id("insert_uuid") + bl_description = get_id("insert_uuid_tip") + + @classmethod + def poll(cls,context): + return context.space_data.type == 'TEXT_EDITOR' and context.space_data.text + + def execute(self,context): + text = context.space_data.text + line = text.current_line + if 0 and len(line.body) >= 36: # 2.69 https://developer.blender.org/T38386 + sel_range = [max(0,text.current_character - 36),min(len(line.body),text.current_character + 36)] + sel_range.sort() + + m = re.search(r"[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}",line.body[sel_range[0]:sel_range[1]],re.I) + if m: + line.body = line.body[:m.start()] + str(datamodel.uuid.uuid4()) + line.body[m.end():] + return {'FINISHED'} + + text.write(str(datamodel.uuid.uuid4())) + return {'FINISHED'} + +class InvalidDriverError(LookupError): + def __init__(self, key, target_key): + LookupError(self, "Shape key '{}' has an invalid corrective driver targeting key '{}'".format(key, target_key)) + self.key = key + self.target_key = target_key + +def getCorrectiveShapeKeyDrivers(shape_key, raise_on_invalid = False): + owner = shape_key.id_data + drivers = owner.animation_data.drivers if owner.animation_data else None + if not drivers: return None + + def shapeName(path): + m = re.match(r'key_blocks\["(.*?)"\].value', path) + return m[1] if m else None + + fcurve = next((fc for fc in drivers if shapeName(fc.data_path) == shape_key.name), None) + if not fcurve or not fcurve.driver or not fcurve.driver.type == 'MIN': return None + + keys = [] + for variable in (v for v in fcurve.driver.variables if v.type == 'SINGLE_PROP' and v.id_data == owner and v.targets): + target_key = shapeName(variable.targets[0].data_path) + if target_key: + if raise_on_invalid and not variable.is_valid: + raise InvalidDriverError(shape_key, target_key) + keys.append(target_key) + + return keys diff --git a/io_scene_valvesource/import_smd.py b/io_scene_valvesource/import_smd.py index 1c55452..788580d 100644 --- a/io_scene_valvesource/import_smd.py +++ b/io_scene_valvesource/import_smd.py @@ -1,1741 +1,1749 @@ -# Copyright (c) 2014 Tom Edwards contact@steamreview.org -# -# ##### BEGIN GPL LICENSE BLOCK ##### -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# ##### END GPL LICENSE BLOCK ##### - -import bpy, bmesh, random, collections -from bpy import ops -from bpy.app.translations import pgettext -from bpy.props import StringProperty, CollectionProperty, BoolProperty, EnumProperty -from mathutils import Quaternion, Euler -from .utils import * -from . import datamodel, ordered_set, flex - -class SmdImporter(bpy.types.Operator, Logger): - bl_idname = "import_scene.smd" - bl_label = get_id("importer_title") - bl_description = get_id("importer_tip") - bl_options = {'UNDO', 'PRESET'} - - qc = None - smd = None - - # Properties used by the file browser - filepath : StringProperty(name="File Path", description="File filepath used for importing the SMD/VTA/DMX/QC file", maxlen=1024, default="", options={'HIDDEN'}) - files : CollectionProperty(type=bpy.types.OperatorFileListElement, options={'HIDDEN'}) - directory : StringProperty(maxlen=1024, default="", subtype='FILE_PATH', options={'HIDDEN'}) - filter_folder : BoolProperty(name="Filter Folders", description="", default=True, options={'HIDDEN'}) - filter_glob : StringProperty(default="*.smd;*.vta;*.dmx;*.qc;*.qci", options={'HIDDEN'}) - - # Custom properties - doAnim : BoolProperty(name=get_id("importer_doanims"), default=True) - createCollections : BoolProperty(name=get_id("importer_use_collections"), description=get_id("importer_use_collections_tip"), default=True) - makeCamera : BoolProperty(name=get_id("importer_makecamera"),description=get_id("importer_makecamera_tip"),default=False) - append : EnumProperty(name=get_id("importer_bones_mode"),description=get_id("importer_bones_mode_desc"),items=( - ('VALIDATE',get_id("importer_bones_validate"),get_id("importer_bones_validate_desc")), - ('APPEND',get_id("importer_bones_append"),get_id("importer_bones_append_desc")), - ('NEW_ARMATURE',get_id("importer_bones_newarm"),get_id("importer_bones_newarm_desc"))), - default='APPEND') - upAxis : EnumProperty(name="Up Axis",items=axes,default='Z',description=get_id("importer_up_tip")) - rotMode : EnumProperty(name=get_id("importer_rotmode"),items=( ('XYZ', "Euler", ''), ('QUATERNION', "Quaternion", "") ),default='XYZ',description=get_id("importer_rotmode_tip")) - boneMode : EnumProperty(name=get_id("importer_bonemode"),items=(('NONE','Default',''),('ARROWS','Arrows',''),('SPHERE','Sphere','')),default='SPHERE',description=get_id("importer_bonemode_tip")) - - def execute(self, context): - pre_obs = set(bpy.context.scene.objects) - pre_eem = context.preferences.edit.use_enter_edit_mode - pre_append = self.append - context.preferences.edit.use_enter_edit_mode = False - - self.existingBones = [] # bones which existed before importing began - self.num_files_imported = 0 - - for filepath in [os.path.join(self.directory,file.name) for file in self.files] if self.files else [self.filepath]: - filepath_lc = filepath.lower() - if filepath_lc.endswith('.qc') or filepath_lc.endswith('.qci'): - self.num_files_imported = self.readQC(filepath, False, self.properties.doAnim, self.properties.makeCamera, self.properties.rotMode, outer_qc=True) - bpy.context.view_layer.objects.active = self.qc.a - elif filepath_lc.endswith('.smd'): - self.num_files_imported = self.readSMD(filepath, self.properties.upAxis, self.properties.rotMode) - elif filepath_lc.endswith ('.vta'): - self.num_files_imported = self.readSMD(filepath, self.properties.upAxis, self.properties.rotMode, smd_type=FLEX) - elif filepath_lc.endswith('.dmx'): - self.num_files_imported = self.readDMX(filepath, self.properties.upAxis, self.properties.rotMode) - else: - if len(filepath_lc) == 0: - self.report({'ERROR'},get_id("importer_err_nofile")) - else: - self.report({'ERROR'},get_id("importer_err_badfile", True).format(os.path.basename(filepath))) - - self.append = pre_append - - self.errorReport(get_id("importer_complete", True).format(self.num_files_imported,self.elapsed_time())) - if self.num_files_imported: - ops.object.select_all(action='DESELECT') - new_obs = set(bpy.context.scene.objects).difference(pre_obs) - xy = xyz = 0 - for ob in new_obs: - ob.select_set(True) - # FIXME: assumes meshes are centered around their origins - xy = max(xy, int(max(ob.dimensions[0],ob.dimensions[1])) ) - xyz = max(xyz, max(xy,int(ob.dimensions[2]))) - bpy.context.view_layer.objects.active = self.qc.a if self.qc else self.smd.a - for area in context.screen.areas: - if area.type == 'VIEW_3D': - area.spaces.active.clip_end = max( area.spaces.active.clip_end, xyz * 2 ) - if bpy.context.area and bpy.context.area.type == 'VIEW_3D' and bpy.context.region: - ops.view3d.view_selected() - - context.preferences.edit.use_enter_edit_mode = pre_eem - self.append = pre_append - - State.update_scene(context.scene) - - return {'FINISHED'} - - def invoke(self, context, event): - self.properties.upAxis = context.scene.vs.up_axis - bpy.context.window_manager.fileselect_add(self) - return {'RUNNING_MODAL'} - - def ensureAnimationBonesValidated(self): - if self.smd.jobType == ANIM and self.append == 'APPEND' and (hasattr(self.smd,"a") or self.findArmature()): - print("- Appending bones from animations is destructive; switching Bone Append Mode to \"Validate\"") - self.append = 'VALIDATE' - - # Datablock names are limited to 63 bytes of UTF-8 - def truncate_id_name(self, name, id_type): - truncated = bytes(name,'utf8') - if len(truncated) < 64: - return name - - truncated = truncated[:63] - while truncated: - try: - truncated = truncated.decode('utf8') - break - except UnicodeDecodeError: - truncated = truncated[:-1] - self.error(get_id("importer_err_namelength",True).format(pgettext(id_type if isinstance(id_type,str) else id_type.__name__), name, truncated)) - return truncated - - # Identifies what type of SMD this is. Cannot tell between reference/lod/collision meshes! - def scanSMD(self): - smd = self.smd - for line in smd.file: - if line == "triangles\n": - smd.jobType = REF - print("- This is a mesh") - break - if line == "vertexanimation\n": - print("- This is a flex animation library") - smd.jobType = FLEX - break - - # Finished the file - - if smd.jobType == None: - print("- This is a skeltal animation or pose") # No triangles, no flex - must be animation - smd.jobType = ANIM - self.ensureAnimationBonesValidated() - - smd.file.seek(0,0) # rewind to start of file - - # joins up "quoted values" that would otherwise be delimited, removes comments - def parseQuoteBlockedLine(self,line,lower=True): - if len(line) == 0: - return ["\n"] - - qc = self.qc - words = [] - last_word_start = 0 - in_quote = in_whitespace = False - - # The last char of the last line in the file was missed - if line[-1] != "\n": - line += "\n" - - for i in range(len(line)): - char = line[i] - nchar = pchar = None - if i < len(line)-1: - nchar = line[i+1] - if i > 0: - pchar = line[i-1] - - # line comment - precedence over block comment - if (char == "/" and nchar == "/") or char in ['#',';']: - if i > 0: - i = i-1 # last word will be caught after the loop - break # nothing more this line - - if qc: - #block comment - if qc.in_block_comment: - if char == "/" and pchar == "*": # done backwards so we don't have to skip two chars - qc.in_block_comment = False - continue - elif char == "/" and nchar == "*": # note: nchar, not pchar - qc.in_block_comment = True - continue - - # quote block - if char == "\"" and not pchar == "\\": # quotes can be escaped - in_quote = (in_quote == False) - if not in_quote: - if char in [" ","\t"]: - cur_word = line[last_word_start:i].strip("\"") # characters between last whitespace and here - if len(cur_word) > 0: - if (lower and os.name == 'nt') or cur_word[0] == "$": - cur_word = cur_word.lower() - words.append(cur_word) - last_word_start = i+1 # we are in whitespace, first new char is the next one - - # catch last word and any '{'s crashing into it - needBracket = False - cur_word = line[last_word_start:i] - if cur_word.endswith("{"): - needBracket = True - - cur_word = cur_word.strip("\"{") - if len(cur_word) > 0: - words.append(cur_word) - - if needBracket: - words.append("{") - - if line.endswith("\\\\\n") and (len(words) == 0 or words[-1] != "\\\\"): - words.append("\\\\") # macro continuation beats everything - - return words - - # Bones - def readNodes(self): - smd = self.smd - boneParents = {} - - def addBone(id,name,parent): - bone = smd.a.data.edit_bones.new(self.truncate_id_name(name,bpy.types.Bone)) - bone.tail = 0,5,0 # Blender removes zero-length bones - - smd.boneIDs[int(id)] = bone.name - boneParents[bone.name] = int(parent) - - return bone - - if self.append != 'NEW_ARMATURE': - smd.a = smd.a or self.findArmature() - if smd.a: - - append = self.append == 'APPEND' and smd.jobType in [REF,ANIM] - - if append: - bpy.context.view_layer.objects.active = smd.a - smd.a.hide_set(False) - ops.object.mode_set(mode='EDIT',toggle=False) - self.existingBones.extend([b.name for b in smd.a.data.bones]) - - missing = validated = 0 - for line in smd.file: - if smdBreak(line): break - if smdContinue(line): continue - - id, name, parent = self.parseQuoteBlockedLine(line,lower=False)[:3] - id = int(id) - parent = int(parent) - - targetBone = smd.a.data.bones.get(name) # names, not IDs, are the key - - if targetBone: validated += 1 - elif append: - targetBone = addBone(id,name,parent) - else: missing += 1 - - if not smd.boneIDs.get(parent): - smd.phantomParentIDs[id] = parent - - smd.boneIDs[id] = targetBone.name if targetBone else name - - print("- Validated {} bones against armature \"{}\"{}".format(validated, smd.a.name, " (could not find {})".format(missing) if missing > 0 else "")) - - if not smd.a: - smd.a = self.createArmature(self.truncate_id_name((self.qc.jobName if self.qc else smd.jobName) + "_skeleton",bpy.types.Armature)) - if self.qc: self.qc.a = smd.a - smd.a.data.vs.implicit_zero_bone = False # Too easy to break compatibility, plus the skeleton is probably set up already - - ops.object.mode_set(mode='EDIT',toggle=False) - - # Read bone definitions from disc - for line in smd.file: - if smdBreak(line): break - if smdContinue(line): continue - - id,name,parent = self.parseQuoteBlockedLine(line,lower=False)[:3] - addBone(id,name,parent) - - # Apply parents now that all bones exist - for bone_name,parent_id in boneParents.items(): - if parent_id != -1: - smd.a.data.edit_bones[bone_name].parent = smd.a.data.edit_bones[ smd.boneIDs[parent_id] ] - - ops.object.mode_set(mode='OBJECT') - if boneParents: print("- Imported {} new bones".format(len(boneParents)) ) - - if len(smd.a.data.bones) > 128: - self.warning(get_id("importer_err_bonelimit_smd")) - - @classmethod - def findArmature(cls): - # Search the current scene for an existing armature - there can only be one skeleton in a Source model - if bpy.context.active_object and bpy.context.active_object.type == 'ARMATURE': - return bpy.context.active_object - - def isArmIn(list): - for ob in list: - if ob.type == 'ARMATURE': - return ob - - a = isArmIn(bpy.context.selected_objects) # armature in the selection? - if a: return a - - for ob in bpy.context.selected_objects: - if ob.type == 'MESH': - a = ob.find_armature() # armature modifying a selected object? - if a: return a - - return isArmIn(bpy.context.scene.objects) # armature in the scene at all? - - def createArmature(self,armature_name): - smd = self.smd - if bpy.context.active_object: - ops.object.mode_set(mode='OBJECT',toggle=False) - a = bpy.data.objects.new(armature_name,bpy.data.armatures.new(armature_name)) - a.show_in_front = True - a.data.display_type = 'STICK' - bpy.context.scene.collection.objects.link(a) - for i in bpy.context.selected_objects: i.select_set(False) #deselect all objects - a.select_set(True) - bpy.context.view_layer.objects.active = a - - if not smd.isDMX: - ops.object.mode_set(mode='OBJECT') - - return a - - def readFrames(self): - smd = self.smd - # We only care about pose data in some SMD types - if smd.jobType not in [REF, ANIM]: - if smd.jobType == FLEX: smd.shapeNames = {} - for line in smd.file: - line = line.strip() - if smdBreak(line): return - if smd.jobType == FLEX and line.startswith("time"): - for c in line: - if c in ['#',';','/']: - pos = line.index(c) - frame = line[:pos].split()[1] - if c == '/': pos += 1 - smd.shapeNames[frame] = line[pos+1:].strip() - - a = smd.a - bpy.context.view_layer.objects.active = smd.a - ops.object.mode_set(mode='POSE') - - num_frames = 0 - keyframes = collections.defaultdict(list) - phantom_keyframes = collections.defaultdict(list) # bones that aren't in the reference skeleton - - for line in smd.file: - if smdBreak(line): - break - if smdContinue(line): - continue - - values = line.split() - - if values[0] == "time": # frame number is a dummy value, all frames are equally spaced - if num_frames > 0: - if smd.jobType == REF: - self.warning(get_id("importer_err_refanim",True).format(smd.jobName)) - for line in smd.file: # skip to end of block - if smdBreak(line): - break - if smdContinue(line): - continue - num_frames += 1 - continue - - # Read SMD data - pos = Vector([float(values[1]), float(values[2]), float(values[3])]) - rot = Euler([float(values[4]), float(values[5]), float(values[6])]) - - keyframe = KeyFrame() - keyframe.frame = num_frames - 1 - keyframe.matrix = Matrix.Translation(pos) @ rot.to_matrix().to_4x4() - keyframe.pos = keyframe.rot = True - - # store the keyframe - values[0] = int(values[0]) - try: - bone = smd.a.pose.bones[ smd.boneIDs[values[0]] ] - if smd.jobType == REF and not bone.parent: - keyframe.matrix = getUpAxisMat(smd.upAxis) @ keyframe.matrix - keyframes[bone].append(keyframe) - except KeyError: - if smd.jobType == REF and not smd.phantomParentIDs.get(values[0]): - keyframe.matrix = getUpAxisMat(smd.upAxis) @ keyframe.matrix - phantom_keyframes[values[0]].append(keyframe) - - # All frames read, apply phantom bones - for ID, parentID in smd.phantomParentIDs.items(): - bone = smd.a.pose.bones.get( smd.boneIDs.get(ID) ) - if not bone: continue - for phantom_keyframe in phantom_keyframes[bone]: - phantom_parent = parentID - if len(keyframes[bone]) >= phantom_keyframe.frame: # is there a keyframe to modify? - while phantom_keyframes.get(phantom_parent): # parents are recursive - phantom_source_frame = phantom_keyframe.frame - while not phantom_keyframes[phantom_parent].get(phantom_keyframe.frame): # rewind to the last value - if phantom_source_frame == 0: continue # should never happen - phantom_source_frame -= 1 - # Apply the phantom bone, then recurse - keyframes[bone][phantom_keyframe.frame].matrix = phantom_keyframes[phantom_parent][phantom_source_frame] @ keyframes[bone][phantom_keyframe.frame].matrix - phantom_parent = smd.phantomParentIDs.get(phantom_parent) - - self.applyFrames(keyframes,num_frames) - - def applyFrames(self,keyframes,num_frames, fps = None): - smd = self.smd - ops.object.mode_set(mode='POSE') - - if self.append != 'VALIDATE' and smd.jobType in [REF,ANIM] and not self.appliedReferencePose: - self.appliedReferencePose = True - - for bone in smd.a.pose.bones: - bone.matrix_basis.identity() - for bone,kf in keyframes.items(): - if bone.name in self.existingBones: - continue - elif bone.parent and not keyframes.get(bone.parent): - bone.matrix = bone.parent.matrix @ kf[0].matrix - else: - bone.matrix = kf[0].matrix - ops.pose.armature_apply() - - bone_vis = None if self.properties.boneMode == 'NONE' else bpy.data.objects.get("smd_bone_vis") - - if self.properties.boneMode == 'SPHERE' and (not bone_vis or bone_vis.type != 'MESH'): - ops.mesh.primitive_ico_sphere_add(subdivisions=3,radius=2) - bone_vis = bpy.context.active_object - bone_vis.data.name = bone_vis.name = "smd_bone_vis" - bone_vis.use_fake_user = True - for collection in bone_vis.users_collection: - collection.objects.unlink(bone_vis) # don't want the user deleting this - bpy.context.view_layer.objects.active = smd.a - elif self.properties.boneMode == 'ARROWS' and (not bone_vis or bone_vis.type != 'EMPTY'): - bone_vis = bpy.data.objects.new("smd_bone_vis",None) - bone_vis.use_fake_user = True - bone_vis.empty_display_type = 'ARROWS' - bone_vis.empty_display_size = 5 - - # Calculate armature dimensions...Blender should be doing this! - maxs = [0,0,0] - mins = [0,0,0] - for bone in smd.a.data.bones: - for i in range(3): - maxs[i] = max(maxs[i],bone.head_local[i]) - mins[i] = min(mins[i],bone.head_local[i]) - - dimensions = [] - if self.qc: self.qc.dimensions = dimensions - for i in range(3): - dimensions.append(maxs[i] - mins[i]) - - length = max(0.001, (dimensions[0] + dimensions[1] + dimensions[2]) / 600) # very small indeed, but a custom bone is used for display - - # Apply spheres - ops.object.mode_set(mode='EDIT') - for bone in [smd.a.data.edit_bones[b.name] for b in keyframes.keys()]: - bone.tail = bone.head + (bone.tail - bone.head).normalized() * length # Resize loose bone tails based on armature size - smd.a.pose.bones[bone.name].custom_shape = bone_vis # apply bone shape - - - if smd.jobType == ANIM: - if not smd.a.animation_data: - smd.a.animation_data_create() - - action = bpy.data.actions.new(smd.jobName) - - if 'ActLib' in dir(bpy.types): - smd.a.animation_data.action_library.add() - else: - action.use_fake_user = True - - smd.a.animation_data.action = action - - if 'fps' in dir(action): - action.fps = fps if fps else 30 - bpy.context.scene.render.fps = 60 - bpy.context.scene.render.fps_base = 1 - - ops.object.mode_set(mode='POSE') - - # Create an animation - if 'ActLib' in dir(bpy.types): - bpy.context.scene.use_preview_range = bpy.context.scene.use_preview_range_action_lock = True - else: - bpy.context.scene.frame_start = 0 - bpy.context.scene.frame_end = num_frames - 1 - - for bone in smd.a.pose.bones: - bone.rotation_mode = smd.rotMode - - for bone,frames in list(keyframes.items()): - if not frames: - del keyframes[bone] - - if smd.isDMX == False: - # Remove every point but the first unless there is motion - still_bones = list(keyframes.keys()) - for bone in keyframes.keys(): - bone_keyframes = keyframes[bone] - for keyframe in bone_keyframes[1:]: - diff = keyframe.matrix.inverted() @ bone_keyframes[0].matrix - if diff.to_translation().length > 0.00001 or abs(diff.to_quaternion().w) > 0.0001: - still_bones.remove(bone) - break - for bone in still_bones: - keyframes[bone] = [keyframes[bone][0]] - - # Create Blender keyframes - def ApplyRecursive(bone): - keys = keyframes.get(bone) - if keys: - # Generate curves - curvesLoc = None - curvesRot = None - bone_string = "pose.bones[\"{}\"].".format(bone.name) - group = action.groups.new(name=bone.name) - for keyframe in keys: - if curvesLoc and curvesRot: break - if keyframe.pos and not curvesLoc: - curvesLoc = [] - for i in range(3): - curve = action.fcurves.new(data_path=bone_string + "location",index=i) - curve.group = group - curvesLoc.append(curve) - if keyframe.rot and not curvesRot: - curvesRot = [] - for i in range(3 if smd.rotMode == 'XYZ' else 4): - curve = action.fcurves.new(data_path=bone_string + "rotation_" + ("euler" if smd.rotMode == 'XYZ' else "quaternion"),index=i) - curve.group = group - curvesRot.append(curve) - - # Apply each imported keyframe - for keyframe in keys: - # Transform - if smd.a.data.vs.legacy_rotation: - keyframe.matrix @= mat_BlenderToSMD.inverted() - - if bone.parent: - if smd.a.data.vs.legacy_rotation: parentMat = bone.parent.matrix @ mat_BlenderToSMD - else: parentMat = bone.parent.matrix - bone.matrix = parentMat @ keyframe.matrix - else: - bone.matrix = getUpAxisMat(smd.upAxis) @ keyframe.matrix - - # Key location - if keyframe.pos: - for i in range(3): - curvesLoc[i].keyframe_points.add(1) - curvesLoc[i].keyframe_points[-1].co = [keyframe.frame, bone.location[i]] - - # Key rotation - if keyframe.rot: - if smd.rotMode == 'XYZ': - for i in range(3): - curvesRot[i].keyframe_points.add(1) - curvesRot[i].keyframe_points[-1].co = [keyframe.frame, bone.rotation_euler[i]] - else: - for i in range(4): - curvesRot[i].keyframe_points.add(1) - curvesRot[i].keyframe_points[-1].co = [keyframe.frame, bone.rotation_quaternion[i]] - - # Recurse - for child in bone.children: - ApplyRecursive(child) - - # Start keying - for bone in smd.a.pose.bones: - if not bone.parent: - ApplyRecursive(bone) - - for fc in action.fcurves: - fc.update() - - # clear any unkeyed poses - for bone in smd.a.pose.bones: - bone.location.zero() - if smd.rotMode == 'XYZ': bone.rotation_euler.zero() - else: bone.rotation_quaternion.identity() - scn = bpy.context.scene - - if scn.frame_current == 1: # Blender starts on 1, Source starts on 0 - scn.frame_set(0) - else: - scn.frame_set(scn.frame_current) - ops.object.mode_set(mode='OBJECT') - - print( "- Imported {} frames of animation".format(num_frames) ) - - def getMeshMaterial(self,mat_name): - smd = self.smd - if mat_name: - mat_name = self.truncate_id_name(mat_name, bpy.types.Material) - else: - mat_name = "Material" - - md = smd.m.data - mat = None - for candidate in bpy.data.materials: # Do we have this material already? - if candidate.name == mat_name: - mat = candidate - if mat: - if md.materials.get(mat.name): # Look for it on this mesh - for i in range(len(md.materials)): - if md.materials[i].name == mat.name: - mat_ind = i - break - else: # material exists, but not on this mesh - md.materials.append(mat) - mat_ind = len(md.materials) - 1 - else: # material does not exist - print("- New material: {}".format(mat_name)) - mat = bpy.data.materials.new(mat_name) - #new fix importing textures - mat.use_nodes = True - base_node = mat.node_tree.nodes["Principled BSDF"] - if base_node: - base_node.inputs["Roughness"].default_value = 1.0 - texture_node = mat.node_tree.nodes.new("ShaderNodeTexImage") - texture_path = os.path.join(os.path.dirname(self.filepath), mat_name) - try: - texture_node.image = bpy.data.images.load(texture_path) - except RuntimeError: - print(f"Текстура не найдена: {texture_path}") - - mat.node_tree.links.new(texture_node.outputs["Color"], base_node.inputs["Base Color"]) - md.materials.append(mat) - # Give it a random colour - randCol = [] - for i in range(3): - randCol.append(random.uniform(.4,1)) - randCol.append(1) - mat.diffuse_color = randCol - if smd.jobType == PHYS: - smd.m.display_type = 'SOLID' - mat_ind = len(md.materials) - 1 - - return mat, mat_ind - - # triangles block - def readPolys(self): - smd = self.smd - if smd.jobType not in [ REF, PHYS ]: - return - - mesh_name = smd.jobName - if smd.jobType == REF and not smd.jobName.lower().find("reference") and not smd.jobName.lower().endswith("ref"): - mesh_name += " ref" - mesh_name = self.truncate_id_name(mesh_name, bpy.types.Mesh) - - # Create a new mesh object, disable double-sided rendering, link it to the current scene - smd.m = bpy.data.objects.new(mesh_name,bpy.data.meshes.new(mesh_name)) - smd.m.parent = smd.a - smd.g.objects.link(smd.m) - if smd.jobType == REF: # can only have flex on a ref mesh - if self.qc: - self.qc.ref_mesh = smd.m # for VTA import - - # Create weightmap groups - for bone in smd.a.data.bones.values(): - smd.m.vertex_groups.new(name=bone.name) - - # Apply armature modifier - modifier = smd.m.modifiers.new(type="ARMATURE",name=pgettext("Armature")) - modifier.object = smd.a - - # Initialisation - md = smd.m.data - # Vertex values - norms = [] - - bm = bmesh.new() - bm.from_mesh(md) - weightLayer = bm.verts.layers.deform.new() - uvLayer = bm.loops.layers.uv.new() - - # ************************************************************************************************* - # There are two loops in this function: one for polygons which continues until the "end" keyword - # and one for the vertices on each polygon that loops three times. We're entering the poly one now. - countPolys = 0 - badWeights = 0 - vertMap = {} - - for line in smd.file: - line = line.rstrip("\n") - - if line and smdBreak(line): # normally a blank line means a break, but Milkshape can export SMDs with zero-length material names... - break - if smdContinue(line): - continue - - mat, mat_ind = self.getMeshMaterial(line if line else pgettext(get_id("importer_name_nomat", data=True))) - - # *************************************************************** - # Enter the vertex loop. This will run three times for each poly. - vertexCount = 0 - faceUVs = [] - vertKeys = [] - for line in smd.file: - if smdBreak(line): - break - if smdContinue(line): - continue - values = line.split() - - vertexCount+= 1 - co = [0,0,0] - norm = [0,0,0] - - # Read co-ordinates and normals - for i in range(1,4): # 0 is the deprecated bone weight value - co[i-1] = float(values[i]) - norm[i-1] = float(values[i+3]) - - co = tuple(co) - norms.append(norm) - - # Can't do these in the above for loop since there's only two - faceUVs.append( ( float(values[7]), float(values[8]) ) ) - - # Read weightmap data - vertWeights = [] - if len(values) > 10 and values[9] != "0": # got weight links? - for i in range(10, 10 + (int(values[9]) * 2), 2): # The range between the first and last weightlinks (each of which is *two* values) - try: - bone = smd.a.data.bones[ smd.boneIDs[int(values[i])] ] - vertWeights.append((smd.m.vertex_groups.find(bone.name), float(values[i+1]))) - except KeyError: - badWeights += 1 - else: # Fall back on the deprecated value at the start of the line - try: - bone = smd.a.data.bones[ smd.boneIDs[int(values[0])] ] - vertWeights.append((smd.m.vertex_groups.find(bone.name), 1.0)) - except KeyError: - badWeights += 1 - - vertKeys.append((co, tuple(vertWeights))) - - # Three verts? It's time for a new poly - if vertexCount == 3: - def createFace(use_cache = True): - bmVerts = [] - for vertKey in vertKeys: - bmv = vertMap.get(vertKey, None) if use_cache else None # if a vertex in this position with these bone weights exists, re-use it. - if bmv is None: - bmv = bm.verts.new(vertKey[0]) - for (bone,weight) in vertKey[1]: - bmv[weightLayer][bone] = weight - vertMap[vertKey] = bmv - bmVerts.append(bmv) - - face = bm.faces.new(bmVerts) - face.material_index = mat_ind - for i in range(3): - face.loops[i][uvLayer].uv = faceUVs[i] - - try: - createFace() - except ValueError: # face overlaps another, try again with all-new vertices - createFace(use_cache = False) - break - - # Back in polyland now, with three verts processed. - countPolys+= 1 - - bm.to_mesh(md) - vertMap = None - bm.free() - md.update() - - if countPolys: - ops.object.select_all(action="DESELECT") - smd.m.select_set(True) - bpy.context.view_layer.objects.active = smd.m - - ops.object.shade_smooth() - - for poly in smd.m.data.polygons: - poly.select = True - - smd.m.show_wire = smd.jobType == PHYS - - md.normals_split_custom_set(norms) - - if smd.upAxis == 'Y': - md.transform(rx90) - md.update() - - if badWeights: - self.warning(get_id("importer_err_badweights", True).format(badWeights,smd.jobName)) - print("- Imported {} polys".format(countPolys)) - - # vertexanimation block - def readShapes(self): - smd = self.smd - if smd.jobType is not FLEX: - return - - if not smd.m: - if self.qc: - smd.m = self.qc.ref_mesh - else: # user selection - if bpy.context.active_object.type in shape_types: - smd.m = bpy.context.active_object - else: - for obj in bpy.context.selected_objects: - if obj.type in shape_types: - smd.m = obj - - if not smd.m: - self.error(get_id("importer_err_shapetarget")) # FIXME: this could actually be supported - return - - if hasShapes(smd.m): - smd.m.active_shape_key_index = 0 - smd.m.show_only_shape_key = True # easier to view each shape, less confusion when several are active at once - - def vec_round(v): - return Vector([round(co,3) for co in v]) - co_map = {} - mesh_cos = [vert.co for vert in smd.m.data.vertices] - mesh_cos_rnd = None - - smd.vta_ref = None - vta_cos = [] - vta_ids = [] - - making_base_shape = True - bad_vta_verts = [] - num_shapes = 0 - md = smd.m.data - - for line in smd.file: - line = line.rstrip("\n") - - if smdBreak(line): - break - if smdContinue(line): - continue - - values = line.split() - - if values[0] == "time": - shape_name = smd.shapeNames.get(values[1]) - if smd.vta_ref == None: - if not hasShapes(smd.m, False): smd.m.shape_key_add(name=shape_name if shape_name else "Basis") - vd = bpy.data.meshes.new(name="VTA vertices") - vta_ref = smd.vta_ref = bpy.data.objects.new(name=vd.name,object_data=vd) - vta_ref.matrix_world = smd.m.matrix_world - smd.g.objects.link(vta_ref) - - vta_err_vg = vta_ref.vertex_groups.new(name=get_id("importer_name_unmatchedvta")) - elif making_base_shape: - vd.vertices.add(int(len(vta_cos)/3)) - vd.vertices.foreach_set("co",vta_cos) - num_vta_verts = len(vd.vertices) - del vta_cos - - mod = vta_ref.modifiers.new(name="VTA Shrinkwrap",type='SHRINKWRAP') - mod.target = smd.m - mod.wrap_method = 'NEAREST_VERTEX' - - vd = bpy.data.meshes.new_from_object(vta_ref.evaluated_get(bpy.context.evaluated_depsgraph_get())) - - vta_ref.modifiers.remove(mod) - del mod - - for i in range(len(vd.vertices)): - id = vta_ids[i] - co = vd.vertices[i].co - map_id = None - try: - map_id = mesh_cos.index(co) - except ValueError: - if not mesh_cos_rnd: - mesh_cos_rnd = [vec_round(co) for co in mesh_cos] - try: - map_id = mesh_cos_rnd.index(vec_round(co)) - except ValueError: - bad_vta_verts.append(i) - continue - co_map[id] = map_id - - bpy.data.meshes.remove(vd) - del vd - - if bad_vta_verts: - err_ratio = len(bad_vta_verts) / num_vta_verts - vta_err_vg.add(bad_vta_verts,1.0,'REPLACE') - message = get_id("importer_err_unmatched_mesh", True).format(len(bad_vta_verts), int(err_ratio * 100)) - if err_ratio == 1: - self.error(message) - return - else: - self.warning(message) - else: - removeObject(vta_ref) - making_base_shape = False - - if not making_base_shape: - smd.m.shape_key_add(name=shape_name if shape_name else values[1]) - num_shapes += 1 - - continue # to the first vertex of the new shape - - cur_id = int(values[0]) - vta_co = getUpAxisMat(smd.upAxis) @ Vector([ float(values[1]), float(values[2]), float(values[3]) ]) - - if making_base_shape: - vta_ids.append(cur_id) - vta_cos.extend(vta_co) - else: # write to the shapekey - try: - md.shape_keys.key_blocks[-1].data[ co_map[cur_id] ].co = vta_co - except KeyError: - pass - - print("- Imported",num_shapes,"flex shapes") - - # Parses a QC file - def readQC(self, filepath, newscene, doAnim, makeCamera, rotMode, outer_qc = False): - filename = os.path.basename(filepath) - filedir = os.path.dirname(filepath) - - def normalisePath(path): - if (os.path.sep == '/'): - path = path.replace('\\','/') - return os.path.normpath(path) - - if outer_qc: - print("\nQC IMPORTER: now working on",filename) - - qc = self.qc = QcInfo() - qc.startTime = time.time() - qc.jobName = filename - qc.root_filedir = filedir - qc.makeCamera = makeCamera - qc.animation_names = [] - if newscene: - bpy.context.screen.scene = bpy.data.scenes.new(filename) # BLENDER BUG: this currently doesn't update bpy.context.scene - else: - bpy.context.scene.name = filename - else: - qc = self.qc - - file = open(filepath, 'r') - in_bodygroup = in_lod = in_sequence = False - lod = 0 - for line_str in file: - line = self.parseQuoteBlockedLine(line_str) - if len(line) == 0: - continue - #print(line) - - # handle individual words (insert QC variable values, change slashes) - i = 0 - for word in line: - for var in qc.vars.keys(): - kw = "${}$".format(var) - pos = word.lower().find(kw) - if pos != -1: - word = word.replace(word[pos:pos+len(kw)], qc.vars[var]) - line[i] = word.replace("/","\\") # studiomdl is Windows-only - i += 1 - - # Skip macros - if line[0] == "$definemacro": - self.warning(get_id("importer_qc_macroskip", True).format(filename)) - while line[-1] == "\\\\": - line = self.parseQuoteBlockedLine( file.readline()) - - # register new QC variable - if line[0] == "$definevariable": - qc.vars[line[1]] = line[2].lower() - continue - - # dir changes - if line[0] == "$pushd": - if line[1][-1] != "\\": - line[1] += "\\" - qc.dir_stack.append(line[1]) - continue - if line[0] == "$popd": - try: - qc.dir_stack.pop() - except IndexError: - pass # invalid QC, but whatever - continue - - # up axis - if line[0] == "$upaxis": - qc.upAxis = bpy.context.scene.vs.up_axis = line[1].upper() - qc.upAxisMat = getUpAxisMat(line[1]) - continue - - # bones in pure animation QCs - if line[0] == "$definebone": - pass # TODO - - def import_file(word_index,default_ext,smd_type,append='APPEND',layer=0,in_file_recursion = False): - path = os.path.join( qc.cd(), appendExt(normalisePath(line[word_index]),default_ext) ) - - if not in_file_recursion and not os.path.exists(path): - return import_file(word_index,"dmx",smd_type,append,layer,True) - - if not path in qc.imported_smds: # FIXME: an SMD loaded once relatively and once absolutely will still pass this test - qc.imported_smds.append(path) - self.append = append if qc.a else 'NEW_ARMATURE' - - # import the file - self.num_files_imported += (self.readDMX if path.endswith("dmx") else self.readSMD)(path,qc.upAxis,rotMode,False,smd_type,target_layer=layer) - return True - - # meshes - if line[0] in ["$body","$model"]: - import_file(2,"smd",REF) - continue - if line[0] == "$lod": - in_lod = True - lod += 1 - continue - if in_lod: - if line[0] == "replacemodel": - import_file(2,"smd",REF,'VALIDATE',layer=lod) - continue - if "}" in line: - in_lod = False - continue - if line[0] == "$bodygroup": - in_bodygroup = True - continue - if in_bodygroup: - if line[0] == "studio": - import_file(1,"smd",REF) - continue - if "}" in line: - in_bodygroup = False - continue - - # skeletal animations - if in_sequence or (doAnim and line[0] in ["$sequence","$animation"]): - # there is no easy way to determine whether a SMD is being defined here or elsewhere, or even precisely where it is being defined - num_words_to_skip = 2 if not in_sequence else 0 - for i in range(len(line)): - if num_words_to_skip: - num_words_to_skip -= 1 - continue - if line[i] == "{": - in_sequence = True - continue - if line[i] == "}": - in_sequence = False - continue - if line[i] in ["hidden","autolay","realtime","snap","spline","xfade","delta","predelta"]: - continue - if line[i] in ["fadein","fadeout","addlayer","blendwidth","node"]: - num_words_to_skip = 1 - continue - if line[i] in ["activity","transision","rtransition"]: - num_words_to_skip = 2 - continue - if line[i] in ["blend"]: - num_words_to_skip = 3 - continue - if line[i] in ["blendlayer"]: - num_words_to_skip = 5 - continue - # there are many more keywords, but they can only appear *after* an SMD is referenced - - if not qc.a: qc.a = self.findArmature() - if not qc.a: - self.warning(get_id("qc_warn_noarmature", True).format(line_str.strip())) - continue - - if line[i].lower() not in qc.animation_names: - if not qc.a.animation_data: qc.a.animation_data_create() - last_action = qc.a.animation_data.action - import_file(i,"smd",ANIM,'VALIDATE') - if line[0] == "$animation": - qc.animation_names.append(line[1].lower()) - while i < len(line) - 1: - if line[i] == "fps" and qc.a.animation_data.action != last_action: - if 'fps' in dir(qc.a.animation_data.action): - qc.a.animation_data.action.fps = float(line[i+1]) - i += 1 - break - continue - - # flex animation - if line[0] == "flexfile": - import_file(1,"vta",FLEX,'VALIDATE') - continue - - # naming shapes - if qc.ref_mesh and line[0] in ["flex","flexpair"]: # "flex" is safe because it cannot come before "flexfile" - for i in range(1,len(line)): - if line[i] == "frame": - shape = qc.ref_mesh.data.shape_keys.key_blocks.get(line[i+1]) - if shape and shape.name.startswith("Key"): shape.name = line[1] - break - continue - - # physics mesh - if line[0] in ["$collisionmodel","$collisionjoints"]: - import_file(1,"smd",PHYS,'VALIDATE',layer=10) # FIXME: what if there are >10 LODs? - continue - - # origin; this is where viewmodel editors should put their camera, and is in general something to be aware of - if line[0] == "$origin": - if qc.makeCamera: - data = bpy.data.cameras.new(qc.jobName + "_origin") - name = "camera" - else: - data = None - name = "empty object" - print("QC IMPORTER: created {} at $origin\n".format(name)) - - origin = bpy.data.objects.new(qc.jobName + "_origin",data) - bpy.context.scene.collection.objects.link(origin) - - origin.rotation_euler = Vector([pi/2,0,pi]) + Vector(getUpAxisMat(qc.upAxis).inverted().to_euler()) # works, but adding seems very wrong! - ops.object.select_all(action="DESELECT") - origin.select_set(True) - ops.object.transform_apply(rotation=True) - - for i in range(3): - origin.location[i] = float(line[i+1]) - origin.matrix_world = getUpAxisMat(qc.upAxis) @ origin.matrix_world - - if qc.makeCamera: - bpy.context.scene.camera = origin - origin.data.lens_unit = 'DEGREES' - origin.data.lens = 31.401752 # value always in mm; this number == 54 degrees - # Blender's FOV isn't locked to X or Y height, so a shift is needed to get the weapon aligned properly. - # This is a nasty hack, and the values are only valid for the default 54 degrees angle - origin.data.shift_y = -0.27 - origin.data.shift_x = 0.36 - origin.data.passepartout_alpha = 1 - else: - origin.empty_display_type = 'PLAIN_AXES' - - qc.origin = origin - - # QC inclusion - if line[0] == "$include": - path = os.path.join(qc.root_filedir,normalisePath(line[1])) # special case: ignores dir stack - - if not path.endswith(".qc") and not path.endswith(".qci"): - if os.path.exists(appendExt(path,".qci")): - path = appendExt(path,".qci") - elif os.path.exists(appendExt(path,".qc")): - path = appendExt(path,".qc") - try: - self.readQC(path,False, doAnim, makeCamera, rotMode) - except IOError: - self.warning(get_id("importer_err_qci", True).format(path)) - - file.close() - - if qc.origin: - qc.origin.parent = qc.a - if qc.ref_mesh: - size = min(qc.ref_mesh.dimensions) / 15 - if qc.makeCamera: - qc.origin.data.display_size = size - else: - qc.origin.empty_display_size = size - - if outer_qc: - printTimeMessage(qc.startTime,filename,"import","QC") - return self.num_files_imported - - def initSMD(self, filepath,smd_type,upAxis,rotMode,target_layer): - smd = self.smd = SmdInfo() - smd.jobName = os.path.splitext(os.path.basename(filepath))[0] - smd.jobType = smd_type - smd.startTime = time.time() - smd.layer = target_layer - smd.rotMode = rotMode - self.createCollection() - if self.qc: - smd.upAxis = self.qc.upAxis - smd.a = self.qc.a - if upAxis: - smd.upAxis = upAxis - - return smd - - def createCollection(self): - if self.smd.jobType and self.smd.jobType != ANIM: - if self.createCollections: - self.smd.g = bpy.data.collections.new(self.smd.jobName) - bpy.context.scene.collection.children.link(self.smd.g) - else: - self.smd.g = bpy.context.scene.collection - - # Parses an SMD file - def readSMD(self, filepath, upAxis, rotMode, newscene = False, smd_type = None, target_layer = 0): - if filepath.endswith("dmx"): - return self.readDMX( filepath, upAxis, newscene, smd_type) - - smd = self.initSMD(filepath,smd_type,upAxis,rotMode,target_layer) - self.appliedReferencePose = False - - try: - smd.file = file = open(filepath, 'r') - except IOError as err: # TODO: work out why errors are swallowed if I don't do this! - self.error(get_id("importer_err_smd", True).format(smd.jobName,err)) - return 0 - - if newscene: - bpy.context.screen.scene = bpy.data.scenes.new(smd.jobName) # BLENDER BUG: this currently doesn't update bpy.context.scene - elif bpy.context.scene.name == pgettext("Scene"): - bpy.context.scene.name = smd.jobName - - print("\nSMD IMPORTER: now working on",smd.jobName) - - while True: - header = self.parseQuoteBlockedLine(file.readline()) - if header: break - - if header != ["version" ,"1"]: - self.warning (get_id("importer_err_smd_ver")) - - if smd.jobType == None: - self.scanSMD() # What are we dealing with? - self.createCollection() - - for line in file: - if line == "nodes\n": self.readNodes() - if line == "skeleton\n": self.readFrames() - if line == "triangles\n": self.readPolys() - if line == "vertexanimation\n": self.readShapes() - - file.close() - printTimeMessage(smd.startTime,smd.jobName,"import") - - return 1 - - def readDMX(self, filepath, upAxis, rotMode,newscene = False, smd_type = None, target_layer = 0): - smd = self.initSMD(filepath,smd_type,upAxis,rotMode,target_layer) - smd.isDMX = 1 - - bench = BenchMarker(1,"DMX") - - target_arm = self.findArmature() if self.append != 'NEW_ARMATURE' else None - if target_arm: - smd.a = target_arm - - ob = bone = restData = smd.atch = None - smd.layer = target_layer - if bpy.context.active_object: ops.object.mode_set(mode='OBJECT') - self.appliedReferencePose = False - - print( "\nDMX IMPORTER: now working on",os.path.basename(filepath) ) - - try: - print("- Loading DMX...") - try: - dm = datamodel.load(filepath) - except IOError as e: - self.error(e) - return 0 - bench.report("Load DMX") - - if bpy.context.scene.name.startswith("Scene"): - bpy.context.scene.name = smd.jobName - - keywords = getDmxKeywords(dm.format_ver) - - correctiveSeparator = '_' - if dm.format_ver >= 22 and any([elem for elem in dm.elements if elem.type == "DmeVertexDeltaData" and '__' in elem.name]): - correctiveSeparator = '__' - self._ensureSceneDmxVersion(dmx_version(9, 22, compiler=Compiler.MODELDOC)) - - if not smd_type: - smd.jobType = REF if dm.root.get("model") else ANIM - self.createCollection() - self.ensureAnimationBonesValidated() - - DmeModel = dm.root["skeleton"] - transforms = DmeModel["baseStates"][0]["transforms"] if DmeModel.get("baseStates") and len(DmeModel["baseStates"]) > 0 else None - - DmeAxisSystem = DmeModel.get("axisSystem") - if DmeAxisSystem: - for axis in axes_lookup.items(): - if axis[1] == DmeAxisSystem["upAxis"] - 1: - upAxis = smd.upAxis = axis[0] - break - - def getBlenderQuat(datamodel_quat): - return Quaternion([datamodel_quat[3], datamodel_quat[0], datamodel_quat[1], datamodel_quat[2]]) - def get_transform_matrix(elem): - out = Matrix() - if not elem: return out - trfm = elem.get("transform") - if transforms: - for e in transforms: - if e.name == elem.name: - trfm = e - if not trfm: return out - out @= Matrix.Translation(Vector(trfm["position"])) - out @= getBlenderQuat(trfm["orientation"]).to_matrix().to_4x4() - return out - def isBone(elem): - return elem.type in ["DmeDag","DmeJoint"] - def getBoneForElement(elem): - return smd.a.data.edit_bones[smd.boneIDs[elem.id]] - def enumerateBonesAndAttachments(elem : datamodel.Element): - parent = elem if isBone(elem) else None - for child in elem.get("children", []): - if child.type == "DmeDag" and child.get("shape") and child["shape"].type == "DmeAttachment": - if smd.jobType != REF: - continue - yield (child["shape"], parent) - elif isBone(child) and child.name != implicit_bone_name: - # don't import Dags which simply wrap meshes. In some DMX animations, each bone has an empty mesh attached. - boneShape = child.get("shape") - if not boneShape or boneShape["currentState"] == None: - yield (child, parent) - yield from enumerateBonesAndAttachments(child) - elif child.type == "DmeModel": - yield from enumerateBonesAndAttachments(child) - - # Skeleton - bone_matrices = {} - restData = {} - if target_arm: - missing_bones = [] - bpy.context.view_layer.objects.active = smd.a - smd.a.hide_set(False) - ops.object.mode_set(mode='EDIT') - - for (elem,parent) in enumerateBonesAndAttachments(DmeModel): - if elem.type == "DmeAttachment": - continue - - bone = smd.a.data.edit_bones.get(self.truncate_id_name(elem.name, bpy.types.Bone)) - if not bone: - if self.append == 'APPEND' and smd.jobType in [REF,ANIM]: - bone = smd.a.data.edit_bones.new(self.truncate_id_name(elem.name, bpy.types.Bone)) - bone.parent = getBoneForElement(parent) if parent else None - bone.tail = (0,5,0) - bone_matrices[bone.name] = get_transform_matrix(elem) - smd.boneIDs[elem.id] = bone.name - smd.boneTransformIDs[elem["transform"].id] = bone.name - else: - missing_bones.append(elem.name) - else: - scene_parent = bone.parent.name if bone.parent else "" - dmx_parent = parent.name if parent else "" - if scene_parent != dmx_parent: - self.warning(get_id('importer_bone_parent_miss',True).format(elem.name,scene_parent,dmx_parent,smd.jobName)) - - smd.boneIDs[elem.id] = bone.name - smd.boneTransformIDs[elem["transform"].id] = bone.name - - if missing_bones and smd.jobType != ANIM: # animations report missing bones seperately - self.warning(get_id("importer_err_missingbones", True).format(smd.jobName,len(missing_bones),smd.a.name)) - print("\n".join(missing_bones)) - elif any(enumerateBonesAndAttachments(DmeModel)): - self.append = 'NEW_ARMATURE' - ob = smd.a = self.createArmature(self.truncate_id_name(DmeModel.name or smd.jobName, bpy.types.Armature)) - if self.qc: self.qc.a = ob - bpy.context.view_layer.objects.active = smd.a - ops.object.mode_set(mode='EDIT') - - smd.a.matrix_world = getUpAxisMat(smd.upAxis) - - for (elem,parent) in enumerateBonesAndAttachments(DmeModel): - parent = getBoneForElement(parent) if parent else None - if elem.type == "DmeAttachment": - atch = smd.atch = bpy.data.objects.new(name=self.truncate_id_name(elem.name, "Attachment"), object_data=None) - smd.g.objects.link(atch) - atch.show_in_front = True - atch.empty_display_type = 'ARROWS' - - atch.parent = smd.a - if parent: - atch.parent_type = 'BONE' - atch.parent_bone = parent.name - - atch.matrix_local = get_transform_matrix(elem) - else: - bone = smd.a.data.edit_bones.new(self.truncate_id_name(elem.name,bpy.types.Bone)) - bone.parent = parent - bone.tail = (0,5,0) - bone_matrices[bone.name] = get_transform_matrix(elem) - smd.boneIDs[elem.id] = bone.name - smd.boneTransformIDs[elem["transform"].id] = bone.name - - if smd.a: - ops.object.mode_set(mode='POSE') - for bone in smd.a.pose.bones: - mat = bone_matrices.get(bone.name) - if mat: - keyframe = KeyFrame() - keyframe.matrix = mat - restData[bone] = [keyframe] - if restData: - self.applyFrames(restData,1,None) - - def parseModel(elem,matrix=Matrix(), last_bone = None): - if elem.type in ["DmeModel","DmeDag", "DmeJoint"]: - if elem.type == "DmeDag": - matrix = matrix @ get_transform_matrix(elem) - if elem.get("children") and elem["children"]: - if elem.type == "DmeJoint": - last_bone = elem - subelems = elem["children"] - elif elem.get("shape"): - subelems = [elem["shape"]] - else: - return - for subelem in subelems: - parseModel(subelem,matrix,last_bone) - elif elem.type == "DmeMesh": - DmeMesh = elem - if bpy.context.active_object: - ops.object.mode_set(mode='OBJECT') - mesh_name = self.truncate_id_name(DmeMesh.name,bpy.types.Mesh) - ob = smd.m = bpy.data.objects.new(name=mesh_name, object_data=bpy.data.meshes.new(name=mesh_name)) - smd.g.objects.link(ob) - ob.show_wire = smd.jobType == PHYS - - DmeVertexData = DmeMesh["currentState"] - have_weightmap = keywords["weight"] in DmeVertexData["vertexFormat"] - - if smd.a: - ob.parent = smd.a - if have_weightmap: - amod = ob.modifiers.new(name="Armature",type='ARMATURE') - amod.object = smd.a - amod.use_bone_envelopes = False - else: - ob.matrix_local = getUpAxisMat(smd.upAxis) - - print("Importing DMX mesh \"{}\"".format(DmeMesh.name)) - - bm = bmesh.new() - bm.from_mesh(ob.data) - - positions = DmeVertexData[keywords['pos']] - positionsIndices = DmeVertexData[keywords['pos'] + "Indices"] - - # Vertices - for pos in positions: - bm.verts.new( Vector(pos) ) - bm.verts.ensure_lookup_table() - - # Faces, Materials, Colours - skipfaces = set() - vertex_layer_infos = [] - - class VertexLayerInfo(): - def __init__(self, layer, indices, values): - self.layer = layer - self.indices = indices - self.values = values - - def get_loop_value(self, loop_index): - return self.values[self.indices[loop_index]] - - # Normals - normalsLayer = bm.loops.layers.float_vector.new("__bst_normal") - normalsLayerName = normalsLayer.name - vertex_layer_infos.append(VertexLayerInfo(normalsLayer, DmeVertexData[keywords['norm'] + "Indices"], DmeVertexData[keywords['norm']])) - - # Arbitrary vertex data - def warnUneditableVertexData(name): self.warning("Vertex data '{}' was imported, but cannot be edited in Blender (as of 2.82)".format(name)) - def isClothEnableMap(name): return name.startswith("cloth_enable$") - - for vertexMap in [prop for prop in DmeVertexData["vertexFormat"] if prop not in keywords.values()]: - indices = DmeVertexData.get(vertexMap + "Indices") - if not indices: - continue - values = DmeVertexData.get(vertexMap) - if not isinstance(values, list) or len(values) == 0: - continue - - if isinstance(values[0], float): - if isClothEnableMap(vertexMap): - continue # will be imported later as a weightmap - layers = bm.loops.layers.float - warnUneditableVertexData(vertexMap) - elif isinstance(values[0], int): - layers = bm.loops.layers.int - warnUneditableVertexData(vertexMap) - elif isinstance(values[0], str): - layers = bm.loops.layers.string - warnUneditableVertexData(vertexMap) - elif isinstance(values[0], datamodel.Vector2): - layers = bm.loops.layers.uv - elif isinstance(values[0], datamodel.Vector4) or isinstance(values[0], datamodel.Color): - layers = bm.loops.layers.color - else: - self.warning("Could not import vertex data '{}'; Blender does not support {} data layers.".format(vertexMap, type(values[0]).__name__)) - continue - - vertex_layer_infos.append(VertexLayerInfo(layers.new(vertexMap), DmeVertexData[vertexMap + "Indices"], values)) - - if vertexMap != "textureCoordinates": - self._ensureSceneDmxVersion(dmx_version(9, 22)) - - deform_group_names = ordered_set.OrderedSet() - - # Weightmap - if have_weightmap: - weighted_bone_indices = ordered_set.OrderedSet() - jointWeights = DmeVertexData[keywords["weight"]] - jointIndices = DmeVertexData[keywords["weight_indices"]] - jointRange = range(DmeVertexData["jointCount"]) - deformLayer = bm.verts.layers.deform.new() - - joint_index = 0 - for vert in bm.verts: - for i in jointRange: - weight = jointWeights[joint_index] - if weight > 0: - vg_index = weighted_bone_indices.add(jointIndices[joint_index]) - vert[deformLayer][vg_index] = weight - joint_index += 1 - - joints = DmeModel["jointList"] if dm.format_ver >= 11 else DmeModel["jointTransforms"]; - for boneName in (joints[i].name for i in weighted_bone_indices): - deform_group_names.add(boneName) - - for face_set in DmeMesh["faceSets"]: - mat_path = face_set["material"]["mtlName"] - bpy.context.scene.vs.material_path = os.path.dirname(mat_path).replace("\\","/") - mat, mat_ind = self.getMeshMaterial(os.path.basename(mat_path)) - face_loops = [] - dmx_face = 0 - for vert in face_set["faces"]: - if vert != -1: - face_loops.append(vert) - continue - - # -1 marks the end of a face definition, time to create it! - try: - face = bm.faces.new([bm.verts[positionsIndices[loop]] for loop in face_loops]) - face.smooth = True - face.material_index = mat_ind - - # Apply normals and Source 2 vertex data - for layer_info in vertex_layer_infos: - is_uv_layer = layer_info.layer.name in bm.loops.layers.uv - for i, loop in enumerate(face.loops): - value = layer_info.get_loop_value(face_loops[i]) - if is_uv_layer: - loop[layer_info.layer].uv = value - else: - loop[layer_info.layer] = value - - except ValueError: # Can't have an overlapping face...this will be painful later - skipfaces.add(dmx_face) - dmx_face += 1 - face_loops.clear() - - - for cloth_enable in (name for name in DmeVertexData["vertexFormat"] if isClothEnableMap(name)): - deformLayer = bm.verts.layers.deform.verify() - vg_index = deform_group_names.add(cloth_enable) - data = DmeVertexData[cloth_enable] - indices = DmeVertexData[cloth_enable + "Indices"] - i = 0 - for face in bm.faces: - for loop in face.loops: - weight = data[indices[i]] - loop.vert[deformLayer][vg_index] = weight - i += 1 - - for groupName in deform_group_names: - ob.vertex_groups.new(name=groupName) # must create vertex groups before loading bmesh data - - if last_bone and not have_weightmap: # bone parent - ob.parent_type = 'BONE' - ob.parent_bone = last_bone.name - - # Move from BMesh to Blender - bm.to_mesh(ob.data) - del bm - ob.data.update() - ob.matrix_world @= matrix - if ob.parent_bone: - ob.matrix_world = ob.parent.matrix_world @ ob.parent.data.bones[ob.parent_bone].matrix_local @ ob.matrix_world - elif ob.parent: - ob.matrix_world = ob.parent.matrix_world @ ob.matrix_world - if smd.jobType == PHYS: - ob.display_type = 'SOLID' - - # Normals - normalsLayer = ob.data.attributes[normalsLayerName] - ob.data.normals_split_custom_set([value.vector for value in normalsLayer.data]) - del normalsLayer - ob.data.attributes.remove(ob.data.attributes[normalsLayerName]) - - # Stereo balance - if keywords['balance'] in DmeVertexData["vertexFormat"]: - vg = ob.vertex_groups.new(name=get_id("importer_balance_group", data=True)) - balanceIndices = DmeVertexData[keywords['balance'] + "Indices"] - balance = DmeVertexData[keywords['balance']] - ones = [] - for i in balanceIndices: - val = balance[i] - if val == 0: - continue - elif val == 1: - ones.append(i) - else: - vg.add([i],val,'REPLACE') - vg.add(ones,1,'REPLACE') - - ob.data.vs.flex_stereo_mode = 'VGROUP' - ob.data.vs.flex_stereo_vg = vg.name - - # Shapes - if DmeMesh.get("deltaStates"): - for DmeVertexDeltaData in DmeMesh["deltaStates"]: - if not ob.data.shape_keys: - ob.shape_key_add(name="Basis") - ob.show_only_shape_key = True - ob.data.shape_keys.name = DmeMesh.name - shape_key = ob.shape_key_add(name=DmeVertexDeltaData.name) - - if keywords['pos'] in DmeVertexDeltaData["vertexFormat"]: - deltaPositions = DmeVertexDeltaData[keywords['pos']] - for i,posIndex in enumerate(DmeVertexDeltaData[keywords['pos'] + "Indices"]): - shape_key.data[posIndex].co += Vector(deltaPositions[i]) - - if correctiveSeparator in DmeVertexDeltaData.name: - flex.AddCorrectiveShapeDrivers.addDrivers(shape_key, DmeVertexDeltaData.name.split(correctiveSeparator)) - - if smd.jobType in [REF,PHYS]: - parseModel(DmeModel) - - if smd.jobType == ANIM: - print("Importing DMX animation \"{}\"".format(smd.jobName)) - - animation = dm.root["animationList"]["animations"][0] - - frameRate = animation.get("frameRate",30) # very, very old DMXs don't have this - timeFrame = animation["timeFrame"] - scale = timeFrame.get("scale",1.0) - duration = timeFrame.get("duration") or timeFrame.get("durationTime") - offset = timeFrame.get("offset") or timeFrame.get("offsetTime",0.0) - start = timeFrame.get("start", 0) - - if type(duration) == int: duration = datamodel.Time.from_int(duration) - if type(offset) == int: offset = datamodel.Time.from_int(offset) - - lastFrameIndex = 0 - - keyframes = collections.defaultdict(list) - unknown_bones = [] - for channel in animation["channels"]: - toElement = channel["toElement"] - if not toElement: continue # SFM - - bone_name = smd.boneTransformIDs.get(toElement.id) - bone = smd.a.pose.bones.get(bone_name) if bone_name else None - if not bone: - if self.append != 'NEW_ARMATURE' and toElement.name not in unknown_bones: - unknown_bones.append(toElement.name) - print("- Animation refers to unrecognised bone \"{}\"".format(toElement.name)) - continue - - is_position_channel = channel["toAttribute"] == "position" - is_rotation_channel = channel["toAttribute"] == "orientation" - if not (is_position_channel or is_rotation_channel): - continue - - frame_log = channel["log"]["layers"][0] - times = frame_log["times"] - values = frame_log["values"] - - for i in range( len(times) ): - frame_time = times[i] + start - if type(frame_time) == int: frame_time = datamodel.Time.from_int(frame_time) - frame_value = values[i] - - keyframe = KeyFrame() - keyframes[bone].append(keyframe) - - keyframe.frame = frame_time * frameRate - lastFrameIndex = max(lastFrameIndex, keyframe.frame) - - if not (bone.parent or keyframe.pos or keyframe.rot): - keyframe.matrix = getUpAxisMat(smd.upAxis).inverted() - - if is_position_channel and not keyframe.pos: - keyframe.matrix @= Matrix.Translation(frame_value) - keyframe.pos = True - elif is_rotation_channel and not keyframe.rot: - keyframe.matrix @= getBlenderQuat(frame_value).to_matrix().to_4x4() - keyframe.rot = True - - if smd.a == None: - self.warning(get_id("importer_err_noanimationbones", True).format(smd.jobName)) - else: - smd.a.hide_set(False) - bpy.context.view_layer.objects.active = smd.a - if unknown_bones: - self.warning(get_id("importer_err_missingbones", True).format(smd.jobName,len(unknown_bones),smd.a.name)) - - total_frames = ceil((duration * frameRate) if duration else lastFrameIndex) + 1 # need a frame for 0 too! - - # apply the keframes - self.applyFrames(keyframes,total_frames,frameRate) - - bpy.context.scene.frame_end += int(round(start * 2 * frameRate,0)) - - except datamodel.AttributeError as e: - e.args = ["Invalid DMX file: {}".format(e.args[0] if e.args else "Unknown error")] - raise - - bench.report("DMX imported in") - return 1 - - @classmethod - def _ensureSceneDmxVersion(cls, version : dmx_version): - if State.datamodelFormat < version.format: - bpy.context.scene.vs.dmx_format = version.format_enum - if State.datamodelEncoding < version.encoding: - bpy.context.scene.vs.dmx_encoding = str(version.encoding) +# Copyright (c) 2014 Tom Edwards contact@steamreview.org +# +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +import bpy, bmesh, random, collections +from bpy import ops +from bpy.app.translations import pgettext +from bpy.props import StringProperty, CollectionProperty, BoolProperty, EnumProperty +from mathutils import Quaternion, Euler +from .utils import * +from . import datamodel, ordered_set, flex + +class SmdImporter(bpy.types.Operator, Logger): + bl_idname = "import_scene.smd" + bl_label = get_id("importer_title") + bl_description = get_id("importer_tip") + bl_options = {'UNDO', 'PRESET'} + + qc = None + smd = None + + # Properties used by the file browser + filepath : StringProperty(name="File Path", description="File filepath used for importing the SMD/VTA/DMX/QC file", maxlen=1024, default="", options={'HIDDEN'}) + files : CollectionProperty(type=bpy.types.OperatorFileListElement, options={'HIDDEN'}) + directory : StringProperty(maxlen=1024, default="", subtype='FILE_PATH', options={'HIDDEN'}) + filter_folder : BoolProperty(name="Filter Folders", description="", default=True, options={'HIDDEN'}) + filter_glob : StringProperty(default="*.smd;*.vta;*.dmx;*.qc;*.qci", options={'HIDDEN'}) + + # Custom properties + doAnim : BoolProperty(name=get_id("importer_doanims"), default=False) + createCollections : BoolProperty(name=get_id("importer_use_collections"), description=get_id("importer_use_collections_tip"), default=False) + makeCamera : BoolProperty(name=get_id("importer_makecamera"),description=get_id("importer_makecamera_tip"),default=False) + append : EnumProperty(name=get_id("importer_bones_mode"),description=get_id("importer_bones_mode_desc"),items=( + ('VALIDATE',get_id("importer_bones_validate"),get_id("importer_bones_validate_desc")), + ('APPEND',get_id("importer_bones_append"),get_id("importer_bones_append_desc")), + ('NEW_ARMATURE',get_id("importer_bones_newarm"),get_id("importer_bones_newarm_desc"))), + default='APPEND') + upAxis : EnumProperty(name="Up Axis",items=axes,default='Z',description=get_id("importer_up_tip")) + rotMode : EnumProperty(name=get_id("importer_rotmode"),items=( ('XYZ', "Euler", ''), ('QUATERNION', "Quaternion", "") ),default='XYZ',description=get_id("importer_rotmode_tip")) + boneMode : EnumProperty(name=get_id("importer_bonemode"),items=(('NONE','Default',''),('ARROWS','Arrows',''),('SPHERE','Sphere','')),default='SPHERE',description=get_id("importer_bonemode_tip")) + + def execute(self, context): + pre_obs = set(bpy.context.scene.objects) + pre_eem = context.preferences.edit.use_enter_edit_mode + pre_append = self.append + context.preferences.edit.use_enter_edit_mode = False + + self.existingBones = [] # bones which existed before importing began + self.num_files_imported = 0 + + for filepath in [os.path.join(self.directory,file.name) for file in self.files] if self.files else [self.filepath]: + filepath_lc = filepath.lower() + if filepath_lc.endswith('.qc') or filepath_lc.endswith('.qci'): + self.num_files_imported = self.readQC(filepath, False, self.properties.doAnim, self.properties.makeCamera, self.properties.rotMode, outer_qc=True) + bpy.context.view_layer.objects.active = self.qc.a + elif filepath_lc.endswith('.smd'): + self.num_files_imported = self.readSMD(filepath, self.properties.upAxis, self.properties.rotMode) + elif filepath_lc.endswith ('.vta'): + self.num_files_imported = self.readSMD(filepath, self.properties.upAxis, self.properties.rotMode, smd_type=FLEX) + elif filepath_lc.endswith('.dmx'): + self.num_files_imported = self.readDMX(filepath, self.properties.upAxis, self.properties.rotMode) + else: + if len(filepath_lc) == 0: + self.report({'ERROR'},get_id("importer_err_nofile")) + else: + self.report({'ERROR'},get_id("importer_err_badfile", True).format(os.path.basename(filepath))) + + self.append = pre_append + + self.errorReport(get_id("importer_complete", True).format(self.num_files_imported,self.elapsed_time())) + if self.num_files_imported: + ops.object.select_all(action='DESELECT') + new_obs = set(bpy.context.scene.objects).difference(pre_obs) + xy = xyz = 0 + for ob in new_obs: + ob.select_set(True) + # FIXME: assumes meshes are centered around their origins + xy = max(xy, int(max(ob.dimensions[0],ob.dimensions[1])) ) + xyz = max(xyz, max(xy,int(ob.dimensions[2]))) + bpy.context.view_layer.objects.active = self.qc.a if self.qc else self.smd.a + for area in context.screen.areas: + if area.type == 'VIEW_3D': + area.spaces.active.clip_end = max( area.spaces.active.clip_end, xyz * 2 ) + if bpy.context.area and bpy.context.area.type == 'VIEW_3D' and bpy.context.region: + ops.view3d.view_selected() + + context.preferences.edit.use_enter_edit_mode = pre_eem + self.append = pre_append + + State.update_scene(context.scene) + + return {'FINISHED'} + + def invoke(self, context, event): + self.properties.upAxis = context.scene.vs.up_axis + bpy.context.window_manager.fileselect_add(self) + return {'RUNNING_MODAL'} + + def ensureAnimationBonesValidated(self): + if self.smd.jobType == ANIM and self.append == 'APPEND' and (hasattr(self.smd,"a") or self.findArmature()): + print("- Appending bones from animations is destructive; switching Bone Append Mode to \"Validate\"") + self.append = 'VALIDATE' + + # Datablock names are limited to 63 bytes of UTF-8 + def truncate_id_name(self, name, id_type): + truncated = bytes(name,'utf8') + if len(truncated) < 64: + return name + + truncated = truncated[:63] + while truncated: + try: + truncated = truncated.decode('utf8') + break + except UnicodeDecodeError: + truncated = truncated[:-1] + self.error(get_id("importer_err_namelength",True).format(pgettext(id_type if isinstance(id_type,str) else id_type.__name__), name, truncated)) + return truncated + + # Identifies what type of SMD this is. Cannot tell between reference/lod/collision meshes! + def scanSMD(self): + smd = self.smd + for line in smd.file: + if line == "triangles\n": + smd.jobType = REF + print("- This is a mesh") + break + if line == "vertexanimation\n": + print("- This is a flex animation library") + smd.jobType = FLEX + break + + # Finished the file + + if smd.jobType == None: + print("- This is a skeltal animation or pose") # No triangles, no flex - must be animation + smd.jobType = ANIM + self.ensureAnimationBonesValidated() + + smd.file.seek(0,0) # rewind to start of file + + # joins up "quoted values" that would otherwise be delimited, removes comments + def parseQuoteBlockedLine(self,line,lower=True): + if len(line) == 0: + return ["\n"] + + qc = self.qc + words = [] + last_word_start = 0 + in_quote = in_whitespace = False + + # The last char of the last line in the file was missed + if line[-1] != "\n": + line += "\n" + + for i in range(len(line)): + char = line[i] + nchar = pchar = None + if i < len(line)-1: + nchar = line[i+1] + if i > 0: + pchar = line[i-1] + + # line comment - precedence over block comment + if (char == "/" and nchar == "/") or char in ['#',';']: + if i > 0: + i = i-1 # last word will be caught after the loop + break # nothing more this line + + if qc: + #block comment + if qc.in_block_comment: + if char == "/" and pchar == "*": # done backwards so we don't have to skip two chars + qc.in_block_comment = False + continue + elif char == "/" and nchar == "*": # note: nchar, not pchar + qc.in_block_comment = True + continue + + # quote block + if char == "\"" and not pchar == "\\": # quotes can be escaped + in_quote = (in_quote == False) + if not in_quote: + if char in [" ","\t"]: + cur_word = line[last_word_start:i].strip("\"") # characters between last whitespace and here + if len(cur_word) > 0: + if (lower and os.name == 'nt') or cur_word[0] == "$": + cur_word = cur_word.lower() + words.append(cur_word) + last_word_start = i+1 # we are in whitespace, first new char is the next one + + # catch last word and any '{'s crashing into it + needBracket = False + cur_word = line[last_word_start:i] + if cur_word.endswith("{"): + needBracket = True + + cur_word = cur_word.strip("\"{") + if len(cur_word) > 0: + words.append(cur_word) + + if needBracket: + words.append("{") + + if line.endswith("\\\\\n") and (len(words) == 0 or words[-1] != "\\\\"): + words.append("\\\\") # macro continuation beats everything + + return words + + # Bones + def readNodes(self): + smd = self.smd + boneParents = {} + + def addBone(id,name,parent): + bone = smd.a.data.edit_bones.new(self.truncate_id_name(name,bpy.types.Bone)) + bone.tail = 0,5,0 # Blender removes zero-length bones + + smd.boneIDs[int(id)] = bone.name + boneParents[bone.name] = int(parent) + + return bone + + if self.append != 'NEW_ARMATURE': + smd.a = smd.a or self.findArmature() + if smd.a: + + append = self.append == 'APPEND' and smd.jobType in [REF,ANIM] + + if append: + bpy.context.view_layer.objects.active = smd.a + smd.a.hide_set(False) + ops.object.mode_set(mode='EDIT',toggle=False) + self.existingBones.extend([b.name for b in smd.a.data.bones]) + + missing = validated = 0 + for line in smd.file: + if smdBreak(line): break + if smdContinue(line): continue + + id, name, parent = self.parseQuoteBlockedLine(line,lower=False)[:3] + id = int(id) + parent = int(parent) + + targetBone = smd.a.data.bones.get(name) # names, not IDs, are the key + + if targetBone: validated += 1 + elif append: + targetBone = addBone(id,name,parent) + else: missing += 1 + + if not smd.boneIDs.get(parent): + smd.phantomParentIDs[id] = parent + + smd.boneIDs[id] = targetBone.name if targetBone else name + + print("- Validated {} bones against armature \"{}\"{}".format(validated, smd.a.name, " (could not find {})".format(missing) if missing > 0 else "")) + + if not smd.a: + smd.a = self.createArmature(self.truncate_id_name((self.qc.jobName if self.qc else smd.jobName) + "_skeleton",bpy.types.Armature)) + if self.qc: self.qc.a = smd.a + smd.a.data.vs.implicit_zero_bone = False # Too easy to break compatibility, plus the skeleton is probably set up already + + ops.object.mode_set(mode='EDIT',toggle=False) + + # Read bone definitions from disc + for line in smd.file: + if smdBreak(line): break + if smdContinue(line): continue + + id,name,parent = self.parseQuoteBlockedLine(line,lower=False)[:3] + addBone(id,name,parent) + + # Apply parents now that all bones exist + for bone_name,parent_id in boneParents.items(): + if parent_id != -1: + smd.a.data.edit_bones[bone_name].parent = smd.a.data.edit_bones[ smd.boneIDs[parent_id] ] + + ops.object.mode_set(mode='OBJECT') + if boneParents: print("- Imported {} new bones".format(len(boneParents)) ) + + if len(smd.a.data.bones) > 128: + self.warning(get_id("importer_err_bonelimit_smd")) + + @classmethod + def findArmature(cls): + # Search the current scene for an existing armature - there can only be one skeleton in a Source model + if bpy.context.active_object and bpy.context.active_object.type == 'ARMATURE': + return bpy.context.active_object + + def isArmIn(list): + for ob in list: + if ob.type == 'ARMATURE': + return ob + + a = isArmIn(bpy.context.selected_objects) # armature in the selection? + if a: return a + + for ob in bpy.context.selected_objects: + if ob.type == 'MESH': + a = ob.find_armature() # armature modifying a selected object? + if a: return a + + return isArmIn(bpy.context.scene.objects) # armature in the scene at all? + + def createArmature(self,armature_name): + smd = self.smd + if bpy.context.active_object: + ops.object.mode_set(mode='OBJECT',toggle=False) + a = bpy.data.objects.new(armature_name,bpy.data.armatures.new(armature_name)) + a.show_in_front = True + a.data.display_type = 'STICK' + bpy.context.scene.collection.objects.link(a) + for i in bpy.context.selected_objects: i.select_set(False) #deselect all objects + a.select_set(True) + bpy.context.view_layer.objects.active = a + + if not smd.isDMX: + ops.object.mode_set(mode='OBJECT') + + return a + + def readFrames(self): + smd = self.smd + # We only care about pose data in some SMD types + if smd.jobType not in [REF, ANIM]: + if smd.jobType == FLEX: smd.shapeNames = {} + for line in smd.file: + line = line.strip() + if smdBreak(line): return + if smd.jobType == FLEX and line.startswith("time"): + for c in line: + if c in ['#',';','/']: + pos = line.index(c) + frame = line[:pos].split()[1] + if c == '/': pos += 1 + smd.shapeNames[frame] = line[pos+1:].strip() + + a = smd.a + bpy.context.view_layer.objects.active = smd.a + ops.object.mode_set(mode='POSE') + + num_frames = 0 + keyframes = collections.defaultdict(list) + phantom_keyframes = collections.defaultdict(list) # bones that aren't in the reference skeleton + + for line in smd.file: + if smdBreak(line): + break + if smdContinue(line): + continue + + values = line.split() + + if values[0] == "time": # frame number is a dummy value, all frames are equally spaced + if num_frames > 0: + if smd.jobType == REF: + self.warning(get_id("importer_err_refanim",True).format(smd.jobName)) + for line in smd.file: # skip to end of block + if smdBreak(line): + break + if smdContinue(line): + continue + num_frames += 1 + continue + + # Read SMD data + pos = Vector([float(values[1]), float(values[2]), float(values[3])]) + rot = Euler([float(values[4]), float(values[5]), float(values[6])]) + + keyframe = KeyFrame() + keyframe.frame = num_frames - 1 + keyframe.matrix = Matrix.Translation(pos) @ rot.to_matrix().to_4x4() + keyframe.pos = keyframe.rot = True + + # store the keyframe + values[0] = int(values[0]) + try: + bone = smd.a.pose.bones[ smd.boneIDs[values[0]] ] + if smd.jobType == REF and not bone.parent: + keyframe.matrix = getUpAxisMat(smd.upAxis) @ keyframe.matrix + keyframes[bone].append(keyframe) + except KeyError: + if smd.jobType == REF and not smd.phantomParentIDs.get(values[0]): + keyframe.matrix = getUpAxisMat(smd.upAxis) @ keyframe.matrix + phantom_keyframes[values[0]].append(keyframe) + + # All frames read, apply phantom bones + for ID, parentID in smd.phantomParentIDs.items(): + bone = smd.a.pose.bones.get( smd.boneIDs.get(ID) ) + if not bone: continue + for phantom_keyframe in phantom_keyframes[bone]: + phantom_parent = parentID + if len(keyframes[bone]) >= phantom_keyframe.frame: # is there a keyframe to modify? + while phantom_keyframes.get(phantom_parent): # parents are recursive + phantom_source_frame = phantom_keyframe.frame + while not phantom_keyframes[phantom_parent].get(phantom_keyframe.frame): # rewind to the last value + if phantom_source_frame == 0: continue # should never happen + phantom_source_frame -= 1 + # Apply the phantom bone, then recurse + keyframes[bone][phantom_keyframe.frame].matrix = phantom_keyframes[phantom_parent][phantom_source_frame] @ keyframes[bone][phantom_keyframe.frame].matrix + phantom_parent = smd.phantomParentIDs.get(phantom_parent) + + self.applyFrames(keyframes,num_frames) + + def applyFrames(self,keyframes,num_frames, fps = None): + smd = self.smd + ops.object.mode_set(mode='POSE') + + if self.append != 'VALIDATE' and smd.jobType in [REF,ANIM] and not self.appliedReferencePose: + self.appliedReferencePose = True + + for bone in smd.a.pose.bones: + bone.matrix_basis.identity() + for bone,kf in keyframes.items(): + if bone.name in self.existingBones: + continue + elif bone.parent and not keyframes.get(bone.parent): + bone.matrix = bone.parent.matrix @ kf[0].matrix + else: + bone.matrix = kf[0].matrix + ops.pose.armature_apply() + + bone_vis = None if self.properties.boneMode == 'NONE' else bpy.data.objects.get("smd_bone_vis") + + if self.properties.boneMode == 'SPHERE' and (not bone_vis or bone_vis.type != 'MESH'): + ops.mesh.primitive_ico_sphere_add(subdivisions=3,radius=2) + bone_vis = bpy.context.active_object + bone_vis.data.name = bone_vis.name = "smd_bone_vis" + bone_vis.use_fake_user = True + for collection in bone_vis.users_collection: + collection.objects.unlink(bone_vis) # don't want the user deleting this + bpy.context.view_layer.objects.active = smd.a + elif self.properties.boneMode == 'ARROWS' and (not bone_vis or bone_vis.type != 'EMPTY'): + bone_vis = bpy.data.objects.new("smd_bone_vis",None) + bone_vis.use_fake_user = True + bone_vis.empty_display_type = 'ARROWS' + bone_vis.empty_display_size = 5 + + # Calculate armature dimensions...Blender should be doing this! + maxs = [0,0,0] + mins = [0,0,0] + for bone in smd.a.data.bones: + for i in range(3): + maxs[i] = max(maxs[i],bone.head_local[i]) + mins[i] = min(mins[i],bone.head_local[i]) + + dimensions = [] + if self.qc: self.qc.dimensions = dimensions + for i in range(3): + dimensions.append(maxs[i] - mins[i]) + + length = max(0.001, (dimensions[0] + dimensions[1] + dimensions[2]) / 600) # very small indeed, but a custom bone is used for display + + # Apply spheres + ops.object.mode_set(mode='EDIT') + for bone in [smd.a.data.edit_bones[b.name] for b in keyframes.keys()]: + bone.tail = bone.head + (bone.tail - bone.head).normalized() * length # Resize loose bone tails based on armature size + smd.a.pose.bones[bone.name].custom_shape = bone_vis # apply bone shape + + + if smd.jobType == ANIM: + if not smd.a.animation_data: + smd.a.animation_data_create() + + action = bpy.data.actions.new(smd.jobName) + + if 'ActLib' in dir(bpy.types): + smd.a.animation_data.action_library.add() + else: + action.use_fake_user = True + + smd.a.animation_data.action = action + + if 'fps' in dir(action): + action.fps = fps if fps else 30 + bpy.context.scene.render.fps = 60 + bpy.context.scene.render.fps_base = 1 + + ops.object.mode_set(mode='POSE') + + # Create an animation + if 'ActLib' in dir(bpy.types): + bpy.context.scene.use_preview_range = bpy.context.scene.use_preview_range_action_lock = True + else: + bpy.context.scene.frame_start = 0 + bpy.context.scene.frame_end = num_frames - 1 + + for bone in smd.a.pose.bones: + bone.rotation_mode = smd.rotMode + + for bone,frames in list(keyframes.items()): + if not frames: + del keyframes[bone] + + if smd.isDMX == False: + # Remove every point but the first unless there is motion + still_bones = list(keyframes.keys()) + for bone in keyframes.keys(): + bone_keyframes = keyframes[bone] + for keyframe in bone_keyframes[1:]: + diff = keyframe.matrix.inverted() @ bone_keyframes[0].matrix + if diff.to_translation().length > 0.00001 or abs(diff.to_quaternion().w) > 0.0001: + still_bones.remove(bone) + break + for bone in still_bones: + keyframes[bone] = [keyframes[bone][0]] + + # Create Blender keyframes + def ApplyRecursive(bone): + keys = keyframes.get(bone) + if keys: + # Generate curves + curvesLoc = None + curvesRot = None + bone_string = "pose.bones[\"{}\"].".format(bone.name) + group = action.groups.new(name=bone.name) + for keyframe in keys: + if curvesLoc and curvesRot: break + if keyframe.pos and not curvesLoc: + curvesLoc = [] + for i in range(3): + curve = action.fcurves.new(data_path=bone_string + "location",index=i) + curve.group = group + curvesLoc.append(curve) + if keyframe.rot and not curvesRot: + curvesRot = [] + for i in range(3 if smd.rotMode == 'XYZ' else 4): + curve = action.fcurves.new(data_path=bone_string + "rotation_" + ("euler" if smd.rotMode == 'XYZ' else "quaternion"),index=i) + curve.group = group + curvesRot.append(curve) + + # Apply each imported keyframe + for keyframe in keys: + # Transform + if smd.a.data.vs.legacy_rotation: + keyframe.matrix @= mat_BlenderToSMD.inverted() + + if bone.parent: + if smd.a.data.vs.legacy_rotation: parentMat = bone.parent.matrix @ mat_BlenderToSMD + else: parentMat = bone.parent.matrix + bone.matrix = parentMat @ keyframe.matrix + else: + bone.matrix = getUpAxisMat(smd.upAxis) @ keyframe.matrix + + # Key location + if keyframe.pos: + for i in range(3): + curvesLoc[i].keyframe_points.add(1) + curvesLoc[i].keyframe_points[-1].co = [keyframe.frame, bone.location[i]] + + # Key rotation + if keyframe.rot: + if smd.rotMode == 'XYZ': + for i in range(3): + curvesRot[i].keyframe_points.add(1) + curvesRot[i].keyframe_points[-1].co = [keyframe.frame, bone.rotation_euler[i]] + else: + for i in range(4): + curvesRot[i].keyframe_points.add(1) + curvesRot[i].keyframe_points[-1].co = [keyframe.frame, bone.rotation_quaternion[i]] + + # Recurse + for child in bone.children: + ApplyRecursive(child) + + # Start keying + for bone in smd.a.pose.bones: + if not bone.parent: + ApplyRecursive(bone) + + for fc in action.fcurves: + fc.update() + + # clear any unkeyed poses + for bone in smd.a.pose.bones: + bone.location.zero() + if smd.rotMode == 'XYZ': bone.rotation_euler.zero() + else: bone.rotation_quaternion.identity() + scn = bpy.context.scene + + if scn.frame_current == 1: # Blender starts on 1, Source starts on 0 + scn.frame_set(0) + else: + scn.frame_set(scn.frame_current) + ops.object.mode_set(mode='OBJECT') + + print( "- Imported {} frames of animation".format(num_frames) ) + + def getMeshMaterial(self,mat_name): + smd = self.smd + + + if mat_name: + mat_name = self.truncate_id_name(mat_name, bpy.types.Material) + else: + mat_name = "Material" + + md = smd.m.data + mat = None + for candidate in bpy.data.materials: # Do we have this material already? + if candidate.name == mat_name: + mat = candidate + if mat: + if md.materials.get(mat.name): # Look for it on this mesh + for i in range(len(md.materials)): + if md.materials[i].name == mat.name: + mat_ind = i + break + else: # material exists, but not on this mesh + md.materials.append(mat) + mat_ind = len(md.materials) - 1 + else: # material does not exist + print("- New material: {}".format(self)) + + mat = bpy.data.materials.new(mat_name) + mat.use_nodes = True + base_node = mat.node_tree.nodes["Principled BSDF"] + if base_node: + base_node.inputs["Roughness"].default_value = 1.0 + texture_node = mat.node_tree.nodes.new("ShaderNodeTexImage") + texture_path = os.path.join(os.path.dirname(self.filepath), mat_name) + try: + texture_node.image = bpy.data.images.load(texture_path) + except RuntimeError: + print(f"Текстура не найдена: {texture_path}") + + mat.node_tree.links.new(texture_node.outputs["Color"], base_node.inputs["Base Color"]) + #America + bpy.context.scene.vs.export_path = os.path.dirname(self.filepath) + + # TEST END + md.materials.append(mat) + + # Give it a random colour + randCol = [] + for i in range(3): + randCol.append(random.uniform(.4,1)) + randCol.append(1) + mat.diffuse_color = randCol + if smd.jobType == PHYS: + smd.m.display_type = 'SOLID' + mat_ind = len(md.materials) - 1 + + + return mat, mat_ind + + # triangles block + def readPolys(self): + smd = self.smd + if smd.jobType not in [ REF, PHYS ]: + return + + mesh_name = smd.jobName + if smd.jobType == REF and not smd.jobName.lower().find("reference") and not smd.jobName.lower().endswith("ref"): + mesh_name += " ref" + mesh_name = self.truncate_id_name(mesh_name, bpy.types.Mesh) + + # Create a new mesh object, disable double-sided rendering, link it to the current scene + smd.m = bpy.data.objects.new(mesh_name,bpy.data.meshes.new(mesh_name)) + smd.m.parent = smd.a + smd.g.objects.link(smd.m) + if smd.jobType == REF: # can only have flex on a ref mesh + if self.qc: + self.qc.ref_mesh = smd.m # for VTA import + + # Create weightmap groups + for bone in smd.a.data.bones.values(): + smd.m.vertex_groups.new(name=bone.name) + + # Apply armature modifier + modifier = smd.m.modifiers.new(type="ARMATURE",name=pgettext("Armature")) + modifier.object = smd.a + + # Initialisation + md = smd.m.data + # Vertex values + norms = [] + + bm = bmesh.new() + bm.from_mesh(md) + weightLayer = bm.verts.layers.deform.new() + uvLayer = bm.loops.layers.uv.new() + + # ************************************************************************************************* + # There are two loops in this function: one for polygons which continues until the "end" keyword + # and one for the vertices on each polygon that loops three times. We're entering the poly one now. + countPolys = 0 + badWeights = 0 + vertMap = {} + + for line in smd.file: + line = line.rstrip("\n") + + if line and smdBreak(line): # normally a blank line means a break, but Milkshape can export SMDs with zero-length material names... + break + if smdContinue(line): + continue + + mat, mat_ind = self.getMeshMaterial(line if line else pgettext(get_id("importer_name_nomat", data=True))) + + # *************************************************************** + # Enter the vertex loop. This will run three times for each poly. + vertexCount = 0 + faceUVs = [] + vertKeys = [] + for line in smd.file: + if smdBreak(line): + break + if smdContinue(line): + continue + values = line.split() + + vertexCount+= 1 + co = [0,0,0] + norm = [0,0,0] + + # Read co-ordinates and normals + for i in range(1,4): # 0 is the deprecated bone weight value + co[i-1] = float(values[i]) + norm[i-1] = float(values[i+3]) + + co = tuple(co) + norms.append(norm) + + # Can't do these in the above for loop since there's only two + faceUVs.append( ( float(values[7]), float(values[8]) ) ) + + # Read weightmap data + vertWeights = [] + if len(values) > 10 and values[9] != "0": # got weight links? + for i in range(10, 10 + (int(values[9]) * 2), 2): # The range between the first and last weightlinks (each of which is *two* values) + try: + bone = smd.a.data.bones[ smd.boneIDs[int(values[i])] ] + vertWeights.append((smd.m.vertex_groups.find(bone.name), float(values[i+1]))) + except KeyError: + badWeights += 1 + else: # Fall back on the deprecated value at the start of the line + try: + bone = smd.a.data.bones[ smd.boneIDs[int(values[0])] ] + vertWeights.append((smd.m.vertex_groups.find(bone.name), 1.0)) + except KeyError: + badWeights += 1 + + vertKeys.append((co, tuple(vertWeights))) + + # Three verts? It's time for a new poly + if vertexCount == 3: + def createFace(use_cache = True): + bmVerts = [] + for vertKey in vertKeys: + bmv = vertMap.get(vertKey, None) if use_cache else None # if a vertex in this position with these bone weights exists, re-use it. + if bmv is None: + bmv = bm.verts.new(vertKey[0]) + for (bone,weight) in vertKey[1]: + bmv[weightLayer][bone] = weight + vertMap[vertKey] = bmv + bmVerts.append(bmv) + + face = bm.faces.new(bmVerts) + face.material_index = mat_ind + for i in range(3): + face.loops[i][uvLayer].uv = faceUVs[i] + + try: + createFace() + except ValueError: # face overlaps another, try again with all-new vertices + createFace(use_cache = False) + break + + # Back in polyland now, with three verts processed. + countPolys+= 1 + + bm.to_mesh(md) + vertMap = None + bm.free() + md.update() + + if countPolys: + ops.object.select_all(action="DESELECT") + smd.m.select_set(True) + bpy.context.view_layer.objects.active = smd.m + + ops.object.shade_smooth() + + for poly in smd.m.data.polygons: + poly.select = True + + smd.m.show_wire = smd.jobType == PHYS + + md.normals_split_custom_set(norms) + + if smd.upAxis == 'Y': + md.transform(rx90) + md.update() + + if badWeights: + self.warning(get_id("importer_err_badweights", True).format(badWeights,smd.jobName)) + print("- Imported {} polys".format(countPolys)) + + # vertexanimation block + def readShapes(self): + smd = self.smd + if smd.jobType is not FLEX: + return + + if not smd.m: + if self.qc: + smd.m = self.qc.ref_mesh + else: # user selection + if bpy.context.active_object.type in shape_types: + smd.m = bpy.context.active_object + else: + for obj in bpy.context.selected_objects: + if obj.type in shape_types: + smd.m = obj + + if not smd.m: + self.error(get_id("importer_err_shapetarget")) # FIXME: this could actually be supported + return + + if hasShapes(smd.m): + smd.m.active_shape_key_index = 0 + smd.m.show_only_shape_key = True # easier to view each shape, less confusion when several are active at once + + def vec_round(v): + return Vector([round(co,3) for co in v]) + co_map = {} + mesh_cos = [vert.co for vert in smd.m.data.vertices] + mesh_cos_rnd = None + + smd.vta_ref = None + vta_cos = [] + vta_ids = [] + + making_base_shape = True + bad_vta_verts = [] + num_shapes = 0 + md = smd.m.data + + for line in smd.file: + line = line.rstrip("\n") + + if smdBreak(line): + break + if smdContinue(line): + continue + + values = line.split() + + if values[0] == "time": + shape_name = smd.shapeNames.get(values[1]) + if smd.vta_ref == None: + if not hasShapes(smd.m, False): smd.m.shape_key_add(name=shape_name if shape_name else "Basis") + vd = bpy.data.meshes.new(name="VTA vertices") + vta_ref = smd.vta_ref = bpy.data.objects.new(name=vd.name,object_data=vd) + vta_ref.matrix_world = smd.m.matrix_world + smd.g.objects.link(vta_ref) + + vta_err_vg = vta_ref.vertex_groups.new(name=get_id("importer_name_unmatchedvta")) + elif making_base_shape: + vd.vertices.add(int(len(vta_cos)/3)) + vd.vertices.foreach_set("co",vta_cos) + num_vta_verts = len(vd.vertices) + del vta_cos + + mod = vta_ref.modifiers.new(name="VTA Shrinkwrap",type='SHRINKWRAP') + mod.target = smd.m + mod.wrap_method = 'NEAREST_VERTEX' + + vd = bpy.data.meshes.new_from_object(vta_ref.evaluated_get(bpy.context.evaluated_depsgraph_get())) + + vta_ref.modifiers.remove(mod) + del mod + + for i in range(len(vd.vertices)): + id = vta_ids[i] + co = vd.vertices[i].co + map_id = None + try: + map_id = mesh_cos.index(co) + except ValueError: + if not mesh_cos_rnd: + mesh_cos_rnd = [vec_round(co) for co in mesh_cos] + try: + map_id = mesh_cos_rnd.index(vec_round(co)) + except ValueError: + bad_vta_verts.append(i) + continue + co_map[id] = map_id + + bpy.data.meshes.remove(vd) + del vd + + if bad_vta_verts: + err_ratio = len(bad_vta_verts) / num_vta_verts + vta_err_vg.add(bad_vta_verts,1.0,'REPLACE') + message = get_id("importer_err_unmatched_mesh", True).format(len(bad_vta_verts), int(err_ratio * 100)) + if err_ratio == 1: + self.error(message) + return + else: + self.warning(message) + else: + removeObject(vta_ref) + making_base_shape = False + + if not making_base_shape: + smd.m.shape_key_add(name=shape_name if shape_name else values[1]) + num_shapes += 1 + + continue # to the first vertex of the new shape + + cur_id = int(values[0]) + vta_co = getUpAxisMat(smd.upAxis) @ Vector([ float(values[1]), float(values[2]), float(values[3]) ]) + + if making_base_shape: + vta_ids.append(cur_id) + vta_cos.extend(vta_co) + else: # write to the shapekey + try: + md.shape_keys.key_blocks[-1].data[ co_map[cur_id] ].co = vta_co + except KeyError: + pass + + print("- Imported",num_shapes,"flex shapes") + + # Parses a QC file + def readQC(self, filepath, newscene, doAnim, makeCamera, rotMode, outer_qc = False): + filename = os.path.basename(filepath) + filedir = os.path.dirname(filepath) + + def normalisePath(path): + if (os.path.sep == '/'): + path = path.replace('\\','/') + return os.path.normpath(path) + + if outer_qc: + print("\nQC IMPORTER: now working on",filename) + + qc = self.qc = QcInfo() + qc.startTime = time.time() + qc.jobName = filename + qc.root_filedir = filedir + qc.makeCamera = makeCamera + qc.animation_names = [] + if newscene: + bpy.context.screen.scene = bpy.data.scenes.new(filename) # BLENDER BUG: this currently doesn't update bpy.context.scene + else: + bpy.context.scene.name = filename + else: + qc = self.qc + + file = open(filepath, 'r') + in_bodygroup = in_lod = in_sequence = False + lod = 0 + for line_str in file: + line = self.parseQuoteBlockedLine(line_str) + if len(line) == 0: + continue + #print(line) + + # handle individual words (insert QC variable values, change slashes) + i = 0 + for word in line: + for var in qc.vars.keys(): + kw = "${}$".format(var) + pos = word.lower().find(kw) + if pos != -1: + word = word.replace(word[pos:pos+len(kw)], qc.vars[var]) + line[i] = word.replace("/","\\") # studiomdl is Windows-only + i += 1 + + # Skip macros + if line[0] == "$definemacro": + self.warning(get_id("importer_qc_macroskip", True).format(filename)) + while line[-1] == "\\\\": + line = self.parseQuoteBlockedLine( file.readline()) + + # register new QC variable + if line[0] == "$definevariable": + qc.vars[line[1]] = line[2].lower() + continue + + # dir changes + if line[0] == "$pushd": + if line[1][-1] != "\\": + line[1] += "\\" + qc.dir_stack.append(line[1]) + continue + if line[0] == "$popd": + try: + qc.dir_stack.pop() + except IndexError: + pass # invalid QC, but whatever + continue + + # up axis + if line[0] == "$upaxis": + qc.upAxis = bpy.context.scene.vs.up_axis = line[1].upper() + qc.upAxisMat = getUpAxisMat(line[1]) + continue + + # bones in pure animation QCs + if line[0] == "$definebone": + pass # TODO + + def import_file(word_index,default_ext,smd_type,append='APPEND',layer=0,in_file_recursion = False): + path = os.path.join( qc.cd(), appendExt(normalisePath(line[word_index]),default_ext) ) + + if not in_file_recursion and not os.path.exists(path): + return import_file(word_index,"dmx",smd_type,append,layer,True) + + if not path in qc.imported_smds: # FIXME: an SMD loaded once relatively and once absolutely will still pass this test + qc.imported_smds.append(path) + self.append = append if qc.a else 'NEW_ARMATURE' + + # import the file + self.num_files_imported += (self.readDMX if path.endswith("dmx") else self.readSMD)(path,qc.upAxis,rotMode,False,smd_type,target_layer=layer) + return True + + # meshes + if line[0] in ["$body","$model"]: + import_file(2,"smd",REF) + continue + if line[0] == "$lod": + in_lod = True + lod += 1 + continue + if in_lod: + if line[0] == "replacemodel": + import_file(2,"smd",REF,'VALIDATE',layer=lod) + continue + if "}" in line: + in_lod = False + continue + if line[0] == "$bodygroup": + in_bodygroup = True + continue + if in_bodygroup: + if line[0] == "studio": + import_file(1,"smd",REF) + continue + if "}" in line: + in_bodygroup = False + continue + + # skeletal animations + if in_sequence or (doAnim and line[0] in ["$sequence","$animation"]): + # there is no easy way to determine whether a SMD is being defined here or elsewhere, or even precisely where it is being defined + num_words_to_skip = 2 if not in_sequence else 0 + for i in range(len(line)): + if num_words_to_skip: + num_words_to_skip -= 1 + continue + if line[i] == "{": + in_sequence = True + continue + if line[i] == "}": + in_sequence = False + continue + if line[i] in ["hidden","autolay","realtime","snap","spline","xfade","delta","predelta"]: + continue + if line[i] in ["fadein","fadeout","addlayer","blendwidth","node"]: + num_words_to_skip = 1 + continue + if line[i] in ["activity","transision","rtransition"]: + num_words_to_skip = 2 + continue + if line[i] in ["blend"]: + num_words_to_skip = 3 + continue + if line[i] in ["blendlayer"]: + num_words_to_skip = 5 + continue + # there are many more keywords, but they can only appear *after* an SMD is referenced + + if not qc.a: qc.a = self.findArmature() + if not qc.a: + self.warning(get_id("qc_warn_noarmature", True).format(line_str.strip())) + continue + + if line[i].lower() not in qc.animation_names: + if not qc.a.animation_data: qc.a.animation_data_create() + last_action = qc.a.animation_data.action + import_file(i,"smd",ANIM,'VALIDATE') + if line[0] == "$animation": + qc.animation_names.append(line[1].lower()) + while i < len(line) - 1: + if line[i] == "fps" and qc.a.animation_data.action != last_action: + if 'fps' in dir(qc.a.animation_data.action): + qc.a.animation_data.action.fps = float(line[i+1]) + i += 1 + break + continue + + # flex animation + if line[0] == "flexfile": + import_file(1,"vta",FLEX,'VALIDATE') + continue + + # naming shapes + if qc.ref_mesh and line[0] in ["flex","flexpair"]: # "flex" is safe because it cannot come before "flexfile" + for i in range(1,len(line)): + if line[i] == "frame": + shape = qc.ref_mesh.data.shape_keys.key_blocks.get(line[i+1]) + if shape and shape.name.startswith("Key"): shape.name = line[1] + break + continue + + # physics mesh + if line[0] in ["$collisionmodel","$collisionjoints"]: + import_file(1,"smd",PHYS,'VALIDATE',layer=10) # FIXME: what if there are >10 LODs? + continue + + # origin; this is where viewmodel editors should put their camera, and is in general something to be aware of + if line[0] == "$origin": + if qc.makeCamera: + data = bpy.data.cameras.new(qc.jobName + "_origin") + name = "camera" + else: + data = None + name = "empty object" + print("QC IMPORTER: created {} at $origin\n".format(name)) + + origin = bpy.data.objects.new(qc.jobName + "_origin",data) + bpy.context.scene.collection.objects.link(origin) + + origin.rotation_euler = Vector([pi/2,0,pi]) + Vector(getUpAxisMat(qc.upAxis).inverted().to_euler()) # works, but adding seems very wrong! + ops.object.select_all(action="DESELECT") + origin.select_set(True) + ops.object.transform_apply(rotation=True) + + for i in range(3): + origin.location[i] = float(line[i+1]) + origin.matrix_world = getUpAxisMat(qc.upAxis) @ origin.matrix_world + + if qc.makeCamera: + bpy.context.scene.camera = origin + origin.data.lens_unit = 'DEGREES' + origin.data.lens = 31.401752 # value always in mm; this number == 54 degrees + # Blender's FOV isn't locked to X or Y height, so a shift is needed to get the weapon aligned properly. + # This is a nasty hack, and the values are only valid for the default 54 degrees angle + origin.data.shift_y = -0.27 + origin.data.shift_x = 0.36 + origin.data.passepartout_alpha = 1 + else: + origin.empty_display_type = 'PLAIN_AXES' + + qc.origin = origin + + # QC inclusion + if line[0] == "$include": + path = os.path.join(qc.root_filedir,normalisePath(line[1])) # special case: ignores dir stack + + if not path.endswith(".qc") and not path.endswith(".qci"): + if os.path.exists(appendExt(path,".qci")): + path = appendExt(path,".qci") + elif os.path.exists(appendExt(path,".qc")): + path = appendExt(path,".qc") + try: + self.readQC(path,False, doAnim, makeCamera, rotMode) + except IOError: + self.warning(get_id("importer_err_qci", True).format(path)) + + file.close() + + if qc.origin: + qc.origin.parent = qc.a + if qc.ref_mesh: + size = min(qc.ref_mesh.dimensions) / 15 + if qc.makeCamera: + qc.origin.data.display_size = size + else: + qc.origin.empty_display_size = size + + if outer_qc: + printTimeMessage(qc.startTime,filename,"import","QC") + return self.num_files_imported + + def initSMD(self, filepath,smd_type,upAxis,rotMode,target_layer): + smd = self.smd = SmdInfo() + smd.jobName = os.path.splitext(os.path.basename(filepath))[0] + smd.jobType = smd_type + smd.startTime = time.time() + smd.layer = target_layer + smd.rotMode = rotMode + self.createCollection() + if self.qc: + smd.upAxis = self.qc.upAxis + smd.a = self.qc.a + if upAxis: + smd.upAxis = upAxis + + return smd + + def createCollection(self): + if self.smd.jobType and self.smd.jobType != ANIM: + if self.createCollections: + self.smd.g = bpy.data.collections.new(self.smd.jobName) + bpy.context.scene.collection.children.link(self.smd.g) + else: + self.smd.g = bpy.context.scene.collection + + # Parses an SMD file + def readSMD(self, filepath, upAxis, rotMode, newscene = False, smd_type = None, target_layer = 0): + if filepath.endswith("dmx"): + return self.readDMX( filepath, upAxis, newscene, smd_type) + + smd = self.initSMD(filepath,smd_type,upAxis,rotMode,target_layer) + self.appliedReferencePose = False + + try: + smd.file = file = open(filepath, 'r') + except IOError as err: # TODO: work out why errors are swallowed if I don't do this! + self.error(get_id("importer_err_smd", True).format(smd.jobName,err)) + return 0 + + if newscene: + bpy.context.screen.scene = bpy.data.scenes.new(smd.jobName) # BLENDER BUG: this currently doesn't update bpy.context.scene + elif bpy.context.scene.name == pgettext("Scene"): + bpy.context.scene.name = smd.jobName + + print("\nSMD IMPORTER: now working on",smd.jobName) + + while True: + header = self.parseQuoteBlockedLine(file.readline()) + if header: break + + if header != ["version" ,"1"]: + self.warning (get_id("importer_err_smd_ver")) + + if smd.jobType == None: + self.scanSMD() # What are we dealing with? + self.createCollection() + + for line in file: + if line == "nodes\n": self.readNodes() + if line == "skeleton\n": self.readFrames() + if line == "triangles\n": self.readPolys() + if line == "vertexanimation\n": self.readShapes() + + file.close() + printTimeMessage(smd.startTime,smd.jobName,"import") + + return 1 + + def readDMX(self, filepath, upAxis, rotMode,newscene = False, smd_type = None, target_layer = 0): + smd = self.initSMD(filepath,smd_type,upAxis,rotMode,target_layer) + smd.isDMX = 1 + + bench = BenchMarker(1,"DMX") + + target_arm = self.findArmature() if self.append != 'NEW_ARMATURE' else None + if target_arm: + smd.a = target_arm + + ob = bone = restData = smd.atch = None + smd.layer = target_layer + if bpy.context.active_object: ops.object.mode_set(mode='OBJECT') + self.appliedReferencePose = False + + print( "\nDMX IMPORTER: now working on",os.path.basename(filepath) ) + + try: + print("- Loading DMX...") + try: + dm = datamodel.load(filepath) + except IOError as e: + self.error(e) + return 0 + bench.report("Load DMX") + + if bpy.context.scene.name.startswith("Scene"): + bpy.context.scene.name = smd.jobName + + keywords = getDmxKeywords(dm.format_ver) + + correctiveSeparator = '_' + if dm.format_ver >= 22 and any([elem for elem in dm.elements if elem.type == "DmeVertexDeltaData" and '__' in elem.name]): + correctiveSeparator = '__' + self._ensureSceneDmxVersion(dmx_version(9, 22, compiler=Compiler.MODELDOC)) + + if not smd_type: + smd.jobType = REF if dm.root.get("model") else ANIM + self.createCollection() + self.ensureAnimationBonesValidated() + + DmeModel = dm.root["skeleton"] + transforms = DmeModel["baseStates"][0]["transforms"] if DmeModel.get("baseStates") and len(DmeModel["baseStates"]) > 0 else None + + DmeAxisSystem = DmeModel.get("axisSystem") + if DmeAxisSystem: + for axis in axes_lookup.items(): + if axis[1] == DmeAxisSystem["upAxis"] - 1: + upAxis = smd.upAxis = axis[0] + break + + def getBlenderQuat(datamodel_quat): + return Quaternion([datamodel_quat[3], datamodel_quat[0], datamodel_quat[1], datamodel_quat[2]]) + def get_transform_matrix(elem): + out = Matrix() + if not elem: return out + trfm = elem.get("transform") + if transforms: + for e in transforms: + if e.name == elem.name: + trfm = e + if not trfm: return out + out @= Matrix.Translation(Vector(trfm["position"])) + out @= getBlenderQuat(trfm["orientation"]).to_matrix().to_4x4() + return out + def isBone(elem): + return elem.type in ["DmeDag","DmeJoint"] + def getBoneForElement(elem): + return smd.a.data.edit_bones[smd.boneIDs[elem.id]] + def enumerateBonesAndAttachments(elem : datamodel.Element): + parent = elem if isBone(elem) else None + for child in elem.get("children", []): + if child.type == "DmeDag" and child.get("shape") and child["shape"].type == "DmeAttachment": + if smd.jobType != REF: + continue + yield (child["shape"], parent) + elif isBone(child) and child.name != implicit_bone_name: + # don't import Dags which simply wrap meshes. In some DMX animations, each bone has an empty mesh attached. + boneShape = child.get("shape") + if not boneShape or boneShape["currentState"] == None: + yield (child, parent) + yield from enumerateBonesAndAttachments(child) + elif child.type == "DmeModel": + yield from enumerateBonesAndAttachments(child) + + # Skeleton + bone_matrices = {} + restData = {} + if target_arm: + missing_bones = [] + bpy.context.view_layer.objects.active = smd.a + smd.a.hide_set(False) + ops.object.mode_set(mode='EDIT') + + for (elem,parent) in enumerateBonesAndAttachments(DmeModel): + if elem.type == "DmeAttachment": + continue + + bone = smd.a.data.edit_bones.get(self.truncate_id_name(elem.name, bpy.types.Bone)) + if not bone: + if self.append == 'APPEND' and smd.jobType in [REF,ANIM]: + bone = smd.a.data.edit_bones.new(self.truncate_id_name(elem.name, bpy.types.Bone)) + bone.parent = getBoneForElement(parent) if parent else None + bone.tail = (0,5,0) + bone_matrices[bone.name] = get_transform_matrix(elem) + smd.boneIDs[elem.id] = bone.name + smd.boneTransformIDs[elem["transform"].id] = bone.name + else: + missing_bones.append(elem.name) + else: + scene_parent = bone.parent.name if bone.parent else "" + dmx_parent = parent.name if parent else "" + if scene_parent != dmx_parent: + self.warning(get_id('importer_bone_parent_miss',True).format(elem.name,scene_parent,dmx_parent,smd.jobName)) + + smd.boneIDs[elem.id] = bone.name + smd.boneTransformIDs[elem["transform"].id] = bone.name + + if missing_bones and smd.jobType != ANIM: # animations report missing bones seperately + self.warning(get_id("importer_err_missingbones", True).format(smd.jobName,len(missing_bones),smd.a.name)) + print("\n".join(missing_bones)) + elif any(enumerateBonesAndAttachments(DmeModel)): + self.append = 'NEW_ARMATURE' + ob = smd.a = self.createArmature(self.truncate_id_name(DmeModel.name or smd.jobName, bpy.types.Armature)) + if self.qc: self.qc.a = ob + bpy.context.view_layer.objects.active = smd.a + ops.object.mode_set(mode='EDIT') + + smd.a.matrix_world = getUpAxisMat(smd.upAxis) + + for (elem,parent) in enumerateBonesAndAttachments(DmeModel): + parent = getBoneForElement(parent) if parent else None + if elem.type == "DmeAttachment": + atch = smd.atch = bpy.data.objects.new(name=self.truncate_id_name(elem.name, "Attachment"), object_data=None) + smd.g.objects.link(atch) + atch.show_in_front = True + atch.empty_display_type = 'ARROWS' + + atch.parent = smd.a + if parent: + atch.parent_type = 'BONE' + atch.parent_bone = parent.name + + atch.matrix_local = get_transform_matrix(elem) + else: + bone = smd.a.data.edit_bones.new(self.truncate_id_name(elem.name,bpy.types.Bone)) + bone.parent = parent + bone.tail = (0,5,0) + bone_matrices[bone.name] = get_transform_matrix(elem) + smd.boneIDs[elem.id] = bone.name + smd.boneTransformIDs[elem["transform"].id] = bone.name + + if smd.a: + ops.object.mode_set(mode='POSE') + for bone in smd.a.pose.bones: + mat = bone_matrices.get(bone.name) + if mat: + keyframe = KeyFrame() + keyframe.matrix = mat + restData[bone] = [keyframe] + if restData: + self.applyFrames(restData,1,None) + + def parseModel(elem,matrix=Matrix(), last_bone = None): + if elem.type in ["DmeModel","DmeDag", "DmeJoint"]: + if elem.type == "DmeDag": + matrix = matrix @ get_transform_matrix(elem) + if elem.get("children") and elem["children"]: + if elem.type == "DmeJoint": + last_bone = elem + subelems = elem["children"] + elif elem.get("shape"): + subelems = [elem["shape"]] + else: + return + for subelem in subelems: + parseModel(subelem,matrix,last_bone) + elif elem.type == "DmeMesh": + DmeMesh = elem + if bpy.context.active_object: + ops.object.mode_set(mode='OBJECT') + mesh_name = self.truncate_id_name(DmeMesh.name,bpy.types.Mesh) + ob = smd.m = bpy.data.objects.new(name=mesh_name, object_data=bpy.data.meshes.new(name=mesh_name)) + smd.g.objects.link(ob) + ob.show_wire = smd.jobType == PHYS + + DmeVertexData = DmeMesh["currentState"] + have_weightmap = keywords["weight"] in DmeVertexData["vertexFormat"] + + if smd.a: + ob.parent = smd.a + if have_weightmap: + amod = ob.modifiers.new(name="Armature",type='ARMATURE') + amod.object = smd.a + amod.use_bone_envelopes = False + else: + ob.matrix_local = getUpAxisMat(smd.upAxis) + + print("Importing DMX mesh \"{}\"".format(DmeMesh.name)) + + bm = bmesh.new() + bm.from_mesh(ob.data) + + positions = DmeVertexData[keywords['pos']] + positionsIndices = DmeVertexData[keywords['pos'] + "Indices"] + + # Vertices + for pos in positions: + bm.verts.new( Vector(pos) ) + bm.verts.ensure_lookup_table() + + # Faces, Materials, Colours + skipfaces = set() + vertex_layer_infos = [] + + class VertexLayerInfo(): + def __init__(self, layer, indices, values): + self.layer = layer + self.indices = indices + self.values = values + + def get_loop_value(self, loop_index): + return self.values[self.indices[loop_index]] + + # Normals + normalsLayer = bm.loops.layers.float_vector.new("__bst_normal") + normalsLayerName = normalsLayer.name + vertex_layer_infos.append(VertexLayerInfo(normalsLayer, DmeVertexData[keywords['norm'] + "Indices"], DmeVertexData[keywords['norm']])) + + # Arbitrary vertex data + def warnUneditableVertexData(name): self.warning("Vertex data '{}' was imported, but cannot be edited in Blender (as of 2.82)".format(name)) + def isClothEnableMap(name): return name.startswith("cloth_enable$") + + for vertexMap in [prop for prop in DmeVertexData["vertexFormat"] if prop not in keywords.values()]: + indices = DmeVertexData.get(vertexMap + "Indices") + if not indices: + continue + values = DmeVertexData.get(vertexMap) + if not isinstance(values, list) or len(values) == 0: + continue + + if isinstance(values[0], float): + if isClothEnableMap(vertexMap): + continue # will be imported later as a weightmap + layers = bm.loops.layers.float + warnUneditableVertexData(vertexMap) + elif isinstance(values[0], int): + layers = bm.loops.layers.int + warnUneditableVertexData(vertexMap) + elif isinstance(values[0], str): + layers = bm.loops.layers.string + warnUneditableVertexData(vertexMap) + elif isinstance(values[0], datamodel.Vector2): + layers = bm.loops.layers.uv + elif isinstance(values[0], datamodel.Vector4) or isinstance(values[0], datamodel.Color): + layers = bm.loops.layers.color + else: + self.warning("Could not import vertex data '{}'; Blender does not support {} data layers.".format(vertexMap, type(values[0]).__name__)) + continue + + vertex_layer_infos.append(VertexLayerInfo(layers.new(vertexMap), DmeVertexData[vertexMap + "Indices"], values)) + + if vertexMap != "textureCoordinates": + self._ensureSceneDmxVersion(dmx_version(9, 22)) + + deform_group_names = ordered_set.OrderedSet() + + # Weightmap + if have_weightmap: + weighted_bone_indices = ordered_set.OrderedSet() + jointWeights = DmeVertexData[keywords["weight"]] + jointIndices = DmeVertexData[keywords["weight_indices"]] + jointRange = range(DmeVertexData["jointCount"]) + deformLayer = bm.verts.layers.deform.new() + + joint_index = 0 + for vert in bm.verts: + for i in jointRange: + weight = jointWeights[joint_index] + if weight > 0: + vg_index = weighted_bone_indices.add(jointIndices[joint_index]) + vert[deformLayer][vg_index] = weight + joint_index += 1 + + joints = DmeModel["jointList"] if dm.format_ver >= 11 else DmeModel["jointTransforms"]; + for boneName in (joints[i].name for i in weighted_bone_indices): + deform_group_names.add(boneName) + + for face_set in DmeMesh["faceSets"]: + mat_path = face_set["material"]["mtlName"] + bpy.context.scene.vs.material_path = os.path.dirname(mat_path).replace("\\","/") + mat, mat_ind = self.getMeshMaterial(os.path.basename(mat_path)) + face_loops = [] + dmx_face = 0 + for vert in face_set["faces"]: + if vert != -1: + face_loops.append(vert) + continue + + # -1 marks the end of a face definition, time to create it! + try: + face = bm.faces.new([bm.verts[positionsIndices[loop]] for loop in face_loops]) + face.smooth = True + face.material_index = mat_ind + + # Apply normals and Source 2 vertex data + for layer_info in vertex_layer_infos: + is_uv_layer = layer_info.layer.name in bm.loops.layers.uv + for i, loop in enumerate(face.loops): + value = layer_info.get_loop_value(face_loops[i]) + if is_uv_layer: + loop[layer_info.layer].uv = value + else: + loop[layer_info.layer] = value + + except ValueError: # Can't have an overlapping face...this will be painful later + skipfaces.add(dmx_face) + dmx_face += 1 + face_loops.clear() + + + for cloth_enable in (name for name in DmeVertexData["vertexFormat"] if isClothEnableMap(name)): + deformLayer = bm.verts.layers.deform.verify() + vg_index = deform_group_names.add(cloth_enable) + data = DmeVertexData[cloth_enable] + indices = DmeVertexData[cloth_enable + "Indices"] + i = 0 + for face in bm.faces: + for loop in face.loops: + weight = data[indices[i]] + loop.vert[deformLayer][vg_index] = weight + i += 1 + + for groupName in deform_group_names: + ob.vertex_groups.new(name=groupName) # must create vertex groups before loading bmesh data + + if last_bone and not have_weightmap: # bone parent + ob.parent_type = 'BONE' + ob.parent_bone = last_bone.name + + # Move from BMesh to Blender + bm.to_mesh(ob.data) + del bm + ob.data.update() + ob.matrix_world @= matrix + if ob.parent_bone: + ob.matrix_world = ob.parent.matrix_world @ ob.parent.data.bones[ob.parent_bone].matrix_local @ ob.matrix_world + elif ob.parent: + ob.matrix_world = ob.parent.matrix_world @ ob.matrix_world + if smd.jobType == PHYS: + ob.display_type = 'SOLID' + + # Normals + normalsLayer = ob.data.attributes[normalsLayerName] + ob.data.normals_split_custom_set([value.vector for value in normalsLayer.data]) + del normalsLayer + ob.data.attributes.remove(ob.data.attributes[normalsLayerName]) + + # Stereo balance + if keywords['balance'] in DmeVertexData["vertexFormat"]: + vg = ob.vertex_groups.new(name=get_id("importer_balance_group", data=True)) + balanceIndices = DmeVertexData[keywords['balance'] + "Indices"] + balance = DmeVertexData[keywords['balance']] + ones = [] + for i in balanceIndices: + val = balance[i] + if val == 0: + continue + elif val == 1: + ones.append(i) + else: + vg.add([i],val,'REPLACE') + vg.add(ones,1,'REPLACE') + + ob.data.vs.flex_stereo_mode = 'VGROUP' + ob.data.vs.flex_stereo_vg = vg.name + + # Shapes + if DmeMesh.get("deltaStates"): + for DmeVertexDeltaData in DmeMesh["deltaStates"]: + if not ob.data.shape_keys: + ob.shape_key_add(name="Basis") + ob.show_only_shape_key = True + ob.data.shape_keys.name = DmeMesh.name + shape_key = ob.shape_key_add(name=DmeVertexDeltaData.name) + + if keywords['pos'] in DmeVertexDeltaData["vertexFormat"]: + deltaPositions = DmeVertexDeltaData[keywords['pos']] + for i,posIndex in enumerate(DmeVertexDeltaData[keywords['pos'] + "Indices"]): + shape_key.data[posIndex].co += Vector(deltaPositions[i]) + + if correctiveSeparator in DmeVertexDeltaData.name: + flex.AddCorrectiveShapeDrivers.addDrivers(shape_key, DmeVertexDeltaData.name.split(correctiveSeparator)) + + if smd.jobType in [REF,PHYS]: + parseModel(DmeModel) + + if smd.jobType == ANIM: + print("Importing DMX animation \"{}\"".format(smd.jobName)) + + animation = dm.root["animationList"]["animations"][0] + + frameRate = animation.get("frameRate",30) # very, very old DMXs don't have this + timeFrame = animation["timeFrame"] + scale = timeFrame.get("scale",1.0) + duration = timeFrame.get("duration") or timeFrame.get("durationTime") + offset = timeFrame.get("offset") or timeFrame.get("offsetTime",0.0) + start = timeFrame.get("start", 0) + + if type(duration) == int: duration = datamodel.Time.from_int(duration) + if type(offset) == int: offset = datamodel.Time.from_int(offset) + + lastFrameIndex = 0 + + keyframes = collections.defaultdict(list) + unknown_bones = [] + for channel in animation["channels"]: + toElement = channel["toElement"] + if not toElement: continue # SFM + + bone_name = smd.boneTransformIDs.get(toElement.id) + bone = smd.a.pose.bones.get(bone_name) if bone_name else None + if not bone: + if self.append != 'NEW_ARMATURE' and toElement.name not in unknown_bones: + unknown_bones.append(toElement.name) + print("- Animation refers to unrecognised bone \"{}\"".format(toElement.name)) + continue + + is_position_channel = channel["toAttribute"] == "position" + is_rotation_channel = channel["toAttribute"] == "orientation" + if not (is_position_channel or is_rotation_channel): + continue + + frame_log = channel["log"]["layers"][0] + times = frame_log["times"] + values = frame_log["values"] + + for i in range( len(times) ): + frame_time = times[i] + start + if type(frame_time) == int: frame_time = datamodel.Time.from_int(frame_time) + frame_value = values[i] + + keyframe = KeyFrame() + keyframes[bone].append(keyframe) + + keyframe.frame = frame_time * frameRate + lastFrameIndex = max(lastFrameIndex, keyframe.frame) + + if not (bone.parent or keyframe.pos or keyframe.rot): + keyframe.matrix = getUpAxisMat(smd.upAxis).inverted() + + if is_position_channel and not keyframe.pos: + keyframe.matrix @= Matrix.Translation(frame_value) + keyframe.pos = True + elif is_rotation_channel and not keyframe.rot: + keyframe.matrix @= getBlenderQuat(frame_value).to_matrix().to_4x4() + keyframe.rot = True + + if smd.a == None: + self.warning(get_id("importer_err_noanimationbones", True).format(smd.jobName)) + else: + smd.a.hide_set(False) + bpy.context.view_layer.objects.active = smd.a + if unknown_bones: + self.warning(get_id("importer_err_missingbones", True).format(smd.jobName,len(unknown_bones),smd.a.name)) + + total_frames = ceil((duration * frameRate) if duration else lastFrameIndex) + 1 # need a frame for 0 too! + + # apply the keframes + self.applyFrames(keyframes,total_frames,frameRate) + + bpy.context.scene.frame_end += int(round(start * 2 * frameRate,0)) + + except datamodel.AttributeError as e: + e.args = ["Invalid DMX file: {}".format(e.args[0] if e.args else "Unknown error")] + raise + + bench.report("DMX imported in") + return 1 + + @classmethod + def _ensureSceneDmxVersion(cls, version : dmx_version): + if State.datamodelFormat < version.format: + bpy.context.scene.vs.dmx_format = version.format_enum + if State.datamodelEncoding < version.encoding: + bpy.context.scene.vs.dmx_encoding = str(version.encoding) diff --git a/io_scene_valvesource/ordered_set.py b/io_scene_valvesource/ordered_set.py index 1487600..7d61aeb 100644 --- a/io_scene_valvesource/ordered_set.py +++ b/io_scene_valvesource/ordered_set.py @@ -1,488 +1,488 @@ -""" -An OrderedSet is a custom MutableSet that remembers its order, so that every -entry has an index that can be looked up. - -Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger, -and released under the MIT license. -""" -import itertools as it -from collections import deque - -try: - # Python 3 - from collections.abc import MutableSet, Sequence -except ImportError: - # Python 2.7 - from collections import MutableSet, Sequence - -SLICE_ALL = slice(None) -__version__ = "3.1" - - -def is_iterable(obj): - """ - Are we being asked to look up a list of things, instead of a single thing? - We check for the `__iter__` attribute so that this can cover types that - don't have to be known by this module, such as NumPy arrays. - - Strings, however, should be considered as atomic values to look up, not - iterables. The same goes for tuples, since they are immutable and therefore - valid entries. - - We don't need to check for the Python 2 `unicode` type, because it doesn't - have an `__iter__` attribute anyway. - """ - return ( - hasattr(obj, "__iter__") - and not isinstance(obj, str) - and not isinstance(obj, tuple) - ) - - -class OrderedSet(MutableSet, Sequence): - """ - An OrderedSet is a custom MutableSet that remembers its order, so that - every entry has an index that can be looked up. - - Example: - >>> OrderedSet([1, 1, 2, 3, 2]) - OrderedSet([1, 2, 3]) - """ - - def __init__(self, iterable=None): - self.items = [] - self.map = {} - if iterable is not None: - self |= iterable - - def __len__(self): - """ - Returns the number of unique elements in the ordered set - - Example: - >>> len(OrderedSet([])) - 0 - >>> len(OrderedSet([1, 2])) - 2 - """ - return len(self.items) - - def __getitem__(self, index): - """ - Get the item at a given index. - - If `index` is a slice, you will get back that slice of items, as a - new OrderedSet. - - If `index` is a list or a similar iterable, you'll get a list of - items corresponding to those indices. This is similar to NumPy's - "fancy indexing". The result is not an OrderedSet because you may ask - for duplicate indices, and the number of elements returned should be - the number of elements asked for. - - Example: - >>> oset = OrderedSet([1, 2, 3]) - >>> oset[1] - 2 - """ - if isinstance(index, slice) and index == SLICE_ALL: - return self.copy() - elif is_iterable(index): - return [self.items[i] for i in index] - elif hasattr(index, "__index__") or isinstance(index, slice): - result = self.items[index] - if isinstance(result, list): - return self.__class__(result) - else: - return result - else: - raise TypeError("Don't know how to index an OrderedSet by %r" % index) - - def copy(self): - """ - Return a shallow copy of this object. - - Example: - >>> this = OrderedSet([1, 2, 3]) - >>> other = this.copy() - >>> this == other - True - >>> this is other - False - """ - return self.__class__(self) - - def __getstate__(self): - if len(self) == 0: - # The state can't be an empty list. - # We need to return a truthy value, or else __setstate__ won't be run. - # - # This could have been done more gracefully by always putting the state - # in a tuple, but this way is backwards- and forwards- compatible with - # previous versions of OrderedSet. - return (None,) - else: - return list(self) - - def __setstate__(self, state): - if state == (None,): - self.__init__([]) - else: - self.__init__(state) - - def __contains__(self, key): - """ - Test if the item is in this ordered set - - Example: - >>> 1 in OrderedSet([1, 3, 2]) - True - >>> 5 in OrderedSet([1, 3, 2]) - False - """ - return key in self.map - - def add(self, key): - """ - Add `key` as an item to this OrderedSet, then return its index. - - If `key` is already in the OrderedSet, return the index it already - had. - - Example: - >>> oset = OrderedSet() - >>> oset.append(3) - 0 - >>> print(oset) - OrderedSet([3]) - """ - if key not in self.map: - self.map[key] = len(self.items) - self.items.append(key) - return self.map[key] - - append = add - - def update(self, sequence): - """ - Update the set with the given iterable sequence, then return the index - of the last element inserted. - - Example: - >>> oset = OrderedSet([1, 2, 3]) - >>> oset.update([3, 1, 5, 1, 4]) - 4 - >>> print(oset) - OrderedSet([1, 2, 3, 5, 4]) - """ - item_index = None - try: - for item in sequence: - item_index = self.add(item) - except TypeError: - raise ValueError( - "Argument needs to be an iterable, got %s" % type(sequence) - ) - return item_index - - def index(self, key): - """ - Get the index of a given entry, raising an IndexError if it's not - present. - - `key` can be an iterable of entries that is not a string, in which case - this returns a list of indices. - - Example: - >>> oset = OrderedSet([1, 2, 3]) - >>> oset.index(2) - 1 - """ - if is_iterable(key): - return [self.index(subkey) for subkey in key] - return self.map[key] - - # Provide some compatibility with pd.Index - get_loc = index - get_indexer = index - - def pop(self): - """ - Remove and return the last element from the set. - - Raises KeyError if the set is empty. - - Example: - >>> oset = OrderedSet([1, 2, 3]) - >>> oset.pop() - 3 - """ - if not self.items: - raise KeyError("Set is empty") - - elem = self.items[-1] - del self.items[-1] - del self.map[elem] - return elem - - def discard(self, key): - """ - Remove an element. Do not raise an exception if absent. - - The MutableSet mixin uses this to implement the .remove() method, which - *does* raise an error when asked to remove a non-existent item. - - Example: - >>> oset = OrderedSet([1, 2, 3]) - >>> oset.discard(2) - >>> print(oset) - OrderedSet([1, 3]) - >>> oset.discard(2) - >>> print(oset) - OrderedSet([1, 3]) - """ - if key in self: - i = self.map[key] - del self.items[i] - del self.map[key] - for k, v in self.map.items(): - if v >= i: - self.map[k] = v - 1 - - def clear(self): - """ - Remove all items from this OrderedSet. - """ - del self.items[:] - self.map.clear() - - def __iter__(self): - """ - Example: - >>> list(iter(OrderedSet([1, 2, 3]))) - [1, 2, 3] - """ - return iter(self.items) - - def __reversed__(self): - """ - Example: - >>> list(reversed(OrderedSet([1, 2, 3]))) - [3, 2, 1] - """ - return reversed(self.items) - - def __repr__(self): - if not self: - return "%s()" % (self.__class__.__name__,) - return "%s(%r)" % (self.__class__.__name__, list(self)) - - def __eq__(self, other): - """ - Returns true if the containers have the same items. If `other` is a - Sequence, then order is checked, otherwise it is ignored. - - Example: - >>> oset = OrderedSet([1, 3, 2]) - >>> oset == [1, 3, 2] - True - >>> oset == [1, 2, 3] - False - >>> oset == [2, 3] - False - >>> oset == OrderedSet([3, 2, 1]) - False - """ - # In Python 2 deque is not a Sequence, so treat it as one for - # consistent behavior with Python 3. - if isinstance(other, (Sequence, deque)): - # Check that this OrderedSet contains the same elements, in the - # same order, as the other object. - return list(self) == list(other) - try: - other_as_set = set(other) - except TypeError: - # If `other` can't be converted into a set, it's not equal. - return False - else: - return set(self) == other_as_set - - def union(self, *sets): - """ - Combines all unique items. - Each items order is defined by its first appearance. - - Example: - >>> oset = OrderedSet.union(OrderedSet([3, 1, 4, 1, 5]), [1, 3], [2, 0]) - >>> print(oset) - OrderedSet([3, 1, 4, 5, 2, 0]) - >>> oset.union([8, 9]) - OrderedSet([3, 1, 4, 5, 2, 0, 8, 9]) - >>> oset | {10} - OrderedSet([3, 1, 4, 5, 2, 0, 10]) - """ - cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet - containers = map(list, it.chain([self], sets)) - items = it.chain.from_iterable(containers) - return cls(items) - - def __and__(self, other): - # the parent implementation of this is backwards - return self.intersection(other) - - def intersection(self, *sets): - """ - Returns elements in common between all sets. Order is defined only - by the first set. - - Example: - >>> oset = OrderedSet.intersection(OrderedSet([0, 1, 2, 3]), [1, 2, 3]) - >>> print(oset) - OrderedSet([1, 2, 3]) - >>> oset.intersection([2, 4, 5], [1, 2, 3, 4]) - OrderedSet([2]) - >>> oset.intersection() - OrderedSet([1, 2, 3]) - """ - cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet - if sets: - common = set.intersection(*map(set, sets)) - items = (item for item in self if item in common) - else: - items = self - return cls(items) - - def difference(self, *sets): - """ - Returns all elements that are in this set but not the others. - - Example: - >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2])) - OrderedSet([1, 3]) - >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]), OrderedSet([3])) - OrderedSet([1]) - >>> OrderedSet([1, 2, 3]) - OrderedSet([2]) - OrderedSet([1, 3]) - >>> OrderedSet([1, 2, 3]).difference() - OrderedSet([1, 2, 3]) - """ - cls = self.__class__ - if sets: - other = set.union(*map(set, sets)) - items = (item for item in self if item not in other) - else: - items = self - return cls(items) - - def issubset(self, other): - """ - Report whether another set contains this set. - - Example: - >>> OrderedSet([1, 2, 3]).issubset({1, 2}) - False - >>> OrderedSet([1, 2, 3]).issubset({1, 2, 3, 4}) - True - >>> OrderedSet([1, 2, 3]).issubset({1, 4, 3, 5}) - False - """ - if len(self) > len(other): # Fast check for obvious cases - return False - return all(item in other for item in self) - - def issuperset(self, other): - """ - Report whether this set contains another set. - - Example: - >>> OrderedSet([1, 2]).issuperset([1, 2, 3]) - False - >>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3}) - True - >>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3}) - False - """ - if len(self) < len(other): # Fast check for obvious cases - return False - return all(item in self for item in other) - - def symmetric_difference(self, other): - """ - Return the symmetric difference of two OrderedSets as a new set. - That is, the new set will contain all elements that are in exactly - one of the sets. - - Their order will be preserved, with elements from `self` preceding - elements from `other`. - - Example: - >>> this = OrderedSet([1, 4, 3, 5, 7]) - >>> other = OrderedSet([9, 7, 1, 3, 2]) - >>> this.symmetric_difference(other) - OrderedSet([4, 5, 9, 2]) - """ - cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet - diff1 = cls(self).difference(other) - diff2 = cls(other).difference(self) - return diff1.union(diff2) - - def _update_items(self, items): - """ - Replace the 'items' list of this OrderedSet with a new one, updating - self.map accordingly. - """ - self.items = items - self.map = {item: idx for (idx, item) in enumerate(items)} - - def difference_update(self, *sets): - """ - Update this OrderedSet to remove items from one or more other sets. - - Example: - >>> this = OrderedSet([1, 2, 3]) - >>> this.difference_update(OrderedSet([2, 4])) - >>> print(this) - OrderedSet([1, 3]) - - >>> this = OrderedSet([1, 2, 3, 4, 5]) - >>> this.difference_update(OrderedSet([2, 4]), OrderedSet([1, 4, 6])) - >>> print(this) - OrderedSet([3, 5]) - """ - items_to_remove = set() - for other in sets: - items_to_remove |= set(other) - self._update_items([item for item in self.items if item not in items_to_remove]) - - def intersection_update(self, other): - """ - Update this OrderedSet to keep only items in another set, preserving - their order in this set. - - Example: - >>> this = OrderedSet([1, 4, 3, 5, 7]) - >>> other = OrderedSet([9, 7, 1, 3, 2]) - >>> this.intersection_update(other) - >>> print(this) - OrderedSet([1, 3, 7]) - """ - other = set(other) - self._update_items([item for item in self.items if item in other]) - - def symmetric_difference_update(self, other): - """ - Update this OrderedSet to remove items from another set, then - add items from the other set that were not present in this set. - - Example: - >>> this = OrderedSet([1, 4, 3, 5, 7]) - >>> other = OrderedSet([9, 7, 1, 3, 2]) - >>> this.symmetric_difference_update(other) - >>> print(this) - OrderedSet([4, 5, 9, 2]) - """ - items_to_add = [item for item in other if item not in self] - items_to_remove = set(other) - self._update_items( - [item for item in self.items if item not in items_to_remove] + items_to_add - ) +""" +An OrderedSet is a custom MutableSet that remembers its order, so that every +entry has an index that can be looked up. + +Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger, +and released under the MIT license. +""" +import itertools as it +from collections import deque + +try: + # Python 3 + from collections.abc import MutableSet, Sequence +except ImportError: + # Python 2.7 + from collections import MutableSet, Sequence + +SLICE_ALL = slice(None) +__version__ = "3.1" + + +def is_iterable(obj): + """ + Are we being asked to look up a list of things, instead of a single thing? + We check for the `__iter__` attribute so that this can cover types that + don't have to be known by this module, such as NumPy arrays. + + Strings, however, should be considered as atomic values to look up, not + iterables. The same goes for tuples, since they are immutable and therefore + valid entries. + + We don't need to check for the Python 2 `unicode` type, because it doesn't + have an `__iter__` attribute anyway. + """ + return ( + hasattr(obj, "__iter__") + and not isinstance(obj, str) + and not isinstance(obj, tuple) + ) + + +class OrderedSet(MutableSet, Sequence): + """ + An OrderedSet is a custom MutableSet that remembers its order, so that + every entry has an index that can be looked up. + + Example: + >>> OrderedSet([1, 1, 2, 3, 2]) + OrderedSet([1, 2, 3]) + """ + + def __init__(self, iterable=None): + self.items = [] + self.map = {} + if iterable is not None: + self |= iterable + + def __len__(self): + """ + Returns the number of unique elements in the ordered set + + Example: + >>> len(OrderedSet([])) + 0 + >>> len(OrderedSet([1, 2])) + 2 + """ + return len(self.items) + + def __getitem__(self, index): + """ + Get the item at a given index. + + If `index` is a slice, you will get back that slice of items, as a + new OrderedSet. + + If `index` is a list or a similar iterable, you'll get a list of + items corresponding to those indices. This is similar to NumPy's + "fancy indexing". The result is not an OrderedSet because you may ask + for duplicate indices, and the number of elements returned should be + the number of elements asked for. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset[1] + 2 + """ + if isinstance(index, slice) and index == SLICE_ALL: + return self.copy() + elif is_iterable(index): + return [self.items[i] for i in index] + elif hasattr(index, "__index__") or isinstance(index, slice): + result = self.items[index] + if isinstance(result, list): + return self.__class__(result) + else: + return result + else: + raise TypeError("Don't know how to index an OrderedSet by %r" % index) + + def copy(self): + """ + Return a shallow copy of this object. + + Example: + >>> this = OrderedSet([1, 2, 3]) + >>> other = this.copy() + >>> this == other + True + >>> this is other + False + """ + return self.__class__(self) + + def __getstate__(self): + if len(self) == 0: + # The state can't be an empty list. + # We need to return a truthy value, or else __setstate__ won't be run. + # + # This could have been done more gracefully by always putting the state + # in a tuple, but this way is backwards- and forwards- compatible with + # previous versions of OrderedSet. + return (None,) + else: + return list(self) + + def __setstate__(self, state): + if state == (None,): + self.__init__([]) + else: + self.__init__(state) + + def __contains__(self, key): + """ + Test if the item is in this ordered set + + Example: + >>> 1 in OrderedSet([1, 3, 2]) + True + >>> 5 in OrderedSet([1, 3, 2]) + False + """ + return key in self.map + + def add(self, key): + """ + Add `key` as an item to this OrderedSet, then return its index. + + If `key` is already in the OrderedSet, return the index it already + had. + + Example: + >>> oset = OrderedSet() + >>> oset.append(3) + 0 + >>> print(oset) + OrderedSet([3]) + """ + if key not in self.map: + self.map[key] = len(self.items) + self.items.append(key) + return self.map[key] + + append = add + + def update(self, sequence): + """ + Update the set with the given iterable sequence, then return the index + of the last element inserted. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset.update([3, 1, 5, 1, 4]) + 4 + >>> print(oset) + OrderedSet([1, 2, 3, 5, 4]) + """ + item_index = None + try: + for item in sequence: + item_index = self.add(item) + except TypeError: + raise ValueError( + "Argument needs to be an iterable, got %s" % type(sequence) + ) + return item_index + + def index(self, key): + """ + Get the index of a given entry, raising an IndexError if it's not + present. + + `key` can be an iterable of entries that is not a string, in which case + this returns a list of indices. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset.index(2) + 1 + """ + if is_iterable(key): + return [self.index(subkey) for subkey in key] + return self.map[key] + + # Provide some compatibility with pd.Index + get_loc = index + get_indexer = index + + def pop(self): + """ + Remove and return the last element from the set. + + Raises KeyError if the set is empty. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset.pop() + 3 + """ + if not self.items: + raise KeyError("Set is empty") + + elem = self.items[-1] + del self.items[-1] + del self.map[elem] + return elem + + def discard(self, key): + """ + Remove an element. Do not raise an exception if absent. + + The MutableSet mixin uses this to implement the .remove() method, which + *does* raise an error when asked to remove a non-existent item. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset.discard(2) + >>> print(oset) + OrderedSet([1, 3]) + >>> oset.discard(2) + >>> print(oset) + OrderedSet([1, 3]) + """ + if key in self: + i = self.map[key] + del self.items[i] + del self.map[key] + for k, v in self.map.items(): + if v >= i: + self.map[k] = v - 1 + + def clear(self): + """ + Remove all items from this OrderedSet. + """ + del self.items[:] + self.map.clear() + + def __iter__(self): + """ + Example: + >>> list(iter(OrderedSet([1, 2, 3]))) + [1, 2, 3] + """ + return iter(self.items) + + def __reversed__(self): + """ + Example: + >>> list(reversed(OrderedSet([1, 2, 3]))) + [3, 2, 1] + """ + return reversed(self.items) + + def __repr__(self): + if not self: + return "%s()" % (self.__class__.__name__,) + return "%s(%r)" % (self.__class__.__name__, list(self)) + + def __eq__(self, other): + """ + Returns true if the containers have the same items. If `other` is a + Sequence, then order is checked, otherwise it is ignored. + + Example: + >>> oset = OrderedSet([1, 3, 2]) + >>> oset == [1, 3, 2] + True + >>> oset == [1, 2, 3] + False + >>> oset == [2, 3] + False + >>> oset == OrderedSet([3, 2, 1]) + False + """ + # In Python 2 deque is not a Sequence, so treat it as one for + # consistent behavior with Python 3. + if isinstance(other, (Sequence, deque)): + # Check that this OrderedSet contains the same elements, in the + # same order, as the other object. + return list(self) == list(other) + try: + other_as_set = set(other) + except TypeError: + # If `other` can't be converted into a set, it's not equal. + return False + else: + return set(self) == other_as_set + + def union(self, *sets): + """ + Combines all unique items. + Each items order is defined by its first appearance. + + Example: + >>> oset = OrderedSet.union(OrderedSet([3, 1, 4, 1, 5]), [1, 3], [2, 0]) + >>> print(oset) + OrderedSet([3, 1, 4, 5, 2, 0]) + >>> oset.union([8, 9]) + OrderedSet([3, 1, 4, 5, 2, 0, 8, 9]) + >>> oset | {10} + OrderedSet([3, 1, 4, 5, 2, 0, 10]) + """ + cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet + containers = map(list, it.chain([self], sets)) + items = it.chain.from_iterable(containers) + return cls(items) + + def __and__(self, other): + # the parent implementation of this is backwards + return self.intersection(other) + + def intersection(self, *sets): + """ + Returns elements in common between all sets. Order is defined only + by the first set. + + Example: + >>> oset = OrderedSet.intersection(OrderedSet([0, 1, 2, 3]), [1, 2, 3]) + >>> print(oset) + OrderedSet([1, 2, 3]) + >>> oset.intersection([2, 4, 5], [1, 2, 3, 4]) + OrderedSet([2]) + >>> oset.intersection() + OrderedSet([1, 2, 3]) + """ + cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet + if sets: + common = set.intersection(*map(set, sets)) + items = (item for item in self if item in common) + else: + items = self + return cls(items) + + def difference(self, *sets): + """ + Returns all elements that are in this set but not the others. + + Example: + >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2])) + OrderedSet([1, 3]) + >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]), OrderedSet([3])) + OrderedSet([1]) + >>> OrderedSet([1, 2, 3]) - OrderedSet([2]) + OrderedSet([1, 3]) + >>> OrderedSet([1, 2, 3]).difference() + OrderedSet([1, 2, 3]) + """ + cls = self.__class__ + if sets: + other = set.union(*map(set, sets)) + items = (item for item in self if item not in other) + else: + items = self + return cls(items) + + def issubset(self, other): + """ + Report whether another set contains this set. + + Example: + >>> OrderedSet([1, 2, 3]).issubset({1, 2}) + False + >>> OrderedSet([1, 2, 3]).issubset({1, 2, 3, 4}) + True + >>> OrderedSet([1, 2, 3]).issubset({1, 4, 3, 5}) + False + """ + if len(self) > len(other): # Fast check for obvious cases + return False + return all(item in other for item in self) + + def issuperset(self, other): + """ + Report whether this set contains another set. + + Example: + >>> OrderedSet([1, 2]).issuperset([1, 2, 3]) + False + >>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3}) + True + >>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3}) + False + """ + if len(self) < len(other): # Fast check for obvious cases + return False + return all(item in self for item in other) + + def symmetric_difference(self, other): + """ + Return the symmetric difference of two OrderedSets as a new set. + That is, the new set will contain all elements that are in exactly + one of the sets. + + Their order will be preserved, with elements from `self` preceding + elements from `other`. + + Example: + >>> this = OrderedSet([1, 4, 3, 5, 7]) + >>> other = OrderedSet([9, 7, 1, 3, 2]) + >>> this.symmetric_difference(other) + OrderedSet([4, 5, 9, 2]) + """ + cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet + diff1 = cls(self).difference(other) + diff2 = cls(other).difference(self) + return diff1.union(diff2) + + def _update_items(self, items): + """ + Replace the 'items' list of this OrderedSet with a new one, updating + self.map accordingly. + """ + self.items = items + self.map = {item: idx for (idx, item) in enumerate(items)} + + def difference_update(self, *sets): + """ + Update this OrderedSet to remove items from one or more other sets. + + Example: + >>> this = OrderedSet([1, 2, 3]) + >>> this.difference_update(OrderedSet([2, 4])) + >>> print(this) + OrderedSet([1, 3]) + + >>> this = OrderedSet([1, 2, 3, 4, 5]) + >>> this.difference_update(OrderedSet([2, 4]), OrderedSet([1, 4, 6])) + >>> print(this) + OrderedSet([3, 5]) + """ + items_to_remove = set() + for other in sets: + items_to_remove |= set(other) + self._update_items([item for item in self.items if item not in items_to_remove]) + + def intersection_update(self, other): + """ + Update this OrderedSet to keep only items in another set, preserving + their order in this set. + + Example: + >>> this = OrderedSet([1, 4, 3, 5, 7]) + >>> other = OrderedSet([9, 7, 1, 3, 2]) + >>> this.intersection_update(other) + >>> print(this) + OrderedSet([1, 3, 7]) + """ + other = set(other) + self._update_items([item for item in self.items if item in other]) + + def symmetric_difference_update(self, other): + """ + Update this OrderedSet to remove items from another set, then + add items from the other set that were not present in this set. + + Example: + >>> this = OrderedSet([1, 4, 3, 5, 7]) + >>> other = OrderedSet([9, 7, 1, 3, 2]) + >>> this.symmetric_difference_update(other) + >>> print(this) + OrderedSet([4, 5, 9, 2]) + """ + items_to_add = [item for item in other if item not in self] + items_to_remove = set(other) + self._update_items( + [item for item in self.items if item not in items_to_remove] + items_to_add + ) diff --git a/io_scene_valvesource/update.py b/io_scene_valvesource/update.py index 0c2a920..e6333ad 100644 --- a/io_scene_valvesource/update.py +++ b/io_scene_valvesource/update.py @@ -1,93 +1,93 @@ -# Copyright (c) 2014 Tom Edwards contact@steamreview.org -# -# ##### BEGIN GPL LICENSE BLOCK ##### -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# ##### END GPL LICENSE BLOCK ##### - -import bpy, io -from .utils import * - -class SMD_MT_Updated(bpy.types.Menu): - bl_label = get_id("offerchangelog_title") - def draw(self,_): - self.layout.operator("wm.url_open",text=get_id("offerchangelog_offer"),icon='TEXT').url = "http://steamcommunity.com/groups/BlenderSourceTools#announcements" - -updater_supported = True -try: - import urllib.request, urllib.error, zipfile -except: - updater_supported = False - -class SmdToolsUpdate(bpy.types.Operator): - bl_idname = "script.update_smd" - bl_label = get_id("updater_title") - bl_description = get_id("updater_title_tip") - - @classmethod - def poll(cls,_): - return updater_supported - - def execute(self,_): - print("Source Tools update...") - - import sys - cur_version = sys.modules.get(__name__.split(".")[0]).bl_info['version'] - - try: - data = urllib.request.urlopen("http://steamreview.org/BlenderSourceTools/latest.php").read().decode('ASCII').split("\n") - remote_ver = data[0].strip().split(".") - remote_bpy = data[1].strip().split(".") - download_url = "http://steamreview.org/BlenderSourceTools/" + data[2].strip() - - for i in range(min( len(remote_bpy), len(bpy.app.version) )): - remote_component = int(remote_bpy[i]) - local_component = bpy.app.version[i] - if remote_component > local_component: - self.report({'ERROR'},get_id("update_err_outdated", True).format( PrintVer(remote_bpy) )) - return {'FINISHED'} - elif remote_component < local_component: - break # major version incremented - - for i in range(min( len(remote_ver), len(cur_version) )): - try: - diff = int(remote_ver[i]) - int(cur_version[i]) - except ValueError: - continue - if diff > 0: - print("Found new version {}, downloading from {}...".format(PrintVer(remote_ver), download_url)) - - zip = zipfile.ZipFile( io.BytesIO(urllib.request.urlopen(download_url).read())) - zip.extractall(path=os.path.join(os.path.dirname( os.path.abspath( __file__ ) ),"..")) - - self.report({'INFO'},get_id("update_done", True).format(PrintVer(remote_ver))) - bpy.ops.wm.call_menu(name="SMD_MT_Updated") - return {'FINISHED'} - elif diff < 0: - break - - self.report({'INFO'},get_id("update_alreadylatest", True).format( PrintVer(cur_version) )) - return {'FINISHED'} - - except urllib.error.URLError as err: - self.report({'ERROR'}," ".join([get_id("update_err_downloadfailed") + str(err)])) - return {'CANCELLED'} - except zipfile.BadZipfile: - self.report({'ERROR'},get_id("update_err_corruption")) - return {'CANCELLED'} - except IOError as err: - self.report({'ERROR'}," ".join([get_id("update_err_unknown"), str(err)])) - return {'CANCELLED'} +# Copyright (c) 2014 Tom Edwards contact@steamreview.org +# +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +import bpy, io +from .utils import * + +class SMD_MT_Updated(bpy.types.Menu): + bl_label = get_id("offerchangelog_title") + def draw(self,_): + self.layout.operator("wm.url_open",text=get_id("offerchangelog_offer"),icon='TEXT').url = "http://steamcommunity.com/groups/BlenderSourceTools#announcements" + +updater_supported = True +try: + import urllib.request, urllib.error, zipfile +except: + updater_supported = False + +class SmdToolsUpdate(bpy.types.Operator): + bl_idname = "script.update_smd" + bl_label = get_id("updater_title") + bl_description = get_id("updater_title_tip") + + @classmethod + def poll(cls,_): + return updater_supported + + def execute(self,_): + print("Source Tools update...") + + import sys + cur_version = sys.modules.get(__name__.split(".")[0]).bl_info['version'] + + try: + data = urllib.request.urlopen("http://steamreview.org/BlenderSourceTools/latest.php").read().decode('ASCII').split("\n") + remote_ver = data[0].strip().split(".") + remote_bpy = data[1].strip().split(".") + download_url = "http://steamreview.org/BlenderSourceTools/" + data[2].strip() + + for i in range(min( len(remote_bpy), len(bpy.app.version) )): + remote_component = int(remote_bpy[i]) + local_component = bpy.app.version[i] + if remote_component > local_component: + self.report({'ERROR'},get_id("update_err_outdated", True).format( PrintVer(remote_bpy) )) + return {'FINISHED'} + elif remote_component < local_component: + break # major version incremented + + for i in range(min( len(remote_ver), len(cur_version) )): + try: + diff = int(remote_ver[i]) - int(cur_version[i]) + except ValueError: + continue + if diff > 0: + print("Found new version {}, downloading from {}...".format(PrintVer(remote_ver), download_url)) + + zip = zipfile.ZipFile( io.BytesIO(urllib.request.urlopen(download_url).read())) + zip.extractall(path=os.path.join(os.path.dirname( os.path.abspath( __file__ ) ),"..")) + + self.report({'INFO'},get_id("update_done", True).format(PrintVer(remote_ver))) + bpy.ops.wm.call_menu(name="SMD_MT_Updated") + return {'FINISHED'} + elif diff < 0: + break + + self.report({'INFO'},get_id("update_alreadylatest", True).format( PrintVer(cur_version) )) + return {'FINISHED'} + + except urllib.error.URLError as err: + self.report({'ERROR'}," ".join([get_id("update_err_downloadfailed") + str(err)])) + return {'CANCELLED'} + except zipfile.BadZipfile: + self.report({'ERROR'},get_id("update_err_corruption")) + return {'CANCELLED'} + except IOError as err: + self.report({'ERROR'}," ".join([get_id("update_err_unknown"), str(err)])) + return {'CANCELLED'} diff --git a/io_scene_valvesource/utils.py b/io_scene_valvesource/utils.py index 3116165..3995253 100644 --- a/io_scene_valvesource/utils.py +++ b/io_scene_valvesource/utils.py @@ -1,707 +1,707 @@ -# Copyright (c) 2014 Tom Edwards contact@steamreview.org -# -# ##### BEGIN GPL LICENSE BLOCK ##### -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# ##### END GPL LICENSE BLOCK ##### - -import bpy, struct, time, collections, os, subprocess, sys, builtins, itertools, dataclasses -from bpy.app.translations import pgettext -from bpy.app.handlers import depsgraph_update_post, load_post, persistent -from mathutils import Matrix, Vector -from math import * -from . import datamodel - -intsize = struct.calcsize("i") -floatsize = struct.calcsize("f") - -rx90 = Matrix.Rotation(radians(90),4,'X') -ry90 = Matrix.Rotation(radians(90),4,'Y') -rz90 = Matrix.Rotation(radians(90),4,'Z') -ryz90 = ry90 @ rz90 - -rx90n = Matrix.Rotation(radians(-90),4,'X') -ry90n = Matrix.Rotation(radians(-90),4,'Y') -rz90n = Matrix.Rotation(radians(-90),4,'Z') - -mat_BlenderToSMD = ry90 @ rz90 # for legacy support only - -epsilon = Vector([0.0001] * 3) - -implicit_bone_name = "blender_implicit" - -# SMD types -REF = 0x1 # $body, $model, $bodygroup->studio (if before a $body or $model), $bodygroup, $lod->replacemodel -PHYS = 0x3 # $collisionmesh, $collisionjoints -ANIM = 0x4 # $sequence, $animation -FLEX = 0x6 # $model VTA - -mesh_compatible = ('MESH', 'TEXT', 'FONT', 'SURFACE', 'META', 'CURVE') -shape_types = ('MESH' , 'SURFACE', 'CURVE') - -exportable_types = list(mesh_compatible) -exportable_types.append('ARMATURE') -exportable_types = tuple(exportable_types) - -axes = (('X','X',''),('Y','Y',''),('Z','Z','')) -axes_lookup = { 'X':0, 'Y':1, 'Z':2 } -axes_lookup_source2 = { 'X':1, 'Y':2, 'Z':3 } - -class ExportFormat: - SMD = 1 - DMX = 2 - -class Compiler: - UNKNOWN = 0 - STUDIOMDL = 1 # Source 1 - RESOURCECOMPILER = 2 # Source 2 - MODELDOC = 3 # Source 2 post-Alyx - -@dataclasses.dataclass(frozen = True) -class dmx_version: - encoding : int - format : int - title : str = dataclasses.field(default=None, hash=False, compare=False) - - compiler : int = Compiler.STUDIOMDL - - @property - def format_enum(self): return str(self.format) + ("_modeldoc" if self.compiler == Compiler.MODELDOC else "") - @property - def format_title(self): return f"Model {self.format}" + (" (ModelDoc)" if self.compiler == Compiler.MODELDOC else "") - -dmx_versions_source1 = { -'Ep1': dmx_version(0,0, "Half-Life 2: Episode One"), -'Source2007': dmx_version(2,1, "Source 2007"), -'Source2009': dmx_version(2,1, "Source 2009"), -'Garrysmod': dmx_version(2,1, "Garry's Mod"), -'Orangebox': dmx_version(5,18, "OrangeBox / Source MP"), -'nmrih': dmx_version(2,1, "No More Room In Hell"), -} - -dmx_versions_source1.update({version.title:version for version in [ -dmx_version(2,1, 'Team Fortress 2'), -dmx_version(0,0, 'Left 4 Dead'), # wants model 7, but it's not worth working out what that is when L4D2 in far more popular and SMD export works -dmx_version(4,15, 'Left 4 Dead 2'), -dmx_version(5,18, 'Alien Swarm'), -dmx_version(5,18, 'Portal 2'), -dmx_version(5,18, 'Source Filmmaker'), -# and now back to 2/1 for some reason... -dmx_version(2,1, 'Half-Life 2'), -dmx_version(2,1, 'Source SDK Base 2013 Singleplayer'), -dmx_version(2,1, 'Source SDK Base 2013 Multiplayer'), -]}) - -dmx_versions_source2 = { -'dota2': dmx_version(9,22, "Dota 2", Compiler.RESOURCECOMPILER), -'steamtours': dmx_version(9,22, "SteamVR", Compiler.RESOURCECOMPILER), -'hlvr': dmx_version(9,22, "Half-Life: Alyx", Compiler.MODELDOC), # format is still declared as 22, but modeldoc introduces breaking changes -'cs2': dmx_version(9,22, 'Counter-Strike 2', Compiler.MODELDOC), -} - -class _StateMeta(type): # class properties are not supported below Python 3.9, so we use a metaclass instead - def __init__(cls, *args, **kwargs): - cls._exportableObjects = set() - cls.last_export_refresh = 0 - cls._engineBranch = None - cls._gamePathValid = False - - @property - def exportableObjects(cls): return cls._exportableObjects - - @property - def engineBranch(cls) -> dmx_version: return cls._engineBranch - - @property - def datamodelEncoding(cls): return cls._engineBranch.encoding if cls._engineBranch else int(bpy.context.scene.vs.dmx_encoding) - - @property - def datamodelFormat(cls): return cls._engineBranch.format if cls._engineBranch else int(bpy.context.scene.vs.dmx_format.split("_")[0]) - - @property - def engineBranchTitle(cls): return cls._engineBranch.title if cls._engineBranch else None - - @property - def compiler(cls): return cls._engineBranch.compiler if cls._engineBranch else Compiler.MODELDOC if "modeldoc" in bpy.context.scene.vs.dmx_format else Compiler.UNKNOWN - - @property - def exportFormat(cls): return ExportFormat.DMX if bpy.context.scene.vs.export_format == 'DMX' and cls.datamodelEncoding != 0 else ExportFormat.SMD - - @property - def gamePath(cls): - return cls._rawGamePath if cls._gamePathValid else None - - @property - def _rawGamePath(cls): - if bpy.context.scene.vs.game_path: - return os.path.abspath(os.path.join(bpy.path.abspath(bpy.context.scene.vs.game_path),'')) - else: - return os.getenv('vproject') - -class State(metaclass=_StateMeta): - @classmethod - def update_scene(cls, scene = None): - scene = scene or bpy.context.scene - cls._exportableObjects = set([ob.session_uid for ob in scene.objects if ob.type in exportable_types and not (ob.type == 'CURVE' and ob.data.bevel_depth == 0 and ob.data.extrude == 0)]) - make_export_list(scene) - cls.last_export_refresh = time.time() - - @staticmethod - @persistent - def _onDepsgraphUpdate(scene): - if scene == bpy.context.scene and time.time() - State.last_export_refresh > 0.25: - State.update_scene(scene) - - @staticmethod - @persistent - def _onLoad(_): - State.update_scene() - State._updateEngineBranch() - State._validateGamePath() - - @classmethod - def hook_events(cls): - if not cls.update_scene in depsgraph_update_post: - depsgraph_update_post.append(cls._onDepsgraphUpdate) - load_post.append(cls._onLoad) - - @classmethod - def unhook_events(cls): - if cls.update_scene in depsgraph_update_post: - depsgraph_update_post.remove(cls._onDepsgraphUpdate) - load_post.remove(cls._onLoad) - - @staticmethod - def onEnginePathChanged(props,context): - if props == context.scene.vs: - State._updateEngineBranch() - - @classmethod - def _updateEngineBranch(cls): - try: - cls._engineBranch = getEngineBranch() - except: - cls._engineBranch = None - - @staticmethod - def onGamePathChanged(props,context): - if props == context.scene.vs: - State._validateGamePath() - - @classmethod - def _validateGamePath(cls): - if cls._rawGamePath: - for anchor in ["gameinfo.txt", "addoninfo.txt", "gameinfo.gi"]: - if os.path.exists(os.path.join(cls._rawGamePath,anchor)): - cls._gamePathValid = True - return - cls._gamePathValid = False - -def print(*args, newline=True, debug_only=False): - if not debug_only or bpy.app.debug_value > 0: - builtins.print(" ".join([str(a) for a in args]).encode(sys.getdefaultencoding()).decode(sys.stdout.encoding or sys.getdefaultencoding()), end= "\n" if newline else "", flush=True) - -def get_id(str_id, format_string = False, data = False): - from . import translations - out = translations.ids[str_id] - if format_string or (data and bpy.context.preferences.view.use_translate_new_dataname): - return pgettext(out) - else: - return out - -def get_active_exportable(context = None): - if not context: context = bpy.context - - if not context.scene.vs.export_list_active < len(context.scene.vs.export_list): - return None - - return context.scene.vs.export_list[context.scene.vs.export_list_active] - -class BenchMarker: - def __init__(self,indent = 0, prefix = None): - self._indent = indent * 4 - self._prefix = "{}{}".format(" " * self._indent,prefix if prefix else "") - self.quiet = bpy.app.debug_value <= 0 - self.reset() - - def reset(self): - self._last = self._start = time.time() - - def report(self,label = None, threshold = 0.0): - now = time.time() - elapsed = now - self._last - if threshold and elapsed < threshold: return - - if not self.quiet: - prefix = "{} {}:".format(self._prefix, label if label else "") - pad = max(0, 10 - len(prefix) + self._indent) - print("{}{}{:.4f}".format(prefix," " * pad, now - self._last)) - self._last = now - - def current(self): - return time.time() - self._last - def total(self): - return time.time() - self._start - -def smdBreak(line): - line = line.rstrip('\n') - return line == "end" or line == "" - -def smdContinue(line): - return line.startswith("//") - -def getDatamodelQuat(blender_quat): - return datamodel.Quaternion([blender_quat[1], blender_quat[2], blender_quat[3], blender_quat[0]]) - -def getEngineBranch() -> dmx_version: - if not bpy.context.scene.vs.engine_path: return None - path = os.path.abspath(bpy.path.abspath(bpy.context.scene.vs.engine_path)) - - # Source 2: search for executable name - engine_path_files = set(name[:-4] if name.endswith(".exe") else name for name in os.listdir(path)) - if "resourcecompiler" in engine_path_files: # Source 2 - for executable,dmx_version in dmx_versions_source2.items(): - if executable in engine_path_files: - return dmx_version - - # Source 1 SFM special case - if path.lower().find("sourcefilmmaker") != -1: - return dmx_versions_source1["Source Filmmaker"] # hack for weird SFM folder structure, add a space too - - # Source 1 standard: use parent dir's name - name = os.path.basename(os.path.dirname(bpy.path.abspath(path))).title().replace("Sdk","SDK") - return dmx_versions_source1.get(name) - -def getCorrectiveShapeSeparator(): return '__' if State.compiler == Compiler.MODELDOC else '_' - -vertex_maps = ["valvesource_vertex_paint", "valvesource_vertex_blend", "valvesource_vertex_blend1"] - -def getDmxKeywords(format_version): - if format_version >= 22: - return { - 'pos': "position$0", 'norm': "normal$0", 'wrinkle':"wrinkle$0", - 'balance':"balance$0", 'weight':"blendweights$0", 'weight_indices':"blendindices$0" - } - else: - return { 'pos': "positions", 'norm': "normals", 'wrinkle':"wrinkle", - 'balance':"balance", 'weight':"jointWeights", 'weight_indices':"jointIndices" } - -def count_exports(context): - num = 0 - for exportable in context.scene.vs.export_list: - item = exportable.item - if item and item.vs.export and (type(item) != bpy.types.Collection or not item.vs.mute): - num += 1 - return num - -def animationLength(ad): - if ad.action: - return int(ad.action.frame_range[1]) - else: - strips = [strip.frame_end for track in ad.nla_tracks if not track.mute for strip in track.strips] - if strips: - return int(max(strips)) - else: - return 0 - -def getFileExt(flex=False): - if State.datamodelEncoding != 0 and bpy.context.scene.vs.export_format == 'DMX': - return ".dmx" - else: - if flex: return ".vta" - else: return ".smd" - -def isWild(in_str): - wcards = [ "*", "?", "[", "]" ] - for char in wcards: - if in_str.find(char) != -1: return True - -# rounds to 6 decimal places, converts between "1e-5" and "0.000001", outputs str -def getSmdFloat(fval): - return "{:.6f}".format(float(fval)) -def getSmdVec(iterable): - return " ".join([getSmdFloat(val) for val in iterable]) - -def appendExt(path,ext): - if not path.lower().endswith("." + ext) and not path.lower().endswith(".dmx"): - path += "." + ext - return path - -def printTimeMessage(start_time,name,job,type="SMD"): - elapsedtime = int(time.time() - start_time) - if elapsedtime == 1: - elapsedtime = "1 second" - elif elapsedtime > 1: - elapsedtime = str(elapsedtime) + " seconds" - else: - elapsedtime = "under 1 second" - - print(type,name,"{}ed in".format(job),elapsedtime,"\n") - -def PrintVer(in_seq,sep="."): - rlist = list(in_seq[:]) - rlist.reverse() - out = "" - for val in rlist: - try: - if int(val) == 0 and not len(out): - continue - except ValueError: - continue - out = "{}{}{}".format(str(val),sep if sep else "",out) # NB last value! - if out.count(sep) == 1: - out += "0" # 1.0 instead of 1 - return out.rstrip(sep) - -def getUpAxisMat(axis): - if axis.upper() == 'X': - return Matrix.Rotation(pi/2,4,'Y') - if axis.upper() == 'Y': - return Matrix.Rotation(pi/2,4,'X') - if axis.upper() == 'Z': - return Matrix() - else: - raise AttributeError("getUpAxisMat got invalid axis argument '{}'".format(axis)) - -def MakeObjectIcon(object,prefix=None,suffix=None): - if not (prefix or suffix): - raise TypeError("A prefix or suffix is required") - - if object.type == 'TEXT': - type = 'FONT' - else: - type = object.type - - out = "" - if prefix: - out += prefix - out += type - if suffix: - out += suffix - return out - -def getObExportName(ob): - return ob.name - -def removeObject(obj): - d = obj.data - type = obj.type - - if type == "ARMATURE": - for child in obj.children: - if child.type == 'EMPTY': - removeObject(child) - - for collection in obj.users_collection: - collection.objects.unlink(obj) - if obj.users == 0: - if type == 'ARMATURE' and obj.animation_data: - obj.animation_data.action = None # avoid horrible Blender bug that leads to actions being deleted - - bpy.data.objects.remove(obj) - if d and d.users == 0: - if type == 'MESH': - bpy.data.meshes.remove(d) - if type == 'ARMATURE': - bpy.data.armatures.remove(d) - - return None if d else type - -def select_only(ob): - bpy.context.view_layer.objects.active = ob - bpy.ops.object.mode_set(mode='OBJECT') - if bpy.context.selected_objects: - bpy.ops.object.select_all(action='DESELECT') - ob.select_set(True) - -def hasShapes(id, valid_only = True): - def _test(id_): - return id_.type in shape_types and id_.data.shape_keys and len(id_.data.shape_keys.key_blocks) - - if type(id) == bpy.types.Collection: - for _ in [ob for ob in id.objects if ob.vs.export and (not valid_only or ob.session_uid in State.exportableObjects) and _test(ob)]: - return True - else: - return _test(id) - -def countShapes(*objects): - num_shapes = 0 - num_correctives = 0 - flattened_objects = [] - for ob in objects: - if type(ob) == bpy.types.Collection: - flattened_objects.extend(ob.objects) - elif hasattr(ob,'__iter__'): - flattened_objects.extend(ob) - else: - flattened_objects.append(ob) - for ob in [ob for ob in flattened_objects if ob.vs.export and hasShapes(ob)]: - for shape in ob.data.shape_keys.key_blocks[1:]: - if getCorrectiveShapeSeparator() in shape.name: num_correctives += 1 - else: num_shapes += 1 - return num_shapes, num_correctives - -def hasCurves(id): - def _test(id_): - return id_.type in ['CURVE','SURFACE','FONT'] - - if type(id) == bpy.types.Collection: - for _ in [ob for ob in id.objects if ob.vs.export and ob.session_uid in State.exportableObjects and _test(ob)]: - return True - else: - return _test(id) - -def valvesource_vertex_maps(id): - """Returns all vertex colour maps which are recognised by the Tools.""" - def test(id_): - if hasattr(id_.data,"vertex_colors"): - return set(id_.data.vertex_colors.keys()).intersection(vertex_maps) - else: - return [] - - if type(id) == bpy.types.Collection: - return set(itertools.chain(*(test(ob) for ob in id.objects))) - elif id.type == 'MESH': - return test(id) - -def actionsForFilter(filter): - import fnmatch - return list([action for action in bpy.data.actions if action.users and fnmatch.fnmatch(action.name, filter)]) -def shouldExportGroup(group): - return group.vs.export and not group.vs.mute - -def hasFlexControllerSource(source): - return bpy.data.texts.get(source) or os.path.exists(bpy.path.abspath(source)) - -def getExportablesForObject(ob): - # objects can be reallocated between yields, so capture the ID locally - ob_session_uid = ob.session_uid - seen = set() - - while len(seen) < len(bpy.context.scene.vs.export_list): - # Handle the exportables list changing between yields by re-evaluating the whole thing - for exportable in bpy.context.scene.vs.export_list: - if not exportable.item: - continue # Observed only in Blender release builds without a debugger attached - - if exportable.session_uid in seen: - continue - seen.add(exportable.session_uid) - - if exportable.ob_type == 'COLLECTION' and not exportable.item.vs.mute and any(collection_item.session_uid == ob_session_uid for collection_item in exportable.item.objects): - yield exportable - break - - if exportable.session_uid == ob_session_uid: - yield exportable - break - -# How to handle the selected object appearing in multiple collections? -# How to handle an armature with animation only appearing within a collection? -def getSelectedExportables(): - seen = set() - for ob in bpy.context.selected_objects: - for exportable in getExportablesForObject(ob): - if not exportable.name in seen: - seen.add(exportable.name) - yield exportable - - if len(seen) == 0 and bpy.context.active_object: - for exportable in getExportablesForObject(bpy.context.active_object): - yield exportable - -def make_export_list(scene): - scene.vs.export_list.clear() - - def makeDisplayName(item,name=None): - return os.path.join(item.vs.subdir if item.vs.subdir != "." else "", (name if name else item.name) + getFileExt()) - - if State.exportableObjects: - ungrouped_object_ids = State.exportableObjects.copy() - - groups_sorted = bpy.data.collections[:] - groups_sorted.sort(key=lambda g: g.name.lower()) - - scene_groups = [] - for group in groups_sorted: - valid = False - for obj in [obj for obj in group.objects if obj.session_uid in State.exportableObjects]: - if not group.vs.mute and obj.type != 'ARMATURE' and obj.session_uid in ungrouped_object_ids: - ungrouped_object_ids.remove(obj.session_uid) - valid = True - if valid: - scene_groups.append(group) - - for g in scene_groups: - i = scene.vs.export_list.add() - if g.vs.mute: - i.name = "{} {}".format(g.name,pgettext(get_id("exportables_group_mute_suffix",True))) - else: - i.name = makeDisplayName(g) - i.collection = g - i.ob_type = "COLLECTION" - i.icon = "GROUP" - - ungrouped_objects = list(ob for ob in scene.objects if ob.session_uid in ungrouped_object_ids) - ungrouped_objects.sort(key=lambda s: s.name.lower()) - for ob in ungrouped_objects: - if ob.type == 'FONT': - ob.vs.triangulate = True # preserved if the user converts to mesh - - i_name = i_type = i_icon = None - if ob.type == 'ARMATURE': - ad = ob.animation_data - if ad: - i_icon = i_type = "ACTION" - if ob.data.vs.action_selection == 'FILTERED': - i_name = get_id("exportables_arm_filter_result",True).format(ob.vs.action_filter,len(actionsForFilter(ob.vs.action_filter))) - elif ad.action: - i_name = makeDisplayName(ob,ad.action.name) - elif len(ad.nla_tracks): - i_name = makeDisplayName(ob) - i_icon = "NLA" - else: - i_name = makeDisplayName(ob) - i_icon = MakeObjectIcon(ob,prefix="OUTLINER_OB_") - i_type = "OBJECT" - if i_name: - i = scene.vs.export_list.add() - i.name = i_name - i.ob_type = i_type - i.icon = i_icon - i.obj = ob - -class Logger: - def __init__(self): - self.log_warnings = [] - self.log_errors = [] - self.startTime = time.time() - - def warning(self, *string): - message = " ".join(str(s) for s in string) - print(" WARNING:",message) - self.log_warnings.append(message) - - def error(self, *string): - message = " ".join(str(s) for s in string) - print(" ERROR:",message) - self.log_errors.append(message) - - def list_errors(self, menu, context): - l = menu.layout - if len(self.log_errors): - for msg in self.log_errors: - l.label(text="{}: {}".format(pgettext("Error").upper(), msg)) - l.separator() - if len(self.log_warnings): - for msg in self.log_warnings: - l.label(text="{}: {}".format(pgettext("Warning").upper(), msg)) - - def elapsed_time(self): - return round(time.time() - self.startTime, 1) - - def errorReport(self,message): - if len(self.log_errors) or len(self.log_warnings): - message += get_id("exporter_report_suffix",True).format(len(self.log_errors),len(self.log_warnings)) - if not bpy.app.background: - bpy.context.window_manager.popup_menu(self.list_errors,title=get_id("exporter_report_menu")) - - print("{} Errors and {} Warnings".format(len(self.log_errors),len(self.log_warnings))) - for msg in self.log_errors: print("Error:",msg) - for msg in self.log_warnings: print("Warning:",msg) - - self.report({'INFO'},message) - print(message) - -class SmdInfo: - isDMX = 0 # version number, or 0 for SMD - a = None # Armature object - m = None # Mesh datablock - shapes = None - g = None # Group being exported - file = None - jobName = None - jobType = None - startTime = 0 - started_in_editmode = None - in_block_comment = False - upAxis = 'Z' - rotMode = 'EULER' # for creating keyframes during import - - def __init__(self): - self.upAxis = bpy.context.scene.vs.up_axis - self.amod = {} # Armature modifiers - self.materials_used = set() # printed to the console for users' benefit - - # DMX stuff - self.attachments = [] - self.meshes = [] - self.parent_chain = [] - self.dmxShapes = collections.defaultdict(list) - self.boneTransformIDs = {} - - self.frameData = [] - self.bakeInfo = [] - - # boneIDs contains the ID-to-name mapping of *this* SMD's bones. - # - Key: integer ID - # - Value: bone name (storing object itself is not safe) - self.boneIDs = {} - self.boneNameToID = {} # for convenience during export - self.phantomParentIDs = {} # for bones in animation SMDs but not the ref skeleton - -class QcInfo: - startTime = 0 - ref_mesh = None # for VTA import - a = None - origin = None - upAxis = 'Z' - upAxisMat = None - numSMDs = 0 - makeCamera = False - in_block_comment = False - jobName = "" - root_filedir = "" - - def __init__(self): - self.imported_smds = [] - self.vars = {} - self.dir_stack = [] - - def cd(self): - return os.path.join(self.root_filedir,*self.dir_stack) - -class KeyFrame: - def __init__(self): - self.frame = None - self.pos = self.rot = False - self.matrix = Matrix() - -class SMD_OT_LaunchHLMV(bpy.types.Operator): - bl_idname = "smd.launch_hlmv" - bl_label = get_id("launch_hlmv") - bl_description = get_id("launch_hlmv_tip") - - @classmethod - def poll(cls,context): - return bool(context.scene.vs.engine_path) - - def execute(self,context): - args = [os.path.normpath(os.path.join(bpy.path.abspath(context.scene.vs.engine_path),"hlmv"))] - if context.scene.vs.game_path: - args.extend(["-game",os.path.normpath(bpy.path.abspath(context.scene.vs.game_path))]) - subprocess.Popen(args) - return {'FINISHED'} +# Copyright (c) 2014 Tom Edwards contact@steamreview.org +# +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +import bpy, struct, time, collections, os, subprocess, sys, builtins, itertools, dataclasses +from bpy.app.translations import pgettext +from bpy.app.handlers import depsgraph_update_post, load_post, persistent +from mathutils import Matrix, Vector +from math import * +from . import datamodel + +intsize = struct.calcsize("i") +floatsize = struct.calcsize("f") + +rx90 = Matrix.Rotation(radians(90),4,'X') +ry90 = Matrix.Rotation(radians(90),4,'Y') +rz90 = Matrix.Rotation(radians(90),4,'Z') +ryz90 = ry90 @ rz90 + +rx90n = Matrix.Rotation(radians(-90),4,'X') +ry90n = Matrix.Rotation(radians(-90),4,'Y') +rz90n = Matrix.Rotation(radians(-90),4,'Z') + +mat_BlenderToSMD = ry90 @ rz90 # for legacy support only + +epsilon = Vector([0.0001] * 3) + +implicit_bone_name = "blender_implicit" + +# SMD types +REF = 0x1 # $body, $model, $bodygroup->studio (if before a $body or $model), $bodygroup, $lod->replacemodel +PHYS = 0x3 # $collisionmesh, $collisionjoints +ANIM = 0x4 # $sequence, $animation +FLEX = 0x6 # $model VTA + +mesh_compatible = ('MESH', 'TEXT', 'FONT', 'SURFACE', 'META', 'CURVE') +shape_types = ('MESH' , 'SURFACE', 'CURVE') + +exportable_types = list(mesh_compatible) +exportable_types.append('ARMATURE') +exportable_types = tuple(exportable_types) + +axes = (('X','X',''),('Y','Y',''),('Z','Z','')) +axes_lookup = { 'X':0, 'Y':1, 'Z':2 } +axes_lookup_source2 = { 'X':1, 'Y':2, 'Z':3 } + +class ExportFormat: + SMD = 1 + DMX = 2 + +class Compiler: + UNKNOWN = 0 + STUDIOMDL = 1 # Source 1 + RESOURCECOMPILER = 2 # Source 2 + MODELDOC = 3 # Source 2 post-Alyx + +@dataclasses.dataclass(frozen = True) +class dmx_version: + encoding : int + format : int + title : str = dataclasses.field(default=None, hash=False, compare=False) + + compiler : int = Compiler.STUDIOMDL + + @property + def format_enum(self): return str(self.format) + ("_modeldoc" if self.compiler == Compiler.MODELDOC else "") + @property + def format_title(self): return f"Model {self.format}" + (" (ModelDoc)" if self.compiler == Compiler.MODELDOC else "") + +dmx_versions_source1 = { +'Ep1': dmx_version(0,0, "Half-Life 2: Episode One"), +'Source2007': dmx_version(2,1, "Source 2007"), +'Source2009': dmx_version(2,1, "Source 2009"), +'Garrysmod': dmx_version(2,1, "Garry's Mod"), +'Orangebox': dmx_version(5,18, "OrangeBox / Source MP"), +'nmrih': dmx_version(2,1, "No More Room In Hell"), +} + +dmx_versions_source1.update({version.title:version for version in [ +dmx_version(2,1, 'Team Fortress 2'), +dmx_version(0,0, 'Left 4 Dead'), # wants model 7, but it's not worth working out what that is when L4D2 in far more popular and SMD export works +dmx_version(4,15, 'Left 4 Dead 2'), +dmx_version(5,18, 'Alien Swarm'), +dmx_version(5,18, 'Portal 2'), +dmx_version(5,18, 'Source Filmmaker'), +# and now back to 2/1 for some reason... +dmx_version(2,1, 'Half-Life 2'), +dmx_version(2,1, 'Source SDK Base 2013 Singleplayer'), +dmx_version(2,1, 'Source SDK Base 2013 Multiplayer'), +]}) + +dmx_versions_source2 = { +'dota2': dmx_version(9,22, "Dota 2", Compiler.RESOURCECOMPILER), +'steamtours': dmx_version(9,22, "SteamVR", Compiler.RESOURCECOMPILER), +'hlvr': dmx_version(9,22, "Half-Life: Alyx", Compiler.MODELDOC), # format is still declared as 22, but modeldoc introduces breaking changes +'cs2': dmx_version(9,22, 'Counter-Strike 2', Compiler.MODELDOC), +} + +class _StateMeta(type): # class properties are not supported below Python 3.9, so we use a metaclass instead + def __init__(cls, *args, **kwargs): + cls._exportableObjects = set() + cls.last_export_refresh = 0 + cls._engineBranch = None + cls._gamePathValid = False + + @property + def exportableObjects(cls): return cls._exportableObjects + + @property + def engineBranch(cls) -> dmx_version: return cls._engineBranch + + @property + def datamodelEncoding(cls): return cls._engineBranch.encoding if cls._engineBranch else int(bpy.context.scene.vs.dmx_encoding) + + @property + def datamodelFormat(cls): return cls._engineBranch.format if cls._engineBranch else int(bpy.context.scene.vs.dmx_format.split("_")[0]) + + @property + def engineBranchTitle(cls): return cls._engineBranch.title if cls._engineBranch else None + + @property + def compiler(cls): return cls._engineBranch.compiler if cls._engineBranch else Compiler.MODELDOC if "modeldoc" in bpy.context.scene.vs.dmx_format else Compiler.UNKNOWN + + @property + def exportFormat(cls): return ExportFormat.DMX if bpy.context.scene.vs.export_format == 'DMX' and cls.datamodelEncoding != 0 else ExportFormat.SMD + + @property + def gamePath(cls): + return cls._rawGamePath if cls._gamePathValid else None + + @property + def _rawGamePath(cls): + if bpy.context.scene.vs.game_path: + return os.path.abspath(os.path.join(bpy.path.abspath(bpy.context.scene.vs.game_path),'')) + else: + return os.getenv('vproject') + +class State(metaclass=_StateMeta): + @classmethod + def update_scene(cls, scene = None): + scene = scene or bpy.context.scene + cls._exportableObjects = set([ob.session_uid for ob in scene.objects if ob.type in exportable_types and not (ob.type == 'CURVE' and ob.data.bevel_depth == 0 and ob.data.extrude == 0)]) + make_export_list(scene) + cls.last_export_refresh = time.time() + + @staticmethod + @persistent + def _onDepsgraphUpdate(scene): + if scene == bpy.context.scene and time.time() - State.last_export_refresh > 0.25: + State.update_scene(scene) + + @staticmethod + @persistent + def _onLoad(_): + State.update_scene() + State._updateEngineBranch() + State._validateGamePath() + + @classmethod + def hook_events(cls): + if not cls.update_scene in depsgraph_update_post: + depsgraph_update_post.append(cls._onDepsgraphUpdate) + load_post.append(cls._onLoad) + + @classmethod + def unhook_events(cls): + if cls.update_scene in depsgraph_update_post: + depsgraph_update_post.remove(cls._onDepsgraphUpdate) + load_post.remove(cls._onLoad) + + @staticmethod + def onEnginePathChanged(props,context): + if props == context.scene.vs: + State._updateEngineBranch() + + @classmethod + def _updateEngineBranch(cls): + try: + cls._engineBranch = getEngineBranch() + except: + cls._engineBranch = None + + @staticmethod + def onGamePathChanged(props,context): + if props == context.scene.vs: + State._validateGamePath() + + @classmethod + def _validateGamePath(cls): + if cls._rawGamePath: + for anchor in ["gameinfo.txt", "addoninfo.txt", "gameinfo.gi"]: + if os.path.exists(os.path.join(cls._rawGamePath,anchor)): + cls._gamePathValid = True + return + cls._gamePathValid = False + +def print(*args, newline=True, debug_only=False): + if not debug_only or bpy.app.debug_value > 0: + builtins.print(" ".join([str(a) for a in args]).encode(sys.getdefaultencoding()).decode(sys.stdout.encoding or sys.getdefaultencoding()), end= "\n" if newline else "", flush=True) + +def get_id(str_id, format_string = False, data = False): + from . import translations + out = translations.ids[str_id] + if format_string or (data and bpy.context.preferences.view.use_translate_new_dataname): + return pgettext(out) + else: + return out + +def get_active_exportable(context = None): + if not context: context = bpy.context + + if not context.scene.vs.export_list_active < len(context.scene.vs.export_list): + return None + + return context.scene.vs.export_list[context.scene.vs.export_list_active] + +class BenchMarker: + def __init__(self,indent = 0, prefix = None): + self._indent = indent * 4 + self._prefix = "{}{}".format(" " * self._indent,prefix if prefix else "") + self.quiet = bpy.app.debug_value <= 0 + self.reset() + + def reset(self): + self._last = self._start = time.time() + + def report(self,label = None, threshold = 0.0): + now = time.time() + elapsed = now - self._last + if threshold and elapsed < threshold: return + + if not self.quiet: + prefix = "{} {}:".format(self._prefix, label if label else "") + pad = max(0, 10 - len(prefix) + self._indent) + print("{}{}{:.4f}".format(prefix," " * pad, now - self._last)) + self._last = now + + def current(self): + return time.time() - self._last + def total(self): + return time.time() - self._start + +def smdBreak(line): + line = line.rstrip('\n') + return line == "end" or line == "" + +def smdContinue(line): + return line.startswith("//") + +def getDatamodelQuat(blender_quat): + return datamodel.Quaternion([blender_quat[1], blender_quat[2], blender_quat[3], blender_quat[0]]) + +def getEngineBranch() -> dmx_version: + if not bpy.context.scene.vs.engine_path: return None + path = os.path.abspath(bpy.path.abspath(bpy.context.scene.vs.engine_path)) + + # Source 2: search for executable name + engine_path_files = set(name[:-4] if name.endswith(".exe") else name for name in os.listdir(path)) + if "resourcecompiler" in engine_path_files: # Source 2 + for executable,dmx_version in dmx_versions_source2.items(): + if executable in engine_path_files: + return dmx_version + + # Source 1 SFM special case + if path.lower().find("sourcefilmmaker") != -1: + return dmx_versions_source1["Source Filmmaker"] # hack for weird SFM folder structure, add a space too + + # Source 1 standard: use parent dir's name + name = os.path.basename(os.path.dirname(bpy.path.abspath(path))).title().replace("Sdk","SDK") + return dmx_versions_source1.get(name) + +def getCorrectiveShapeSeparator(): return '__' if State.compiler == Compiler.MODELDOC else '_' + +vertex_maps = ["valvesource_vertex_paint", "valvesource_vertex_blend", "valvesource_vertex_blend1"] + +def getDmxKeywords(format_version): + if format_version >= 22: + return { + 'pos': "position$0", 'norm': "normal$0", 'wrinkle':"wrinkle$0", + 'balance':"balance$0", 'weight':"blendweights$0", 'weight_indices':"blendindices$0" + } + else: + return { 'pos': "positions", 'norm': "normals", 'wrinkle':"wrinkle", + 'balance':"balance", 'weight':"jointWeights", 'weight_indices':"jointIndices" } + +def count_exports(context): + num = 0 + for exportable in context.scene.vs.export_list: + item = exportable.item + if item and item.vs.export and (type(item) != bpy.types.Collection or not item.vs.mute): + num += 1 + return num + +def animationLength(ad): + if ad.action: + return int(ad.action.frame_range[1]) + else: + strips = [strip.frame_end for track in ad.nla_tracks if not track.mute for strip in track.strips] + if strips: + return int(max(strips)) + else: + return 0 + +def getFileExt(flex=False): + if State.datamodelEncoding != 0 and bpy.context.scene.vs.export_format == 'DMX': + return ".dmx" + else: + if flex: return ".vta" + else: return ".smd" + +def isWild(in_str): + wcards = [ "*", "?", "[", "]" ] + for char in wcards: + if in_str.find(char) != -1: return True + +# rounds to 6 decimal places, converts between "1e-5" and "0.000001", outputs str +def getSmdFloat(fval): + return "{:.6f}".format(float(fval)) +def getSmdVec(iterable): + return " ".join([getSmdFloat(val) for val in iterable]) + +def appendExt(path,ext): + if not path.lower().endswith("." + ext) and not path.lower().endswith(".dmx"): + path += "." + ext + return path + +def printTimeMessage(start_time,name,job,type="SMD"): + elapsedtime = int(time.time() - start_time) + if elapsedtime == 1: + elapsedtime = "1 second" + elif elapsedtime > 1: + elapsedtime = str(elapsedtime) + " seconds" + else: + elapsedtime = "under 1 second" + + print(type,name,"{}ed in".format(job),elapsedtime,"\n") + +def PrintVer(in_seq,sep="."): + rlist = list(in_seq[:]) + rlist.reverse() + out = "" + for val in rlist: + try: + if int(val) == 0 and not len(out): + continue + except ValueError: + continue + out = "{}{}{}".format(str(val),sep if sep else "",out) # NB last value! + if out.count(sep) == 1: + out += "0" # 1.0 instead of 1 + return out.rstrip(sep) + +def getUpAxisMat(axis): + if axis.upper() == 'X': + return Matrix.Rotation(pi/2,4,'Y') + if axis.upper() == 'Y': + return Matrix.Rotation(pi/2,4,'X') + if axis.upper() == 'Z': + return Matrix() + else: + raise AttributeError("getUpAxisMat got invalid axis argument '{}'".format(axis)) + +def MakeObjectIcon(object,prefix=None,suffix=None): + if not (prefix or suffix): + raise TypeError("A prefix or suffix is required") + + if object.type == 'TEXT': + type = 'FONT' + else: + type = object.type + + out = "" + if prefix: + out += prefix + out += type + if suffix: + out += suffix + return out + +def getObExportName(ob): + return ob.name + +def removeObject(obj): + d = obj.data + type = obj.type + + if type == "ARMATURE": + for child in obj.children: + if child.type == 'EMPTY': + removeObject(child) + + for collection in obj.users_collection: + collection.objects.unlink(obj) + if obj.users == 0: + if type == 'ARMATURE' and obj.animation_data: + obj.animation_data.action = None # avoid horrible Blender bug that leads to actions being deleted + + bpy.data.objects.remove(obj) + if d and d.users == 0: + if type == 'MESH': + bpy.data.meshes.remove(d) + if type == 'ARMATURE': + bpy.data.armatures.remove(d) + + return None if d else type + +def select_only(ob): + bpy.context.view_layer.objects.active = ob + bpy.ops.object.mode_set(mode='OBJECT') + if bpy.context.selected_objects: + bpy.ops.object.select_all(action='DESELECT') + ob.select_set(True) + +def hasShapes(id, valid_only = True): + def _test(id_): + return id_.type in shape_types and id_.data.shape_keys and len(id_.data.shape_keys.key_blocks) + + if type(id) == bpy.types.Collection: + for _ in [ob for ob in id.objects if ob.vs.export and (not valid_only or ob.session_uid in State.exportableObjects) and _test(ob)]: + return True + else: + return _test(id) + +def countShapes(*objects): + num_shapes = 0 + num_correctives = 0 + flattened_objects = [] + for ob in objects: + if type(ob) == bpy.types.Collection: + flattened_objects.extend(ob.objects) + elif hasattr(ob,'__iter__'): + flattened_objects.extend(ob) + else: + flattened_objects.append(ob) + for ob in [ob for ob in flattened_objects if ob.vs.export and hasShapes(ob)]: + for shape in ob.data.shape_keys.key_blocks[1:]: + if getCorrectiveShapeSeparator() in shape.name: num_correctives += 1 + else: num_shapes += 1 + return num_shapes, num_correctives + +def hasCurves(id): + def _test(id_): + return id_.type in ['CURVE','SURFACE','FONT'] + + if type(id) == bpy.types.Collection: + for _ in [ob for ob in id.objects if ob.vs.export and ob.session_uid in State.exportableObjects and _test(ob)]: + return True + else: + return _test(id) + +def valvesource_vertex_maps(id): + """Returns all vertex colour maps which are recognised by the Tools.""" + def test(id_): + if hasattr(id_.data,"vertex_colors"): + return set(id_.data.vertex_colors.keys()).intersection(vertex_maps) + else: + return [] + + if type(id) == bpy.types.Collection: + return set(itertools.chain(*(test(ob) for ob in id.objects))) + elif id.type == 'MESH': + return test(id) + +def actionsForFilter(filter): + import fnmatch + return list([action for action in bpy.data.actions if action.users and fnmatch.fnmatch(action.name, filter)]) +def shouldExportGroup(group): + return group.vs.export and not group.vs.mute + +def hasFlexControllerSource(source): + return bpy.data.texts.get(source) or os.path.exists(bpy.path.abspath(source)) + +def getExportablesForObject(ob): + # objects can be reallocated between yields, so capture the ID locally + ob_session_uid = ob.session_uid + seen = set() + + while len(seen) < len(bpy.context.scene.vs.export_list): + # Handle the exportables list changing between yields by re-evaluating the whole thing + for exportable in bpy.context.scene.vs.export_list: + if not exportable.item: + continue # Observed only in Blender release builds without a debugger attached + + if exportable.session_uid in seen: + continue + seen.add(exportable.session_uid) + + if exportable.ob_type == 'COLLECTION' and not exportable.item.vs.mute and any(collection_item.session_uid == ob_session_uid for collection_item in exportable.item.objects): + yield exportable + break + + if exportable.session_uid == ob_session_uid: + yield exportable + break + +# How to handle the selected object appearing in multiple collections? +# How to handle an armature with animation only appearing within a collection? +def getSelectedExportables(): + seen = set() + for ob in bpy.context.selected_objects: + for exportable in getExportablesForObject(ob): + if not exportable.name in seen: + seen.add(exportable.name) + yield exportable + + if len(seen) == 0 and bpy.context.active_object: + for exportable in getExportablesForObject(bpy.context.active_object): + yield exportable + +def make_export_list(scene): + scene.vs.export_list.clear() + + def makeDisplayName(item,name=None): + return os.path.join(item.vs.subdir if item.vs.subdir != "." else "", (name if name else item.name) + getFileExt()) + + if State.exportableObjects: + ungrouped_object_ids = State.exportableObjects.copy() + + groups_sorted = bpy.data.collections[:] + groups_sorted.sort(key=lambda g: g.name.lower()) + + scene_groups = [] + for group in groups_sorted: + valid = False + for obj in [obj for obj in group.objects if obj.session_uid in State.exportableObjects]: + if not group.vs.mute and obj.type != 'ARMATURE' and obj.session_uid in ungrouped_object_ids: + ungrouped_object_ids.remove(obj.session_uid) + valid = True + if valid: + scene_groups.append(group) + + for g in scene_groups: + i = scene.vs.export_list.add() + if g.vs.mute: + i.name = "{} {}".format(g.name,pgettext(get_id("exportables_group_mute_suffix",True))) + else: + i.name = makeDisplayName(g) + i.collection = g + i.ob_type = "COLLECTION" + i.icon = "GROUP" + + ungrouped_objects = list(ob for ob in scene.objects if ob.session_uid in ungrouped_object_ids) + ungrouped_objects.sort(key=lambda s: s.name.lower()) + for ob in ungrouped_objects: + if ob.type == 'FONT': + ob.vs.triangulate = True # preserved if the user converts to mesh + + i_name = i_type = i_icon = None + if ob.type == 'ARMATURE': + ad = ob.animation_data + if ad: + i_icon = i_type = "ACTION" + if ob.data.vs.action_selection == 'FILTERED': + i_name = get_id("exportables_arm_filter_result",True).format(ob.vs.action_filter,len(actionsForFilter(ob.vs.action_filter))) + elif ad.action: + i_name = makeDisplayName(ob,ad.action.name) + elif len(ad.nla_tracks): + i_name = makeDisplayName(ob) + i_icon = "NLA" + else: + i_name = makeDisplayName(ob) + i_icon = MakeObjectIcon(ob,prefix="OUTLINER_OB_") + i_type = "OBJECT" + if i_name: + i = scene.vs.export_list.add() + i.name = i_name + i.ob_type = i_type + i.icon = i_icon + i.obj = ob + +class Logger: + def __init__(self): + self.log_warnings = [] + self.log_errors = [] + self.startTime = time.time() + + def warning(self, *string): + message = " ".join(str(s) for s in string) + print(" WARNING:",message) + self.log_warnings.append(message) + + def error(self, *string): + message = " ".join(str(s) for s in string) + print(" ERROR:",message) + self.log_errors.append(message) + + def list_errors(self, menu, context): + l = menu.layout + if len(self.log_errors): + for msg in self.log_errors: + l.label(text="{}: {}".format(pgettext("Error").upper(), msg)) + l.separator() + if len(self.log_warnings): + for msg in self.log_warnings: + l.label(text="{}: {}".format(pgettext("Warning").upper(), msg)) + + def elapsed_time(self): + return round(time.time() - self.startTime, 1) + + def errorReport(self,message): + if len(self.log_errors) or len(self.log_warnings): + message += get_id("exporter_report_suffix",True).format(len(self.log_errors),len(self.log_warnings)) + if not bpy.app.background: + bpy.context.window_manager.popup_menu(self.list_errors,title=get_id("exporter_report_menu")) + + print("{} Errors and {} Warnings".format(len(self.log_errors),len(self.log_warnings))) + for msg in self.log_errors: print("Error:",msg) + for msg in self.log_warnings: print("Warning:",msg) + + self.report({'INFO'},message) + print(message) + +class SmdInfo: + isDMX = 0 # version number, or 0 for SMD + a = None # Armature object + m = None # Mesh datablock + shapes = None + g = None # Group being exported + file = None + jobName = None + jobType = None + startTime = 0 + started_in_editmode = None + in_block_comment = False + upAxis = 'Z' + rotMode = 'EULER' # for creating keyframes during import + + def __init__(self): + self.upAxis = bpy.context.scene.vs.up_axis + self.amod = {} # Armature modifiers + self.materials_used = set() # printed to the console for users' benefit + + # DMX stuff + self.attachments = [] + self.meshes = [] + self.parent_chain = [] + self.dmxShapes = collections.defaultdict(list) + self.boneTransformIDs = {} + + self.frameData = [] + self.bakeInfo = [] + + # boneIDs contains the ID-to-name mapping of *this* SMD's bones. + # - Key: integer ID + # - Value: bone name (storing object itself is not safe) + self.boneIDs = {} + self.boneNameToID = {} # for convenience during export + self.phantomParentIDs = {} # for bones in animation SMDs but not the ref skeleton + +class QcInfo: + startTime = 0 + ref_mesh = None # for VTA import + a = None + origin = None + upAxis = 'Z' + upAxisMat = None + numSMDs = 0 + makeCamera = False + in_block_comment = False + jobName = "" + root_filedir = "" + + def __init__(self): + self.imported_smds = [] + self.vars = {} + self.dir_stack = [] + + def cd(self): + return os.path.join(self.root_filedir,*self.dir_stack) + +class KeyFrame: + def __init__(self): + self.frame = None + self.pos = self.rot = False + self.matrix = Matrix() + +class SMD_OT_LaunchHLMV(bpy.types.Operator): + bl_idname = "smd.launch_hlmv" + bl_label = get_id("launch_hlmv") + bl_description = get_id("launch_hlmv_tip") + + @classmethod + def poll(cls,context): + return bool(context.scene.vs.engine_path) + + def execute(self,context): + args = [os.path.normpath(os.path.join(bpy.path.abspath(context.scene.vs.engine_path),"hlmv"))] + if context.scene.vs.game_path: + args.extend(["-game",os.path.normpath(bpy.path.abspath(context.scene.vs.game_path))]) + subprocess.Popen(args) + return {'FINISHED'} From 71e1d2f519c4e46593f9d9c9d3f95d23a635a0bc Mon Sep 17 00:00:00 2001 From: Dmitriy Date: Sun, 26 Jan 2025 21:23:55 +0300 Subject: [PATCH 5/6] here is 1. auto import textures 2. auto settings for goldsrc 3. GUI Import/Export SMD buttons. 26.01.2025 --- io_scene_valvesource/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/io_scene_valvesource/__init__.py b/io_scene_valvesource/__init__.py index 2c242fb..4483135 100644 --- a/io_scene_valvesource/__init__.py +++ b/io_scene_valvesource/__init__.py @@ -27,7 +27,7 @@ "location": "File > Import/Export, Scene properties", "wiki_url": "http://steamcommunity.com/groups/BlenderSourceTools", "tracker_url": "http://steamcommunity.com/groups/BlenderSourceTools/discussions/0/", - "description": "Importer and exporter for Valve Software's Source Engine. Supports SMD\VTA, DMX and QC." + "description": "Importer and exporter for Valve Software's Source Engine. Supports SMD\VTA, DMX and QC. New Fixes BLENDER 4.2. + BST 3 3 1 (imp button )" } import bpy, os From 0c50b0e14aebd144b7aab2b76c593a785e24d5b8 Mon Sep 17 00:00:00 2001 From: Dmitriy Date: Fri, 31 Jan 2025 00:52:00 +0300 Subject: [PATCH 6/6] bpy.utils.register_class(OpenQCFileWithStudioModel) bpy.utils.register_class(RenameCollectionBasedOnSMDFiles) bpy.utils.register_class(Create_w_model_sub_QC) bpy.utils.register_class(Create_w_model_QC) bpy.utils.register_class(Create_w_model_Sequence_Idle_1) bpy.utils.register_class(Create_w_model_Bone_1) bpy.utils.register_class(Create_SMD_Utils_Panel) --- io_scene_valvesource.zip | Bin 0 -> 93810 bytes io_scene_valvesource/__init__.py | 665 ++++++++++++++++++++++++++++--- sequence.smd | 8 + 3 files changed, 615 insertions(+), 58 deletions(-) create mode 100644 io_scene_valvesource.zip create mode 100644 sequence.smd diff --git a/io_scene_valvesource.zip b/io_scene_valvesource.zip new file mode 100644 index 0000000000000000000000000000000000000000..7319f86315802a20faafe1a82fce2ef87efac867 GIT binary patch literal 93810 zcmZ^JQ>-WotmU?C+c?{{ZQHhO+qP}nwr$(iyEB<&zFw0eY5G+pU0L!{z#vcn|3?g? zcs2iT@c#_zf8N4Q&&kNd)m$b?S*zY>)Hk`Mq|0TdcxVk>~hBme-l zVE_Px|67T%fwO^)ow14aKYNd1?k&fy(Zo6BlwSk5NB+f7cjp2pL)COB+64yc#^V%Y za*fOppooHS3=j-}WfPUpMxS-LO}+KoTrL2~*9xAl3Skh_QZG->PA|_+?2o&@@47pT zE=|W*U*{3oA`UsnS9z=|RyJr~Mv10%#!`#gEu~Mob20 zLlJBL-DT_u8$g;~z?wPHqDyE3Tm<55r?+f4Y^2Zyv70je%Yb+igDe;_ZRv)Qn1Duz zrU>8QoH4(BY~(_ty$ij5{JvSS3yLDd)@g)CKg$y(A3{Eb{dchQv|(3N<#vW&hY{V5 zk*)JHFuT_{uz`QQwy+T~7ux67j-jjD_yjG;=J|Kv%pCl>u<>v)mb9=zcMaR0n-gRi z%});Su)-ljwGb1U7JmP8fB25u4YwODE2if69(;XsehK5y$J3_|{AytDz{2_K&e^Vy z&#UXr-iHH(N$&q??fm)*WQx^`pR41dZ)$EdBzHgfZx2>h&Vh4~?Hz);gI{yw;UkRi zpJ-oqm*L&4oLuPqyD0<;+-^49h;x1a(bB+q&zvmS9e5uMYbVa9wLL?p@M(7syB{4|6b)FplORaFnNR@a z#@+4W2WXF9H;!fTwJ|$8sCT`IS&B`_&hAcxDg_KovI!F%|8)}$fK4R7tC<4Q@e_rh zOq7vqX6Bc5Q^ep-j!0@P2|^MD>jY=I-@};z_gvB6ez^BUP`ufaNM=o7;7i8a(cbH; zdV8l4cSc$99emF>;%{)AbSxvRmlvN0Db7-~-V;~vSWtl}A0Ps6MY3d|Vz6_bq~A;c3AdVx#`N z)HkaO2omYsOxf$-0U|8Pw+K*f4g&f8%?9XF>GE({Vx2n`=^E{W(VeoDtKPk~>-&sM z!}?djhACZ!IiGnOX0@m@6W*D&+rD%c*O~(-@DlHjqZ4~fBEszX4(=7`$yO2ysDb$l zghvIi-TFNp;J}$+f&bF+mcsK*uY=v4eqjbkB1Xu*f%F7`jDoeM!a$Qgnv-5L>YLT> zEoOnj4i6cJdn%c}3zmp0-K$D7Qe}g{LS%Tt<%6A%=_iODF|!Dbpn!stw0!#ePIetM zYcYOu9D&(3QbE;MOJe?!Gjf>l3AYUYKPb-Vkj?_8`}_5vzh($bC~vqg=~2|QMRW1j z?-mK@*hT}XXmVu%qk-EUTu7iy)|?WHZ9)8*&_OqP1%Y+W{FS-Pyt~)8EHspG6;P;Z zRVj@3)VI+m8@1EIZ>PDuBa)o> zBC?jupsAyUa*LUWMQY(F)fiucqrzTIZ8q&<_ZG=F6DaqVq2rIW`3C} z=iH^LgLU(6xDDs6TQ+JKPlJL%c)XE#;W|gIkTus-TM#;#Dx_c{V3{q|uj??Jefn~p zgj38+#R5@CD2mVNAz+Yj*VQh_5GR6z^2ike;Z^}o$Z_)trEKwm zcwez4`VguLvTwK@XT0H)4(Lobz}wQ5$J1ZP)1 z0Ia{YJFPJo{4S9w5jGa)g+>@o-LWr~P0oe$xRI;tRJC3B4L_A!_@*@ zP^kn|rAw-_M|fRNy_Kz8h|F7F|P6hz_)? zvw0TD&7<~IwVa9`!HEXYI|>z6P;XB(VuSF-=030O>Vsx;yl=-HqWlWwh>#d{J80kx ztRj#O#*3-VsyGN?aqw~N!4bLOs;62-S8j{gTLkt4FUO9gDYYH^m6njrZ}{+z2PFyo zmQQN{gzf)QtM77kLDuqf&fGxd1Z}!w*z?!>5cYdb$Y)ymG5=f2bPO2LoQ=;Hu#5C@5+&Nf zPR{O{u?v%y*)`12CEtTe5`7?}Ch>Gv^oCwl>(xet*3EE2qMZ_gBh$rwcl?XPAWK+F z{KbQEmJ{@>%jWSx(!%(h#rSY$Kj`mN9mL0i;Ufl8lVV+JXe`85hQkZULQ_UvaxpAH ze>T8e*`)iF%_KN6cC1-_e-C$kEEDLOL;Nhh@!G&)q#QT86>72_GSu?Ht@x3XP(C%+ zkl#6trODj@%7NTMImPzPD1ULrm=+VmJ z4lb*tL=@LssH_Zc3nj4oP(fDtw%0uGx~^W$f0H;+mE%qv@45(U2sZp1>fA9!y2HAS zx5L|oO#~gMH9mWz2TitKf*9`FgK%hVTSkNJnJrU6_z8oSh&40BB81L1V)OC$XbOVh zq(Ivg!}PIa!NKbJpV);$xMY^uSCSZ}(QjP#5?%wH&t<0mDd5|XWzNk_2-++i5jg*0q8$d2S$EHhxsfnQ0n$^x0Dp1F>?IX2Mz$q%hUtm z5;Z%!x{Me=l7Nl4ZTnX!ycunXX15CZkV7|&EL54m1?!Ags3G8uM!=IFk*NRBM!%5T~L=lzqT9vtz4r651p_6oT;_$`Dd9|C7 zh8<^+^2RAPY?X~#D zm{!L`J}EAZ7rT>EP>|{I2{{yhnJ3GVL$yj7#HI8`9k5q%wH%@RoU@9x3s!GPoy+$- zLo}zw&Ay!e86lta$|8i){#4SCf{qC_ij}Z?pnPgVu;dX(%Om$;4aPVE%2VVN1Oxuz zU`E)oz`vOdT-_dq^{CaMe|hbw{kiwNW!~Xm_iVxkdc3W?{XxW;&a~1UMl5a_tW;|l zSM>8TR4KPLP zrfpPwc>OhINu0tXlOdP|L$zL+^`=%fjZf#7~ zsE^FC8MY#*-TR_u1dr}ArnQ-G4D?FP6rz8H?*J9Yf;#Y0VOXy$6>u5_C9*U?G-mk_ zMz3=zbdhm>zJ{*M#mI;1r)QGr@bAZvNwR*vSFwz^ltyh6;}j;k@ihur3XxJI94)^w zPgdr8=s)-KR*sO6d8lK3ay(4evY|A@I5uu~{_y$9t{>n`uoQsi;s_+IM6-qgbjQtP zwU-e=-}VE^$XDS8v-X}XUE7=56qJUwUzs4?gTllnv}eV~LqnMex%;!W(; z%0Yte850}JKoExfk*#KbB4BW#d~wQLX+@~*75n()D81a6SK0|$_o7NY=b*&h84G%o z=NxKbBKKu`Jlqh}%M^M#s-cawuI?*b)^6g_C|g@k7osodZ34fK=-hkT)bS|9Lc)s# zr*0QCRSexL~XMVo7<=A4+`!L9Uwroh?48$V`uSj?megx zRFOh=-3ehn(tH_WS=_{B3mHl&Igxo7G^@hS-{IoSb}mIp%ai4L(#J&=h)&T|-@b#4+T0af zO(2;vMSJha{cs&>s%u~2aY`nF+L~t1Q2FThCn)8jed~Wd!)>d;F3^g1b;om}K;%^jU7x*}goM z`MP61gsiAS?jjFJx#w~FsYt*MQ3E06pO1xWTd+T#??3XK6?fULbYVrp+-KP`671Gu zy**g?X)mR66Ny>BP`9gLhCgu+!CA^!lSsDvP#v#j2`)v>5o8YGJ}_-rd`PvCsN9?@ z+^5erY$zR<8St4LR;}S*Nn)h5<&eOb`B3(S5|t_JR$`g3!>-OU{y;A0MH`csqm_Ia z2<(nYDF~6aa(U-0%oU0}5oQ$}F^lfrQ2bhX)QB4)NSIE|BGONcab$B`ZR-V32&$G7 zq#zhC6q!-1X^EUKth`H6m*)~THn+5lug=(F#Wq&g_2Dj%M}V7w5=#YP_9-0`;`&-g zcxNEHW-fXMIy&W^yJAFwLJj$=dFF3MaPWh&dr23l1$V8cm)CJC0CKX;hWR>Prcw-* zQ7x_xoXspxW)uOryedSBhYVFgE(4N2fkkEE6efojCo%IxeN~Z1=lpEt5jIInQ1a+e zQjhIVq5481M2@LaC>0O;rRIsfcWz`(Ig8f`v}HGYIeF$dtA*?-VxkODFC{2xrW!I; zUgQv*9PGIgwmz5sAhT(C@d~l-K6n$SBO@mXQI6zh)b{{!ucp{4f z%#3i?>$u|(`*zF6Rj=aHyTw|=MirH73lUxnl{Bd^6Cs~(KP-DLLJpGlzW5IuF(7h= zk;@zqZ)BhJ2HuBe6)8}X^55_XYxr}XD9~=8;2H`rK1~2@0Wzc8wNFmPE!8G@feeMj zfbn$3K~(gh>!pJ3YXZyfl5SLpxhWA^?rCujSFg~Kq!dd8z9dq;b)JY__b#0@*j_myUma`ID+iG|4P6hk{Ple-uh((j_6$;>7D52N{ z0^4*#EItpi!RSQRK`tN_Ds)lOjJLj>!<`2ArQ zb>tuPFBIm8zK>P`>4z$Yc;bVqn+bTmb67X;VT|EanB_5uOKpNV57M8;XTusSzRCvC zLzfdA_HXsxscA1yV`~8VZFxX2_f{RifF;EuQ54Lvi-SgEW)TqwZhqRVNT|gE->oqf z;$}{T2mFidx5SZH3ejHZ-yXkbINAi47UfYAi+gjAaBep<-+N;_{?Whf$TsQGW$w4C z%qO&5DFjAn+&cOcHc#9VI?SRO4Ptc$kuHf9+NDD&rS1i;%;(?R6($d}r}3R8`_3|| z_ZZ3`AT(eS=!>Tm`5RC?LD;_;?UN0RB;0p0>xC8rXEd_36+cApZ5E3qF&@w!$6l8o zO|#Bt!B_FXh?+{v?l_fwmXT9_xNJ#OKThysUVve%y!(!B z2hUEc@Xzq;BQD}TU^w=bPS^r)Jk-Yt;mVIF2pBOdH^3@blvV5zEVhx%?3+a~_6C(< zo4-|Or}Obxiy5*t(|}vxKb)-4m}InzHcOYJ=o$Q@ zKST)MeOS=^Q+5Z}Pt}k|nBQl-rD#$?)WFB`{O?GZCW(zgzC=%QgArHl4;OLr?IW z;YodoD*!cT9{OYasGN>a=Ht1mGGcrkYi|j*NZZ_Bpr4%VpXby5(E$OGOIJzJ{<80L zVY`70s!Wm>KpmK9a5Dwkir`AcWKJ93UXjB|JPMDAI(eQtUo+LIQ)tihiW&ENXEG3Ei^x`a#sHH3ajb| zL0NB4$nL0LF0;D#ypS5poafdTvGKgnzB7=Reth$&pz>d&C6A`lZ#Gk#8<6U@+|np? zb{CoZcI7b`3Iil?LAPI5g&&v`6A7swJJ&b9Ap?s71KMM2WXF-v3Cny}!L|k(@Irlj z&IiccgYN+Oa;l5+%D!S*B4wk+n9UMa#Fxy}Iw)Roj%v0UA?%*?f1{9<12aXuqK_hM zhNSsE_?$+P>WrLZ8tOr%xs$1$boqUEeqTp72;*Gd0ZJb!kON0ZIpgyL`|iGiACIq+ zqGKdr>FJXBDPc+w-3_c~(7?Dy@!fR+q0`blUp{nr0eurza3pY2t?Hh=@6X47 zgfzH}Nq*4bqA-0@^3l8d#G4Kh4t~FUDn(C#w-;;&K{oJctrvr}o-c~}TchKZzlnv{ z?h?Y&Ys9`(go%wHCcDArufY5k8$GxYr9J*8%bU^rQQtlG0Hr<4pUN}Uhjz~3y7TCF ze~Fj~WiCNkm1YtV-J;)gOfwf0BV?_=Be_oU6Pp9QcIy_$j#$uR=f!l}{A6&zP&)VY zo?skk(jeamjt8W;y^Y7lnF??n_rmM-pSq7P41a;=wQ;+;YnqeU8a(!!u%rmz=JD_5 z<}=xX^&TsB8DV(c7`jv|cT}-MAjgwnB?^nl-*-Q$KrgnH#bCtrT!R~|nI}h^$tXB_ z**I(+^_=cDID9q%?Vk2}b1EF|Zws;o=jNF(u{CCekx zC_~65{eM--d2RAc2M!N;Sl6KdyeueO{0zdzy#sFPTYMQCK>P#0`*Tcnv_U+hQCqoJ zp5dJ#FLC<<)M*2YviGP$lD7dtJURSbcfc@)^1;mbe%>%oIeF~zXEe*dike3tAMMd^ z=_)Em-aAK~hiHEoWj2?rcj`D<7q)X3@~Yo{1|a#gNW(DdRk-=8D}cdRW2sVGsl1g! zpIFfn%r<`0P9va0X0~q3>MQrz^RvkGWq=t_BPUhwGwT(SV7#Urz>RMa;Anjb#w0Wu6El32|xR;F9UtPFnu^t#Gd4-{s zg_%12P-$kGF*ZpXre|a3{R!yoNkL0Uxg+JP#HN0-w0@|danf~t$vg{A);J2S-<>mA z;(jKB>7ERL$7^?vt}n;RZvB!=-h1@84&l}V(<;>h=i&2jcEn}V7Xz88Bm&8b%8BDk zCu%GYq*-!1q$+FzKMxg{IKi-blRxJBvoL&kEKHT|$WZM#enj`l2U zEDX52r)6R}Xq*6i_VE&M5>&;V5G-q!fK}^~M_GYR452q9%I4kO9JV}3n zGu+ij%sP7J{SCYGyOiNk|W9FQa#`I7%$XY}{n&|NVJxixWE&g$%Xv z8T}phv9)+Z9^0w;odGO92mfq%M@cDeK!q2gB!4S;(Zy2e4dC-W3=c`sy-u>H`I- zZ+L|5OgjwFF*VCQ{-ynNsQbBt@4mZ_UrJIB!8!NFg~1ib5|ubQ-#6fsMBTRQ> z^k|{*DhVWK*Hj$A+Yq%(i$$KZ$#S{3Ii0C=!mo-?Oe_|z*9OA73Fw=+j6v?D37A20 zqqH|gs#VL2WAx;g8q!JH7rPd#*)mRhs-K$b$ukJNYe!zT-;JXj^tF+1ChzHlwE%DH z1V@f(x<^}q-{4v&L9r@lJum73EJn@&T}1%0Z`dF}Xrj6mMo)L4^LDJqZrG zU62A;aPdws%Lzh34{%q@#K{{_03rSzkMHtioz|@0KY={tyYvt*?9Z~*zH)jajh9ac zH;5m>@vLe|!5o4jKUNu>7xDIY+Q;i1iX4CsyQ5m4dCp{G@y`M8LNhZ<`Qu1)1b?cv z)iR?F%IICpD7V0krvOiE_wAQHGT!152?J!s-JI&o#G;I`EzEw1AmA+~6Rl^K6nIe>X$2Na4jn_DQ zrZ?hRXWF@h+lC5tEKSr3H=zKBLnjAXeJCcY9#b3zwW-O-R+Qa1s8F3O_!{R9A#K17WuGiQFty%CGzi?VGG~0sL}rfD_F77^ zQ?Dj;+Kr)IBZyil>8G7p@NIoJf@HPXa@NtxdwE>0UO(eSNxq~oSzlny-x|Xk*-WZA z?MgRqF!_vNJUbQwb0G>4@%BjYG_}y>!)p)g2<<-j^6j(GQhSh*t&By>+q}U;zxf0F z{}d_pR9?-<1bx~ySpWblHb4NP|BWJL;%;x}=&a{tWBeaSs+!x%Zc98d54ZoXK4kV_xz> z^uUt1~^dxnxZmKYFahA>bUpBBM_p_{(DA=fy&Y6l(76Av#Eq=a+wEU zR%R%w?GvSh66>M$zjyasPhsHv4L;ehynlS~8v`Tw*ZAOTYvAZZe7<1kmYG8|e#eQb zjMR%D;zO&G^i{b5UPKPO?Yy5L?BHQ#W@STs_Q>RALs0`Wuv{J8rr_6?`=0Ke&mY(O znL0kby`9~d@6kK<#?q3G9r+s=zD%>7(e36~xFU{B-p<^lj;BH+WQFi(x1ukwWwtQCFu zryiAkHoXeT-(XRhzI_sOG_f04|E%qW37+4$gTL;DnYs1lQ2 zp?-fzXwVi*W*^3bQf_`vGouu+iR{p3*)JKyrVaQK-TP9<;}^_04=APr`2d?vy}|*Z z6^T?QvD62K{|XrixAeet2x$1@4yyFOa04$1tA0ds6kx+i@w(?}-;=s|j_I5J+NoK^NXt3~hKlBRo z9`c4YL?M2^xGpB5gVLfEV!fxK*u!;`q2P;f)L|<|MJK8}B4z=y+xY-Fv=e+FoawKL z=GehVS!&=qjDL8QD%C#OhUUh~($2=rvh0=xC0^6{?sNAq)ay^`H4xH-8H`Vi(I$6uGLzjo{R5HST&LnVqZGX^92j~ zv)$mp^0eRC56OYZ(+t}aKVUQII66aj+inJ4UQS+K-R3&RH|)fuYyX{$7;PqS66L|H zCT=YZj4bS5TiaI~wCdVcnH?YY(u{eb6x0686+GAO zbTGDs4uOIBoqQFC(W-I|EhGv~(H|!7f39s4V>SoWP4)1g9a%f5xwo67zI8WGtA+Q< z2vLDgeJvh+f)Rfd3g=mH4v(F8NqFVlzNfimad|s_K72k+;tnKbh2lPB$WLKBsU)y6 z^l21M>-kF&5LHW%R_^Av3(yNdeZK8=zQ9h$e%ZL;5dbPOK`Zp*gH#%zr5$A{E|g$N zaezZ5z=8bUYHFh*U^)0L@)rSeQf2i>CWUWp{s9hG(m;^n(qE=Yy_0xwiz(w<)U>X!^xDMKe zE_}Y=4=0lliKr!{%%+16CZ}~uNkB$4Aq;9cD?x;3p0ib~uPrn&gu+L|NR7MJL&ym$qOMX> zd=3Iw4fSzJ1Cfnp^G)3VqL*b?zo?&h*3fDSvbTb)M|5)&kyv$XR0^Q{Pq{1NA2=Yg zKx)#KdY zK~i!`kWyaz^Rf?QI5^vjS>Iz&8`t&dTt-1Aq@Ehzj#J{Su6MiL6 zMzb&6Qv_)yq7{@D?M}TueNNtc;AiD4y4WieC^$<|yJ_PplXe}lt8JAMe^4uRZd=V5 zaU6F|-R~0v@ae0Zy3}JY`{lP=$qeY3GfgCdQ}VVc&DEjth(v)%Gmk_%yXRrNTS-YR zJGDnhryD}4d?q$=^ORU4evd+abCJ5x-RdDnG12tH&dbKtRTN31ghqkJ%ALR{1549!gUqn`RC!gJ zm`9Hs?xdPG=9%P(ZkZdmD+4n!^xw`tA0J0$CLvf>)>c<961)WsPr?A3>>1NimIyyG zB3>QjOA46zD~0%;(5}?DzepDnNk#OI-oJA;7G0sS_%a^P2;m+AE+I^kjrHEH-K0~9 zMOEt`AwS~V&8KoC$nwB98O^Q%2OawZLr35f2aT49ul>uZx7kj`GxINkK))t%q&fN! z{VZH=vxGL`Kkj0v)Gcl?VYkMKH052}#(Qj6-98!JSBxMk`Yn^V6-lRT;>IR9QdhN_ zOJT`v)y6UV*VpW~f%#(Tiy-Yf$bV|6r_io(a6QNCZQjREFh$na?R1ab(n_Cxk`8(U zmfT;NM>73V+>%{f|FL)wP>lgmOC#Ks5`xq`L4FEL$>Nfr=opnrhw+0jLu5qr$|P`} zq*q#NhMh3;KLK-tmKq4p;+fB8iFvi|zx!8?(_% zIS9@8f%!2Vp10hXIEBWN{Fj069T{AzDswfUl*YY-MFwfVEBa`Y4m6K0yq6q_8>L`zXK1I(1(ThZW&D zl^+qLye{ZQfXs~(_Ny%hf)#QeT4$cY1CkP)>EG+Nm-=$J7@oCY_P#KwEscY*(#I!>XY>|GlyS$}%z#S~DA7J7inDfaWp0qJu71{-c0qH@ zF-PNZd!q{qj-}DI$0T#2%7D#OekP-#dsD+JIy65gj%UYl3v^T3QD%|MU%2;__DKv0 z-eX+;Vj2sX+6J&NH6340Ow&0BX2#GijX0rqZ+33!G_Ij)-K<}nVn;!Xn-StvHk(D^ zmf&21P7CK%QCco{=Vz=HD^9F)!$o|U^FrbcInQp;`G93Kij=P@LnduW>ZX#n)sUzc zO;#2%r78ToUJdNh6UTYR)qqwznO0$M_|W-ERG^fhrrcVta3&^UkHu^Y>djGd-qomt zi}k;5lLQ$%Ln`)No+J#DQk)?0Qm_%yF*In#32|VCm4Zo`o!#^Oc)7Lx+wt4E2)87z zh%4=@rRSlLGp{pW!}_k#9;R~68$`;QiK>i24lv~{?K-E)rrrJ-j4zw$s@+jh33?gH zha?;Hz06NaxrHd`8gxWU8(wah!`cH@5LGqc4t&O`gMxp!^HAsAe;F#RVa$7o-Fm!2=vuajN!2d?Ue?sK}_af7xDHZKk zN>{Nq6RW30L9v5{zS4H8wOnk^)l6Q$==Ev3-IjI;@pS|vCD*3{zzT)QL`TJ*$7hBJ zC*Maxot2shBU^usr(d%UlsK~5ttM+w<70Wc%C58J3fX9?6g;U^)yW#(?4#+^wl+ac zrZh;T(3Gr|Tep4OA4^m3B>^MplGs*|l$sq>uOv>LSdK3N%X}?to^#KmY}zzjcJKr$ zGM3wRPD=L_3z6fG@(58!p^!&PAxjx%Yymp>5Dzwx$vA|F!H6|@YW{R*$j@{q#(bfO9H>GGw8BZ+Hm9d;RxGrFH z9wqDuh8IMuklSY5kU7amG;PRfRHZ)grss~gNyf{y6$*M-v6m|7WX89T|4A+Fs&p)N zl;VXJhA>vi?Zy&TBjZ!(W-&lZSWq%zqxJWuL<~R^(NUg!kX{yimfGNRdgiJ$mWCyQ zl^D6oRX89~bpf0~#{BHmt}La}B9+%+K{gXF3u1!|Pz`bG!hmCmYwIe#9A%PbOmRK$ zN!J-y7~=ZZ{88Gww_>&Ii%O}l9>DT?tMdx&!!R`%A9_`2A;pz6TCVeWm)-|E&(92K zR3|wfT!pJ*#)Sxe#GmKD(_1lA{%-;YkmRJN$kog7cZ&O9g;xo6#LE?f*MBd7Q!?r+ z1TVB(EqEGj8f;J`fScd+a~xDe*Q>M)8-CgRA=%cx9HyRj*h?ly;xM&!nKe`t1Q`kDbwuoRE1Yc}ONwe| z4{Ne7vFef%9Xx_8xyn0ct1fVX_>~ew^OgoXyVTBtkV`&+1t|h*I0;PgmWRbXo2B%s zAK+LRP1Bm@gibLrg8?yjbg6P9d+?omn-}kUfZH|s7RP>OCia$f^S8Zp?P4?HWiMju z5hb`faobQr^jJ>jybF$$cB2)pVNjYAD9MHfh0O`l3~BR!5Mh9hv4i?(=;CRXT7-oX zCtyw;i*e&#j>1N%yW#CQ*s2@;o&G{>SwVY7XuQn|pycWfC8dOF%DKH{rL@fI9!m#~ zif3WTxTEIn(kr5>o+byNi&1H_CNv;ErUZbDK@T*^caFG!@VL!Vz~tb=07lls+hsGe ziqw@YuMKt_9UfBsIfZqPw+|t%fJj=R0+`vO`U1dw?mV~BYC+2`#O%TNR96AMNc_JK zZm(ktU4Fn+2+!XrD^^o=KZQdURo&KnWPy_7o-R`|&N%`A8G7StNnz~6l#A4z%@m|z zFKUv~n@W~V@#S&4Fp6x_MK-}z=sPAZ5sj@`6uLOZg2MM}HcmL%efL?fBdlYzEg(Jp zJJo~^Gp)x%3EmVM5s~&!5$2zHSS`KBGJ^srnrVc=y(|*UxR=dRsiD5#)5r*kKH5iPCDZHOG&g z4YQ@nz-nhR^3%nJv_}u5$>Qv57{AT0hdRHN_vN5}|A7xis(Q6gFgZTe4r`+2N0ov$ z8JIuj$S#Z!QA9qs78&Dt0}&L`9pCDwDm(?pznT#}p;olGTw<*YD01OufSHpJA7GpQH-13O}d^x zk(%X9VZ~#HixW_u{${%@@0SWL+5Fl}bonRn&HP&+k3FHk2uL=*h-M08S$3M5UyikX zL@RokT~j@5tbSYm-Fh^qS0t9o`*)C1AO9vw1?4 zw2({kPT_OpXeud}cSs3>`_|@#;{tV96@t}bc?TkoTQ3v{cEvLcNIYrV^DN~TshVR- z5d*pSTUoGxOVrO`ey{egNytt^{fM%moribCzw@Wpmrqd!Xoa7%GH$W{f z-`O0D_91TvfK~Zfg~V$1_=$i_B;Yj!XoBW}ZmU4Kssv&6;w=|0(VAn~uoj>y_WbZE zND=-pX47)>+H&`kgnOo(fHf_euEHBM>*g5NG^4R}eG_v0*Ay@&$<%gOtUtYrhs+2) zSe+@QGcme->t2mjh?LH~XBPMPmb+2lAHLn(-;M~Jjl3MHKX}cu+G`E6QPaKE-G_f) zsigH{xCoH|US4E#C+kP_@MA}34Zt)bf4|FmidMbWPF{5siPO=`mMxk!s#RSldEN6o z@iy;nH3(u^1sp3^6XR@A8Q_B1$^uB1o}l(+%l;NOx?T|r6nTC8%)MB4Ai==5TG57; z?58at^(P_2E)BUeKfKr&Mua=v18$Y_JJROryutM{G;%@OA}e5&br$Q|;xbu4q(dk9 z;){atlG-R3Fw&?MEo*;x# zVz^R;Jk%>!iS2rRr!M&rBOfAK-$g?0#+CTYjv#NTp)k_8S7Jm-C&G)hl~8zP&#OP-A^Tl1+J zn*v{GA#3|u-tH)ILQN~lw8PTs4?EtQRZCm7cY-~Y)r&-Y*d2;j0@Foe|0))#5Ew2A)WJTz9V2InDg%*# z$P&@t*0WM^#4p3Cqax>|OeH7i`$^biMU4Pg@#R!6&KM{B=H7z=^B|C81Lz_uY6&T8 zjYnFK2ef0t(q)uBBgmszes!Dlk;SXMnKlyHxwE)0{1}Qd?hnj|U(;}^KUdx-gctnS z0`Rwzi2?VK3|jS2@ao?jV3I7!7`QMB1lFYzF!lC~-X#1FUFXmyS`TK?vTfHb+qP}n zwr$(CZQHhO+jf7m?n#gG2lAY}IeYDubf3x>BZ3)8aIC44xtn&f5>V9^B z6j|!>+&sgwcET7usm3UoR?kL>Rphf;S=c(VeWsfpQZL*KI|l9~+2fk+g67!y^JxkW zD3aVroE|z5!2kwPnNfSQprBpfPT|jK&xDT&6keo;{zDKG;Wp$iif*a0>ehlkaQVvj zl+G_e%;_%YR@RJ@7ls~JhMJ3QvY@Y|4_|&Wy3>9klxp8K_GSxFDPVpOWkX(AYuK~64S-p#(k17OWfYTY94b+~>{W%4I3CzavgoL&=Mcgn}>H~5z{qvAbXewOs z4#X@Yp;c+e3}!ZYXiVrLhJVYM8~CeVGHh*0x5imPo;(BJt!h!$pShX_>-qXrw~v#& zvI2Vr<4O;bj!9rziHE-Z1adx3Xw?ajNbu3$x@4??F782!#%)|>0t|8uNOAheggfGU zQ2!A9@S^^}W2zITljC(}d^LiUx_}QcVIfXljf+q_pzG`zD3*{yja*cuDBXaF!adH_ z_ae*(zO_{Ny~VWUyhfv9@~)}VO+{hx1lHZ`y_vWLxyC|&3+vvW)~N+HY4{Bttr~Vf z`sA{+K*^VVoNf&jXti4#R?v6=*cBZTrrFp@Q|CCIUO9mapx!6W!QbUaC7G~>hone| z`!zxDFmVaMx?8DgK3Z%H@u;WV z5u5TqRYytjOY}Pe23M??yJNiEkfDh53w$eQ7m9enR8b3!m3+#T!@9K{_0ZA%EGK`sE3Eu#ky z({7vWAyr7|vDiHCRN-kmoW)pBElF%QJQb-)Aq)fw-V%2BUL=P^IH#H>;`3_bQ7%#Qc>uxD)@n(N%YUVG1*b^b_6JIf^~wUlGb!<@|1nLG2~bshH4E zV?be}A4_RIQ(zmMOpvFOmdrUZwZdfCjv9$T9dC;Q{)@G zIrLyr8zP7@#~oUr4Gk6|KIsX}c7W^YXBKQl`T-*7f9d;C1b=6I|EN}BA+}JF@f)nO z^cd?*OQCwoNOp3ZY$19k5Yp5glyFWu8DC$9i42aPByw~E*fsk5U{}kMu-9kNe!gWd z#Xy+t?;(FQmN318wf##0XnNG&wfG!6CE&acWEUU^e6y!#*qNYd1GlQ89^|N32> zgC0O4P3h>|0n3;p8`u>|8>Ch3Zg)qsnYNZ*dnCWt6m6F_0s3OWd2xx?ip=pv$ zUDFabHJ5#RK4IQR4q&xa*#b|gOf#gegm)egwo9M_!BKGm!Nc+I3jFKJnRbL&5O8Rm z$Y=9_-;JfvA=egrfbT*Ot=+w~$3~;nnyT_uk`d50lz))Ad3riS+Rdf;{;ok?hQ_jR14 zwFfRdgo_A@KGhON*2Aviv)#a|+Y#rQaBsg4`?+N)lxe z?YZhy2Z#bwak`8;pLZ2odcvbnBuwD#Wz8O(zACu_!F*f8k(X@X5_Itj2v^@vFn-%lAcDYhG+Rww8rNp! zKJ72REzCxiymKg{E;L(dVw)n1(K9KMTejT5!2pjS%!ckwein}_@3>$PT}oe@goF%!oX$lzd-5uYX}7 zUoo+IW1|nJ(>REO)uj{T>E+H@RUanTb~Tu~*lS?H6xQdFg4rco<#ucRXQrO40~Ry* zZ5E)1_76`{+j;^&NhX3$^j?cc$RnL5_3|Q-S$;M(x&vsWTZDnDxfCGzcBqfX!;_WQ zZ#$+7eopV3)%WFb;3;bCj4n^dnT{kuXHWM0%2>(0`pMWI$sQdYO%bfZQ&M?xumw2` zp@XzU)frsp z?O;g^UN`pd_k00*E>XE{-lk3Nw(k)g@s)D$r_laYi`JvZ^PXYG=H{|1xwYzH*f-8q z9JVZ<`B*0r8IYmBn2BWKwo48OGl}k5sVi%<7+c{m^45cWV~}mn|F{s%O*#ULtzXh6 zc=tSx*;;pjG)5J*I)8j*d}ba#J}4OKnl7=B)$t zT~*zi@DMq(me+d>!_X&V8@B*^I#B+DdAisgXAwCclC@TK(~MyZ0})+}?!`gB&Ic3d zSHn2@Xs_FKeN9R@n3pDRI%G)nJ*(tRF)t84h@yG$;t`5(eO8Jy@bcvVcm|C6Dc`Sh z_M!*io>$$mj~Qz_Z2T1)h)D$lcH1dutY%bf^gQHlxAX;TzjmEH%cjBH%?*Ojl*V#> zL;Uu~C^x9OUs6RuK<8F2-EWT{x(Iqi&tz{K!WC$}$iIj5YZsf0maL~z1e6r&f{r5>5WW$UkTEgGtz z;LDgo!3eM<^RM#i3WJu{1oZa%XrfDUAmV-kU6UxeHCjfewau34d~b?7%l$Uh1h1&R zq-O_+l%-_yW2h=^8QEGR6Dxq8oqLT{H4{QgdfwxsNt>~x=qB$Cj{DQJw6i+Bb_#jD zqgT%E3jI6{C`b~!i9Cz0<7=Uriaa2n8BCRD2Rwz&YdMXmR({&+Lr8J}ml@pkk}PKM zvnIl;Wx~bc%t2LHY7u;zFzUwZA6FJj3=W5hBwlZEvn)4X&32y&h<;tHr<#ibWGP(R zRF<_Wl69`T%me*>M&>&n09(j20GA5-RQuc2y9rLIZx4jqu;~PRW(TVM(1)30BRhnwU5W7i z9iU#fnVxY=%b?%4K}dl*daX%bj4%~2ul%_k>KIDT)oAaKDfNG2rC*N=QDyLWyq8eP zmz(~!)}dZ$WZFhdH3yHtNpLCW>uA9` zgiIlWyAcIja9f>w~k;^ z`XfAkMhOFM%PzRNe^Y%x1fZD8y<-ME*FwX8$be~D5Zv{DEpK7L8fjIMogoenCF~J{ zZfyAaGWtnCmz{n?3%ST!bbRZR`p|K&vlQsv8~G5m-K^Pae`!6PftS|i8rc?H6*`S~ zcXsf!$N14nqb8SDd%2v-Vcw{?%w2Kc=VMM<=a+6>nfN}K40haG25CX@H7P31E9Dv< z)TrrohD)LTIAUAZ60#=49MR)Jeq0ULLc|f>ZqF$L+OONR;4X;6R64+te+j*EE;es` z)5djkwvcS;NW5#CMxwJTDC2;s|G7k&ouxgMnW6u;k$Jt3Yq5#n+OazagsqDPQQ){3 zm|h4*()6*W@PBGncvf+U!5W!^C0;GjsaeeNZ}6?dh1vg!&bLmoU38)Vj)BRt(n;o| zFz9Z6Bu>4{wq22#9Y-vnr0YSN>+ibud;HmqWwVC@{_{2WuvQU#Kg*cd(;8@+=ub#{ zyU%fCyas?Qn=ND0ca&f=lCQp>PoZ2UdjE*ZCtDgE=vT7tQ6VsF&l-CeYxqF*2do9a z+PiUHGAYiMgU-eNI1=M%DP@zbeL8~6$Fm&Z%j6XT<5msVj%(0MAY4}Aun(IR&&y7s z5U|5(DCW9vvs_HQ>AvZAejvH)r&?0b410ZhMSYs-L_ZWcfoO{KDYkkrKF?Qk07zl- zZo*6jhCi*^|hF$Km=gF%pBM{ z=`Z)QOM{Y{*&joI4YK93|Dtqe!@*8jae>!l zkfESFNMK>EJzvl}4KYC)aCU36om2N{yPQm&#t1p5=3)Pl;Imr>$Bs-aS{i){BBMbT z6$g70#7;8ub*#iF^cTE+>G5;L>(O;EA}`wUQYQ5XPc-@P(&0}cK=a85GR!rPDtO<` z&nA=(+&`R+IdpW4EgLhXM9brP%zIyos+L!^Ha9k>*Q_Z&fCf%U&|(Q8=brIm%TSr= z7z4SXq_>1EVQe!nT18Y)Y&F6_3*f-+jvE9GzBwiPQvzpQb6YrG8@z|wp_zCU?{?5^ z1+Xl*U5w=Xl7XXy@~$+w40lf0kt@v?bAq#2RF(fLJm7s@@!Rp_B(D{@moN!1k;s(;hfVX4HC2|7h&wz~q%A z?bTbtJ8J6=-bh|amu8LLE)qqU!7yV9l)^FGJ8zlt%zsWyBRq*j@koA~ju%tI^$^Qx zs>ixd+-RF-RyGm!RMz+7_>1G_S%Zmyl%18tlG!wjZRRU)Yw}-b#Rw}qwI3^&E9_Zj z*Dkrm98>#U#tc%&Y}EEpp8ZkyHAc&Vzsz7JhCCpGKbr><##6?rR zHtHh35FsQbyobAUw@wu)$;*Ys%negxGt?FIO(e}Trzr#s%XK(#4hCO1Ds_~|iW7zT z++7|96pP}P^%#sL7LGHsKrLw#Lu{%I-&;_~ZzK?6lb`q^RmYl%jmm{q`!x#21Nzn= zXtO_4cuI*1h7i2i8h=ZqlU-uZshsIMYK5d~wimDeRWks-am++o>ykwxmo5cZl~6h- z|2ewh3MN{T5+Kquw6XJ6scMyhD5RyYRh1RMggsn;^jPnJ60fGb_XE%_QSCj}y>uC+ zkH}QXe{d0Q{Z2t#ct(j898}6+8{@(-AvaKdqgSyKD}@!wce;@aS0fIFUGaS&&Z3TR zgP&-8Fi>=?*z*f^*!U~jfa^FhSK(wFIwrAjO#R*;@Q>*Ozq%v!-|MTQ04&E|UG&%} zB>0;7lpKz{-HEMv0P-|7e1$R<`B`KLv=j6W#i*_^>^E6XzCo~`aBj)x_}q6>z0eUvClD=D{P=pO8a9x>gU{A;)8O?5B(~f5kab(& z^uye>3YD7L%(?1B`&hdon6(jv9IOj2Dn@pZ+Z* z^#f(gCJ+X`VTx43%0xLJ1rb`5KD3BGXX;(bxAI0WH0JgSnjrmuKO&G!k_muXc@k+3 zft1_)wqy-;CB5pHBo=o_wUvt^4NH?(aQrA4;kkD$w9QAoX2|OolLHfWF)u3>zRde_ zj7<;~qq1M9ju9nkK}EhFWk2LhZ#S5biJ<{_ZY(;R#nF%|+KKtw-O;DDJ*QwiQ$2+d zph_$?e;2s#j*8ON9US#KdZ`w(W)6(bKO= zCCOT`Y_;XBv}joNWeq+h;beJ&g=Wggqfv*;xydHcp}x7^14+9%V1O3Y zs`nvA*_^J|rm1u0Y$h!HagLoiANW7BcOt-6SKb$htGIReeWf*+mr;-aK2xd=%A4Mn zMN&xn>1kOgl#2d!|4c)t&-U#a!B())(oz#=LY123N!Tb4>JTXUaC7PycR)9or`6rU zZ~cCY$VRbhyzaIrIR-N@4D_FRDOAgD7g%9)^jUI-|3G01CL^4trQV)Lf%eyg{cz^c zA!WK`*Tv;KMRw-soz*WFj2+38cYD0&lkw|3>2}q%M@1H_1a|E^vgm8T?r#=Uq5Vq= zs#5K+tRYfWHZ0qLQ%)_-NoT?LyIe0ReHrLMF;?IL!B)YuOqiq*NJRv=&nEk2+m1W8A?ocWTZ`2ZF1&$U940f zmsr5~j=X2SeL^R{{(`lx%a(pikotlsnS1M9#}A?#z#v;T%pDRdXS*}Db-NU>%~e_=8a9eeO#{Wc8auy)FE+|7-ftLy26z4SE_0< zwmt$3aAq8K1kHp0kSy{jpA!8kQD2ACKVS0J^MNHXCJ-Gvq^FU1YqKTz5a(?bFEuh)br#8+;O|pyG(aBtno!UZSgpyJ!}B!;r{;#p6%%`d z2EzhhbBk*GProOUrR!roB69ar6COnu#mLB_Q659(hZ|i_&&<7rpV!Xd=ssvX^_kR)tZ zL-JJOWG|=}FlK7{R&sJ7sCa}K3pRhjoEd8e)|LUeH+xwA(`e`^%k{_EdFe>9kiPk9 zG+thdIJ89)JVp$1-xK%``$RoHZQ@xFR0VgfO6AMI z(mbb)t^P8{3mo8{3jmA1*TMh(Gd(MupI>qq7^hAiLmA`f8~i;Zub3d7nu%^b2&2XX zk1N2gJkCGe8IOL>OCN8t0;H3vy`*}wgg#w4IzzK=?PaJuGr9JgL_rC_5WWM-9X%m8 z2;KCSY3HhR3*n~rRF+7IfHiyF*8iMZL_<#wsXnFD>aRf}Zfe!_ItuNSS981+%Q}*e z+xe9GdPdhK>PfoYgQM^55C>R9eod~M zDlfqgd}F#h4FdGR+}d(JHnY|zA`YHIg_Tflyp#}wfdPyX_4fkWsPON5L)62*zQ1}H z3-N{#d2t@nDS~R7OqkYJh?W#af!X{P@+(KM<3n*Z7Rc>L5tWZ$@$V5TD8LvxN8{(sO(Q z?CO@D`}?N}&>tpZW*J7ti~r^ZIEp%2G;pS(s}J9?K(aeqt{bcbN^#(JsqFpX9%;yB zGj_25qdF^aeb877^?JOMAsN5dhD0As%2=afOQ5Ba6l{xzatk+C?(}>iQ8evd-w?(? zf-`c9PRIa}`E@*;8=--MW9o5m!K9R7dpZ67mL~qn2q6h|dOpcY5FdvVDV`@_o4*hu z#8ecc&3J%N=@rVT*}}3|{M*5hI1@)hmPd<*d(@C5S>7NyZ_kkJzb4rJqbEK2^>n>0 z@Ton|$|eN|s_{x*>wn*}4nuXozIL0u(i5~+*5Hjgqnguy@QSE1AP=DYm|&$d z$B~ub=L6$DEc;94kY}$Lflyp`A+DcKS6QJcm)vOpF5^hH$fSFL7r5Y9eARf;w{Ix! zz1-g$D4jOiJU7JmpjjN!@a)+azhpT1$j-!=eQ$9fv zz?+quK?LsdsdZ3VjZ_=9vV=6Rkx-eo$AyGsk_%{PjRpKh+gE{ zpC4V%zGCo@rMy=mfYLzYuOJIqq)g3sO9utuG z^bOR-)GdZG$4hJxIoB?I&o&U$-uE6U$0(fKW79GEufv2{9h;v46(NjF5m*lQq5r+_ z20or+U0dne$9p60qwv>g!w&<28L?IMEet!koqXqeNh%y-**2+gQ(sL0*zrK8>?fYp z_Z&_8`Jx92PfHD1Dfy^VSeAht7CH)XXHmf0RO~4L&eq91lId`m9?TBo+(+Y{gt_De zH|Dy?C?Ie8o6Ybm$1CEQwC)s(!!CF$%7(|t@=$40eDC1}pSFH2HXu7ad@40T%Qj_LC{q#&)aUFifK|!Klh@DGTt^3th3mr`;C7P(%eOEjFw5fVZrm2PBZ5X>>w9}r2MJ+x1tlpw zVUv!wofbuXDEXz0n+NXkJzlima@=!nlL{E2Z4?%7XS?C7G_Ti1za0~|-g73nfEbhF zlq)ekF{U6A4pfP>E|urvEeWj$hK%O7RJUG`Ogs72=IKsa7{0vg8GOhrJ_UWB2ZG*^pLSX zSm02^vP~E$?hmdA5^Bf`p{O@SR-*ar9Z=BGQ|x0?LFgLxG%^L^>&UJ`_YQ_%>4Tl9 z)N{Cht$!Auzs#xFvXZCmZlNg`@xJEV98%AqgD!s{#b}WXTAdl??;@kahRg$SDZ$wz z@UOa39op$fy>kiIWIfp{e;4|F`GUL(ivwM~dZyTsu0+QG=z2TQqd2Q)2yo+7mUiwY zw9uI>UAgb*6SQ@oLkK;P!5w@#ONOT_H9Kv!pmZZ%4fE6qglbc!%YmWOowBFIG^Xg6 zw_+{F|6U(*+-n!_1W@pmZv%gpd+@FPy3b6{3}YLwsh-Df)5RoD)Pu0r@*7>@M&*z| zndu!tQi>Y)2JqSLJ(x~h=&YWMC;?Z%A7Ud>{_@gJ0{%ZZLiS}bSlIWT{dgh)Sf0(X zYcx01eBB}a;dgvq&EB|kaD5&l!H2<)Rq36AE8d@ACA7+xtG&{Q47=M7Qv+dL$5Wti zhH1Uj%gq@EPB4JV;5Q_>FuJs0eHCmy%ER!JC49m1x)`~&>)5ry?^a?r(e13U1C)jF z`Lw4hTTs;r;MpvIq=47f(imxGcw!66`15ZZg3UfNz1$yfP?ip0TH)7 z6I~?z<4PX=2dE<$3rp#ZO7Xt62`iQ_=_^)m_ezac{)TTwm3!Y+q*faVWiF)W&B- z&DvYl%-&w#>A<)`G3M%Tj_^x8Cqi*o*{Z>OrY#dzewR{V?97mW7KQ=XghP&aGYg=v znNK+y4rL>cg7^d7P4X_511na@I#CUrQ9*sQ5z6alQ(SH#v905l>wNc4IdaMf>5)t! zo2iPwjUw0)X%JS=WiagTyWSsf=oPq266XHXyoK}6hl|rNXP7Z_wx^R4TU)a3zKjp} zMVjg}PWUM8)h%}NtY~qys@lsn?ldY+vzn+z$VYo$PFxdy9mN6*afq+K<18GAxkIMo zMPQ@G$T0GRltw;Jlk0(ch#@dct|>w-!V6#hjS+MgEWmc|=g@GDTaDV^lwcdkg&} z4E87DqOPvD_FvU9HOU*r+OjTDQQNo;>yQ`nx^%eqe)#ffj zz)e(l#}1&2Qx~6Mr_^3-7BwTtN!8$?K)DNQr_6 zL#mII3-o5qlnTC~B-`d@YC87pk~zkPtXKMow2z5lm8l|o695rKax^FB_83CisXG|9zDwx3!Hc>!mrJMH z3;h-Gy`}+!Jsc^tyIqy}?fRXu42Iif64Q+Vmx#h=(n-|MX{(laA;xbm4F8qkSRezG z(P9It4%|E;%QpgaergvMf=NK8VaN{`Bv;mn2?oVK7!>~H9qF&@t7`bUfnGQeya8c1 zB9?2T&TgY6lGz?sxlQq|5_Q=EKTqMnb~@MTkBCqo`lv+BdJ(6COK~?F5w>F}ojGU( zKoTx%=?LkQb40^j z2y2B~efLA)OBHTZI+;*u5{MJ>gcT4Tz5JZ|%Fnr?JEI+~7l0Q|U)W)fVIw**xd}ul z9<>WaLTAAQEY&T?3aol;`wodKRq!nvVp>l|JJTE^G^2?A*mT|7hk93ET9Hh-EIMv> zamSvV>j`d><6keT&nz)tk7*-A(1cdTxUlo;DbmSW9lcm7i!kmi6kz5+?Y@yXwG5}r zc(Ad~4D$$=2tGtW;AzYjKM-TiEC#?2JDqP~?0rS;Q1*hJDeA64pdIde&2RtRD!OPZ z!24c|{Dqe7>W{`|HWfBTa7R?iYz@G6G1+6m@HQ=RO+FyURwbBqO?~4=3(S565bPKs z%5rXN$!~#sT$C8-pYmB>{C*A_;KRP%=dZCLdG2H*a?irIGS1~f786uIij z7c-%0;&AmgmOFy2&=D-=BEzVkTzvw2?e&cqTO6js83Uv)Bo@&&*>BjKB{OJADE8@4S4*%pioh!!Y8 zgpx$RJqOkCex5`g3Z6v~O=(iV5kf1=fEnKXjM&lf8+jx=KdV_0#2s*$!%@sR0Ba&6 zF&YvV&QBENZQqWQ@c`_sFxjz{4gJ{RfE_$TgZm(}x#FA-kBqTyI)C^CKjCS?!H!cYEytA^wmeO_#agkDwI76&k3jvhY4*RJd`z-2;ykZ`i+sA-Moil1KrUd?rEnPv28MlYMTPZhC>$rZw0S10(h%8EJ zQ>#!v$#$JwF?{3RGM|%V-$%e~-W3hpe!4MAG-{Qt0nRkw>5Xe_ME2GG|(57QV zqD%y7Vli%!%J%L=V0?m6t0mDyDZp@Qe4KAzt>+_)Py&R{!y>>EK-by1`(7}x9Jse{ zQR@kU5jpaJb}g|T0GRm`C{8B);^dl>6{zHO!PpsP`v>OWDaQoUp+H;-Hb{S6ocA-e z#Tz6Og8U$Q$o+WSoGqpn19MyNp+(*;+|VZlT|*48M2w380NGh-7a!De;0Zg*)<*nt zx?i&10*ZBG-BP}`y5@bZO4>f0 z|7ln~UqQcrJ{Q5p%Y543uLMm&Krq%rp$bM!SIJ2=U#6jNLdl|)HX!zsjXpTM#B}yP z(FDa7S}z6F&h(QEf`-AdvG(-=9DZ)|0p{AIef;u$iqw@H0JE`jj8x2P(e&F|C)h?n(r; zy{EhePv*+#2G~;O=PtkDp9KWQ8y3)S7Pwm=+$@VXcSI~(NGP@u>BGqQOD8+$b0_HC zqHInmUbZV#nrsDUmUnh2Ug0;24o?f-4&&GKWzNudM++wvEN>SlSvsFe$d!y*U>8zP zd2zXAP(^g3*eYqjB}AWO1qXQphtcK?`BwgdD{R(3JX4DSE`YV}i@_K;byB%#IV4#+fck8VuMYMxnkekt`nYz>%ES32?pji#mai@D%Np1X@H$^&5WL{gR9FUo z7aGOC2N8Vle87A8(3@yBmnJN}U_cnfpkc0xg0E_f@#KenlS%!4v5hFUKYOJufz(wa zh!i(2y^c2$jG^2?d`C?lX~iYerWYXgNM4)GX>$itPl19R?I^an0scUQO?Vwwg!j2d zI;f-NPPldPXig}jrH>84X?dFr0POUIh`}lb<8qQVV#psT5-0%DQE>mnqM>!drfW#~ z5*gMzw0eAYZ`xI51a-4D-jzGgFJmpHo9AcAfcsw6oRqVZjpB8O1@E^teBDhEV0{eE zCE0=vbyavG2(fNm>a}rkawCv^jBT8MWkCtzW9Pd%g<=_2zcp&x$IwKNQq=fxet!6eZF%?zSn}mf}yqEjHxictU~d9 zyHeW?4^q(UWGTt9H06bPY_y=Q)7S%vk-jA=@&lOs&r$OrQBz;r2toKc5 z{Ztr?`*N)I#u(#2De3Y+KT$Ue@pzg}yVIBZ1g2 z@qh>gpCLr!2b&7+gZ{FKyK8eO^rc>>fBxaHRNqh61tIEQ>Ics&>9q+O+}QX8L?`L) z_mK~eGusyE9$lC#&oMxH@T~FT%IjCLH!ZzxwQi-Zx*-^l=gf&`$;E@9^|G_Bxv1H( zUM0}b#HG3IG_d&12fc|e6sUC=eOg})+EubC0~fVu*g%CEy^Z3TX)$!o4ni?qF4b5Z zepGfu!P2d22nQATfZ9-g!1$uUZ833GBxe{|P2$%95x(lnI^NrwLTf?jp2(u()K`Ek zzkxXtqgQIS{i|BATE*rSclv>1r!i;_WOg18LD5+Xtb71QxZU~orSZx)`JiJuNHfil ztQxc~NKote2Neo}>^Rj%cwEg4MJ~v!ztIfBJ3Bi!okxfjL@74Kvr7ME897qvZ4?IB z@BG5d9lQUAH0R{8CgK1Q&|VVUHP~kw9e{QJFwq7WdtrjO9FF2BnPG^}<0Qm66`@zs zsQE3NaqxcS+|=HoHGnwcZCLSNLh%eo9bl;9|MwKY8@C6>H$z%dxdGHHt6=K;J5x&V zblGJHSSG#g@8mM) zt$#2}#)~nB4)pN>{Jj6<5dA)8`topSXW2|II=ljY+2Y#+6dK|6Ul7U2G0~**1zq#^ zJ^Xdw`2&hScvfN&Sk#ZD-VCPi-2f~DsGs32s3bt;fk;s2=n2AAPuo6Lae^2JUGuDy zh6xcsjpisiy1}9w{C>e{UJu9xBS*s};TuzJXA%iCawE zs^@t2-|RJylt^CWNwwTNUuctFD|h@o`K0@^ z0!3W2;^TjTNHsYjDH*W>o}8FVb~JfPEIHFUm=pYlsRd?-smrAtxZ%3lX|3^UO`5S< zmq>r`_##*;!9aq+I#)YIsvY|cvfGE@7toYex>jmum=u6OQ)9iJJ*CwL`L=3~Otm$- zfw>i`gn7a;Q0^z#L7EXFO7s~@lvPA=#qoQ~srb3;7l%e6L zeq_%DGAVn@-dm<1{GP9pd+!=bF1+-?hpdu$7nGTl<}UwRWiEWF9Tkj zL_b?5AJNT1BT6XuzIlc0H zJs-0tz4`mi+Lvhe+>QNE4BDQZ?dGeFE)w}@7%r*_Eyq|K+|f1t|JCBDq?2nDj6+>g zK?4A65&;0<{C`?JQ)?6V|Acre8d^$Q;%L64|3!bgCj*LvDB`B2rsJM&5b49HhUC_! ztS9Ztjmx80#4S}TtnK8#U%5`5F2Gyqn;sh|fE8ccpW3~=JDxM^4e2x{-aIPPaJa9& zfBR&t2ozVsZ2{4aXpz#O1)n@>+~Op9!{4jOkDhNUR__LN5BOipAKNz#bLeK*)n3~d zFI?A48y^p9JXYiz`dDYyq$4dbiePcb6C6_`!U1@{2{a?wAp)bsafjLy?%ZnGBaxsQ z&$8l)@sZ@Y)DHH!BHD~BC(;fXr_$2rCr)>fKBt&z0)w=tN5pU&8aR*CXT$jZzBn^w z+=fRa4cUdLK1QHisQ)dcr{fm@qtd7$ zl}D~>oCI1H!$&YBaw9RREwRY8Pi?Y^t*1A*&yqY26JhdA{anHIX6wMN%inT-Je)l| zma)D6AeJpD^;7BEQ&G#Bh`|=>nK6wmAy<>a?Q*7$==zf8AC^1XDhxV}f-7joan6Ti5p6jBxj$2(M{>I2<4D84(NwK;f+Cfidl# zHSvQ%L7Tj(q=R_O$}V>CEufb{_Hl(0rV>j0!?&UDSDM0{>v_T?!#lHoTbX4CT+oPj zTs1}O6Y}we^F7l|-u*yEI1DR6kzyhqNGGedxo)hn%KbPJ4rWU&f+WlhV=V1d7dfqR zSB<~CoCvt?by?x!uz5}3zu7mnQj;&c7rT?&naxV z7Oy>JT*`ee6_DkfwE2lO1v%!_t7F0-+opZzvDdh46p0^BpldYV;Dt@h=CYCz55m@U zl94?fPGBXL)aZ(=aRZXuyPs$^?{|ckrz-3Qk%Q0E7fz7A^}_Fz-}>E}Srj zock!f!46w%lV(gt!H0IiK3iTHTdA=DCKZ+nV>{yn|DUld zN4G1B>VlgJB@=f7bfWOmFExOUV_ziaKm$$s&u#$s0muDoYhes?;g0C138x-)uyypy zAl}+>8lgC59Xs`Yntj57ECM2OxDuf#0$*;RC>ES^EItz~LWhrKG}qDdUdj&s<9^@X znDtf0EB7X!IFVmO3PHt7G4#9hZ*=G;6~TPN9f^5qg9V=@*>qO(06Q@eQCW@t~4Ht zs3+GQ)i|#mFK~i3n@1}mnwb8;s*U2tK|bl71d;OAQ$pB=;Qdy%RZ zpRC9F_oqAiWNkuVn391On|DWkd(T*J#V(RUh|&O$rScDstZ6-gKu>g5r-8>SDO!_+ zZStlmd6YC=G=ZFW>~G^;K@bm*9Z$O)_MSIhDEAyq8tid4mq<+7A&yv773h1f%jS*C zmmVisO<3g=Um-+}{kDV$8Z9Jdo7E$WuiAK3Q49qlrURwk()k*N4{Ugr)-Ngyc9x&! z!4$SH(`iQc3RUrq{)zO1YUe&^F$$?2k`Qo{gO%et-=_7Qe%7IO4aV$rn=?X%^P0jG z!qvim=(03h1}FZJs}&rE+}eE#ot$yB8is4h(12v6ZVU3G22zz-H~&iJ%o5`Sf_eGH zKo3k!+*+`mke|M!v*5QqR^^AFxaR7Vweg=d(-GPd$v-{=I93bK@{aJf5qFnI3;G*A zXs9YlBUow7;|o!JTyFCprDi-c_2&?qR_3WcKY zY=uL6p>SKv^%NB~j2{ZGb<($$l5DEx3YkL+HN?6Q3Zz~M6LrgQFGMZbrwUu0J~{@_ zd-^qWO4#2JF`;(TT~oWuHw--P+#brM9r0KS=b1y(94{R3f9oR%t3=HAEO#B!OE#3{ zDwt|Xf*xXp&TwDOc(hA{Oht)hh66TH2!i+#WCz-`^}CkF?!E4kM+>;_Fqhm!XAkf8 ze#%8`i_b2ySuMRTUNwX_(S*pUoim$NQ2=&|nU4C@qrCEOwz2|enBC>VsZ6CUpiSNE zkI-DbuDxxlNf>khG^$gSSU7KH0hxsSD{xVK`~A;DSQh6raWW|qvY?S^IIycdi~Qcu z%Ir0dZ+^pK5{*72t>EMOtWv340>(klf2hPQQNO)#{lovgkGf*t3`hx$VnL5ZP4U_` z&L*%6VY{nGIC~w6L(>yG5MC$KAi5?i@sQp>jh?$`Gy2pcOvJV7j3|1F)+59=)uF7W zwv$v789<1R9LJY8wvcIEr(`|-f@P3!F=zZ-8eEzsk*6q8B%jp+mh%VmJuBg6@4I1n z?4EQWaNA=>9V zSbAa;Lp5^6vc<6st*67hA`&LH_wNRFu#JM}6R1R>iikviCIf-(36qB_j1=F8Z`SbS zi<7|Rt)NYE^I;s7g;&TFF%t2#^%h^#8f)JS7tJcCdb16NA1lGZRp&AJ>&hra7E_B5p$U18a6`@!#Z}TS=#}cKGMcmK+lQ0Cvq~4@zbHW#fX3Oiz zZtT;5Gd*=Gye5^B!k9}lZB=H&N983B(>M>7hQQ@2ss8fHTp@+$zqRn3%kW8a$U&l2 zsbq4Cx9yT!3SQ2m-cud|F@TVJ4{Mno?HZ3~*wFJR89Q%-xPaB<=8l{e4jJ8J3) z8uloP@SI(yfqAGjX1T$<*yto%aB4DliGp`9wflr*IH_4U2q6+fRmBrC zM$`lmnX~rYelj`J3*4-P6tC1e#(njng6+008NcG7e)WJ`R)-mnqW`jku3()YLjLmX-PUMFpG9l4?D+Ch|D1B)y!>A85z~2VbH$K;0&$V7d+8QwHxtpij5gr#l3|E~h!L(F5@^o$(qJeq{lihE4OqTvA zQR@EyWk8z0EICVfw2E0crynn`ZrDp6@W}Dm^=9gOE*p3*4`NQ^9he_m<0YT5=_3uf zKq|(P3U(0!W+(B&;5WuS=#SVPk7D@EUJwd7k;K|$VZ<6vg2YEGT;o6!D1KzVlVoG9 zDi&lboiQ&U%*${Mi&{DfF7w{=eKzH66Z84TZxRlGW^enGSHqhLYhS%*Z`-3$`)cz3 zH-Ju-A@uS)E^v9PweJCHSX$%+$sY>&HW7=!e0AmdX;1o4+w{&Vn#$viryb#Yw`Q(iN=i2d>p*zn%h^6MXbR z0^pc^D@!gk84T$&1s|lb=q1J~?f&n{nI9SwZirTWtf!M<9(b zO4xM$cziUEz{0Fe0{mgJ{qL%kKj3dWUd>e3cs1)$BL`~6mtA`}u{+^v?fE=KwT6Xp zZNMZ9{rE<-J_81?Ry;@q%V41#*@YIjaz3I|y=eB++KE6#+@%vNc&zZweg2?Pczkr{ zI<^Nsfwfq@*X>U(huxn2YA~r|r|Ua$OcY*DY(a8o8G0^nz^bj}am{1vk_Q{}?D(iQ z^=;poavv})cw&3Ah9Q;?ly2d_qhu;*~@wxrit*t zHJ=9!ZJ3~<7JI?M=XN+v@qFM9WDCf@fawzf68>O?QyUq={sy#lhJ!(`GwBbn>bEpc zZ4rf=^^T1L;XW1 z5c4}UenAYZ4_8K#H*tNaT-OtO3?6|C))SD2eJ5vP3J^Q0g>S)sH>;rG_}(IDO`;7q z^%4KYUgSolt4)zuQFy;ST8Ber$XmS{lZY1LnjhBw!B6DdSp z^T?nC7`U$$!D`|t(L44NHA&X9-`S~ThkqbV+cpPHn1H4A1`; zRL$? z%Hhf3UraE~QWWmi^r>{mu#NSkaMN80Az6!193K^zxxJL%*-3t5P`0b>2-i-){oPx_ z8bPu>0RiK+<8tZtm~lNCUiTnq>5UE19q|PiRoefyf7Knnm3`T+U=X#kS;TM1{!ewe zQP`cqaNO(GKYz(SqDo zN``AES&}b3QP2=Xty8e3p}}R-fR&9CaP}626b?k<-0uZ$I0K`^E$pPkz%1ATyreOQ z>>-R=C)UqDXQn}7Zj>mhE!5I5*^kUPGqMo^WjUpCOcQ=Wx{&$mL&&M120Xm;;KeyQ zD#=j5O;)5J$2mdm1rdw1dVVhG8~Ppo2%DX!?#yenw4S8 zX;x81=P8fsz?ZsMXvRmvat=YeyTml=DokSmwXkwo@6kQsWoD}f2tW+e)G^|?(qN!u zk$}MF(l!ifw8n05rNGG+u;yIDgr#C4fP)oFGN%w0nQ3d0q!nhbp5tqRDuqMR?>W{E zAgB0fC(DRKhUY^75yQsFql{4;_P@M~c$#y7Z0Kp8K^stRmAhj^L1jeH*qX(P|GA)pFMw0V3@^i0x!Wh zXZ2h}e$UUrT!Gc&a1*(lvOe6-P&bTx8fak5zVO2-#3BtD5V>J~)6u|-`%W40BI_)n zo;I|9#Gss>yW+_*oXPD|8$de^v`qMVy=)#$txKC!Ww^i86ag zaq=j#H-W3@nn}N;>TU)yWA8)$5o=4k=?}bEP^AdAF-OT$-DCo5lVN<& z)RI&YPD0iPY0e$DMYsHcMk_;n5XOU8IwEH+4Fjb_*pqjL4ay9$kD{53;!c)S;ci6< z)m?0@Hc4jw!oVeiOKco%nVP6>(qi>N??TiK@?Kf~mB#kM56LN2fMQ5Z?Y!!BUY`%& zQNaXrcSV{2BWe|D^q4Fg(e2?FwLvubY6@9(i}H%HFh*k@@t5Y;8fE$gXXAuf+X=iCRSRQNn;gOS&3xxNa#~d` z(~RxH^TBXCj=SW_rfkS1y~MW3q%`|S2g+3r%UZeJ9;G!DJ+ziN%9*t&7{Z@4OZGdA zX4=oi#%0kPxvjF7i;YNbyE6MG2TRLk`xYR3S8b`bYw3=y-CAl@g=>QhO?w4>;x)9U=arKKz&Py-ep-lj%fZu7r%k z98WTh2y*=O(h)2CaJyyCZ^)swl|Mtv{*N$hX<^z9lI^mYkv9#*W9-=b_PC3Z9SWP% zXj2}kT#M7q0<(&7aOz;L+c)DLd6ule6LB6`UYum)j4a9= z*mgfes=In+X^#K3fwYqs9bsD>2538wTyJ|eE38#hilJl<$t`68mCRN%xkKVAZD%$! z%Hv+#`>@v(-7^mS@zyxcHjtfoYs1{G!}YHwz0p;BAS-bHpatYIiZuj#4cCvGu6d@J z5XFW#iB!kTbu?U)0!kBIzz1!`WWddQ94X_-+stkA2kt^3DU8F-MXWogQ*fmLh6OBW zm^z5WQ-s_bsj#2esm6v+^^5*hfBZ_-Fmnnp;;ZnEe*+ze2+W|Ji@YZXde2H!?Bu@J;cZYucfZ?BxdS@1}7Sg!A_ zr=b&}!qtDEGbCoLc(mX)#CPn|$>&d}pX=&iGE24=Jb`?I_a2h$8is4_I5PmXk!J>K zR+{xk=whEf{TaYae+Hl7pL8 zctN$1->I)ju`4JHrcR1gC!5lpg&I7>0a1Jkl4mgJX;%~B2S41N05mLff@{z z=F2rTbJO^v14-9{ z3W098#v64>$Wr{4vCEIC>sk>{{nw?lE_F*_Eo<8)K-)-1ipxj`j1+esw2^vvw{xV7 zx~SZZtj9K$MQ32KN)TZ`Sn+t-2&Xx36;5@(M6nK};OV`j%ij2vC=E*2o?lm$3(xuR z-7NVrC0veE4!zoS1B#4wLXJC}cdeFjgSGV8u&?0POC7t~TNMjk6#$6pfvKvm`h}6& zoooAHxF)}v1E-^SB}g~)!ziYD4@{_uS5zbzUy;=xc{8y-jJlHMFA7*?oOkZFbJ#6h6eHb`w(u@jjHOQEOQWDM<}TQRPuK}tP=-Rm z?0RC0EA9#VSo(0#jxWHEbH(g+>ZJjuO@dY-5w;(cG;}Hne#T&-cIOJkVeNBQ%BP-O5?2a~bX0q}+G6fOK0rKGM>oEy%3665HYm zbc^jjfX<(~q(irkr3D??)ttf+SG<|dyhv4aFZzR?eK8z#dn3K9n_X8|*AytqCGD)z z)^D$bYX`DIRDJ9yGn~Ve^)6P=3=E?xZAP)vK?VB7{Pl_NttH~`N-f!w=I3dZxF~GP zmLwtcn7Xo>33`=XBo{Lvk{o(3hSV`}AVm_#2Bqq1?qYS$<(I5OT2ARQjl_&1&SWruf5?#y^pcB@DoA4=<@wvwwx0wVb!cp&X_@<|DjD(#4-IGGsl>EgE!gx5v!TWY1U#E04Sx&l$XY z7xkU(y~5nT-kRm^rbl*XDsDYMpR#LLQcr})=#XFT9-0EBZ=5{RJM>ZbuB_j!H$@bQ zT2xS`(-Wez%JT?r0S_5V!~Viu?e827l-$7w71!^kQ(LYGR{d11 ztj_jC+m>6sf+@*LQ{YMqFIZo?krK~we1zyC8xP^p@N~mFmi1pR0CP_|=@tANuOW&% zVjXf~(Nyt_D*wO;GLZ}$F}O=-%(4D>S@_NFsQ(6{j{MUfykj9wrDK^%luR<>h)0NT z5?(rqyUZr7oNxU8YSMmZ4=*kXb2{s_|5!R}A?_*Ttl0XxZ`xO#)Hi7$F+Dxf-kF)@ z5b4WI(8~*$rjr7Q$4iYW8pLoaPRawtQuMo3L;%UmuhNd=$dwP_(SRqtclk^AQ^CinkJBDk+6zoHj^o`!FDDiWHQnAU@*x_pc`OSn!KJI=1KDoY*8P zzC`ltuQISG*{Bpw6)=!9w%>GygIs(>?mi14@kNcec#(D#U4I6W1v7_Uq*5=7y4`x- z4emfHjturA6$cFkyxJMVp?3h)=?5xUdW)-8Z~~o8bjQCwlXVeIc`B? z-!GqqBtb5Vth83!tInD#?+caWN<;rK_FxkfzI+x2T$N9ckE%n(J?SilkQwpeG`sdD zcTfrlVJ;Z~id{`7{;+R~eB)pXv2TY9j?0Tt3X)C_y|RFy?8{j8A04|BVlC<5k7WJj z{@cHeC8D@%Cs}51&S7Z@wv592tnn`wZ+pjiDEXnv7CW)%qg3Das**Q9;EP2vDw|h- z{z&daes16BjYh*_odU!$b!N60D?Xuvg2{!cc`S6(d<#hsmf%pQD&-6NJzLj!a?|Y( zFS`RVqrd$*x&z6O-?@`H`->tWwWoekYT>NZ2c@!)Nns=Eik>8td@Ly)D{SicD3=F5 zZHisr1Ux{*L5K(iEjzyXm*W)v^Njt`q3e7MgrORSnRY&=5hl+!n)$NYw~ZZbotm zq}GEkK!Mr|9TG!2hzZ!d2ZjWE0uy3?GI1xmf-4@7Yv7d)iZw{aXrTEPZI5?;u^z}6 zJ0C*S_5(vED9vIlvc#NclB!Ql=f#C?1Rl-n8l;^Q&AmB}; z#0=G)mtPL+Nuezjfcw=}NlOVW>tO~pmP8DZfG2u*(E#5Ufb0P*xd^TiGvFY35OmCO zVxD@50p zuwg)utD9?(tDm!LFj|q21J)Fuk&|177$*h*P)h>@6aWAK2mnQXK3Xc%n1Jd<003k4 z001HY003!kUvpz+Ze?F~VQh9~b8mHWV`VRCZE$aLbYF9AWG--dygTV~+eVYWsVct1 ze9%`#B7tDZc2X&NwFxCrHa8Nj=*Y1$i!}s>A|en21E3_vl_%RD`$+o=+ud^j;3au? zYg36s%=Gm1^xZw<|N4*r+-2-ori(1Pn-;7cxa{@*!CQ7MXRH@K@+{0*Ad`Xz#ZP%5 z_$(6-k$CiFcGuk5g@4&D{=<%X@B0Jx{^E?C^^bdls~$T#`*{4bn$?i(dK%>{&E#Fi zXAFK$G9g$lCk5;y-m!(8vw$Zo6JeAWSu~y(f<*=6N%%@;gkdJbXtKb;c@m0@6;r_q zk7xQr(1?(&eM3M_y&tZQY&8H${;{`1_MJlc|73@?3W?n=x zdB;Qq^BH>(Sq|UqHKEWOX{;WT8Ef+biO-l!aiI$oFIdcra;?9K1?5RY7A1svDpNSr zloxo+$0&~3Sg?66CiB=M901LJ>tBEPcyrCVgFo1B-OJ1F;QEhu09{Na%!&u0a7D8; zjsP_rE#pbC0O|?FdGGT01FY#D_0Rg(f8Zfb``3fs)fGGac*(l#qI-GWKfXEZUb2gu z%Zra!J)f~FfrKeeZe}!_0JfO~UWTIJQJmYu{{aHdfxtLqQ~n@8(1C~^fIJR5mM&f@ z!&oMFbW~PUxcQDn6PC!rV~<$`ic`p~(p9YNCxP#=?+yTrC-*T(;HrQHfaElqzy_zW zl$pnlWM04$cHU+CuMZCP_YU6dA9(EMs_WORpf@<#Yy-_5kcP|(Hcl5F8_z^O^%!g$ z%9#h}k7E%aVR>_BlF1oDF}*;hl|i4Usb9dlJf?%>1}eQ1MIoMw3MjB9wexVYD7m|U zA(0h8!Li-496ORSF28!od}h8h))_CR^CF5Z)?enR=LvF@_2#k2G@xID`o@f!0T2QQ zV-_(Q5LmJZM>(>65{oBfRS@$$XIHbZPw*mZ<9UijDsulL5X1|V%bC2p6Pep-?li{n zC<+rk6YP*VYLih8N+bMy7CH(b=3^1VJTMA?+Zs${qarF|;kXJa6nT(EWXbDcQv<2Y zJqM^St((Eg$Ck%h7ni-O-gWD5fPjBL2QWF1321Hu=&&E~xyv+bBax$dTYof(V!_6l ze9V!V4g5@j>#VaR!8V@u&^bl$3jlCDRv}$a*wwTHTBbS(;GxZ~&QD(bdfk0>a{l|P zUycbJ@XYvAERw^6{nu|1>4eYY;?RK!ed5C}tq=W^lir~9x2s9aS!az#wNYY+tD-~N z&cOBcVlfj*u?cY)fr|j}#p>ZVl9^<#_cVmiBK@Q{R2#KNKAP3};X9=jVX_h&HjF^yqjF-f6~C zt4_!{*-fI^+B2LXt#dtp7l?<*S(gvVdVplBXFj;&odAjzw0T?-HVIf;AA6Sby~;WKEH%>(EDxF zy*%$;-&}5{I1vzvXO6pl-e6X!jW7(=D35vCeTwq6@^EfahLetFCVvvS6=>_-RrkJLXVm1S@W}Tn|QnxdEA84*DMlI9bL}fN&g_ zmD;D)ZzWgnTGEVcd^ynwU8fr4_;CR8Yn><}S{_mj8gqI1@wY4Z>Sh?7%<$F4haQT% zb(KQ4ENHlL05kop71R%bK1@uQcoG35$L(Ci6OSqp$Z#m&H_}XuuY1Vg7c=b{W|Kdz?nWlo0?X24aHgbQ!tF;*KX456E~#A$X+uiKOnU zjWFWd901bWTbcX#+y7HW331i8LDoY+AGO3~W*x2}DB56%Id=KU1RRLn+5tp7B-z-k zWfWruWWM}ZK0+!-B544s3X~5#%`vHOxBSmR%Vjd#JRP-^3T$Hl%|P@De>rX&M4pm? zrzXpQ>8o(LI$p+`)j91NATrNU(w_s$0o2(VRdvLI8FJ5s$c#?3tcWykByPXig3AwBGsBZE#SjIfm zc|9qp54H_uT-$hpG0z1Wy`|W7Rc5I-vEHwAg;gg8u^_utbgb+i$XObTVrAJZzmnNp{vdzHTW#LW1$4zyY3d zqWyYX2OEXpX#s1W7Jn%>+fZIsj?q_Y+*(RtNrBZ+8#W7pWeN71^4tBPS06YS zx-N6g`oAoKzdVBp5!Khc9%}x7y+5;&;&Y(F1byVQ(C3zAa9WN`E7_k3k@z^Am&Syj zR6e2=wSMhGj``-KW$b`v^OOg|P~iu0l#Z0j;>p++5Rb(84SUUunpF=<^zkHI8C)f2 z1uPSHz`{~Hk!f#2#XF@FB5Z3r`^r2*Co|2R=M40!fV>j?7iIsX+wGdB>+Lg9%(H}j zfr@$1zxvQS!S035zoO(p-Y=zJ9`zS>7Pj77RuU~`Qx7;Z;xL7as*HM!l2AU5W<23{ zpiyMq+JwW9Hg;=7Ufv7_{lWXu`Nxy)S%p&2*?A@~j8j{TlB!>I>7(&EAf)Z;0F-8ESI!>m5>86r zD959Q7&r9h)D0GSk(jj-G+%+BBtQjaqlI+Yzp(G$(rP1NT+`fH#%F)ucD{cz#M#Fwrl)pX;V80& z`muhoABq6@*VZH{$T1l7y$a$_fk+E>lSBx0La;rIZ`*L*cJ>a2^>El;8;0>%QIXL@*W9ZSN9;k+^T&W)MM}%`Ua*a4jzE*9}n4VK)5l`;GAACw2ErOcWvdiA78n#Aut>|Z*h;mj$ zTt)u?E=xRF3oJKTgAM!ArLh8zamKTS;@$M~b_a@8VM zl&m2nzZY@A;|eXn%2ei%B|`N$kjyd-ltL=H_tI73aZYT@flhWw7qLRGEfSIqNbghL~BSe+3@$%qaI-|!P$RUs$=sdoW( zL9T)!3KmRxX70aez*vX95qRW~B}REJLURIuxCP(C{sFpdm6z6it47KeK!m6=&QWpo_}RjzEP^+MSYtuWlQ8-8*$31$~H`ibbE zg*m+@03iwpNvX1%nrXJn-}>aXGlf{qY^gR@qa5aV&ZojLD%A+)8My(Y-b;R=6AgX5 z=ug{7H+o4#1;vQSavNTi_LaT@Plp}VQW?vgT(_hLu1D z#LVR=$R0A5U|O+AJQu#JXnkpcU^AYR(LsEaEFxn{gb0s-r}v_!$j?hg`%?p0HjSoWD54pUraH#8bVhvgsOlg}zzFf(qTA@SntSRndaG(^3`dP#Ng6 z3?A^0Xc=O{CWDVJ45*4$*GfL2vtgC=}F7t9a{L_irXO0RA{^^-lWNEw7Mw zkPsi%)~w#Zz*m7ve>+yr6Z57qo~WWvB;cXOq5j023_Z~yGZ(F)$pW|4IfmOum_?{; zs>3Lys7)X1kZ2&PG1&lyru`oL% zD^{lr4yMSo(0x$HL1hGB4<1DYbo%#VLAc5@mpD5WhJJg<4m6C2SM;e)$g+#DRO{tA zU^YTic+1_9z>4A08EP&@YhpU(5O`+FbjiX)v|0sf5mqp1fRa$9W3>aT;@81YJC02P#+8!sDVV0GzH}qqcT2q@0OrrMr!`#6^D67qbx*U*K$krAUp$>H#Q zY>?z8Xq4B3I9QgGa=dXzyQds08KVC(sH=3CyQGo1wPd>`19ksM!GN#$QS*m)m`2YP z_L9PFxD{zjgrE1WJ{Y2rNSH$n7;&QlHHy>&ev&dX=$S(Qqr$OF9x>%4ki5^2|yy_f7T5HWi~K&l~!(hgZx)tYn~St*TmTBzYFZ46gxFagh7@ zR6dTPWCRhS)@#vT0AfZ8skgdx08w6D_m6*WHL(C&VQ%?4)6xdc{l3lJX0^w-Ree}g zy$)9=XS<=k?<3!?K^2kf?#^FYJMfh(CCX3Ee}AEL>Nlg@Y77SP(~Qrasb6>5Z$f#j z05e@a#=n$-O?rCwPLWy8gt~CS1V&R@t)!4@Q%zB8en@+*xh}x#h7JwpQ=W17hgwQ5{ED|9CUA8|N}O`4cM`N70cp?WXXKCRb!WqY-l z0?XV`34TR9wFBFfYD_ZB8Fgo;1CT=>R5~i!?(M1~qMVINrO2k>$%GIf6z&S$fQ0{6({X!f9;iqCT%W6q9!O4BZ$u!<;b>u#MDfT^0v z7o=0bRJ@H_x(lwBJ66rta+&Sw#=d0Om5~$GCs!-X%8g>Ei=wez9IGO!u%xu5I!Lu- zK}Uz4ueoFG`B!>#+PV-P(+A<|~KaV+VB@kYN@*0nj=9@UP3isMUspyX+?hWc!62Daf{q zzu!K+UEWM_$$n;aRwp_fAETItohA>_{zutH%5RV_9&4Qey&R^M-c{Hm`uwo|*Vn?r4% zl4<{Ur&#gI73Qu#Qd1SfrJuqK#A94l#|oWkJ1a-gKQWEK{)urID+jCz|lcggUhvQ>(6K`$)#}QIVA8F)hqaojJC`Hm6Y2x`!lQ&Fni1lvTRS)>R+X0CFm zPQb86R2`e!sXe~1F9}G+s&0Ngjra<}ijnP_zo?tw1rM8&a#|#7G3?nuQ1)+-h^m%) zIFe|#S)!I?db|F7!_w}aWz+3C3WpW5eA!mZQnjkN(Q7Q}OH=1b=KdsJ=<31J;g!+E zwPi*XlB&{m;1JK!T))oPREWGR*IaXxWFHT1zv3y~s5xLH!b_7bUP$v^edBcPHr2NB zaFb9$&AP0tPw`J1VeED`%Re%ZX>kF6JKcVnvT#EkPi5?}>@d_b7 zoa)i}QS|$LwHlC96;_RfPR2*n#w>Y5W`%pP`5!advn;Q^s#8d(&len4?EUUr+@gom zY$$FK@)Q$Mxinnm$cT^0)VxxJqI)Ty&OK~ztpVQ^Q%+gO zb1xFu93Wzk!6V)4?;%=8H}H;$K8XV7nDBYuFqWT2pYM@DD=b_LvY{-10t&aO2*Hv> zV|Kjf`6yk81z9N;8s&zVMwOdf0ETB79l9-7Q(`h=trgrkWPX|RZYcQkZCUIbno)P% zhkNcrub)F9F*R#_((OfeBf%&HdfEWf0;e|tM?qd9xRFhwB6k}A<+$&ffN783VV{vW zA`P5JU7^G$q)`|2)or-#M!RWnBs^IEZn|%6o=3pl0k_ihX~g#dUGF4g4>F$^CG0h1=y^e1GqgX0qwNU_taws(rpeF>rHfBr*j4zX9{=$S=(tZV~OE zH=c_+P{8h-16_@X3dsVvrPN3#5LUk4oOsh$Z-w@1XguCt4BUBD|0FngFaCK^ED1;I zQHHYO&dQ}iPFgyX`^#>PrS^DgY&-s_r;gz}s!~_5^z7A)HNuseAT=uhvw1PscRf`0CiLXp=8WK0>^`Fp%|M0dMTF}1`a-S$5aH}IryqE)0fNb2~+r{rv(QVIv;&~Hg%+fLQocJk9`Ds^|Y}Hs=yjl#qhl$V=&y#$~4*1^cey& zou|jDd|}9>JIq^j*dUI%%Pka9;C3X`*pjRgu2W@=`Qd3X7HKJXeOxTV*ECTj1@?hN z;GJ}AlQryHD-C<}^*lxW#iCfvEotu4BcCx8w8PbDS!e!;?%=x1LJv0F0v%ZUC@P%3 zSw&e}+cp`Xx~78Yxui;YfiQEg8)4nvq6Ax`b?!=z~}-d@ertdl6wNbO$V2C#CQ)?R}DI zp4-%&-iP7OjJgqj@<;jDG#Ot;>VR(%yUEhriReNETfv(ekF&il4eh!}#&1W80f)#6 zsf@k?d%qkBm3UR0JbUbsPp#(lb5_5Z*r0z=_&!UPZCaZu6t->5%Wd^-;ZVEdDz)4t zNb4=t-}h$1;b{l=Cy{PMdeuTN4mKwP*qKH^>udE9o#rL+xP#tih2gGXxFY}fV74e` ztSZQ+>aclGI;AaC!2#W}&s>4EBX*lv+HWq^3U=XV*r+dfpkZT1Z;2!l&fHr)s_1N3o z?sj`i4eq0gv8}NE#krF&EkoyB;6&~U#}MFWX?gh!sLPTmU*q7QXtyi$@TLWMg!-hV zYgY7HAdZ|(y!Kqdme*ECi*&SS2FR?N8J&joD)Cue+K`5MBCi6q_oIVzCT*3N+{fgF z%KoQth}Dm32G2LI*|}@W`YVPbm+oybhn@p%Vm5Jo6R&Tcm{$fQPmAJ;!=A#qP@pmM zLbaP*qy6TrU|a16gwF%G?*Ysh&PY(t^p)efd4KIVjL?-+$r{DHXadsj*r`5wUHiS& zpjYdZTA!05EvYkWL+|`dzE5v#!Gp;2B_Vr9kJ9nX)aRf0TD#D-C}~S_IR5#Xk+VeJ zMuJBQ0k}w0`c(@9IbP*3f%C0kc!JM6@BaPWZ}0x=_ka2Q zUw{9%-~ajD-=cT_{q8sX^S5{Z`R+g7{f1v=-u=sPfk(x( zSagPQRG*x$q3e_H4EfH(nOQ&MB--9;RDfN>KMw@CitA?dx^-$qj-!)T6qtfOId(UmrWi%C@x0vG47R;vnj3)SLV;X(r{ywh&AY0 zei6e)^a?9?S=7C3WG?!%wwqN4nElLgeZsH2Y^kM+FOmma4=c_9^x1% zWDV|-fK@_3^&U}{xm+fiQX`a7)&*w2X2ZOG^-y#a6~x}8NOdtw=Bxv0;j}$RRysAT zE|`bTv4ckI6m{F4A6w}El2KsyJNn!5T>63=qXiY?IG0 zEH9DJ0;hY~kA6sZVYZG4ff~>sRjARtxakwh#)Scb$g*BAoh6eWXnyP)^!zNDi`&S0 z%YJqnVLSYV2D`{Ro-sZ?soj`wdQJgiaI1_R5uHtw{EExY(jU?mU&vB#W-8v5tA9&v(p<+El2ehE~G=9w_Mc67h+w)1n6fm&99Tyb#Dmn~35uC*hI&3cT$$c&4 z4Q;tHl2LbD^xWVqON)n|8{ne^7GV=&3`*-3nVjK%6(hrXiOLKpH)ch;{Vc3gQA>wfC4qxmBH+PO6oel$gZ*GlTmG@{;r1>v7bj=>Q`CJrB z#Wx8htY4_l*>=h_g6dsx!S-8S^%uUzL49|llGATf}cET_#ea zXb+h%HF>$emt{6zrPsQL&T>@F3NtIdNu!D@K%^7@`YOT5A|SSqAq!nJxC#XpxXBg@ zLPZg!-1J1ONXrqiYa_MwL}Vy1E1t~Y?x^Xq>LGdL;)`5;Zj17TW|;BIOxWB?2HIvtNFLq+$x=dV5M+RYygGjyNx35^RncKKi) z!vlJ5<5ZA-aZEnXbfUtBKsl5OrP6a`=+_%9@*uk+;pBRmz|G#H>3Ac(4D~tl!Z}lu z$(5lNWRA?3^lq#SO&Fohb-+&H2AE1(eM*CJZTkb2DzY~J7XvDMXgmdOiWisaxP#ak zzTfnKr`ZxLI~Rum6DM1)vi2oUQ|};R9J+N6V-!+^dSIXj_r$hvU&A#}!?WMHo(RRM zs>V0~@*J~(> z6lR9%(Ba=17oAwkOId6pe``^bu6}RZnUhORc_V*j#l%B`s>7?(d~&%kn7mme^G?iZ zLQnp5{N(%ZemH*kVz=j->N8Q-3GzOm?uY}PMHF)N;qlSQtI_MD<5x#N?n=R1*4>Eo z50Qn|LI+rVFtpl$L9lAJuC$Ngw(}35b6VxR2H2;nba3 z7wl85

dxgrHJ9= zHoO@8+gtt`Z==W`ct}=HYM+ns^;=$0h-KkvL;0Xll^#TVsw~wI&pewnN1$N>zTQk( zxya~4A_PT4kC$n~o6s^p0J{x*%n;g`(EQj7q@p0rJhiLQjD6xUo4AP~ry@>(z=pEe zv8R!2Zti~=iTx~hxxKYUAvsSip1AS@Z=0w&K!F(tdkl=*vfv zC;mGq+t>aZQKH&B_G) zoEvODdG_MDVFr#5C~U3a>9G7FIJGDuJht9XFJpea96!m=)fm^vw3feRSzz-Oat! zopH3gkchnb=&YE$Rp*%uPM%EL3_ttJ3RXlu;pUHfzQ_mP9&j2pk?)r1$*9+(@-0b= zLSA8q7a|pMi)_aTSowsq`64pLWMxEq&SPY__YxxZ%SYxoSY%{g#AGx^b`D&FmD2<) zg#Z!cyhIz7aE>IjbCwjdkj$^DC*{=*OpibQ>#rYsVagYJ1aa6iKLH6})m+_Z2?Q`I z${=VnXRXkBx~VDI3EWf=!;wNq-SmdBLUn5X8QRPTG#Q8`P-&y{De}B<3XF2Onq zKh$||w9nXq3%4xHW~9zn<>f?s(T{_nm+K@z0Y!e`*A_6-1st-+SIQYk!5T9WVVVyzCNhtT9AW4^O9n-2H9$kPaghlrp`L; zblEy}UJSFcn$O+5*!u8LtMGB{=n^pLQkO1rU=rul;V&Pp+bF1|;0t>%jFVj*lSF4@rHnePFE!V01;ACnEOE+OM94q+l2x@F%J-@+vdT9`cuAeA8!udnU zw`Re>sbuD~B@Ie?1aNG3N~~NmEhje#tZCv4zC-#~1}RhNUB2x|s?F*yp_F7FgF`XL z)yEVG-=-$>SHgab_(`4k_~@HWh}pETw9IlmF472|TY|qL!yZ-8+F`?Tx-y(nqz%s4 zg`$%^I(53Npi}hH`L$EwM;zp>sqL!6ww#{AR^k2}3aZWc1)GP?Ohn_L2X4^sIX>HG z>nLUQaQJ3FduZu_ZaG<4OR$(n`!`%=S_&)8Im6i~sXHzjM=ps|ZW2nKGD!`~Faz6h zbD5%0ln~{}kO2=85|eq(c?TGE{lwp#a~@AJ|DOscG)dW{My!pzarMO zL9=_AO(v-kR6EIPS(xo+ei>>urTZVVJaeecJGd;$@%-jInc(jPDQV4LGduiqc(7rA zXdpv>_+cb!^gNl+J~uzl;3ImIO^`9U1OFkf;id^5Y}hN^Xl6@{j%*7kR3L`RPrRVl z_O!oYPc(vC+PeQfZ82)rH~3jggSs_wsXK$iF=Z?ji*#{8G$r0Te+-Yblr=e}TdVD#eY{75P z=~QGJ#f5E^<;Nh~FMPvXlNIuFJ})^RYy03Je|d;-P2H(RAZu?U2wTuzOyZ8QTA0E7 zWZ+o@W*+C_Tcg)b%rNiw_EsI=D1muWY*&tS+^fbgyctur$*ILKI0t9^M!IrY;R zop)HjJ5MqY2E&;1TWr%ZIfohZIAM!QY;1~pgXAM;@WDwv_*=}}8&APt=HdJ9)|%d8 zcyNmrut^P0CIj^e(%T9eG5sQaUXWVkyYy|Rci??>vZ^+-Zf56~Z_8|41`cK+r(+Y` zGou=NZ~j8TE-W2xV&h;3S7%-ad!CgLW#7-vJ+l0V!Cv(A`Q!4BEkGBG>>|tmQPcDb z7X*_Yf-yp1$YN1yn9(^?bf`>3QxAM*Bzs>_*sCdJqM`h)l;F;QaRWe#^QN`_G57DO zR{~wKt^rK9um9!FSpn=Rt&z=78q|r`$OK$NmLyoTO4HtDvR0uWt>P*v6-6l@sN|V4 zlewNjz6|JRFPeks(6|Nu+-CLBJeC;cWI&teQR2##jw^vWytEnf?9=^tFP>*ZWJQ#> zffmNfojoro60BaSsV-To;ch|>e#3SV?Mc&F$q`R)7DayXk%03)W~~Z*qhYJxlr`*> zK-D_h!$1|$YU3>3cyWJPNxl@V*xt0@EBY#GD8Ie6e$h8fwO+7rYa!IMm<(+#N2@%8 z3T^lC(YMD(M<=^2<-jQp`VaO7UwpP7?Wk|X)CgC!oo6$l3X<%`Y}d~o7@6o|ky?{$ z8+B#3jJ|pPI?CvZg;l=T{Xas^`YF03tJfI0 zdwc!;FIr2Gy=4FX=M4p*hf3$>Y$;Y)+VjgK3O!xII?d6eUp#&I|kC z#ajhJ$_-V@^6`|6E*ohreVwG8HG0R&Jzw0QbA*czE-&Ura5mI_oZ#}>Oqe|bX3chbHTAqz+r50m z=QrSOUFc8# zr?zA5ZR0q8rUCzl^QC~u3<0E!V!C^A?}nFa>S zOx#}_1_?-KDy9&;*m1zy>KNw%`k>{dDJmHVRY|~*ws!GWexX`ec^?Tu&CV6js@BIx zQHb#5a0AZ~w86E~pG~0$5}@q^R(T(>PeRy{+~@nDw&vWlQ0M{~S@e*WCbH>C>~fxS zW4>kiB;T(XckB!D?667*t_>^tek@k9_aKM(li& zl)e&DNWgsUzEMdeOdx!ud`xKHE>ir#l=#ySN*|M7J!T!P1cCZH`1eJ*d0TBj)#d8s zdRQ1vxml(4{#hc?Snx{ky~ueJdx)MQmtLePDU3Ayzn*h+We{nqW_wPGB5bfl%mr+p zXw;k7JKHac=$>3g0*(H*hI#I$O5H+YHW~n+fMuy6@bkJvg^mM09*=2_%$4T~mI%l? z>J2HJ;I%cyP4P5tYnT!dO3Pjbyj8%2&K-nd4v1vDP2^CfNq3;#4M_bxX#GImvq-;l zzIy*Xe;8h^m`4Js6l^YQ$Ek3cFF*lMZZX#0!R$bA4O&6A$>U=bFU$4aaN-Eo3RM>P z170syWB7>)BEjdt{nk|Z`T44%09)tqN(wuao=gsAGkp5M+AuJEP4B{Pc3_rb=B{t< zeBq3_@xCG!;)eTv#_VMHufm~Sfa3qgE%N5{jDL(@ygYyM{QTvajqJ%v0HGF)N_Z84 z5PLK^pn#xsaJZGk5(Xq~Y}j+DpfbY#u@Gj=z4r=qL8Xu5j{kAf_X4%tr_%34c?L%Y zRgEGnzto@PaCR=X5Z5P%ToaSr*UP6c4yj55G1W=!0dF8_XqY;~k}bg$2<*jOv#nOy zX#~F1qFmWj(1K?iqk+69ZGHLPU0;`V{r2`IN@NJFq1J?~JOec}zuvkz?3#H(5-`wvUnNB?Zutx17XyD~o zJ>FWZT?W{}$2N8_oNtqPrCIwA%_h0TS#eu)6f}$$k80POg(FpB!qc6WdJtiQx|0Tu zYB1$o%TqmkNON2AynLM(@Z-PmaF$PlE#;s*tv64IY^LBf$;@Ysi3x-~F0Q5?aJ9Z& zVOkfYCy$*NvfY-KZ;&!$AK8iFf84qBs|qzD>)}}KX>SFo_nSwq`R;x;ncBmO1p!ll z*+Du(MU-0;B+`83KTr76G-&EamD^1hsXc6F`j&6u-J){155rC*hjgHU4?ZI`}u3hVR$%8`nnc-3yU3{e}JG(U14bq|I-QG`x z3;^5rDS6>?65u&1=ww@WFs}p6>FGFZA#V?hYrNO)M>d6S2Gl6h=uiz*&WP^DTnJS) zlqNXR6YX0^-|ess4H7UsjAY7Tw8NNxFoW97h5Y`+X1%s1{@xWd`~)oQqe$<*{EroC zMz_`-6lW(Z$xRQVBD2ABY$+8$on1CZHKq#Ns4Kq zsjVQtgXHjNTkB1L-^6ige+y8HBA0?wL zPx$Y>5)ki`rAw3L*jdoLq=6V~=;twW8>*6}0BA^RSq1uAXs0;c8 zbf~QJWoLqNGw$48i|f>-ovydEdGw>=kwk=W^;E%*9xCfLy`96KO?KLXuSZ$n+Z*di z0swDszO+x%(PvBu8`!Jd941~MX=SsdT*j+f=xBJ}JdVLF6=L-WH2ymre(wv5pt0XW z%V3{;D~v=;)i~qIAdpU0y_`(@3Yk=ASh*-=N}7e$bC1QIQB?;6|BgU+bRJ`@t>D^_ zRAuJluAi#M&m^a>IU2kNKqvfM`V_@Y1;zg;q15%7cQebdd&4v-U|T&;#%dFhPNKA_ zKsgEkw6dMpGKeaSB)3|>tClr{H444qf^$;O6D+q!WE+HnH3i<`7dO&KQR-*4EVnyx zG_oN0pHyy@L6ly_VF{Zp#(|Z-Fu-sfb*&FvorCE#Zt!e<1!KoWuP*!)#rG47kK$ zjBO+kTLnPdn*@I+FkWj+P%gh*f@eBS2u98(qdpx6@W0s%Uf5?de#|E?dJM6GIC1>{ z=*lcF&-iaxw9vKSUtzIE?3C<078iWQuIn30aVHqT2Xy>5SXfIe4wz+opZG?wTXdp- zu(>b>9M-ujYY!UX&)(tcQU<-*Z0`66He2SS1QfQBw4d;#bgq|ZQsOnT5Q@%S4Yi_e zy{q!nyM;hrr_FrforC;qjk{mhn;>zCiPV%*9S;`QZBY0giigM? z+(lvqJ$AmHjn{{vHr$rca287-lwizJ`cy31l1^{edDd#3Tf041)v|MVi?tnGXV;7M zr|uX6YVFZ6<>LRueft+jVtvQ^U~>EJ10eq$H{!0kn1XX3954yx9&s@9Gju#$*ZS1= zE+TDTDb><6v>X;4eDbgw_o4viDQNU*n*ylQ^HV{Ck+Kyk+?~7_Xv@Gn` zhyZj8>-43%B5|6@X>TXg4m8aswNp5a?Q6(1XvZ_Vvz_1*xBnTV*PXK4V8o2D24>XB z*``D%a=)a7L04fo{()hSz}f_mPN#Qi^oSbz&6%uDGdq0~(~*u7*Nm%wW7&+`^RE@D z(-j(L;iu<|hJ}Kbz1`6fDXOCus^xPF z|COC~7$r)RZhmQmK3}&^d~!|xCRXx)aeD)e(%0oztYr zB$qrt`PBX1E};v(XQh)NYi6LEbUI||X5?cT{`Om5^n$L=X)5Z+J= zO2D&SX%&q9mN< zTM%k7FWi0Dp?sXUsqmV3&X_@ljuyMLgk{mpZr`cZxvxZIlAoB@JwlAoze49jpSXrT zX_YyPBA|!eMK@kQrzW2w{trba#W?^wfCiIz44fnj;o90L_6+uD?;C67Wd1Bl%7{si z%e5rZx&a*$&ck@T1z#GG;crUU#qE+rcm?b=97v$#)cF~ZFP0hyN9l4|l^dg3F*MEI z0Fj(35yc<--0{+v&e6%ec&_`lxV}>yEpzK2)E0Bk-&I{|aJDt4JFc8T<&;j$#K$#k zfzJc{jD!F=0e;`7mD4@cVX%hDVZaebapl;`pXqr(N(dY@Xp7*}P`eJ^U(Qd4+iU0X5 zdx6j<)1xyktUqYSAfD@^>ehUX9kv@eh0JP#L zD7Oa(^QD3Q&aGgByJWT^xBr7z|M~glFFzjas7;xrUNd?0JI;qF!wKOF54~4o3`p^x zIk6Moc7O_QOSWNn9RLO(82UJ3c2?)9{3lTwFsav-CoiZL^(78x_7zX-TR>?{+9MCl zK9uQ1=awGdXpW-xod*{Q&bIt9`6dV@eGh{woV%|Wl{}QscLNGeY#_wztWeq$=>XH6 ze4&x8LflD~w{w&7h)d2-+e07B=R1?bh_F!9f!B3g8T<$QSwTp~ZZ zlrc7NH(4!XZz1Z9ZUCn?entA-SEg13A!H0BBS6Gl+So zzI!4BT9g-GV%$53D^^f+v3iMKp$+?m;3WvII|LLi<4>Wr1v<&~voM3k2`m-(@k#-8 zjGuEzJjG+Zy4@g%c2z0KQD5Fs2yWgQ-0b?g!NhP;UHYB`Lq!OrI8>bQsV#v!e0Kcv zFj%I=HL1P@BYanK8{i}}L-Trl=!>idV3TO05Eh~3A<^GGaHxVFND!V8@n^C2Q?M43 zGoV2>bP5Oea#R9BwKbx`xm~|q*T1h@4khGaRj$|75`7{4hTyM;U2JQ%cZDvgbdu{w z**EHHCj}>&c{RIu`XX@cPz$4kfgvuMf*ET9sn7(|TLh}vADIS;DL?*|GRWQK{?YcK zhYOQO2;uS@YcRi_GzOnVGzlTdpp1jLI_y#~;0A6-w`)fa>(}c=Q(Ym}F&T=WD%ePR zlHoTqQK!v#EyGoPECMpQW_jmF4?toH?gzm7LxyWGOd$ijr`^Tqq<>g=B-~}|wZ+B# z351sP9)`#wDTKqIt%wYIc^GdXQ4RVQo9aZ76!DoFVU046GeOz$7EK?Gz{uVAGKnP{OQ z*bcBblxg9&0Z%HGE5)H#s&Tn_jgpbOV^O?Rkoqz5QufE6%4Vq^L=wdS1n>tt`~5;| zL{Z4{5mDDoxgeX^hY#ZDEdXAPha^yQ5Y^~gG?k=;$QD(~iER8l1T8W*E12 z)Ac`4O9KQH000080J5k#Rlp(5jE4>Y00KP#03!eZ0BLVub7N(0WnXq-Y<6XHZ*_8G zWiM}XWMy(?WM6Y-bS`jt&0Fhl+c*;c9w7e%k)o*GQ;jCs-rg4H9(wz@D{fzCd$%6~ z!=WWQ=0*=MNyRm~{oik9NJ$hWIZE0tHYieCB8S77-#p2joSYmVz0BE9DiKN~Z-ik| zGtS~tn<8UBl!mWTfsf6G8>U1ivXxMpNuybTlOm=I`%>HpwLd-*xlwzz;Xo#5GEc-i z`#OfNE5XvDxE2X3x3lA;A=DvyDhco&PUVXtV+w@MW%Bz-pfvY+^2!pBE%zJK-gx67B`f6wMjSkU`sJCH7p6}2CjgDrAHFvdA}}rzTffvEWm{lrc^nb z*l(7I#z}~xh@DiFPNvNW+#iSvFo!7NAO$;E8#Q7SiO-FjS^&o>*V^-ClRn$%LOOJR z_i}%h7WqJSCl`p*zJ13t*v<21*DqeY@HcXC8Zr2H7O`{qz3{{n48jqMskcM>5ltIZ zkV~^%PP9nZQH{+xxdm@&d_9A=nFjZ8u?Svdd<&1?e`p@dbxjMUhq^+N9>r=h`+Z&y zZF822+%e{JQmY7mUK7GJAv!Nn4d_$=(V$ia9zO%AgJN2^HNm{If$cbhK^l#~r){ZR34@Otl_8V{1@te1DO$70z+M5yR}eHE z0B61!Uai?BJ-xJWr|=N5eNnQVOj8EyvK5c7Eo$j4%9>b=$n`a}afN-a^F3;A7D43iMv^J+>|umXW%qZVKz-A8AiwYw#d@ zD-)n{VjVM(EmKpWm$w{3bi|u~m zaU@dNPCz<%_b6O6R&ya2qzH-DPjeOl(JB_vOB;>w6NV!Mg<(iX|ZzyfRGzs$^v>4h@ zXaaloP@S-c_GQaSkq$@rZ8Z7~Wc9_4rnnRlg6Qg3$Y)0an`Vp$Hs(%jPO~~#Ym=PY z!R^;?RVfB0NZmK#I}Q>bkV3zf@oU%TYpip6!l7K(;bMwBKMh3mz$TeE8&i!(=(|vp zBHNlh;cpgB_(jw?qpI{|RB1MQo7YBY9tb4hOxK*TUE!#!G7o8XdBU8Xd>!SLD$b)t zyW>Fd^bD2m7VOEcW_5yhu{g|7do+;_c#4bMaByKYPuF5UXyI=KgloBOk`L+fFsbjJ zh@Ilpz{10vmknCyvyrn$m@4cTiB(CjFCU&HBgFDDN!TR}xMZ#nK+#avBK;yILb>JD zs?z9zgpDX+9+fI6sP|Q->l1P`@UeB{(p1A6S{t6+JC7JhA7*^J6?rl_zddK(LUnB> zI4KzGE3M_u!~6sFf?1lUn}GM~@dXTBuz5Gf?hK|E)hPyeOSq%{;cx+ePKvUisZ)pXi;sde4ZE373#h%r?TiES96j7UCP203$k zzBrGtP<=+fzX(TvaZfz2)3d97C}AXLmDi#ThID8W2+;WP zsmF!u;?;RoZ`SocjHfp3>MnEgQeBm#DGbNjwXb;YRY+HW`G*>_Fl^pTYFM?2b&=cb zz=tS37~#a~q7uj3E}q3D^*AEh8?^#+_6mp425n2=k!~7c_yh4(!>kqrtygn>5pTP= zlIwEWBD5;ausHj5`b7oeygl zvD!oy;GpXds%=b8-gp~0nfg0Gk+kd0Cw`@#i1hY%2m_?kMF~Fjm6u2zL#8~!E7V%7 zls04Rn}T+2H^+156d{jBdGJ-o^s!d)8myUo2WT!e8N4J9D6_4D1gd$aXwk`(Wx{NV zB(nD35PXb1?8hDh1d<&F%le=~ImUt2{E2u6-T}#@_&y(UDzTnT?;rjCK+;zlm{5;Q6n)tBv7`uUMxlnA z)(X!+v`9v9eTeId%t+49>CNdaoYzDTaKBYOoO>?4KTw}M1YBZrHfgtb;8V^ssO|k8 zfj8%&Iy}qk8U&n*-|FI=dQrC@oIu4~#Y|V<<58A9F0D={Z6!nnCsDN}XG82wy9%N5 z9*5A0qevVtJ84f9u@P{U7wRJmkG=}N89it9m@zE^e^LN;*6j>*X5D>?znKcAcW6gE zfW*2v!lhdXO*Y)91CJr0(l=D|Yli(baG^dKI=!|5ZzVwMvQRedt1dBk&a2(RCY?rX z(yT^oOhZTffPIZH@XqSJCUVtMYOq!va73<`5fY<8?v$0=dBjQ6G(Om6L)p_eYaWM+ zK^n2-dTHUOMR(FIHQoc?Wa{QC?D>Z3ZrCAY9~>!1b!o&vh&&CqVYd+=Vz{fQg|}UQ zAFYN&4tbb!7LT#tIOr_CF0vK)1x<0z9j;oR;F&LvH!cR&_7AH_Jk?t5sfD$cN*n5r zaD@=Muldg`=Ya`fYr(b|7SqUD{~{vOe7d-A8dQ^gMke}n$lo}QF8&s=uNHTp7qPw> z^`?Jk?@yn8xC^<{r-NBroN7X`A5sF7wpeIZm=eUi2OhO1R?HbBDibs+MDZ-%!1D>! zsEddZt~{2iD+JZR8753k(c6&iMn>4cq%S%{**F~Z9*~FFa)lnQt{pg)Ak}e`e>~7x z)z#VtQv|wWRXc%(ht}&c23190|AqpT-dTYy53|F=5HvNUF=Ic4=2YaV*Se*POu`u# zV)4FK)!Ky*v#0enlghH}=a1{#4WYix$Hkci_@Y_3?)LSQKO0B*ZVY}#OOk8SbD$!J zhB@mn{ur%|LPls?%(E}Dl%SuPuHChqtijv#U^uC$;oOn-n8@`S_SgtvoFyRKYipoX zfv&x|10%#Lp6FqGtRw!|#$B&!XC9vcdhSpb-*<_q?ZywUIeJ0%e>1YF)0X6zujwj4tf%HiCA_bGu{RPL8b&i~;VRM?)IDKPX}rOJ2aJ2dzzNzh)rkMTK%hA6ldRBiFDN**kE1vm41fOv zz~s(t4#DCbkBy&lVjrQX&slWX!I#RFfJA;J>>;1J!A=nM7V1@~TY8CtxWqxRz)PGB ztzeujr0;VrEJRtvzC3g}%%AiXjE&fVeiPj!>s4?VU04t=ZQ=G+K=(7In#kKJn^V{# z`xO0B^TMqXk@?rupqs&0YssQp9w8)8|88G3EB}AlM@s`lvp=4O>Zs zEQ@@+3}x?IwBV$xtLi+C0igJIC;Oq^JdJINRrtS$&YZo+SI?PD-jN5Gzz4ghDH%>) zW>DA~;gW<8{fY9DSRI!y*Y`r&n{Q6BykiJ5Q)O?|)xkr5mTQ${1l=9A(ou>RgGD1! z7?H2ug;7^Iehj-&j|2JZ{daL5@YA`eqL-#vI!)S^qc<86X7woZzFavZ@OyvL(t49} z>)GN(Z?^34ALPuzDvQBRh+AS*oEwJqwGdnCcsOLs+)ss~Dz>x?-FPv`DWbA*35_c0S=^fSd>T-6eO(I^cnZ~!;Yrdyob|k+&2x4@n&9njBmL{1*F5Yby<5VBr5yO9KQH00008 z0MTnbS`Ya=mF+PA0NJnr03-ka0BLVub7N(0WnXq-Y<6XHZ*_8GWiNDcVQzD5VRUJ4 zZgVbhd8}Mfa}-CCem+O|Kh$v#ND+fD-g~&vhP{Dh%sRr@U}4AY9v2g>>5|k-PxrXH zN1!8!(9D*Q)@rYLzt_C- zSPyUE8m^ep_hx8i$I;JYpl!gA_mDOeO zDt@{TG}?{tp`yDEeI&q>U+CZaC~mTxb&};#SP|?}hitT| zt!U12=XmJXg9YAzwf8@4eQad#%6pS}lUg`@P0rd(Blu z%hrWn>xW)zv)B4R_$gphM@x8?y@A*@(Z{YC0c@afLzADz9kS~MWeB;Lg<553LolWT z3f_+IXDkTpYy@6Any!wvu*^@_{2&^{{?;a;{*_+yj$B;p8hv;{U&kbw!6BN%kopIG zk6>32c0i=0LGo>B;7M>@4#B0lPg)){!{7R+m>9sqn(IVt%7Oq_;Q;l1-L=^iXEvX9 zz$G(~kMD1weKAxY;XnCLyd@NgmiH0)Xftk0(8sS=5T6Le9@2y2Z?r-3-Y&ZG1BEfD zeK6vEMcb0DbP6I?Iq;*h%V(_0yt&4==raai51PPr5OaatLqG$qX$3a6Bg8=$Ng5)k ztgLA%PhbV0YV<3rURd`rpr)C`poL0b@=SM88 z*U)SM$EW7mB;lta{^wvX_du3a#TQVhRgk*{e=a8H2%reIS8+LF0W0{v6iX0m0OYpP zh#IfaT7QhY2!bhUg;Cvh{HpEtz0=Z3LDoHQkxPP;5;X%5YJS~oecx+71P|5S^PmV& zZ_p&h;%@74JQKcC8HgQ5R*(}Mh2u{LRuzI|BsGh10f$4RsQ3XAQYM+tLC>iU+VvNp zf;ALiB?X4KQ-a?$Izl?%Mw3U!ToX_)VEnw=n011AAQ)nokiuP{Xu37rjI14qmn2=M z90-0MtTYtV3)dCRIw|LZ;0XVCM7x9{`s529c3lK0fwLKe`UImS+LH=unJ9@s$xsgz z6*o{1&3yWX%j=+43?`SHD`s++Xi(FjbS1*P3>AD6I>#HjvJjZp=~qC|7On#ofOtrpOlt4} z>Lpnc{NbU%bD=OsTE!HgB?E&nsD5Cezg1v>IX=z=71NhwYLr8?JZT^$MFTkEtEJqA zsYgWyUI*xH@9)C#^a!LGiEnVLK0h)dJ`OQf_Ek2X$DLm=fIo=0wXkIJCc9p(&X+NmXTl}q zt4@Vv#eHzW=oyaE#Xf;s(h_koSw68xQ2#Z;-5;f@`Y}6MGR!9@y%YiA=~vAsn4jP3 zwHAA=n@H1CaL}W{P)oUD>|S01Qk1EgvJg%DuGd_Hmk;vZqwvrSnDgOXbKO-+5H=}P zE*R)7)KTTZ42PeU*{mEld(H1rA^7wAUgNt*_ZFVE?(`aWpL8xix@^$cBRC1eNx5%CaD+29nIE?Q_o_ytMYy1Dq=j)lxavbIibWr6cz8x z#)aY8S1K0-si*#mwJ)Wk2ZXASrw_2c>e7g=~>v%Bq|+ zMSTrE5oH3$^NPv@MBx^2?%~A(J++=A?APJX8T!uZ5O4o9jNFIR3jai;Tw`lioje@P*~YT zNdR)f0t?7Sup7PB|K`+O+i4dFhq6_%u-L1FyV`*~94Y1xYrfq%Y*m zB0}Jj7cwbH zJcMjQBdP-05X5{-xF#%VRjvb#g6xP+2@ov@zAL7hel(l05L5 znQ&kT@Oi$m>%^#)8x?Zn-LnNcOzi<~o~6d>*M4S+)u{1p4pdjyw461WwlDwuxFQZvGuv06$~hRlK~5wOCgXNa zRcUaU69K1!AfrL#>XZngKAlP$WNhEti`Az&KE@7J2{%I(V zv8g)t6TqDCij#TyWO~Gd9HB6!%=CBkX|m!s5Mk6AFAWfdFSBZxEDrV|Y>6~rg-%)8 zs@6QlM&esqp%;W2>xXB7nKjGfE&_&CHN(v$e#9tO6a;&**ZO0x`L!N;#Fz=cTCIDa zih;EXR8$4zZARlW0^kXIScd#v9WVfCue;f!)`g`rFP-fzT*@5k{A26-nFD8akxuM^ zfhYE)rJ)h{-rR)SyXwtQ^AJZcS_(Q$zH(p@=ws!IU-u&T83c&nbD1Fi`18AufB2fZ zhQ6f)`|3Y7zc}-fv4n~ASvE;U{&AL$NbzcrHVdOzAUxd=j+Z_-AZkBs@1`~hG7c|K zjz4pKay%nem3wt?>B!9pBRr5D_kA~HJDjrXLw9l5F1@>WD`&Y|Go{HEQ(6xSiz%(o zY2;O1!ffs2xA1}i*uzTD#bJ$CSOVHLvTMb(oUwTIbnHtN-ha#SjgM7ghDjR8KvDVxsmJC#`7bxg`rjH zRJ;!=JMZeT39ab&iO^l~z8;%EK(ApK%RtUHRUx}b=%ia8n<(DfoCBDgQe^8Y?er&u zv4wo)C`^LUn`ciMP+1DD+drR}3`noi&E1&i?ixNPQUlGdrJo)edz{~NxmYYSm7=l& z-$%`Y(wpCr_P^J)|7iWMPj6nzfcvDN|D^ucL=G^%%Ogd-LKAoO`c1#?GM+2H%=hXM z-+>9*GHiBHF zaa+eCn%T@q#4Bj9V1=VYz!C?k(ohcm2>iCIr{{PuSm8%Mx=fbXm9)s*Qp}JcbqEIn zwK&aekT1ia@!Yr8YxaO6JpOX+@n>y1O1(&?wF*K!xwY}rcb5!Mp5>L3#KFZRrsdy| z)-Ehx72ZXQP(Mw~wU?bR4^=X-Z;zJm{?yvqDWcH}3^}hGrvWmf9TM8qSyV0rHgAzI z*9G9=pF>gbpqxuXLb9mQp?GMv!1f4NoF;V%k&z@FA5w>4hVP#}Af}n$SGQCdP*4n`Wbg3e*}>x8=LWSbQU|%~+~i>6kWT zaU2ENujD8xAvC`$BamaikEsGPHjRvhRlCH-<{}Gz0+TcC^-oer?E97v=08F2%_~gg zlpBVX7Y4S{^(&UXk7`~m-+qq?6BBP`@12E92zsBr~?18BHv^_KX!Et^bJnjxdAj_aOn2wO|BNnYde`v4nq0ck(uu zpsUnSe@>7{EQXI286lO6O+~yz{1i8iR_FQY`Fx3mC!JaijMIc5S_Degh&QrVi(r~@ zk|}8pJ0ds4nsscOXQI!fe6yHPaA;N$dp6Z?6)hpkOo5ZkGrLZ$gHs%z5>w)R7o)kH z+R1|g?cf21=X-U}0e8y)Mw-Ma=M<{)_Q}yKtXM8Yr6W9?#WOiSkZEnMm_^FUk5IM4)M z-@pUOX~#p5o!1NwfjetESg)| z+Okum%}yAWeqxU(aEMJbbe%(ECR~t4W81c^j&0kv?R0G0wr%r`)v;~coNrdMnxAl2 zRky01bC5#gE=KR0>8BsgnrQ#nL-m%P)_+cy#hP+m&u)y0w(hnwOy<5@4=;DgFHdOR z5TgsuKUG+1;Zatj8oLcLh-&?Mtp=i6EOy6D;G9Ra-y!6BiAnd`ITE`e;yNVopcPbE zDxTreaFq%Pr0}VpIVXkaS)VonFHz}-F(uR9;7bRjVO@zz>Ya4Jf;qxHAl$_)E+MEJ zZv{rcn2y5E(^N(xi_op5qdHOpXX$S|CpQiR95xDQR%Z7R<5>stcsNp7u>KAHec}6W zfr}X9@|PycA<`Mt9mt2oTDYiEG)-WnRnqDeZF(MQ_0>pTvQ095YNP5}k{Or!1VtO< zZu9X?Jgw#9H4=W`(%Bguq`s{OO#ci{W)eGOPk5sv**(E*IEsf02(dD+oW|2tj0rY~ zfI#FBVzAxPcp+}|*;+TUj-&G-jyS|+*@OCP{8{1N&|n0Ht9n`+xa5)-N(PC_e4t2r z+~C!O7^m1=^B$A{V**;Klf^MMcLOnP7O#LJWYmCAukY!MP z=h?$+@2H3q!jg_9|M%5md91)Ij=?L=u4&w!qo7OU>RZcd$D$2gIiznc>i+9qgZS;C+_YTP0YQTHgi1w?e@N+UmT5et|VSW$->w_>&?|^lT!kJcr zNiFb2)UMw45hhH8pH4{@9RmiX{%i@YUej%Wo-%3Lim}I@(OXYG^P!&k1%YJSd#6w8 zszgMGOm^7TU?JkG$N5#uOutfz8Y~38v&8aFL7Qe}__`%S%9SgC;1c#AosdBFJG4rQ6h|oUra`M9bb!e~hIg$?)7bnHSXB8n>x>MVO z^b~Cj7HQv*CJ2sd>8)N;m6t>zof`67MRI=8UvAU1AiE6+fw zRq|YNi3dnR&rLZ)nrtSCDt1SP&nLyE!NH!f6& zPgXzEE!SzUK^4wg;){%N7sGAotoO|+mzyLuchQBXdx!0bbtvR9>a%(0Ek}m6fjf=8hrcMQk&|mob|d zk(bJ~xe%)GWvY`e)NnE^yf zpK<(DI?WOb*lw5icYrp?)X#$$-eSWIM>tH}RIldQ(kc%1W=u`Z;_a;9AT#=bGcL2j zhXX7(1LHn(FsRyvq=axP>~o&3-VT!3AEACCSL`)#WlqK2ij}Ub=4Cw=Gf^N-+%Eee zw14U#`!GY5mpNwY_gYl3kR~qwj{4PJ^kdH41b2cYxIPBsIo3l7Z)uD*(>K`E)dsnL z17HToo2LU6>r!s_FAQChZZyk#iL`ccYvj*D{-H)#CaXT#3s0|alSNP{7a6>Lvg1H) zBuw1_1;UakG0D&kfps1r)=wB6GtHy8=7a`{i+WMw-7s`7>;X&}I8 zEb_?hb;Zf0CaY>X(&d;DW!8%}R9@#}EUV?ziFs+%!9ukhX5deYN!c9jh*-fztpV%fzL*Q;k~^B3Po+~1AG|iMc~Jpa^_n-`OCgwS za!*rEB`jxgg#EY5kQiku5 zu>2_%!9TK5{l12$CfdhfF<7 zg|8RmO}Q+W$Jc*BIQh0|-Nq9fN4P+&#|S60ao1muMw(L!94<$cOba7|#N9c|k3-)W zV^l)&Ae$8HbCrW2as%%px)uYJKX+GEGaB&V(3SX>ewPBIU%jEDald(r`3i1N2 z+=e)}186|U+H$$NqrYOOx-5&E=$_5Q?Mhx)rKP|5bIO!&96p?hbs|b0!Gx#t2|JS1 ztR#Q%SJ5`j_>FE+Euu^DXzWF{Jb&;NtOA(K)%pbmG+Ca#bU(xD+Il-z;Z&xm(TaMF zbx{Zo1EpjMyd-GJ+G-;VCsBq#jsjaC^cEGRFyBI`^Ll8|$so0hoZfAy(SG~9wj<^Y ze;ze3?rNMMX^dpM9Ze75#r1CmdpL_?7^Q>&GCx9x~xk&ahf3FVMo;>S?i>qXWKX zQ>RInZ9A5&nA(0*p<{W(RB+0n zlU>{vQYh`XV9kmO+*g13L00i5)UKUMgAGS}=oolcLy{{cp!JRm(dt*19{kUZP+9p& zQs6DkiGmruee9V5&^Pb~2{K?9S`mp4V4Ah0gOwpE=$&rV3R@l|&Eoq0e9Bt|0nnZ* zEx}&>p>zcYvq&vm?YjfP$MIs7TWn8Kz5Se8tg4v!~!(WPnB z5CI$mxu0xa~xau+xj4Q zNoXyed1HI`e@LHn=Y>f1^g_6ia&T1!Z&}@)5Pf5^cp@S0;;+~6qGZ_Kms}7c% zzmyzu3Xt-KQ~}X|j&~cHR2Ocj7|h}ilJQV$5>X<{6lD^5kGcXmI77{_>n_DtW^ms# zlynrl$(3;L`G+|`F}zOS#INMRAtyh5w-~Eq(J`3CljAkHR{zOx(Wm)u%%$>&e0(bS zDP7%_Uk)Q;6g48T?LW!zIb9}=6lBs{k;ooL$~VY(2B;hwQOrf3HKm|v(cu89>`j23 zkrWveU#`Sk%_zfL;(a3`HR~e0zdV-I?E)0qNb%Wo0}?;T7B(0nb0ku_e75?*TnWtR zVzMy05TYd4~$sa?RU*o0=1&A_TPdoS`*I4^4q6SVDyT9s;*q-^_ zen?JQ3pMTCNE|Tt-&E0@-~HC;f$paYs;Gi}1er81=bp+p zBM&N8EKj8<{ucblY_~__Kq8$7q(EI2;|z2kcMvfy=#@qgky;fKN5-@qNSh2a0!Z*( zors79x0mU%v4B>GnuY_Vh<)^Kg$?zg9r`unMwRct9`GVfRNg%k{Ny?x@BHm~`##r$ z&4gV&H8B5-fqxIFkOD5X$C(KlHI#K$T{oSUy+JpoSrFy)?I94q>G^-PdkF7Y6HE^Q1Gy&>1P*FjMm%RTPyZ%X{G= zkdaaXlO-Zb{18TyLpbOw+6=;}=<@PcfiB)(d)$8S+x@-fJ|FH^dc)IyYaCX9d+?#o zz)<4z^Yg1qLpRt}jPn(ATE*M*u(Ul|WL74xsT7@B&niUAgH^3$ZqY>?SLml5*!tvV zwn@u;&*tlsDDfR%o56ZWWO*ivF|m^G2=N~eNlqAM9{X$^!nBuAw1;GlWaqgtDj{gT z$a)~{Q~&T1TmoduJ$^R&zZ3RHmXf(}12AOOI?9$P48c>2>pL6eG%xaf?3|B{Ocg}as(Nz&fs zPOJ>VD-8o1%AC(RtpRyA){UFjiZ@Mh@UiH$s=T4s**%?D4L+#;&C2Fd5N}bi39eBB zNcb7S#&Plqy+GgUI(D@TNjtvEC9aVmlHb>X(OMjy^X*jP(~7-k%G?_&je~cIh(xX& zy~7G;vZldp=D5ImbOEwz*VW`3W{E;DvgILKU6E#Cg(S{YeUvoNPjh&OB5~m_W#ivV zpJ7K(OAK$WU{8EbgDf-4U>^0e!_Px>uDr3Xkofv>>11e!Y1i{k@>O1Vfqmq6bUPGR zTJ%yju8c)WSm#~ETO{Lr-oLqJeWWo3Ouh|dKBB-C`{->$Za&2~otwy8mZr{aW-t@V zTfgu=7(k_`+EzCJs_&&t0>w_hX zjg`?>6LaMI<;FJ@Bh6P$3|f6q zg`ho<3Ye=5^O3Eu##9Sv60vNz86RT=th2H>lrcwdjN^ac>Jcjv?WjX|lLZz|7E=k0 z)jjwJ5`fR?m%ATx*Z4QTV9sz}xWvr7qU&X?Muhq1pHc2THKWplkRiUyB-p4K$2c{L zc|+AV^PNf$9k0fix))G0TH1?WydZn-dK4;b!gHh_(P+3lxeE%!WGg2*p%6$@*+}gS zI-m~M*Gn+$siTx-Me6pzSk8*$Jr;v2Pwj?-sz`*y1-sCdwqCP5kh>&>Msft#H?SB! z(;V!GSaF_^`p2~gb$a9K$n!FWlI!bWJYrI#4ixa_h-af3XAo#?n>BtS*UVj{s-yVa ztGwFPAK6Q_$CbtZvrDuT&?D{;RA{{QTc;F1dnJH@vfa#nVsA9^b$#9cv!Nv|g8k%f z-UkyO!TGUpMXnlI?zlN$1Ez4kVmm@=V^R2cPT(Wk*3l*=qqDAU<`|f${#$GmS;e8` z)IcqmB-^}*wH)F<;Mv`|w(lLaZH#$m-JZSDdp)R-=ks2`?9qMQ-T}P36470|v3{{G z{@By)8-7oMD%2l~+uV&%-Yg|r`BJ`MIw(E&TTB%y+^j)ua}_REbtrbzTDY1Y@K)lR z5#i_U`H}KbU`ssX*(c3}b5atA+7edNzHi{coyT~<@MFJl+DHz$EtDjjV%*ZgX1pU4 zKPEnk)*zxv=>iZAW$#S7~r>sZ$x zIMnCoqKtDKZ3sa7#C!ay4Er`gUo366oPZ`1R5c5-`T9MO{-1x($!$tu^u$GOuOX;) zqh43fDp$wQT8jqg>XUa-?A`|?$1&i%Xy7%l)Z!+3#F4XJ4O9NtVi=Fh5AWQvK?@qT zZV-O7rYKDN?{tBjcjMdH!0yKaztNFW^)Wex?WrC>;o}X1c;ODQK|reGyFJ#?drm20 z&%;cORUNa&`e->%z%L(Q(Ve{mXY2ew5A%}%Mh$oQwFPk)5RImB5*;TQ$>Me^;@LIt z_dkcrgpU}{)x-B6Pl)*y&!WorBzGM#T5Z=9lgm^$SR^8bxfn2}AVp)!BA($Alx`5| z7gz&2LLWKB$$@z%7iNSR1N31z80%oW#!XwJD%EDF`(Z2qv-7!b&=7`7s*)sB6l49B zjkCdp-zam=TVv(f(;|gIN$%QhNa!TsqF^@HL@<(uc*3{&RB+Lq;(e>ElG(>E9}}h8 zyw=TT<5$Xg?z-17v&}VSH0+T(Z5cbR|WcgA$k~6~0 z3k0K-Ui2KgHsU6yD#wORjr5QA11aRV&0Czq4Nz^gl^VME!(@pjDhk%COk&6A%BnDZ zo{if3k`z&{k(t$}xGVud;7{C5X&n zF!Vd=TW6Q6?CV2JRGLJB{3_z)I=8v_m%H&b3rwAnw0-|BhC3pWR}}Uv+o<@_m9u}vSHJ)Puf8vT zo}AYAdwM>9hHLOIP!<#;gfy43JO0Mj$X)1^RIG?Fv5(Xh#$W%_cm9^!q9a$A2X`*G z@j+KM`6B*x@VhYZdn)%J*uB04C?*JSa}nYjD&Uy-9+dSPYG3d9&MGtH?pR$t(}`30 z^2DM;8bCw-Hlb6hC?b)9aE~idAD3?&S%I!ha+!?WGuHvFTjmSR0~S8xZ+A>WD*l1k zIHD&NNs2WDyKA#DZd+IZvmj~=NB5Kij`4h4krgC$K;E+vf}V|5qG=y;u>zyYcf`^Y zFj@;QO828EFzz<)Py`eK)c_f3bW%z2V@tZRdfR5Za!bw1I$1RotL;q!`uj9+!PDDVI9hz}YGMpA%&C*q}i|i9kEP z%7!yQq2WkM5I;g}pvcr_36nYRu}f{+apl|j&2f184@5wHuMR0uC%)mrsigPdf%iTR z7Xqn^?WDQTjlD?z?YjGkjV2s9SxaxkU*u~ZC@!a$iN(?stzCsV9?n`h{fte>9jfSg zypI5k%a-oNd*hf?TMxzd?$`mrPlOv`F+)&s<|V}Oa3Xbd!=^5rgZ<``5G>fu;Rl|# zB8{&Phz50bL&0Ou$Ow%#A44{{hcGIgMPYfl{Bbo5)K_B}V**fTmuyj3CsqvW8zQs0 z4i1!eP`K*A;EG2YU1;v5B`j)!@nU<4f2$Ws^|u`Nf8KTA3JqIORVYd9*@mb%1DeTj z>xB?JoC%)q#CFC-Q!#P7oqMl%ImtOM5o{4Um%Zo_|G0QmEUnb|(#iS^Vmdb_HTw7n z5HTA7nKkJH%++{HCv?M^$-jJ9ZlzVtpO?~6d?0@sce8KEw5RUX5i3MtvqhfTRs_=5 z?cgIYFsvBvBzcmekep2u?MrC$Nj($~Te^1ZH=;DMLbeDQtUN`UEam=9Kjo<=3#`FJ zC>)~VNOZ1PRNxR`5p#YXOLFn!LKAvWI_6F_lI)w!3{FviE>bBYyr<~XtRSY^YkEeZ zsZiywP6n5H6#A<=Tn;WkDw1w?vjl;Gr!CmO?}gN*Yh(e0@FIzU3aJI-v{&{1VGcdM zLta^g+bONXc<+8a%tIn_2>8r_9(g`_Gz#qay{(*n80(W4!~w3h=h3{qT!jnnGfgLu z-Rxy7d0~T0=GvQs*g|@R`VG1K>}n@eqs`l)x!5yw5c^!?YKw!Ti3kK(izZxDDLJHF z`UaQbopqSPxbjW^!>4oehb3y8gf7IaY>QP>RO^DXM^aa@e8BK30~Uc(I6a|>Zs+91 zhCMyisX{fVALl{;{F8HMrlMH3L{Y^R=eUFi8eUeE%CwwkdP`%8^(2T?6^FD5>?8fr zcQ+B(@u87T?GE-ME}M+^7=t%PhUk)^Sahm}t@T**m;p+{gH?yl_&0#ctH2i#Odo-- zI*^(GUq!b&Zw|URa3BxtugQ&0@8r>7T`BufGU}_}gq*7iv*RA)Kgf5uR2HHr2QFLJ z)8pYo#<i?&4uxZsh*2hCELX=T3<@A*zwT1EEJ@czh5=sdE`FQTSFx>!MM2*YacM~R%h6^odib(6^2+`Rn++w}+T6HH~ zoetDbzLW-p`hPQg-C55@@OkSVLYwn`0BG0re!M#6Q}2*3aMFDp6A%Mg^p;KxSjMP` zf^1xXKv6tTk8zC&3k-6;Q-Kx2C`8Wie(b!%+AxzQBVRNU-^E|;gN4iDP|oyI1vD$e ztRG=nt~*PcR03HWSV5OE#MkG93k{H-A4^ltmSlYWv2G7FpPQJjR&nj+ioDlN8P9uOZD zfe#;{Jgsq5AE(c@>&=S^Th&|{?&X%e_*e!0v9h6`C_nVx2?ha37KT~{rlf1-XAktR zDD>fZbB|`8y_?mAkIO6QuFF4bk*n*ebmy+k7~Z_QeX;tLqs-zMa{a{UiTab#U#-SC zFU5X3Pqg30p}jOW?A*^SE7-xmF^`oT##J~KLxg%|tw(n+O6t#e`YF*>4PJ4BGNbp# z+%o+N;vXgY!MOVVc2`h5MySZU=}w)j)-q4wZ4S*V$LmSgn?Jz+7Z>?o?7;|QA3rPv z5D?0L=mUcPFD~NdXlmqY_Fqt>R(;d?Kpe?8jOM3M#!rx}#z*R*Nxv~pF*olT6RU~?^h3mf5pK3M4J6OmUP>bB?9Uz9fkGb1i z4~RpAoagY<{$mGNnT+YoFwu!j66U>KNhKI^==gh15}R&IzKZP?#kONpJ>cc297Y#yI#GPw1SC*828( zuuEAHLlTj^DX`=$RFg$rrdcCt;$=2zFIIq{&D&PC51zh40bfVpzw7x5zKpG$qSj0q zADLULxRHYp){xO9G-UJ0c?+cU-1&EjC#(GDkKTz`xuVT%IPODc*KkrxfSNbkd9K0h zCTT|(fgb!AKBA$louL=`D<%#}Wdc==nIycCZhca}P|zQUagmHHQ5HcMrk5S3TFX4s>HASaF;{7*;(O#V%x+-IL8>ISPD%*A?f|$yEtXYHor|xzWfPov36I>Hrisn_6SzZ(F&xN5^a;c8UDil&&Gc~+S#meU4qkjDk73r7AQKm7pNu;WA09H^e80c$_DW$y$X|9}(ZOX!n zXT2R2W{?cZ2hZs7F&*9;Go*l4phd)be5$@V2a1`+Qf#IpF!7d~WvuzjfIF)cN_~Y7 zC=S{MaS)aq(8D?opjX3k2B## z?z|;ul>mD?QyOwr}#8Ixteu495JlM%jy;^-PbcnOPdQ2F=+*z!M~@O-jf` z2Bv3%z#gckaxxW>A31L!PHhBbhTtYc@;s^>?uQRA6(W?tl!p?kOOyQLZY`uK7W-y= z-TvjyQmu&WfEv*&ExWg$q*Q%|foge<27L2#D2YK|Z;QUSnOxq%;p}EUkbcg%OJgtM zbf3TM4oyap1l9P2zLL-Txlg*Js0TW;N%>g#JwUd#w`=c{on#l1tn^%yoNmy=-OgbS zpWk5v>GRa8`KD?fW^*=Kwk2GLrH$ydv0fRIw1nQ`({rz+cbt)uHxt=$iGiTClzS*3NT9{&N*y+hKBh>xI6X*~d2~IFhk^)?1L7>tw;#3+vif?01=&~9uP=25J~5bbCEMm^?rQBC?hj*S`9b!; z)qTRtWXdn-|CNzxH3lt|p{v^L|LVt(_J72c|F?{|TG{?@+^~hm#${7HDZiBFI~+QP zCP*KO!iv({JWc;nXuT^UNxme9QyU$IER2K%NsuXkK z)00h?_h>!Vrp&@8U61zBbY81g{i5#}#Y7T(u#NyMST?!S&A*3ulG7iSLk{A8x8y5p zn@9Ao{NAD8#_j!bfXU1GQv^Q#4Sf8VNSn87(`z%HT7qp86u3tAIH+qHvk%oG{a>FZnLf6kpopFyqJ=2F9n9DRQL z{ALB-lT5lbg z3y>YHc*%-LG_`PY?B;L0%zv6mB*wU$eHMxN8XY$#Bo*p74NMf1(PW`w^j~=CUby1O z+0N=v{EQ^wmOyNu$Bqy0M?ORDas5pO?z6M*ag+xBAZI>_162L{N$MT4#YmEX1r~wX z+dyv;W}LihUx=LCZ9p+#LPK|WaR11Ylb|^`nY9fd?=ede?tLo3V{!HLb@gP*>xzx* zVk#^B;3U+{rKm$eG!<~_hZX63Zy+=Xf)xYOcIUuJ^>-A2YxoZmSLW8B3P2TEZ&^vl zQSy!odrBwo9KLN%j3uAPTd7Aq3Gh7Oz6xguc)xc8!@)WY01`zQ;*xblB9}U8*cLqs z7!v2HS-f=T+1zf=tpm)C@z~G6hK+s3e-5lrSdvaSv&-G$Ls&DKGbs=-3`Uqsq1|IY zkPj5?2`I@L>9I20y%YS%Ml(%{;DwEtc_oS7LCM@(Br<|w>SRCCX_45iDyVRTKSxBU z(`00E^d5N4-aequfxl@CH>1$)RRRT;!PZL}f!vff{0$e2B&--ii^VAts*$4q_E9=A zQeD7$H(d5W_ymcTW&xE3?NQ&={RU14*_k0lQO$|IQ<#O2mc5ydH-8&x6Rw7@7?>_b zatT@6n7-NZSQB(Q$Rp>12F!jDfz7Yo^4Oi`E`GFv53GO72==3#QCm?*7JCna zqe2hDuM1kO^__L(%9ndj=r_09nO!Pz!n!zQ4R1w%lV40`O}_K;SrebphBcqQQfuF@ z-vI&fc<34KeOjReRQo8n?~kum(@`&6Ba$!%F&VMHeWm0x7s!vE=;Oa@MqA`Em>Dhol^P;^7zI zSzE_FIJD$kAKdMIY-SWu&DDIPewEZmhgX-N2^7-ssNnX;QR8f=+0r*E??s5*2YxbZ zcb^sKNq%G{SrcSnT!`4L24(B?r_5{_%)|evUv{@`&?tXa>;GDDKI>QSrbp(D=vozn z@kE4k{J__oTU93{VHkM!zS4o8Ms0YW^{fZGmA=aZ^N-%s$ws4CPw?cGWKq=cw{a24 z^2N-1k*H|X2k(MRf+l7lS$n_#H%8fySFDe0<=p4*$+96JtUC9QaY9LB0l1%dU*B=O zHFf>#$;K;^gbt>-iMZHn_-F)(jfAp6#@L$M82Bd;E@S(^U`DsO!`FhF+m~_|E|+O^nDNOYsCoG zfDQuEkIXU|L}#MIhy2=pIwgV3jh(9WBu$v4BfcSXur5*LZY=;h<7nd0sj&q-{^XjU z4Ad)eWE{iSiCi-KJ9s@!!Vis%6_&#bhrxmXD>-fGc!0RM=H*Z&8B6^nPlxnj6*4%` z0mEs)7H31j{JMV@uu@h?T6onO{nFLCj#IX+zN=ca?KH4g`_GSUmmlWT5YLxgRs{I5 zIXOeePEt>M!TxXVtbj&>H$p~#=?u)M#@?;~=&$~ee^AK1e!v>;pj8Vw*0^X;zNw$j zNcqGz%p0QoYd?@a!YzfzRczbaeC2RVx>ko}&Jm{(DcqF>^bvG8^XGJNgkv8cg^~sZ z-AnLnm(+ysv-oGGUzqHzv5kw}!b*sUKEgfmKcZ*jUX0oHvg{S){L?G^%~L-hq`lGh z1YKB);%@=`$vHp|ITN8-yP#d5fBifyGhhG6k-qtVV#k5f_?_$5g!0aM*h1Kw$&>=ZZH$+*7U8{ zi^iQe(xx!z^~0tK!bUQ}1`JXBMXlzUcRA!{bSt7rjRjZgYrQ>?>7gp;sh}!bGWOwH zdb7njT~T&TrZDv|KY(x8dF2p!y)wqi(w$^%l4TF+i?wE8V9k5M(XD1AB9B}MSPUvV z2xh{^0`|$2X>diB4Ysru@()G|)uzd~aJ^D>>@*cenob7C?4W~$BEv=ox1gxs$Wa8$ z4lyR&CC3wGYP2!%KT@X7MQ^IVCNC?#h^5Ly_LZ7pu8tGSD-f_k@L<-UcRfsDJxk3N zn!>#=&9q-$zP`~4c3Z+6hQrTQJ}0IhJf@u)V^DdUcJRP0F5t&zJ_yJrg)CrVM13$O zecA8#aFC87^ka2juT1*Xby?8;!&NZromqRwJLS$1x>7LrW;Q|)2L|(uK(M{-6zS$>HlWqVd?gc!@h?8 zyffCZ@V!LBACrSO(lC;N{DR*>JrOFsdJoBUia85Pa4_4YglA&s_0c~ z3YWK)=6G}vpawc>80^ni&ygy~+HY(nF+;R&0~fLqN7+B%Sj~s|YJKKOt1}&KoiB%O zSEFwG@2U0jE+Z}kyKA+$n&6{2TBehNGDcerptl84mn~)SPv~KokqDzzBcih70kZKl zy3$f-j1QPeQGZ_YW7jb!mO@b=i*qzWfJ-SR(5^KZ1eE58Tmy#rpvKf`wNsCerdI@m z0~uPa=rWUPJ5~Guq>Ovin!`YHGk9Qi%_Kxy@}EgZF_|elQWfl!b$RQS4K8N~gd2&9 zJs`OJ&B}d*GyJ(NEo3{Z+r2JHJy}vZIVjJDdc!SXCem&6t=r>UkSk8!0+LgSihuYn zmwQuAi-!?y6-2mz=%)&i=dA%8XK{C`UD*f5+f0~J&eukSdJY}Z!WDi?krTih z(k82_(gjYEuj&aaUD^?+u}#NaATbY~Dfc9PG0**>*D;LAxIe;emje)+wAvDV$(q*i z%S{qm=7uH(XmL#dXZ$DhSl2%x$ODUMb9BuVd&$wLM5Y!# zVo19j|8O*Y`@746>Nb-b6=xK<;*K)1;-HNoda{_)$n4!?#b8Z8B(lc$La+{)S{W(O z%FFGj-N4J}kCRsJW>QMqcJ#k4{IU2fe;C^9PFWq$gtLq`%B_PkcEt$e_gnd+cxboO2Cl3641O-%}w6FvGPD8t(Xi~{Ee+GDoZc&QUNDQ#2BE<7AZ}@ z-1=B+15{s}UGWqTaHq=7DH6&bv7)L1C(Qp!4UxKugEu5N;D$qU4wYd#_>l?P7w|d= zGrLt`vJw9^5Hsp>7lqNxEv9rr(mo*}Dv(%H=kTL92Vy?$u$<0QDe0%Ao0 z>Tns<(JZik7A(*s1{=3pE(_Gy%}qhAyouJ5(gVsn)!V?gD<_u>GG%=>zL#ekdt%uki!t5x?ocM1Bms`N#|fjLok zl@icU&MU2ZXpH^Qh{wFL@E#4Sj420VszD6srTeBRoJMfWc~dkFU`6lf45mier9r5~ zsFTO@A0#6Ojov|6OUZ_imYjH!_}%ALB=5%+z$G~GcFfUHY1lPN<|x_T`m1ic3X0sU zqN~YBK`W^pU+!uRqPCX80Y@}Loqw6kdJ{j!j~Duzg|4>|HYtKrGuFmAmdlt-(L~V& zp0+(6rRS~rD;S_rVKH= zP1zxog-0N{Z9(HFnS4s!F9n!aWU?wkfD%QR+L&5W3F)E`qHYK{EwC_qNbL5yheHQG zk=mY5V$DV`ef8dc6o5&V!|VMAqNPgd;`P7|wPI#^U{ODcyw4jp!gnsh+p7t=)6*$v zpXsbxA63DJPCoJy%$BFD_w7LY8wa(&Kbw+P3pUgA4RXhyg(m#ib>`BhZ{KPP9RE3j z4%!t7Kc5&4yOl-)Fu$)o(*MiJfh2=T^B}2qE~oJai-=|s2-DOuuR$1@+xRI`V7xIo zm29#v`D$hN8sw2LP#=otT8$+}ZJ?hR*UhI3DCxs(xUM@7{SPLNxpI*i;s(8-xhDk{ zwVxKQC8#@1jPW45jYZ88F_cgs+y#!o0)Q2P=E|Mmc~i%G)0!*KW6ARy;s>X(hPH!z zGg;e3!-&YA#KQ%i9@|IWf#7ig33nxI39(_d@4XFvwZ zcNlXNf{jJ~0Yr7N%l+F#bHW2qnmA@GwM9nH|>@jdYwv7<=#sw>^pRg=* z|1A(|Emp!k?axQzmpg3tyxXvjuMFS5g_!Au1X7oyEFU#m+$+UBZJl5rvxMd{i*=UfN(=x^18mfeq_tCS06vrO^SkXv41GpgLrVZF`j z>WU@P%JsuSRvu+o zoU?_YdRfmq&Dw$g4YKwa(%gZ4N$~1|qefwkxCOpi7{hr4$$9xraW!lmZwhFV(MR9> zbK};V8jND4cjhgckrs#BfMc$okp52ku#T832OnY-X77U7Uk%+4Hom!9$goFx}_ih2D^8i2G zIyW2o=#-Qp#DBz2I-0LbKia$G(VNAMi5;%y-qK(}y%c3g5j|^@R?AOHnB3$K7!;nb zX@mSoPzkVeD`x_GPta2#GNiY_C?;JhotW5?u8|py7vxTcqm7E1YKLN{ZAI;wwU-qU zQFxmPl5+#3YSlB^e18D@+|fH>+}=45S-mtd+}g)S0a(YGLE9AZxRE2Px?AB$Ab>tFesi)q#9w>%>t4IQQKkyY`uDYCIjhN+1ssV zLwRnDM2!Xg&#TJ?GYXmM6j1*{;o~07CI(p;poN%T?cuw4Cs*Htv=#CDEcVvVAN8vP z+NbA`^G*!qOsVnP5Xfeta{{xDZa>cKmSY8B>wzI7Tc?A#t6iEViBuXR{?p36!UG1s z`7kZt>VM=WrZoNBj2)}W3d{-&Js3CZNTlqwN0vxnTKpipC1(LA>U~V7 zOVub?*1Ea1&m>Yd&|>NS(K8FFx`k{U?3Wri!&EGrCwFuM&6kU&^&)3F=#wQPBQb}~ z&=89Iuc;izAUIJM)vXtwl&puqPM;YtbdzfRGe6?=-boUM4k&#i;h_d*}+d5n)5VMgh~>o|FR;JHbMH(~e+B;g6vF4D{6 z8Q-8TXxLHZO6|LxKi6mZQzsWPIGG!n+64!wYt$dA-X4H5Q4_AwTThrMM1x)n;-;uE z+ps}z#xSJz+9*!FGN6@r%e_d{4WZGv2h`YRoz*{%|8pVGNDB-NWSMT$3fiDv0dn1(E1#d6Wk3+L)MZQb~XqFs5pLTq+ZM^p{I{HG^urQ&RI zNJdS`ENZtTA&+S!+TOs5!>7zctqbT{)wivRO>N{dX-@G$+qWJfM-3|xtlPBGon+?D99XQ)GFoqX%kTffDt^>jN%^f z--z_ok>56F*Ip~q!Bf>qf5&Y|M`^`dbm}T+NUM3|v-uX^2%0HF$Q(o5^#r!6k6njT z8xK1SNrlz)l;@l2tE=goD<3*~q0OxLO&;e@?uvJ>63xs3VZf zQN6LmnS$(UfI2`;Nxzgfw8=NzKY<=ExPPkIlGR|thz$HNOvc@vs2!2@F?l1Cj zZSwCaHcTDOw06FP4O6ZnWSB4F-#YbwmU~K$Ak%gQ8l{6`HO0DLoW*1Q!M+?4sn>Y z*KlxY8(ISK{Dr(Zq7K`bQv}*)dMSo)|40EV zL1##HiBxQIl?Su+z?Gp49YEI!%8*7IS2pVx{xT265d8^_ ziTaDu9ryFikVEx)6eoNOc~*@B9UBO3FbM4c?Bmc=DH~P#i^g7SQ)H%v-|KuuGnV*0 zQi|EI%PzV^o{GopS`Rp<1+smKgH7kjztQIn+%55E#LG*ICfobWL$ks<+TiJODSaw6 z6^9M*(-u0%{16menHRI(5_48wdO#Gruezp_Z?r6$f5`G#%8{eJ>LiwxDG#h zH%2#W=6t~5a|ldE^6wQ7ghg7g?4&G|G(?pShR?+KXw37USTC^-vi;uiUU9%P;0U}m zuzb+AD&r9kc{$!=_}Vx>u`+b31vhc5OFquB%7LWBKy~f1{wuL+4q@e@UT0^@IPWoI ziTJd0G3^a@CC+diKriI&jxmI=k5<<-3%0nUsr?-%pK%nTxXXeHj%p4~MW*di%+JENo0GxsFMGPS`pcB-^Bxk*@I22*gw{+NO2nx7IS^?i z;Z>EVaZM9H*as@)@ZeqvtxuKd?+U$@QpqyWmT)j>Tq0ZOmf1j`vJlN}Se1QO%#AcS z5pD<--SaOI#$ho*3);IlI;jkm$#}8l=4&YnGg2_$avUe!->SBds0dbPWHqiqx`hk~ zH>@~&&~*;;Xa!D}rE>Nw!JfF#c-p<;_qp2B7P6AoLM1Em? z_(WFHR4Nl7F%k>>`(AJ23V}@-C-w!E#%h%X{FgcRR*6b(=b-|~yZ=;Li{i&v|FlXo zL_sN&R%8y(;zI_cu}GW;BFce^MrUfrOcpNoA)uF4+Ke&=JG8nN7BzB=l4+b)ad2l~ zyRcmq-lsb!F8mOYuODy-O8M+;njx?30-gN!JB}?|o_x-4jFcXX#yo9!-ifM`LjK-H zswE z(oJV{J3td4bBx3)I=jQ(Zo5^oFW~kml(9Fo>_j^Br*Scg_?d90T$2bi10z2Tpv}c7eVrhQQ8I7V zU2|5qo1m>8Kre(%qXG#kXKzY1kCxn zdK+VAX0~HyW@e7rF*9tl9Wyht6Enxm%*@Qp%pBwMotiiI+&gpTJ*lLwma6nsFD-Sc zs{j5QA1FOXye|A0+{W()vY#^U?&-rKCp}9S5bmccN51Bdo7GNe9_Vkzm+Xp#-U0LK z$B2H1*Ek2WHpZ`i%0a({Y@tQ?TL0qU)Q}= z2OYEMRt}dh+a0`3(-o}1C_AawJIIsMtDuwg<3%w@O@vit0A+}?ai$J{;R^_+nW1*M z&IE=#vf4A((-|17ekxQhH8cJ88maNrouLPsA6;)E6|Bj(D)Al)uvN0$mr~Z?PwMCu zsg~rDZGD*~jkBb6!al`({W9IfeVk_ZexL#+tt z+MnjcUb4M=1DI-Vd$6P~wSXi3$x!DL{3#Om%VsyL`3y(SOXaN>Ztgn`yZ*DgqXilX zGfZ()jbu5loXBYdH=ER>A`i-p{67J4d~gAS7^*3*uwPdDoF`7M6m^W564AaVSq~AC;NSP! z#qZ}8=Ud93+hdg8i7OkvrAd9Al^ApuNM8%gGygT2LMXAQw@R_au1RU`uQV|vjX${4 zrf7XyVJF$7V-myfl|s$S)pAY%;vAM>`MuC8QQ1twM7{j2J!sCVzJE&vgAGj0Us0^u zlCVsKn{7To)lURN2OmfEORw~(matS8?8X~sXwxn#D(EoL=7%uUjIXHVLU*|jg5KK% zg4kdfBVx>zBi6z7>P3+uJcof>+VIZKAH_239Hh`SiQX#ehihxw3lm+r{W zA0io*4*sUfRh>WTIQimCc%vP8?iNrDe4G$gW2ZTL9ystQJHOXlZ(;SM(Jwp<4#s}x zhH9RE{1Y_Jz&3eVUfu!mQQdge-*Ea8McUxw(1n1*bDyV{VMB;_=}D9+ExEv73&wuc zo(a6e_B?w|ukAVw77Xc}9N36W3m)~fT^2f8%`g~B!)NZTU*OAPyyF|ah_<&o(pody z#x^5=n6yMavQ@VjXbmzqYfpnPd!ID zU;zSzGq8!ne^~G!oZ7L&xw#${2ZCiUmy5p)TwDB_kG8GVt~af9nd^At7Ys2Y;N8d6 z*{XQ~8-$!A`Uz~>#%iKO3%Ng-fKOFfwZ&syX;cbr7k??^AeEBn3g+bWEPnJGLvDq;$YXqq@VE)2J@n$;p{?u7ET4^0{__mLpLB;`v%( z!5{D@Z@)F1LlDavgs?T(dpSDpt{k-D{-WlwnXgktn~QOYFw(cF-#uzrz1K)u*VEQj z(#e}lqAN^neF)V?Z#cJeIKgO&AJ$yGP4}%wa79PYi6UNFka#k-P8QMU1?b(r>PMWv zxJ=#-mJKs_o3)}dDYX!d(RP+HKLKlnckzV~1~&tQl59flQSmUViGGXwZ2j4;e}>uw z=N@E{@_TZdbm4sAp~zl5gbr{?<`GKxOj=E z%Y$ciWzl}<(JNF%|BaYcC>%*rQ0q4RMF3=yTQr>uPLZ_xd7wqm>bmRhT!<_=HsmGs z2NvNl!_fi<{2_@;kuuV9$;E?pid7jK9uaHWc_YUvThcj!x2QS)+Vm1~CwM2$H2@Dx zr(nbQ(qfVAL8f8dvetEbl7dF>BzCt+MQK5eFne;T5B3ypAl4ET-O$vg{?x+@Z(4B> zB!OtFxnY$D*^)4C)?NO!`~u{aZ3bqjGGM$moTfxt1onW}kX_Qb>5PJ_%sc6wMp3=SY1X^A#> zYFGZPpLt}tg$Z&HTLTJ5h3?`Ci`RWIFjw*o!{8VTB&uJdp_ppr0;#N;B;WX(v1S5^ z6(2!z(hFkU3nCgk=LIhDm^fKWg*fYlwXVmbZ2<{XM3{Cp3~SC*jhiAeHwV70U8r@^;ifR6Lq!nKjw(gB7MVpS6wE1E>cGg|QcM~{Boa|~m zNxYx^UqJE~yVK~0j=+Y3sDA~BMBYWBg{8(3_F=1ZQBqO!Pdn*CK@uPZ1{1T;yZ#W;hB(Mq7E zc|up6B8iYd82oiG|BU;#X^LO>UIWhXg8KL=@OD;CkDfwpOh{^EgCRSIeRiDrO|_Q= zX-Eq#WzU;#nTo->jmCBOXb|jX)JK=x_@=iS`n5=p($>_zh zyaRpT3LdsB2qB+2UGMxF334PHT*7iFEVrgXU@vKe7uT~692e@l8d3jgh|MuNj~PK7 zm2@yYVZrf%65K>2h``~lAX2-slaoO?WOCD1tEcb&INKndC<@vLkcbnqlI<7qk+3GY zoM0bd+w!+_o{REFwzu|XdA(Huo-Hz6-Ql*O>FgMLw&QK@?CgN;*cM#9Dzg2MGwLj6 zqr)A;&sKBEys;u(@So{1L4qU(KBGkK$(TanXEksYC-Y59Yz<*Yft@N&+0_P?F_YlC z_7|b~x!C}HVmxt6Yzb$hM64u@za55a?^gPDoS6-ILT7A(hR z{76K@HaYSY|9UvN7dhiios@@9EYAyj;{?FETtoj{za!d)+XYk-bj54l7nvQLyQ>!G$P&DE5Qk6g%Nr<)GpGv%g7;{hL)x4XD!^BQnxuF z$W$2Psq+Os70vUStQ#yT$O^*fs69p4C?n6$JS%)EN~A(b?E^v?WZ$Sx%-?F@?!k>g zzCec>-k0RQl{5%HY2WHp`*Qnn@#KOJodv$*_9L%KmmAJ_l7qlv$@oi7Fe-r0+j07~ z@xm@@*c6=Zec@Dkx=W&ks)B%n^2NK^5sH5@g$^66=mdxTI)n`($xD8uCQYqk&I7X1 zlVjrHHEPfrJA?+wPMzX{C?JgFCK9_&)m!~6nP>umsyaqsC<++@wvM&jE!;si7D)<* z(alu2ndmT2P6@3`T)=PpdpWo^GAp-2X&M%f4H^Y?*aEa6{&b%;t@$Psss+9t*g79I zH1Eih0VmdwREvXo?)fe)imn|kbnpH9UT9YoIB&ZkV|grYMr;I-Q?|U&≪ zwb8ut42-`+ify|kekRdV$LgFFr!K*dW>GM0Qio5cj?N$nTgOkzbkf9ECxi1L%A4IL zTbF_B!i=ZEqawtNA8~?r&`|r>CW_VSSo5HYF=-M^+Vo|hxaF&Lv~|~xaawFtH>1jk zZ;%@wzs=;jdU6peRirw~jl$TT{F)rh1t8hpeKkOd(E`+7A( zdE4pXcJk$sntbIkXy+mKa2zSDjmLN&MD=`vKo7Q3wv+&-j@M<*Xt>}sm}osM$mD@U zo#EsCkmu8M4my#xn?64@QOJb<@iD5L({At)^kQc8z6eot{ zMf$L4>Hc9}2vx!mxI5L`Jxx+w=$}6bXl1N7iORhwtaq@BdQOSvD>?5x7as0|j+nI; z@GoynekLUBDf?L5YN6YYwa_+)b<1Uh^)i;?DTcoQ0%;~Hor}H5Wixk?1-uQGvFZyC12mp~ak2nFK0O8c zlQ!fDyeEAIc`~&4+N0J`jU;swA(*b7VwXKmu{jsL6HGt+xnio3YOoh!`)b7y7tH|U z-SgTdT`zTa_vXQ}u&GfM&7F*P_xZJcg#uKUOmoOf$9oqd(M;qvFk?&vk-Dkuazi8$b9Fv5> ztMYTBFq!Y1bW9w5-B^QQKpp)Aqq+>Bw2i z46b6h6E*n7t*3somyKJ%Rg-!p6n@>_8{Ydhe0ofYw3j8@X`G;rwBwV>pXr^;40XSl zkdisO7PoM?orWm`xyuL>xbH^Jg|8lYOsbhQ z&Zy*+C#tDado4{iMr*@{aahi`S|3o0eYbZ?dzElZigb6shYrL2CtB_U+mv6}M3zuq zz+iCIC##F3(UG5<%eLWumz`eRH5lYBB5ci-79DLrZMmAOut*is*KSVkXT_MAbQ+2; zKMgES5*D>mH)@rWT!bxp5P@92N3uU?CV#M-AoFN+cBFvs+NSUCEctilH<-wGn20VF ztk>v7(5$6BZO?(itoX}pGf#mZAYciEQ&toV%+Ei0k%y+`>}lL+I!+2ra0*^$%Qzix zkvR;y71P^d9v+5XulJO3X44H)Q$)y-2@Nz0w8;KNexdFdi~+GRP%9Oy@%6Y6MR9 zeOx9AG87_`sv@n-Dv-UWBrM+EWihe7-DOcsGENlfG3vBQU`bJWAL<=lMa zhyF>QdzW*^!GCh%BLp_8$t-%SDDPVMr1ST9-l8>mL3;B3^8Cf(%2}Ljo6;vZF;~Rt z{A|I?6vwz9RPa*JKA@E6oY$f49_vK0+I}^`(vd&&?X*nh4Vla%>>Z_b#wt!_>5fo; zkB4!_3MO74&rs^O{^cRrLITzIz#huV{-qs_ZuLZD2Ajnj`umP;9KMT$m|-ilU~=1q zz2P-pK5k3A^V5*Q*7|^%fGoCYg9NWVcY{DD#4)udmFM)`(KEAm4&>e7DH*Aw?De>)4%v3P?>rvJb$vXg zP9|(_tJ3bH`wiK`l;*uT{tk-cC8HMMmc*d~k>PT&Nik419yro-er<4NQ*(M}jF zQaL0{kfv5J(-S>^tH~+~C)gm&8O{30H*?g`>(_%l> z<&`>a@>1c?m;OgwW(`MrO-Gzt*1EL1pHJOz6byO`0kb?Uck@Nah;fUerHcIQD0q1O zXc5?U7ntPpaQtz6b87{IH5Nw#-=4`EfeEc}a~EfCj2WPzl$-TuTh|Fw0d`T1BQ?GP zeJZ2I>X%z3b}aZI5)tuxdVqIaz26n?3S>KmR}(u288Rqh+UxZt(Ewuhv_x1shgTx$ z)!wn}0hZ8F49k45C&0COByHaDxJI>mGjUte6d|E-?nphy$Z)H~ug1{K6k1@w8FhlT;>lWuG)N+TNvjI3o5Ll9kWi<-W^xN*8*w zBHTQNTi>OfiTY$w+~l!)E%iiY^)zhvW)LoZ9U=`$7Z*YzYDD4d4dJ1McX!UpUT68} z6wxRV(eN>|NqX-GpK^$K_X<*crNL^=o~zZ>=xwGm+H>YxY^@q+(ZL$ooUAPGsExMn zl#3QrQzNPLg$P|}w5z^vuwW8{R0|E=ki*XDxJ|l8R3&^E2^nYstT46ZOHGNYGa>aoTNQY`;8LglN8SY2?VG&- zofPIXUJ2aOBJ$v#` zH(2-g1mSzBoacN+FXiG5l^lO{pAGW2r+TW?m1C8Rtt{n)D=DjWdwC+3LOrTe-{tG% z1YaHSFEa~7IzdmY;E(YS)K=`En3^*vNkj_>SV3FId1aM9LgW>OL$U3!i)w*XhDHzI8($jgZZoufWXSY43vrfdTzhgLrN& zY5&TCJipU<@syeC&Mt))!bPw4jY(*|sZn^F4~xdk(Dk6;^k6)i*e+zPV?_PZt<+%>7ffx3O2eXZ;R3z>;rnp=c!%(cU29WnQ-24{%nv-$$}b0FYZnfiNJwP zh=BRl^KhN1#QUBfm%Gbnv3lpxgxG6=sg%y1kZ|b$P@kuObPH7bfK-s#vW*(?ds6y- z85hjKfqn@_?{HSIB_EK({;VyG7=%BE*D3)>lZ(Sg0wvI^94lN+!q@{FM{HlK6KKg} zl~LB{w+&=<{#;m{$u_7au&K-ZY7q_7Bs~k=@P#vPmv6`2gnv$=E}1R!X_fS=vb*EC z1mB--XWme9#BCqW$TD|BnPn-&c>FOLWJ=yxDrDaza?Sn-2-`8gi+(}xh+W)~tjWga zHE`Uu_0FCXDBR49LAYPF=-}&DjrZs0Uo;{CKA?oh8O-qq11AOS0zHP9FJcG_wM8&+ zI=_X0L4X?7K)q=EiI@b6n*g^pwcL4IH8J`oJDadgl8TmAP*@vG*hN`%zN6cEkCuOh zcTKC8P53_17Y#fcCFu3=9S7GL6CCoRV*Gw+{(KKV0^Tn&%cEwkrYI9;Jl@s*XK)B>Z9 z@fJ6K*3fujwuX|GPr+JX;`AdxGwBbvCo}Cw3*6{WiWymN=LQoqm!iqGF{5t-VXsSw zcU0T6D{p)rgvD?NdOf5)&F0rD++k@Z<-*{DUO`le1lK7#5(7v0zok`CuFZ|fIam>2mx@E#t1nW&Dc3sGdNn+2{x;3avn*$2cR|l0yrPD?9<`Xonek&I z(w;qX$s-2G`!Z%o=*+vWKA}!WV)q??->Hv0v(CwM2K3d>)K}$@3a0|=>FW=BaCn;= zK$MWA^UyPM*K9htf<z}UGDbe(>3>H?N2xeE6)AVrOPGlz7T$BV((V4w6&hq%m)-C zfI~lOasA#-nIvd-$=R5}4?*$h>BFnhDq?fZxpsK((pExFK&W-tS#$)j&%?C2?w6z& z>j!y)GXoe@H>tx}H(GX^84a%KQt3k#@^r`{!hOGqX`_=zn3^7ab-~36&AQ0xQ@a^Y zWc(T!k%{6j1&OS>xF zp7)fUUqj1V_WZ_9S3A_hFLbCl?bA4K!kx3c6gVR{1IRmbTRI51=s3&Wa0Rn_Im%*4 zBhv1>G)1AGR_hi)x^~%H9@XL~O0a0s(xO^lzFJgQ}c~ zu7wDahU9r!eRRKjvaQrUu-J*ewYTtVLV?JR{5GQMrAE*?6;cJuBOwi!Q$$?tHf_I% z4URsjW!dA!bYz$cF(6#duS3T=HD;#cMQWeFh;LH5%IKa}DagVK`VDWAvr+&miDU{y zHSSu`GAj}1sYs23`lTzJ?83Ikc8pbJXvJfGy)UP&4V zWQtn^+te_Ne)1>Pb*HC8448h?Tcwd`OlAR15qk5b-7P;(c*upS09GC>e^0;Ag@wZ^ z19H(231SL3GO#*r2B})|4`9Cn(*wZhb_OwJyQdI{bq=SLG=x4)Mz*$P`RsG+n)$=I zFEw7f%kLHg|S zigf8noLWQW4N*I8_>OPoJDbcFwx>GmnbAyHbSdIlGa#A(3;}6)G<`JLl z8GD%u#H@Ohd52zPDnds0(;gbJk86k*sv>s4|5gGiUNd>AtmY7+G3o4oVPBXd6l#nG zu+AmhfchC5zk+NiikwH+S|^^#gi8~42B0@79%(T`GB@TGP=&m|ip(@l2 z$^U~d+OxfP6Xx8MFE1d`G_s(I+F5d7qg3_u9PlVtAO7^*Du^78e8An=i{*cHcia0w zh!_4??5@%x9B=?X2P1IA|GRT4BD#q&Ut_5DA$8;*OMj^46U@P!kD?e*g_S zqj+W#*nECDTJ0AH3L0jd{)$nL#1+WW-=Ioi16akcWG0YkJ0_wKF|?|QaKwMB2A`xQdEHYWeA;+hd8R3a>WWEeu2BofVM>$`!%`b>(cLC zl1XnH6~op#wA$?2yaao#;$<^Y?4&!{F=FqmtHsK`++VKmh}T8M%Y;r zxct*VZp#+MMAUu@|B__umpzUqo=er(mWQC>y>9zXvi%$FMO-G2`LN1t0x3F>|FciR z`(6?x=hxD$ZI@EYuNmDlk5|RDp$S+6?SP6-+PVQ0Hsc))1n5FUFaUpTsWkIqbx``i zEC>hi!Q>x?4g)L%C$5(4)_(k8=2XMufk4|rA%JjTj-^GZ{z(E-uNWf!{Ozmv&}6I3iLNrmJ^2oXk|T9P)Gnj?}L~OpGH3fYh3)-P8^@ zzfj(T>7+fvmoFlrU+2QJHWAdB~=Co-=m+UU`0@X zf{hj@ghlw}@&XuIlh0X-tr&x+a!%rD~*-U;kBT@mI%fXSp1~Il^%@rx7tnA>A#Ea5(Kskq- zM%+q7tE1UrRN612GE05_0qP&uK5~Wy0H|JUQ!wJ#GmQ7 zY!Ps7CPR34ZtMxZXjbwhU@Hlk zq?X@G|H=9jyz#fpZ>dz8IYTffG_U}`3Ml}9_b*Hvr@vvETAR528x8R3bN~hVztieM ziM1r)YR1H{004bm008%2Xfr;4LzDO>^=~lvRC7Kxo&Oys=FP7~J)7!#A_f2$-2?tD zoQkP`1GBLCn{Z<6-20&aKf?JB!?G^o@yL)*vocUV0D$yg8tA_9H%y>|v5A9;v7V!e z)4w$oprK;{J-zf_@9<>E_xeD-!{LI<^*oZ#Q!smYjb#gGU zb+k5cvH;rt!}I2^*}4oFGso%h&+z_F)c@|y_E)MsA;|yyf&7{Yl{sjl+%IpsLzc=r{t1JJCix&MG++Vboe Import/Export, Scene properties", "wiki_url": "http://steamcommunity.com/groups/BlenderSourceTools", "tracker_url": "http://steamcommunity.com/groups/BlenderSourceTools/discussions/0/", - "description": "Importer and exporter for Valve Software's Source Engine. Supports SMD\VTA, DMX and QC. New Fixes BLENDER 4.2. + BST 3 3 1 (imp button )" + "description": "Importer and exporter for Valve Software's Source Engine. Supports SMD\VTA, DMX and QC. New Fixes BLENDER 4.2. + BST 3 3 1 (w_model)" } -import bpy, os +import bpy, os, re from bpy.props import StringProperty, BoolProperty, EnumProperty, IntProperty, CollectionProperty, FloatProperty, PointerProperty # Python doesn't reload package sub-modules at the same time as __init__.py! @@ -46,6 +46,8 @@ if func.__module__.startswith(__name__): collection.remove(func) + + from . import datamodel, import_smd, export_smd, flex, GUI, update from .utils import * @@ -179,8 +181,9 @@ class ValveSource_CurveProps(ShapeTypeProps,CurveTypeProps,PropertyGroup): class ValveSource_TextProps(CurveTypeProps,PropertyGroup): pass -#### Import and Export button s -class SMD_PT_ImportExport(bpy.types.Panel): + +#### создаём кнопку:: Start +class Create_SMD_Utils_Panel(bpy.types.Panel): bl_label = "SMD Import/Export" bl_space_type = 'VIEW_3D' bl_region_type = 'UI' @@ -190,8 +193,520 @@ def draw(self, context): layout = self.layout layout.operator("import_scene.smd", text="Import SMD") layout.operator("export_scene.smd", text="Export SMD") -#### Import and Export button f + + layout.separator() + layout.operator("object.create_one_bone_and_assign_obj", text="Create W_model") + layout.operator("export.create_idle_smd", text="Create IDLE SMD 0") + layout.operator("export.create_qc_file", text="Create QC File") + + layout.separator() + layout.label(text="SUB_Model Setup:") + layout.operator("export.submodels_qc", text="Create QC File SUB") + layout.operator("rename.collection_based_on_smd", text="Rename Collections") + layout.operator("open.qc_file_with_studiomodel", text="Compile") + + + + # работать могут, пока неактивны + #layout.operator("checkbox.method", text="chebox method") + #layout.prop(context.scene, "checkbox_typemode", text="chebox typemode") + + + +#### Iсоздаём кнопку:: Finish + +#### Создаём кастомный экспорт OBJ:: Finish +class EXPORT_OT_ObjCustom(bpy.types.Operator): + bl_idname = "export_scene.obj_custom" + bl_label = "Export OBJ (Custom)" + bl_description = "Export the scene to OBJ with custom settings" + + # Добавляем свойство для хранения пути экспорта + filepath: bpy.props.StringProperty(subtype="FILE_PATH") + + def execute(self, context): + # Устанавливаем параметры экспорта + bpy.ops.export_scene.obj( + filepath=self.filepath, # Используем путь, выбранный пользователем + use_selection=True, # Экспортировать только выбранные объекты + global_scale=1.0, # Масштаб + axis_forward='-Z', # Ось вперед + axis_up='Y' # Ось вверх + ) + return {'FINISHED'} + + def invoke(self, context, event): + # Открываем окно файлового браузера для выбора пути экспорта + context.window_manager.fileselect_add(self) + return {'RUNNING_MODAL'} +#### Создаём кастомный экспорт OBJ:: Finish +#### W_model +class Create_w_model_Bone_1(bpy.types.Operator): + bl_idname = "object.create_one_bone_and_assign_obj" + bl_label = "Create W_model" + bl_description = "Create a single-bone armature and assign the active mesh to it" + + def execute(self, context): + # Получаем активный объект + active_obj = context.active_object + + # Проверяем, что активный объект — это меш + if not active_obj or active_obj.type != 'MESH': + self.report({'ERROR'}, "Please select a mesh object") + return {'CANCELLED'} + + # Создаем новую арматуру + bpy.ops.object.armature_add(enter_editmode=False, align='WORLD', location=active_obj.location) + armature_obj = context.active_object + armature_obj.name = "Armature_W_model" + + # Переходим в режим редактирования арматуры + bpy.ops.object.mode_set(mode='EDIT') + + # Удаляем все кости, кроме одной оставляем 2 кости blender_implict и Bone_W_model + #bones = armature_obj.data.edit_bones + #for bone in bones: + # if bone.name != "Bone": + # bones.remove(bone) + # Удаляем все кости, кроме одной blender_implict + bones = armature_obj.data.edit_bones + for bone in bones: + if bone.name == "Bone": + bones.remove(bone) + + # Переименовываем оставшуюся кость + #bone = bones[0] + #bone.name = "Bone_W_model" + + # Выходим из режима редактирования + bpy.ops.object.mode_set(mode='OBJECT') + + # Привязываем меш к арматуре + modifier = active_obj.modifiers.new(name="Armature", type='ARMATURE') + modifier.object = armature_obj + + # Устанавливаем арматуру как родителя меша + bpy.ops.object.select_all(action='DESELECT') + active_obj.select_set(True) + armature_obj.select_set(True) + context.view_layer.objects.active = armature_obj + bpy.ops.object.parent_set(type='OBJECT', keep_transform=True) + + self.report({'INFO'}, "Armature created and mesh assigned successfully") + return {'FINISHED'} + +class Create_w_model_Sequence_Idle_1(bpy.types.Operator): + bl_idname = "export.create_idle_smd" + bl_label = "Write Collection Idle SMD" + bl_description = "Create a collection_idle.smd file with the specified content" + + # Свойство для хранения пути сохранения + filepath: bpy.props.StringProperty(subtype="FILE_PATH") + + def execute(self, context): + # Содержимое файла collection_idle.smd + content = """version 1 +nodes +0 "blender_implicit" -1 +end +skeleton +time 0 +0 0.000000 0.000000 0.000000 0.000000 -0.000000 0.000000 +end +""" + + # Записываем содержимое в файл + try: + with open(self.filepath, 'w') as file: + file.write(content) + self.report({'INFO'}, f"File saved: {self.filepath}") + + # Обновляем путь экспорта в настройках сцены + export_dir = os.path.dirname(self.filepath) + bpy.context.scene.vs.export_path = export_dir + self.report({'INFO'}, f"Export path updated to: {export_dir}") + except Exception as e: + self.report({'ERROR'}, f"Failed to save file: {str(e)}") + return {'CANCELLED'} + + return {'FINISHED'} + + def invoke(self, context, event): + # Получаем путь экспорта из настроек сцены + export_path = bpy.context.scene.vs.export_path + + # Если путь экспорта указан, используем его + if export_path and os.path.isdir(export_path): + self.filepath = os.path.join(export_path, "Collection_sequence_idle") + return self.execute(context) + else: + # Если путь не указан, открываем окно файлового браузера + self.filepath = "Collection_sequence_idle" # Имя файла по умолчанию + context.window_manager.fileselect_add(self) + return {'RUNNING_MODAL'} + + +class Create_w_model_QC(bpy.types.Operator): + bl_idname = "export.create_qc_file" + bl_label = "Create and Write QC file" + bl_description = "Create a collection.qc file with the specified content" + + # Свойство для хранения пути сохранения + filepath: bpy.props.StringProperty(subtype="FILE_PATH") + + def execute(self, context): + # Содержимое файла collection_idle.smd + content = """/* +this QC generated by DeathDemonSaxofonovich in Blender +*/ + +$modelname "w_model_new.mdl" +$cd ".\" +$cdtexture ".\" +$scale 1.0 +$cliptotextures + +$bbox 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 +$cbox 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 + +$eyeposition 0.000000 0.000000 0.000000 + +$bodygroup body +{ +studio "Collection" +} + +$sequence Idle "Collection_sequence_idle" loop fps 0 ACT_IDLE 1 +""" + + # Записываем содержимое в файл + try: + with open(self.filepath, 'w') as file: + file.write(content) + self.report({'INFO'}, f"File saved: {self.filepath}") + + # Обновляем путь экспорта в настройках сцены + export_dir = os.path.dirname(self.filepath) + bpy.context.scene.vs.export_path = export_dir + self.report({'INFO'}, f"Export path updated to: {export_dir}") + except Exception as e: + self.report({'ERROR'}, f"Failed to save file: {str(e)}") + return {'CANCELLED'} + + return {'FINISHED'} + + def invoke(self, context, event): + # Получаем путь экспорта из настроек сцены + export_path = bpy.context.scene.vs.export_path + + # Если путь экспорта указан, используем его + if export_path and os.path.isdir(export_path): + self.filepath = os.path.join(export_path, "Collection.qc") + return self.execute(context) + else: + # Если путь не указан, открываем окно файлового браузера + self.filepath = "Collection.qc" # Имя файла по умолчанию + context.window_manager.fileselect_add(self) + return {'RUNNING_MODAL'} + + + + +bpy.types.Scene.checkbox_typemode = BoolProperty( + name="Use Automatic", + description="Enable automatic processing", + default=False +) + + + +### выбрать режим экспорта. +class Checkbox_Type(bpy.types.Operator): + bl_idname = "checkbox.method" + bl_label = "Choose export type" + + # Свойство для хранения пути сохранения + filepath: bpy.props.StringProperty(subtype="FILE_PATH") + + def execute(self, context): + if context.scene.checkbox_typemode: + self.report({'INFO'}, "use_automatic_submodel mode enabled") + # Ваш код для автоматической обработки + else: + self.report({'INFO'}, "use_automatic_submodel mode disabled") + # Вызвать "export_scene.smd" с параметрами + return {'FINISHED'} + + def invoke(self, context, event): + # Открываем окно файлового браузера для выбора пути + # context.window_manager.fileselect_add(self) + return {'RUNNING_MODAL'} + + + +################### Создать QC файл для контейнера субмоделей + +class Create_w_model_sub_QC(bpy.types.Operator): + bl_idname = "export.submodels_qc" + bl_label = "Pack submodelslist into QC" + bl_description = "Pack submodelslist into QC" + + # Свойство для хранения пути сохранения + filepath: bpy.props.StringProperty(subtype="FILE_PATH") + + def execute(self, context): + # Получаем путь экспорта из настроек сцены + export_path = bpy.context.scene.vs.export_path + + # Если путь экспорта не указан, используем текущую директорию + if not export_path or not os.path.isdir(export_path): + export_path = os.path.dirname(bpy.data.filepath) if bpy.data.filepath else os.getcwd() + + # Получаем список всех файлов .smd + smd_files = [f for f in os.listdir(export_path) if f.endswith(".smd")] + + # Разделяем файлы на две группы: + # 1. Файлы для $bodygroup (без "idle" и "seq" в имени) + bodygroup_files = [ + f for f in smd_files + if "idle" not in f.lower() and "seq" not in f.lower() + ] + + # 2. Файлы для $sequence (с "idle" или "seq" в имени) + sequence_files = [ + f for f in smd_files + if "idle" in f.lower() or "seq" in f.lower() + ] + + # Если файлов не найдено, выводим сообщение об ошибке + if not smd_files: + self.report({'ERROR'}, "No .smd files found") + return {'CANCELLED'} + + # Генерируем содержимое QC-файла + qc_content = '/*\n' + qc_content += 'this QC generated by DeathDemonSaxofonovich in Blender\n' + qc_content += '*/\n\n' + qc_content += '$modelname "w_model_news.mdl"\n' + qc_content += '$cd "."\n' + qc_content += '$cdtexture "."\n' + qc_content += '$scale 1.0\n' + qc_content += '$cliptotextures\n\n' + qc_content += '$bbox 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000\n' + qc_content += '$cbox 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000\n\n' + qc_content += '$eyeposition 0.000000 0.000000 0.000000\n\n' + + # Добавляем блок $bodygroup + qc_content += '$bodygroup "w_models"\n{\n' + qc_content += ' blank\n' + for smd_file in bodygroup_files: + file_name = os.path.splitext(smd_file)[0] # Убираем расширение .smd + qc_content += f' studio "{file_name}"\n' + qc_content += '}\n\n' + + # Добавляем блок $sequence для файлов с "idle" или "seq" + for smd_file in sequence_files: + file_name = os.path.splitext(smd_file)[0] # Убираем расширение .smd + qc_content += f'$sequence Idle "{file_name}" loop fps 0 ACT_IDLE 1\n' + + # Записываем содержимое в файл + try: + with open(self.filepath, 'w') as file: + file.write(qc_content) + self.report({'INFO'}, f"QC file saved: {self.filepath}") + + # Обновляем путь экспорта в настройках сцены + bpy.context.scene.vs.export_path = os.path.dirname(self.filepath) + self.report({'INFO'}, f"Export path updated to: {bpy.context.scene.vs.export_path}") + except Exception as e: + self.report({'ERROR'}, f"Failed to save QC file: {str(e)}") + return {'CANCELLED'} + + return {'FINISHED'} + + def invoke(self, context, event): + # Получаем путь экспорта из настроек сцены + export_path = bpy.context.scene.vs.export_path + + # Если путь экспорта указан, используем его + if export_path and os.path.isdir(export_path): + self.filepath = os.path.join(export_path, "w_model_new_s.qc") + return self.execute(context) + else: + # Если путь не указан, открываем окно файлового браузера + self.filepath = "w_model_new_s.qc" # Имя файла по умолчанию + context.window_manager.fileselect_add(self) + return {'RUNNING_MODAL'} + + +import os +import re +import bpy + + +class RenameCollectionBasedOnSMDFiles(bpy.types.Operator): + bl_idname = "rename.collection_based_on_smd" + bl_label = "Rename Collection Based on SMD Files" + bl_description = "Rename active collection if an SMD file with the same name exists" + + def execute(self, context): + # Получаем путь экспорта из настроек сцены + export_path = bpy.context.scene.vs.export_path + + # Если путь экспорта не указан, используем текущую директорию + if not export_path or not os.path.isdir(export_path): + export_path = os.path.dirname(bpy.data.filepath) if bpy.data.filepath else os.getcwd() + + print(f"Export path: {export_path}") + + # Получаем список всех файлов .smd + smd_files = [f for f in os.listdir(export_path) if f.endswith(".smd")] + print(f"All SMD files: {smd_files}") + + # Фильтруем файлы, исключая те, которые содержат "idle" или "seq" + filtered_files = [ + f for f in smd_files + if "idle" not in f.lower() and "seq" not in f.lower() + ] + print(f"Filtered SMD files: {filtered_files}") + + # Получаем активную коллекцию + active_collection = bpy.context.view_layer.active_layer_collection.collection + + if not active_collection: + self.report({'ERROR'}, "No active collection found") + return {'CANCELLED'} + + # Имя активной коллекции + collection_name = active_collection.name + print(f"Active collection name: {collection_name}") + + # Регулярное выражение для поиска последнего числового суффикса + match = re.search(r"_(\d+)$", collection_name) + + if match: + # Извлекаем числовой суффикс + collnum = int(match.group(1)) + # Удаляем числовой суффикс из имени коллекции + base_name = re.sub(r"_(\d+)$", "", collection_name) + print(f"Base name extracted: {base_name}, Current suffix: {collnum}") + else: + # Если числовой суффикс не найден, используем текущее имя как базовое + base_name = collection_name + collnum = 0 # Начинаем с 0, чтобы следующий индекс был 1 + print(f"No suffix found, using base name: {base_name}") + + # Функция для проверки существования файла с указанным именем + def is_file_exists(name): + exists = any(f.lower() == f"{name.lower()}.smd" for f in filtered_files) + print(f"Checking if file '{name}.smd' exists: {exists}") + return exists + + # Если имя коллекции (без изменений) еще не занято, оставляем его + if not is_file_exists(collection_name): + print("Current collection name is unique, no renaming needed.") + self.report({'INFO'}, "Collection name is already unique") + return {'FINISHED'} + + # Находим следующий доступный индекс + new_collnum = collnum + 1 + print(f"Starting search for available suffix, starting with: {new_collnum}") + + while is_file_exists(f"{base_name}_{new_collnum}"): # Пока файл с таким именем уже существует + print(f"Name '{base_name}_{new_collnum}' is taken, trying next index...") + new_collnum += 1 # Увеличиваем индекс + + # Формируем новое имя коллекции + new_collection_name = f"{base_name}_{new_collnum}" + print(f"New collection name found: {new_collection_name}") + + # Переименовываем коллекцию, если новое имя отличается от текущего + if new_collection_name != collection_name: + active_collection.name = new_collection_name + self.report({'INFO'}, f"Renamed collection to: {new_collection_name}") + else: + self.report({'INFO'}, "Collection name is already unique") + + return {'FINISHED'} + + +import os +import subprocess +import bpy +import sys + +class OpenQCFileWithStudioModel(bpy.types.Operator): + bl_idname = "open.qc_file_with_studiomodel" + bl_label = "Open QC File with StudioModel" + bl_description = "Find a QC file in the working directory and open it with a program containing 'studiomdl' in its name" + + def execute(self, context): + # Получаем путь к рабочей папке + working_directory = bpy.context.scene.vs.export_path + if not working_directory or not os.path.isdir(working_directory): + working_directory = os.path.dirname(bpy.data.filepath) if bpy.data.filepath else os.getcwd() + print(f"Working directory: {working_directory}") + + # Ищем QC файл в рабочей папке + qc_files = [f for f in os.listdir(working_directory) if f.endswith(".qc")] + if not qc_files: + self.report({'ERROR'}, "No QC files found in the working directory") + return {'CANCELLED'} + + # Берем первый найденный QC файл (можно доработать для выбора, если их несколько) + qc_file = qc_files[0] + qc_file_path = os.path.join(working_directory, qc_file) + print(f"Found QC file: {qc_file_path}") + + # Ищем программу с "studiomdl" в названии + the_compiler_files = [f for f in os.listdir(working_directory) if f.endswith(".exe") and "studiomdl" in f.lower()] + if not the_compiler_files: + self.report({'ERROR'}, "No studiomdl executable found in the working directory") + return {'CANCELLED'} + + the_compiler_file = the_compiler_files[0] + the_compiler_file_path = os.path.join(working_directory, the_compiler_file) + print(f"Found STUDIOMODEL Custom file: {the_compiler_file_path}") + + # Запуск компиляции с использованием вашего кода + try: + # Кодируем путь к исполняемому файлу + edir = the_compiler_file_path.encode(sys.getfilesystemencoding()) + + # Функция для получения директории файла + def filedir(some_array): + return os.path.dirname(some_array) + + # Запуск процесса + cmd_process = subprocess.Popen( + [edir, qc_file_path], + cwd=filedir(qc_file_path), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=True + ) + + # Чтение вывода процесса + while True: + output = cmd_process.stdout.readline() + if output == b'' and cmd_process.poll() is not None: + break + if output: + print(output.strip().decode()) # Вывод в консоль Blender + self.report({'INFO'}, output.strip().decode()) # Вывод в UI Blender + + # Проверка завершения процесса + if cmd_process.returncode != 0: + self.report({'ERROR'}, f"Compilation failed with return code {cmd_process.returncode}") + return {'CANCELLED'} + + self.report({'INFO'}, f"Successfully compiled {qc_file} with {os.path.basename(the_compiler_file_path)}") + except Exception as e: + self.report({'ERROR'}, f"Failed to compile QC file: {str(e)}") + return {'CANCELLED'} + + return {'FINISHED'} _classes = ( ValveSource_Exportable, ValveSource_SceneProps, @@ -234,60 +749,94 @@ def draw(self, context): import_smd.SmdImporter) def register(): - for cls in _classes: - bpy.utils.register_class(cls) - - from . import translations - bpy.app.translations.register(__name__,translations.translations) - #### Import and Export button s - bpy.utils.register_class(SMD_PT_ImportExport) - #### Import and Export button f - bpy.types.TOPBAR_MT_file_import.append(menu_func_import) - bpy.types.TOPBAR_MT_file_export.append(menu_func_export) - bpy.types.MESH_MT_shape_key_context_menu.append(menu_func_shapekeys) - bpy.types.TEXT_MT_edit.append(menu_func_textedit) - - try: bpy.ops.wm.addon_disable('EXEC_SCREEN',module="io_smd_tools") - except: pass - - def make_pointer(prop_type): - return PointerProperty(name=get_id("settings_prop"),type=prop_type) - - bpy.types.Scene.vs = make_pointer(ValveSource_SceneProps) - bpy.types.Object.vs = make_pointer(ValveSource_ObjectProps) - bpy.types.Armature.vs = make_pointer(ValveSource_ArmatureProps) - bpy.types.Collection.vs = make_pointer(ValveSource_CollectionProps) - bpy.types.Mesh.vs = make_pointer(ValveSource_MeshProps) - bpy.types.SurfaceCurve.vs = make_pointer(ValveSource_SurfaceProps) - bpy.types.Curve.vs = make_pointer(ValveSource_CurveProps) - bpy.types.Text.vs = make_pointer(ValveSource_TextProps) - - State.hook_events() + for cls in _classes: + bpy.utils.register_class(cls) + + from . import translations + bpy.app.translations.register(__name__, translations.translations) + ## Compiler + bpy.utils.register_class(OpenQCFileWithStudioModel) + ## Register Renamer Unic + bpy.utils.register_class(RenameCollectionBasedOnSMDFiles) + ## Register Save SMD mode + bpy.utils.register_class(Checkbox_Type) + ## Exported list + bpy.utils.register_class(Create_w_model_sub_QC) + ## QC Creating + bpy.utils.register_class(Create_w_model_QC) + ### Idle Sequence + bpy.utils.register_class(Create_w_model_Sequence_Idle_1) + ###### W_model + bpy.utils.register_class(Create_w_model_Bone_1) + ## New Custom Export Obj:: Start + bpy.utils.register_class(EXPORT_OT_ObjCustom) + #### Import and Export button :: Start + bpy.utils.register_class(Create_SMD_Utils_Panel) + #### Import and Export button f :: Finish + # + bpy.types.TOPBAR_MT_file_import.append(menu_func_import) + bpy.types.TOPBAR_MT_file_export.append(menu_func_export) + bpy.types.MESH_MT_shape_key_context_menu.append(menu_func_shapekeys) + bpy.types.TEXT_MT_edit.append(menu_func_textedit) + + try: bpy.ops.wm.addon_disable('EXEC_SCREEN',module="io_smd_tools") + except: pass + + def make_pointer(prop_type): + return PointerProperty(name=get_id("settings_prop"),type=prop_type) + + bpy.types.Scene.vs = make_pointer(ValveSource_SceneProps) + bpy.types.Object.vs = make_pointer(ValveSource_ObjectProps) + bpy.types.Armature.vs = make_pointer(ValveSource_ArmatureProps) + bpy.types.Collection.vs = make_pointer(ValveSource_CollectionProps) + bpy.types.Mesh.vs = make_pointer(ValveSource_MeshProps) + bpy.types.SurfaceCurve.vs = make_pointer(ValveSource_SurfaceProps) + bpy.types.Curve.vs = make_pointer(ValveSource_CurveProps) + bpy.types.Text.vs = make_pointer(ValveSource_TextProps) + + State.hook_events() def unregister(): - State.unhook_events() - - bpy.types.TOPBAR_MT_file_import.remove(menu_func_import) - bpy.types.TOPBAR_MT_file_export.remove(menu_func_export) - bpy.types.MESH_MT_shape_key_context_menu.remove(menu_func_shapekeys) - bpy.types.TEXT_MT_edit.remove(menu_func_textedit) - #### Import and Export button s - bpy.utils.unregister_class(SMD_PT_ImportExport) - #### Import and Export button f - bpy.app.translations.unregister(__name__) - - for cls in reversed(_classes): - bpy.utils.unregister_class(cls) - - del bpy.types.Scene.vs - del bpy.types.Object.vs - del bpy.types.Armature.vs - del bpy.types.Collection.vs - del bpy.types.Mesh.vs - del bpy.types.SurfaceCurve.vs - del bpy.types.Curve.vs - del bpy.types.Text.vs + State.unhook_events() + + bpy.types.TOPBAR_MT_file_import.remove(menu_func_import) + bpy.types.TOPBAR_MT_file_export.remove(menu_func_export) + bpy.types.MESH_MT_shape_key_context_menu.remove(menu_func_shapekeys) + bpy.types.TEXT_MT_edit.remove(menu_func_textedit) + ## Compier + ## Compiler + bpy.utils.unregister_class(OpenQCFileWithStudioModel) + ## Unregister Unic Name + bpy.utils.unregister_class(RenameCollectionBasedOnSMDFiles) + ## Register Save SMD mode + bpy.utils.unregister_class(Checkbox_Type) + ## Exported + bpy.utils.unregister_class(Create_w_model_sub_QC) + ### QC + bpy.utils.unregister_class(Create_w_model_QC) + ### Idle Sequence + bpy.utils.unregister_class(Create_w_model_Sequence_Idle_1) + ## W_model + bpy.utils.unregister_class(Create_w_model_Bone_1) + #### Import and :: Start + bpy.utils.unregister_class(Create_SMD_Utils_Panel) + #### Import and Export button :: Finish + ## OBJ ExpORT :: Start + bpy.utils.unregister_class(EXPORT_OT_ObjCustom) + ## Export :: Finish + bpy.app.translations.unregister(__name__) + + for cls in reversed(_classes): + bpy.utils.unregister_class(cls) + + del bpy.types.Scene.vs + del bpy.types.Object.vs + del bpy.types.Armature.vs + del bpy.types.Collection.vs + del bpy.types.Mesh.vs + del bpy.types.SurfaceCurve.vs + del bpy.types.Curve.vs + del bpy.types.Text.vs if __name__ == "__main__": - register() - \ No newline at end of file + register() diff --git a/sequence.smd b/sequence.smd new file mode 100644 index 0000000..6d094ba --- /dev/null +++ b/sequence.smd @@ -0,0 +1,8 @@ +version 1 +nodes +0 "blender_implicit" -1 +end +skeleton +time 0 +0 0.000000 0.000000 0.000000 0.000000 -0.000000 0.000000 +end