diff --git a/release/scripts/mgear/core/utils.py b/release/scripts/mgear/core/utils.py index f1f047d9..b2a7503d 100644 --- a/release/scripts/mgear/core/utils.py +++ b/release/scripts/mgear/core/utils.py @@ -384,5 +384,5 @@ def get_maya_path(): :rtype: str """ maya_path = os.environ['MAYA_LOCATION'] - maya_path = os.path.join(maya_path,"bin") + maya_path = os.path.normpath(os.path.join(maya_path,"bin")) return maya_path diff --git a/release/scripts/mgear/shifter/game_tools_fbx/fbx_batch.py b/release/scripts/mgear/shifter/game_tools_fbx/fbx_batch.py index d50a32a7..21d84d13 100644 --- a/release/scripts/mgear/shifter/game_tools_fbx/fbx_batch.py +++ b/release/scripts/mgear/shifter/game_tools_fbx/fbx_batch.py @@ -21,6 +21,9 @@ - Partition Skeleton + Geometry. - Exports each partition as an FBX. +Note +---- +- Print logs are being used by the partition subprocess thread to detect progress. """ import os import traceback @@ -28,55 +31,49 @@ import maya.cmds as cmds import maya.api.OpenMaya as om +import pymel.core as pm from mgear.core import pyFBX as pfbx - +import mgear.shifter.game_tools_disconnect as gtDisc def perform_fbx_condition( - remove_namespace, - scene_clean, - master_fbx_path, - root_joint, - root_geos, - skinning=True, - blendshapes=True, - partitions=True, - export_data=None, - cull_joints=False): + remove_namespace, + scene_clean, + master_ma_path, + root_joint, + root_geos, + skinning=True, + blendshapes=True, + partitions=True, + export_data=None): """ Performs the FBX file conditioning and partition exports. This is called by a MayaBatch process. - - [ ] Setup logging to a text file, so the stream can be monitored. - [ ] Update FBX export settings - """ print("--------------------------") print(" PERFORM FBX CONDITIONING") - print(f" remove namespace:{remove_namespace}") - print(f" clean scene:{scene_clean}") - print(f" fbx path : {master_fbx_path}") + print(" remove namespace:{}".format(remove_namespace)) + print(" clean scene:{}".format(scene_clean)) + print(" .ma path : {}".format(master_ma_path)) print("--------------------------") - log_file = "logs.txt" - - # Import fbx into scene - _import_fbx(master_fbx_path) + # Load the master .ma file and force the scene to load it + cmds.file(master_ma_path, open=True, force=True) # formats the output location from the master fbx path. - output_dir = os.path.dirname(master_fbx_path) - fbx_file = os.path.basename(master_fbx_path) - conditioned_file = fbx_file.split(".")[0] + "_conditioned.ma" - - print(f" Output location: {output_dir}") - print(f" FBX file: {fbx_file}") - print(f" Conditioned file: {conditioned_file}") + output_dir = os.path.dirname(master_ma_path) + fbx_file = export_data.get("file_name", "") + if not fbx_file.endswith(".fbx"): + fbx_file = "{}.fbx".format(fbx_file) + + print(" Output location: {}".format(output_dir)) + print(" FBX file: {}".format(fbx_file)) # Removes all namespaces from any DG or DAG object. if remove_namespace: print("Removing Namespace..") - _clean_namespaces() + export_data = _clean_namespaces(export_data) # updates root joint name if namespace is found root_joint = root_joint.split(":")[-1] @@ -85,14 +82,44 @@ def perform_fbx_condition( if scene_clean: print("Cleaning Scene..") - # Move the root_joint and root_geos to the scene root - _parent_to_root(root_joint) - for r_geo in root_geos: - _parent_to_root(r_geo) - # Remove all redundant DAG Nodes. - _cleanup_stale_dag_hierarchies([root_joint] + root_geos) - + _partitions = export_data.get("partitions", dict()) + + # Performs the same code that "Delete Rig + Keep Joints" does + gtDisc.disconnect_joints() + for rig_root in gtDisc.get_rig_root_from_set(): + rig_name = rig_root.name() + jnt_org = rig_root.jnt_vis.listConnections(type="transform")[0] + joints = jnt_org.getChildren() + if joints: + pm.parent(joints, world=True) + + # Updates all the geometry root paths, as they may have changed when geo + # root was moved to world, depending on the structure of the rig. + for geo_index in reversed(range(len(root_geos))): + geo_root = root_geos[geo_index] + geo_long_names = cmds.ls(geo_root, long=True) + if len(geo_long_names) is not 1: + print("Too many {} found".format(geo_root)) + return False + geo_long_name = geo_long_names[0] + output = pm.parent(geo_root, world=True) + root_geos[geo_index] = output[0].name() + + # The geo roots are moved to be under the 'World', in doing so we need to + # update each geometry object stored in a partition. + for partition_name, data in _partitions.items(): + geo_list = data.get("skeletal_meshes", None) + filtered_array = [entry.replace(geo_long_name, "|"+geo_root) for entry in geo_list] + data["skeletal_meshes"] = filtered_array + _partitions[partition_name] = data + + export_data["partitions"] = _partitions + + pm.delete(rig_root.rigGroups.listConnections(type="objectSet")) + pm.delete(pm.ls(type="mgear_matrixConstraint")) + pm.delete(rig_root) + if not skinning: print("Removing Skinning..") # Remove skinning from geometry @@ -103,35 +130,44 @@ def perform_fbx_condition( print("Removing Blendshapes..") _delete_blendshapes() - # Exports the conditioned FBX, over the existing master fbx. - # The master FBX is now in the correct data state. - print("Exporting FBX...") - print(" Path: {}".format(master_fbx_path)) - cmds.select( clear=True ) - cmds.select([root_joint] + root_geos) - pfbx.FBXExport(f=master_fbx_path, s=True) + # Save out conditioned file, as this will be used by other partition processes + # Conditioned file, is the file that stores the rig which has already had data + # update for the export process. + print("Save Conditioned Scene...") + print(" Path: {}".format(master_ma_path)) + cmds.file(save=True, force=True, type="mayaAscii") + + status = False + + if not partitions: + # Exports the conditioned FBX + master_fbx_path = os.path.join(output_dir, fbx_file) + print("Exporting FBX...") + print(" Path: {}".format(master_fbx_path)) + cmds.select(clear=True) + cmds.select([root_joint] + root_geos) + pfbx.FBXExport(f=master_fbx_path, s=True) + status = True if partitions and export_data is not None: print("[Partitions]") print(" Preparing scene for Partition creation..") - # Save out conditioned file, as this will be used by other partition processes - # Conditioned file, is the file that stores the rig which has already had data - # update for the export process. - cmds.file(rename=conditioned_file) - cmds.file(save=True, force=True, type="mayaAscii") + status = _export_skeletal_mesh_partitions([root_joint], export_data, master_ma_path) - _export_skeletal_mesh_partitions([root_joint], export_data, conditioned_file, cull_joints) + # Delete temporary conditioned .ma file + print("[Clean up]") + cmds.file(new=True, force=True) + if os.path.exists(master_ma_path): + print(" [Removing File] {}".format(master_ma_path)) + os.remove(master_ma_path) + else: + print(" Cleaned up conditioned file...") + print(" Deleted - {}".format(master_ma_path)) - # Delete temporary conditioned .ma file - cmds.file( new=True, force=True) - if os.path.exists(conditioned_file): - os.remove(conditioned_file) - else: - print(" Cleaned up conditioned file...") - print(" Deleted - {}".format(conditioned_file)) + return status -def _export_skeletal_mesh_partitions(jnt_roots, export_data, scene_path, cull_joints): +def _export_skeletal_mesh_partitions(jnt_roots, export_data, scene_path): """ Exports the individual partition hierarchies that have been specified. @@ -139,7 +175,6 @@ def _export_skeletal_mesh_partitions(jnt_roots, export_data, scene_path, cull_jo alterations performed to it. """ - print(" Correlating Mesh to joints...") file_path = export_data.get("file_path", "") @@ -150,6 +185,8 @@ def _export_skeletal_mesh_partitions(jnt_roots, export_data, scene_path, cull_jo cmds.warning(" Partitions not defined!") return False + cull_joints = export_data.get("cull_joints", False) + # Collects all partition data, so it can be more easily accessed in the next stage # where mesh and skeleton data is deleted and exported. @@ -203,9 +240,11 @@ def _export_skeletal_mesh_partitions(jnt_roots, export_data, scene_path, cull_jo if root_jnt not in short_hierarchy: parent_hierarchy = _get_joint_list(root_jnt, short_hierarchy[0]) short_hierarchy = parent_hierarchy + short_hierarchy + partitions_data[partition_name]["hierarchy"] = short_hierarchy print(" Modifying Hierarchy...") + # - Loop over each Partition # - Load the master .ma file # - Perform removal of geometry, that is not relevent to the partition @@ -216,13 +255,12 @@ def _export_skeletal_mesh_partitions(jnt_roots, export_data, scene_path, cull_jo print(" Partition {} contains no data.".format(partition_name)) continue - print(" {}".format(partition_name)) - print(" {}".format(partition_data)) - partition_meshes = partitions.get(partition_name).get("skeletal_meshes") partition_joints = partition_data.get("hierarchy", []) + + print("Open Conditioned Scene: {}".format(scene_path)) # Loads the conditioned scene file, to perform partition actions on. - cmds.file( scene_path, open=True, force=True, save=False) + cmds.file(scene_path, open=True, force=True, save=False) # Deletes meshes that are not included in the partition. all_meshes = _get_all_mesh_dag_objects() @@ -232,6 +270,7 @@ def _export_skeletal_mesh_partitions(jnt_roots, export_data, scene_path, cull_jo # Delete joints that are not included in the partition if cull_joints: + print(" Culling Joints...") all_joints = _get_all_joint_dag_objects() for jnt in reversed(all_joints): if not jnt in partition_joints: @@ -241,7 +280,7 @@ def _export_skeletal_mesh_partitions(jnt_roots, export_data, scene_path, cull_jo partition_file_name = file_name + "_" + partition_name + ".fbx" export_path = os.path.join(file_path, partition_file_name) - print(export_path) + print("Exporting FBX: {}".format(export_path)) try: preset_path = export_data.get("preset_path", None) up_axis = export_data.get("up_axis", None) @@ -255,16 +294,16 @@ def _export_skeletal_mesh_partitions(jnt_roots, export_data, scene_path, cull_jo pfbx.FBXLoadExportPresetFile(f=preset_path) fbx_version_str = None if up_axis is not None: - pfbx.FBXExportUpAxis(up_axis) + pfbx.FBXExportUpAxis(up_axis.lower()) if fbx_version is not None: fbx_version_str = "{}00".format( fbx_version.split("/")[0].replace(" ", "") - ) + ) pfbx.FBXExportFileVersion(v=fbx_version_str) if file_type == "ascii": pfbx.FBXExportInAscii(v=True) - cmds.select( clear=True ) + cmds.select(clear=True) cmds.select(partition_joints + partition_meshes) pfbx.FBXExport(f=export_path, s=True) except Exception: @@ -274,6 +313,7 @@ def _export_skeletal_mesh_partitions(jnt_roots, export_data, scene_path, cull_jo traceback.format_exc() ) ) + return False return True @@ -282,7 +322,7 @@ def _delete_blendshapes(): Deletes all blendshape objects in the scene. """ blendshape_mobjs = _find_dg_nodes_by_type(om.MFn.kBlendShape) - + dg_mod = om.MDGModifier() for mobj in blendshape_mobjs: print(" - {}".format(om.MFnDependencyNode(mobj).name())) @@ -312,7 +352,8 @@ def _find_geometry_dag_objects(parent_object_name): child_dag_path = child_dag_node.getPath() # Check if the child is a geometry node - if (child_dag_path.hasFn(om.MFn.kMesh) or child_dag_path.hasFn(om.MFn.kNurbsSurface)) and child_dag_path.hasFn(om.MFn.kTransform): + if (child_dag_path.hasFn(om.MFn.kMesh) or child_dag_path.hasFn( + om.MFn.kNurbsSurface)) and child_dag_path.hasFn(om.MFn.kTransform): geometry_objects.append(child_dag_path.fullPathName()) # Recursive call to find geometry objects under the child @@ -321,7 +362,7 @@ def _find_geometry_dag_objects(parent_object_name): return geometry_objects except Exception as e: - print(f"Error: {e}") + print("Error: {}".format(e)) return [] @@ -360,18 +401,19 @@ def _find_dg_nodes_by_type(node_type): return dagpose_nodes + def _cleanup_stale_dag_hierarchies(ignore_objects): """ Deletes any dag objects that are not geo or skeleton roots, under the scene root. """ IGNORED_OBJECTS = ['|persp', '|top', '|front', '|side'] obj_names = _get_dag_objects_under_scene_root() - + for i_o in IGNORED_OBJECTS: obj_names.remove(i_o) - + for i_o in ignore_objects: - pipped_io = "|"+i_o + pipped_io = "|" + i_o try: obj_names.remove(pipped_io) except: @@ -385,7 +427,7 @@ def _cleanup_stale_dag_hierarchies(ignore_objects): temp_sel.add(name) if temp_sel.length() != 1: - continue + continue dag_path = temp_sel.getDagPath(0) dag_node = om.MFnDagNode(dag_path) @@ -403,7 +445,7 @@ def _parent_to_root(name): temp_sel.add(name) if temp_sel.length() != 1: - return + return dag_path = temp_sel.getDagPath(0) dag_node = om.MFnDagNode(dag_path) @@ -413,7 +455,7 @@ def _parent_to_root(name): if parent_name == "world": return - cmds.parent( name, world=True ) + cmds.parent(name, world=True) temp_sel.clear() print(" Moved {} to scene root.".format(name)) @@ -441,13 +483,17 @@ def _get_dag_objects_under_scene_root(): return dag_objects -def _clean_namespaces(): +def _clean_namespaces(export_data): """ Gets all available namespaces in scene. Checks each for objects that have it assigned. Removes the namespace from the object. """ namespaces = _get_scene_namespaces() + + # Sort namespaces by longest nested first + namespaces = sorted(namespaces, key=_count_namespaces, reverse=True) + for namespace in namespaces: print(" - {}".format(namespace)) child_namespaces = om.MNamespace.getNamespaces(namespace, True) @@ -461,6 +507,42 @@ def _clean_namespaces(): for m_obj in m_objs: _remove_namespace(m_obj) + filtered_export_data = _clean_export_namespaces(export_data) + return filtered_export_data + +def _clean_export_namespaces(export_data): + """ + Looks at all the joints and mesh data in the export data and removes + any namespaces that exists. + """ + + for key in export_data.keys(): + + # ignore filepath, as it contains ':', which will break the path + if key == "file_path" or key == "color": + continue + + value = export_data[key] + if isinstance(value, list): + for i in range(len(value)): + value[i] = _trim_namespace_from_name(value[i]) + elif isinstance(value, dict): + value = _clean_export_namespaces(value) + elif isinstance(value, str): + value = _trim_namespace_from_name(value) + + export_data[key] = value + + return export_data + +def _count_namespaces(name): + # Custom function to count the number of ":" in a name + return name.count(':') + +def _trim_namespace_from_name(name): + if name.find(":") >= 0: + return name.split(":")[-1] + return name def _remove_namespace(mobj): """ @@ -476,17 +558,19 @@ def _get_scene_namespaces(): Gets all namespaces in the scene. """ IGNORED_NAMESPACES = [":UI", ":shared", ":root"] - spaces = om.MNamespace.getNamespaces() + spaces = om.MNamespace.getNamespaces(recurse=True) for ignored in IGNORED_NAMESPACES: if ignored in spaces: spaces.remove(ignored) - return spaces + + return spaces def _import_fbx(file_path): try: # Import FBX file - name = cmds.file(file_path, i=True, type="FBX", ignoreVersion=True, ra=True, mergeNamespacesOnClash=False, namespace=":") + name = cmds.file(file_path, i=True, type="FBX", ignoreVersion=True, ra=True, mergeNamespacesOnClash=False, + namespace=":") print("FBX file imported successfully.") return name @@ -622,4 +706,4 @@ def _get_all_joint_dag_objects(): dag_iter.next() - return mesh_objects \ No newline at end of file + return mesh_objects diff --git a/release/scripts/mgear/shifter/game_tools_fbx/fbx_export_node.py b/release/scripts/mgear/shifter/game_tools_fbx/fbx_export_node.py index 7271d5df..441f954a 100644 --- a/release/scripts/mgear/shifter/game_tools_fbx/fbx_export_node.py +++ b/release/scripts/mgear/shifter/game_tools_fbx/fbx_export_node.py @@ -32,6 +32,7 @@ class FbxExportNode(object): "ue_enabled": False, "ue_file_path": "", "ue_active_skeleton":"", + "cull_joints":False } ANIM_CLIP_DATA = { "title": "Untitled", diff --git a/release/scripts/mgear/shifter/game_tools_fbx/fbx_exporter.py b/release/scripts/mgear/shifter/game_tools_fbx/fbx_exporter.py index 7eb3644d..9436bdc9 100644 --- a/release/scripts/mgear/shifter/game_tools_fbx/fbx_exporter.py +++ b/release/scripts/mgear/shifter/game_tools_fbx/fbx_exporter.py @@ -16,11 +16,13 @@ string, widgets, ) +import mgear.shifter.game_tools_disconnect as gtDisc from mgear.shifter.game_tools_fbx import ( anim_clip_widgets, fbx_export_node, partitions_outliner, utils, + partition_thread ) from mgear.uegear import commands as uegear @@ -82,6 +84,7 @@ def create_layout(self): "ue_enabled": self.ue_import_cbx, "ue_file_path": self.ue_file_path_lineedit, "ue_active_skeleton": self.ue_skeleton_listwgt, + "cull_joints": self.culljoints_checkbox, } def create_menu_bar(self): @@ -343,6 +346,12 @@ def create_skeletal_mesh_tab(self): self.export_tab.addTab(skeletal_mesh_tab, "Skeletal Mesh") skeletal_mesh_layout = QtWidgets.QVBoxLayout(skeletal_mesh_tab) + # progress bar + self.progress_bar = QtWidgets.QProgressBar(self) + self.progress_bar.setAlignment(QtCore.Qt.AlignCenter) + self.default_progress_bar() + self.progress_bar.setHidden(True) + # deformers options deformers_label = QtWidgets.QLabel("Deformers") skeletal_mesh_layout.addWidget(deformers_label) @@ -356,9 +365,12 @@ def create_skeletal_mesh_tab(self): self.blendshapes_checkbox.setChecked(True) self.partitions_checkbox = QtWidgets.QCheckBox("Partitions") self.partitions_checkbox.setChecked(True) + self.culljoints_checkbox = QtWidgets.QCheckBox("Cull Joints") + self.culljoints_checkbox.setChecked(False) deformers_layout.addWidget(self.skinning_checkbox) deformers_layout.addWidget(self.blendshapes_checkbox) deformers_layout.addWidget(self.partitions_checkbox) + deformers_layout.addWidget(self.culljoints_checkbox) # partitions layout self.partitions_label = QtWidgets.QLabel("Partitions") @@ -392,6 +404,10 @@ def create_skeletal_mesh_tab(self): "QPushButton {background:rgb(70, 100, 150);}" ) skeletal_mesh_layout.addWidget(self.skmesh_export_btn) + skeletal_mesh_layout.addWidget(self.progress_bar) + + def update_progress_bar(self, value): + self.progress_bar.setValue(int(value)) def create_animation_tab(self): # export animation @@ -414,7 +430,6 @@ def create_connections(self): # menu connections self.file_export_preset_action.triggered.connect(self.export_fbx_presets) self.file_import_preset_action.triggered.connect(self.import_fbx_presets) -# self.set_fbx_sdk_path_action.triggered.connect(self.set_fbx_sdk_path) # source element connections self.geo_set_btn.clicked.connect( @@ -513,6 +528,7 @@ def create_connections(self): self.blendshapes_checkbox.toggled.connect( self.partition_blendshape_toggled ) + self.culljoints_checkbox.toggled.connect(self.cull_joints_toggled) def get_root_joint(self): root_joint = self.joint_root_lineedit.text().split(",") @@ -620,6 +636,11 @@ def set_use_partitions(self, flag): self.skmesh_add_btn.setEnabled(flag) self.skmesh_rem_btn.setEnabled(flag) + def cull_joints_toggled(self, flag): + cull_joint_active = self.culljoints_checkbox.isChecked() + export_node = self._get_or_create_export_node() + export_node.save_root_data("cull_joints", cull_joint_active) + def partition_skinning_toggled(self): """Updates the Maya FBX Node, when checkbox it changed""" skinning_active = self.skinning_checkbox.isChecked() @@ -720,38 +741,85 @@ def export_skeletal_mesh(self): use_partitions = self.partitions_checkbox.isChecked() if use_partitions: - # Master partition data is retrieved from UI - # TODO: Should we store master data within FbxExporterNode too? - partitions = {} - master_partition = self.partitions_outliner.get_master_partition() - partitions.update(master_partition) - partitions.update(export_node.get_partitions()) - print("\t>>> Partitions:") - - # Loops over partitions, and removes any disabled partitions, from being exported. - keys = list(partitions.keys()) - for partition_name in reversed(keys): - partition_data = partitions[partition_name] - enabled = partition_data.get("enabled", True) - skeletal_meshes = partition_data.get("skeletal_meshes", []) - - if not (enabled and skeletal_meshes): - partitions.pop(partition_name) - print("\t\t[!Partition Disabled!] - {}: {}".format(partition_name, skeletal_meshes)) - continue - - print("\t\t{}: {}".format(partition_name, skeletal_meshes)) - export_config["partitions"] = partitions + export_config["partitions"] = self._prefilter_partitions() preset_file_path = self._get_preset_file_path() print("\t>>> Preset File Path: {}".format(preset_file_path)) - result = utils.export_skeletal_mesh(export_config) - if not result: - cmds.warning( - "Something went wrong while exporting Skeletal Mesh/es" - ) - return False + self.default_progress_bar() + self.progress_bar.setHidden(False) + + # Creates a Thread to perform the maya batch in. + self.partition_thread = partition_thread.PartitionThread(export_config) + # Settup Thread Signals + self.partition_thread.completed.connect(self._import_into_unreal) + self.partition_thread.progress_signal.connect(self.update_progress_bar) + # Show the process is starting + self.update_progress_bar(5) + self.partition_thread.init_data() + self.partition_thread.start() + + return True + + def update_progress_bar(self, value: int): + """ + Updates the progress bar in the GUI. + """ + value = int(value) + self.progress_bar.setValue(value) + + # if value == 100: + # self.progress_bar.setHidden(True) + + def error_progress_bar(self): + """ + Sets the progress bar to be red, errored + """ + self.progress_bar.setStyleSheet(""" + QProgressBar { + text-align: center; + } + QProgressBar::chunk { + background-color: #9c1e1e; + } + """) + self.progress_bar.update() + + def default_progress_bar(self): + """ + Sets the progress bar to its default green colour. + """ + self.progress_bar.setStyleSheet(""" + QProgressBar { + text-align: center; + } + QProgressBar::chunk { + background-color: #4CAF50; + } + """) + self.progress_bar.update() + + + def _import_into_unreal(self, export_config, success): + """ + Event triggered when the Thread has completed successfully. + + Impports the asset into unreal, and checks if it should import using + an existing skeleton. + + Recieves the export configuration from the Thread that was completed + """ + if not success: + print("ERROR: Export Failed") + self.error_progress_bar() + return + + use_partitions = export_config.get("use_partitions", True) + partitions = export_config.get("partitions", dict()) + file_name = export_config.get("file_name", "") + file_path = export_config.get("file_path", "") + + master_fbx_path = os.path.join(file_path, file_name) + ".fbx" # Unreal Import, if enabled. if self.ue_import_cbx.isChecked(): @@ -759,14 +827,12 @@ def export_skeletal_mesh(self): # share the base skeleton in Unreal. skeleton_path = None if len(self.ue_skeleton_listwgt.selectedItems()) > 0: - skeleton_path = self.ue_skeleton_listwgt.selectedItems()[ - 0 - ].text() + skeleton_path = self.ue_skeleton_listwgt.selectedItems()[0].text() unreal_folder = self.ue_file_path_lineedit.text() if use_partitions: for p in partitions.keys(): - result_partition = result.replace( + result_partition = master_fbx_path.replace( ".fbx", "_{}.fbx".format(p) ) partition_file_name = file_name + "_{}".format(p) @@ -778,13 +844,52 @@ def export_skeletal_mesh(self): ) else: uegear.export_skeletal_mesh_to_unreal( - fbx_path=result, + fbx_path=master_fbx_path, unreal_package_path=unreal_folder, name=file_name, skeleton_path=skeleton_path, - ) + ) - return True + # Complete + self.update_progress_bar(100) + + def _prefilter_partitions(self): + """ + Helper function that retrieves all partitions, and filters out the ones + that are deactivated. + """ + export_node = self._get_or_create_export_node() + # Master partition data is retrieved from UI + partitions = {} + master_partition = self.partitions_outliner.get_master_partition() + partitions.update(master_partition) + partitions.update(export_node.get_partitions()) + print("\t>>> Partitions:") + + # Loops over partitions, and removes any disabled partitions, from being exported. + keys = list(partitions.keys()) + for partition_name in reversed(keys): + partition_data = partitions[partition_name] + enabled = partition_data.get("enabled", True) + skeletal_meshes = partition_data.get("skeletal_meshes", []) + + if not (enabled and skeletal_meshes): + partitions.pop(partition_name) + print("\t\t[!Partition Disabled!] - {}: {}".format(partition_name, skeletal_meshes)) + continue + + print("\t\t{}: {}".format(partition_name, skeletal_meshes)) + return partitions + + def set_fbx_directory(self): + path = self.file_path_lineedit.text() + export_node = self._get_or_create_export_node() + export_node.save_root_data("file_path", path) + + def set_fbx_file(self): + path = self.file_name_lineedit.text() + export_node = self._get_or_create_export_node() + export_node.save_root_data("file_name", path) def set_fbx_directory(self): path = self.file_path_lineedit.text() @@ -844,14 +949,53 @@ def export_animation_clips(self): # exporting. original_selection = cmds.ls(selection=True) + # Save a temporary scene, perform all formatting to that scene, + # then load the original scene after exporting fbx's + file_path = export_config.get("file_path", "") + if file_path == "": + print("Error no file path specified") + return False + os.makedirs(file_path, exist_ok=True) + + # Stash temporary file path + # Save new temporary scene + # Alter data in temporary scene + _scene_path = utils.get_scene_path() + _temporary_ma_path = os.path.join(file_path, "temporary_anim_export.ma") + cmds.file(rename=_temporary_ma_path) + cmds.file(save=True, type="mayaAscii", force=True) + + if self.get_remove_namespace(): + print("Removing all namespaces..") + utils.clean_namespaces(export_config) + print(" [Namespace] Complete") + + # if self.get_scene_clean(): + # print("Cleaning Scene..") + # # Performs the same code that "Delete Rig + Keep Joints" does + # gtDisc.disconnect_joints() + # for rig_root in gtDisc.get_rig_root_from_set(): + # jnt_org = rig_root.jnt_vis.listConnections(type="transform")[0] + # joints = jnt_org.getChildren() + # if joints: + # pm.parent(joints, world=True) + # pm.delete(rig_root.rigGroups.listConnections(type="objectSet")) + # pm.delete(pm.ls(type="mgear_matrixConstraint")) + # pm.delete(rig_root) + # print(" [Clean Scene] Complete") + + # Parent skeleton root directly to world + root_joint = export_config.get("joint_root", "") + cmds.parent(root_joint, world=True) + # Store the fbx locations that were successfully exported. export_fbx_paths = [] # Exports each clip for clip_data in anim_clip_data: + # skip disabled clips. if not clip_data["enabled"]: - # skip disabled clips. continue result = utils.export_animation_clip(export_config, clip_data) @@ -864,6 +1008,14 @@ def export_animation_clips(self): else: export_fbx_paths.append(result) + # Load temporary scene after all exportation + # Set temporary scene file path to stashed scene file path + # Clean up temporary .ma file + cmds.file(_temporary_ma_path, open=True, force=True, save=False) + cmds.file(rename=_scene_path) + cmds.file(modified=False) + os.remove(_temporary_ma_path) + if original_selection: pm.select(original_selection) @@ -933,6 +1085,7 @@ def _get_current_tool_data(self): "ue_enabled": self.ue_import_cbx.isChecked(), "ue_file_path": self.ue_file_path_lineedit.text(), "ue_active_skeleton": "", + "cull_joints": self.culljoints_checkbox.isChecked(), } # converting qt list widget data to text diff --git a/release/scripts/mgear/shifter/game_tools_fbx/partition_thread.py b/release/scripts/mgear/shifter/game_tools_fbx/partition_thread.py new file mode 100644 index 00000000..1934f609 --- /dev/null +++ b/release/scripts/mgear/shifter/game_tools_fbx/partition_thread.py @@ -0,0 +1,250 @@ +import os +import subprocess +from typing import Callable +import tempfile +import shlex +import datetime + +from mgear.vendor.Qt.QtCore import QThread, Signal +from mgear.core import ( + pyFBX as pfbx, + string, + utils as coreUtils, +) + +import maya.cmds as cmds + + +class PartitionThread(QThread): + """ Thread that handles the creation of fbx partitions""" + + completed = Signal(object, bool) + progress_signal = Signal(float) + + def __init__(self, export_config): + """ + Initializes the thread. + """ + super().__init__() + + # self.log_message = log_message_function + + self.export_config = export_config + + # Makes sure the Thread removes itself + self.finished.connect(self.deleteLater) + + def run(self): + """ + Main function that gets called when the thread starts. + """ + # Show the Thread is starting + self.progress_signal.emit(20) + + success = self.export_skeletal_mesh() + self.onComplete(success) + + def onComplete(self, success): + """ + Cleans up the thread when the thread is finished. + """ + self.completed.emit(self.export_config, success) + + def export_skeletal_mesh(self): + """ + Triggers the batch process + """ + export_data = self.export_config + + geo_roots = export_data.get("geo_roots", "") + joint_roots = [export_data.get("joint_root", "")] + file_path = export_data.get("file_path", "") + file_name = export_data.get("file_name", "") + remove_namespaces = export_data.get("remove_namespace", True) + scene_clean = export_data.get("scene_clean", True) + skinning = export_data.get("skinning", True) + blendshapes = export_data.get("blendshapes", True) + use_partitions = export_data.get("use_partitions", True) + + if not file_name.endswith(".fbx"): + file_name = "{}.ma".format(file_name) + else: + file_name = "{}.ma".format(os.path.splitext(file_name)[0]) + + export_path = string.normalize_path(os.path.join(file_path, file_name)) + + log_path = os.path.normpath(os.path.join(file_path, "logs_{}.txt".format(datetime.datetime.now().strftime("%m%d%Y%H%M")))) + print("\t>>> Export Path: {}".format(export_path)) + + path_is_valid = os.path.exists(export_path) + + # "master" .ma file does not exist, exit early. + if not path_is_valid: + return False + + # Creates a MEL temporary job file.. + script_content = """ +python "from mgear.shifter.game_tools_fbx import fbx_batch"; +python "master_path='{master_path}'"; +python "root_joint='{joint_root}'"; +python "root_geos={geo_roots}"; +python "export_data={e_data}"; +python "fbx_batch.perform_fbx_condition({ns}, {sc}, master_path, root_joint, root_geos, {sk}, {bs}, {ps}, export_data)"; +""".format( + ns=remove_namespaces, + sc=scene_clean, + master_path=export_path, + geo_roots=geo_roots, + joint_root= joint_roots[0], + sk=skinning, + bs=blendshapes, + ps=use_partitions, + e_data=export_data) + + script_file = tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.mel') + script_file.write(script_content) + script_file_path = script_file.name + script_file.close() + + mayabatch_dir = coreUtils.get_maya_path() + mayabatch_path = None + mayabatch_args = None + mayabatch_shell = False + + # Depending on the os we would need to change from maya, to maya batch + # windows uses mayabatch + if str(coreUtils.get_os()) == "win64" or str(coreUtils.get_os()) == "nt": + option = "mayabatch" + else: + option = "maya" + + if option == "maya": + mayabatch_command = 'maya' + mayabatch_path = os.path.join(mayabatch_dir, mayabatch_command) + mayabatch_args = [shlex.quote(mayabatch_path)] + mayabatch_args.append("-batch") + mayabatch_shell = False + mayabatch_args.append("-script") + mayabatch_args.append(shlex.quote(script_file_path)) + mayabatch_args.append("-log") + mayabatch_args.append(shlex.quote(log_path)) + + print("-------------------------------------------") + print("[Launching] MayaBatch") + print(" {}".format(mayabatch_args)) + print(" {}".format(" ".join(mayabatch_args))) + print("-------------------------------------------") + + else: + mayabatch_command = "maya" + mayabatch_path = os.path.join(mayabatch_dir, mayabatch_command) + mayabatch_args = ['"'+mayabatch_path+'"'] + mayabatch_args.append("-batch") + mayabatch_shell = True + mayabatch_args.append("-script") + mayabatch_args.append('"'+script_file_path+'"') + mayabatch_args.append("-log") + mayabatch_args.append('"'+log_path+'"') + + mayabatch_args = "{}".format(" ".join(mayabatch_args)) + + print("-------------------------------------------") + print("[Launching] MayaBatch") + print(" {}".format(mayabatch_args)) + print("-------------------------------------------") + + self.progress_signal.emit(50) + + try: + with subprocess.Popen(mayabatch_args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + shell=mayabatch_shell, + universal_newlines=True, + bufsize=1 + ) as process: + + # Process each line (sentence) from the subprocess output + # Looks for specific sentences in the logs and uses those as progress milestones. + for line in process.stdout: + line = line.strip() + print(line) + if line.find("Conditioned file:") >= 0: + self.progress_signal.emit(60) + if line.find("Removing Namespace..") >= 0: + self.progress_signal.emit(65) + if line.find("Cleaning Scene..") >= 0: + self.progress_signal.emit(75) + if line.find("[Partitions]") >= 0: + self.progress_signal.emit(80) + + # Capture the output and errors + stdout, stderr = process.communicate() + + # Check the return code + #returncode = process.returncode + returncode = process.wait() + print("Return Code: {}".format(returncode)) + + # Check the result + if returncode == 0: + print("Mayabatch process completed successfully.") + print("-------------------------------------------") + # print("Output:", stdout) + #print("-------------------------------------------") + else: + print("Mayabatch process failed.") + print("Error:", stderr) + print("-------------------------------------------") + return False + except FileNotFoundError as error: + print("Error:", error) + return False + + # Clean up Mel batch file + if os.path.exists(script_file_path): + print("[Removing File] {}".format(script_file_path)) + os.remove(script_file_path) + + # If all goes well return the export path location, else None + return True + + def init_data(self): + """ + Initialises the Master .ma files that will be needed by the Thread and Maya batcher. + + This process exports the geometry roots and skeleton as an .ma, which will + then be passed to the Thread for Maya batching. + + Note: This process cannot be run in the thread as Maya commands are not thread safe, + This causes Maya to become partially unresponsive. + """ + # Export initial FBX Master, from current Maya scene + file_path = self.export_config.get("file_path", "") + file_name = self.export_config.get("file_name", "") + + if not file_name.endswith(".fbx"): + file_name = "{}.ma".format(file_name) + else: + file_name = "{}.ma".format(os.path.splitext(file_name)[0]) + + # The location where the temoprary maya file will be saved to + master_path = string.normalize_path(os.path.join(file_path, file_name)) + + # Get the current scene path + current_scene_path = cmds.file(query=True, sceneName=True) + + # Save the current scene to the new location + cmds.file(rename=master_path) + cmds.file(save=True, type="mayaAscii") + + # Revert the scene name to the original path + cmds.file(rename=current_scene_path) + + # Set progress to 15% - at this point the master FBX has been exported + self.progress_signal.emit(15) + + print("Temporary Master file: {}".format(master_path)) + + return diff --git a/release/scripts/mgear/shifter/game_tools_fbx/sdk_utils.py b/release/scripts/mgear/shifter/game_tools_fbx/sdk_utils.py index dea47a84..66c796d1 100644 --- a/release/scripts/mgear/shifter/game_tools_fbx/sdk_utils.py +++ b/release/scripts/mgear/shifter/game_tools_fbx/sdk_utils.py @@ -188,7 +188,7 @@ def get_meshes(self): def get_joints(self): return self.get_type_nodes("LimbNode") - def cast_property(fbx_property): + def _cast_property(fbx_property): if not pfbx.FBX_SDK: return None @@ -348,11 +348,11 @@ def export_skeletal_mesh( blendshapes=None, ): - if not pfbx.FBX_SDK: - cmds.warning( - "Export Skeletal Mesh functionality is only available if Python FBX SDK is available!" - ) - return None + # if not pfbx.FBX_SDK: + # cmds.warning( + # "Export Skeletal Mesh functionality is only available if Python FBX SDK is available!" + # ) + # return None # TODO: Check how we can retrieve the long name using FBX SDK short_mesh_names = [ diff --git a/release/scripts/mgear/shifter/game_tools_fbx/utils.py b/release/scripts/mgear/shifter/game_tools_fbx/utils.py index 5ed1503e..0f84e996 100644 --- a/release/scripts/mgear/shifter/game_tools_fbx/utils.py +++ b/release/scripts/mgear/shifter/game_tools_fbx/utils.py @@ -20,7 +20,7 @@ utils as coreUtils, animLayers, ) -from mgear.shifter.game_tools_fbx import sdk_utils +# from mgear.shifter.game_tools_fbx import sdk_utils NO_EXPORT_TAG = "no_export" WORLD_CONTROL_NAME = "world_ctl" @@ -93,136 +93,6 @@ def get_item(self, item): self.item = item.text() -def export_skeletal_mesh(export_data): - geo_roots = export_data.get("geo_roots", "") - joint_roots = [export_data.get("joint_root", "")] - file_path = export_data.get("file_path", "") - file_name = export_data.get("file_name", "") - preset_path = export_data.get("preset_path", None) - up_axis = export_data.get("up_axis", None) - file_type = export_data.get("file_type", "binary").lower() - fbx_version = export_data.get("fbx_version", None) - remove_namespaces = export_data.get("remove_namespace", True) - scene_clean = export_data.get("scene_clean", True) - deformations = export_data.get("deformations", True) - skinning = export_data.get("skinning", True) - blendshapes = export_data.get("blendshapes", True) - use_partitions = export_data.get("use_partitions", True) - - if not file_name.endswith(".fbx"): - file_name = "{}.fbx".format(file_name) - export_path = string.normalize_path(os.path.join(file_path, file_name)) - print("\t>>> Export Path: {}".format(export_path)) - - # export settings config - pfbx.FBXResetExport() - - # set configuration - if preset_path is not None: - # load FBX export preset file - pfbx.FBXLoadExportPresetFile(f=preset_path) - pfbx.FBXExportSkins(v=skinning) - pfbx.FBXExportShapes(v=blendshapes) - fbx_version_str = None - if up_axis is not None: - pfbx.FBXExportUpAxis(up_axis) - if fbx_version is not None: - fbx_version_str = "{}00".format( - fbx_version.split("/")[0].replace(" ", "") - ) - pfbx.FBXExportFileVersion(v=fbx_version_str) - if file_type == "ascii": - pfbx.FBXExportInAscii(v=True) - - # select elements and export all the data - pm.select(geo_roots + joint_roots) - - # Exports the data from the scene that has teh tool open into a "master_fbx" file. - pfbx.FBXExport(f=export_path, s=True) - - # Instead of altering the Maya scene file, we will alter the "master" fbx data. - # The master fbx file is the file that has just been exported. - - path_is_valid = os.path.exists(export_path) - - if not path_is_valid: - # "master" fbx file does not exist, exit early. - return False - - # Create a temporary job python file - script_content = """ -python "from mgear.shifter.game_tools_fbx import fbx_batch"; -python "master_path='{master_path}'"; -python "root_joint='{joint_root}'"; -python "root_geos={geo_roots}"; -python "export_data={e_data}"; -python "fbx_batch.perform_fbx_condition({ns}, {sc}, master_path, root_joint, root_geos, {sk}, {bs}, {ps}, export_data)"; -""".format( - ns=remove_namespaces, - sc=scene_clean, - master_path=export_path, - geo_roots=geo_roots, - joint_root= joint_roots[0], - sk=skinning, - bs=blendshapes, - ps=use_partitions, - e_data=export_data) - - script_file = tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.mel') - script_file.write(script_content) - script_file_path = script_file.name - script_file.close() - - mayabatch_dir = coreUtils.get_maya_path() - - # Depending on the os we would need to change from maya, to maya batch - # windows uses mayabatch - if str(coreUtils.get_os()) == "win64" or str(coreUtils.get_os()) == "nt": - option = "mayabatch" - else: - option = "maya" - - if option == "maya": - mayabatch_command = 'maya' - else: - mayabatch_command = "mayabatch" - - mayabatch_path = os.path.join(mayabatch_dir, mayabatch_command) - mayabatch_args = [mayabatch_path] - - if option == "maya": - mayabatch_args.append("-batch") - - mayabatch_args.append("-script") - mayabatch_args.append(script_file_path) - - print("[Launching] MayaBatch") - print(" {}".format(mayabatch_args)) - print(" {}".format(" ".join(mayabatch_args))) - -# Use Popen for more control - with subprocess.Popen(mayabatch_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, shell=False) as process: - # Capture the output and errors - stdout, stderr = process.communicate() - - # Check the return code - returncode = process.returncode - - # Check the result - if returncode == 0: - print("Mayabatch process completed successfully.") - print("-------------------------------------------") - print("Output:", stdout) - print("-------------------------------------------") - else: - print("Mayabatch process failed.") - print("Error:", stderr) - return False - - # If all goes well return the export path location, else None - return True - - def export_animation_clip(config_data, clip_data): """ Exports a singular animation clip. @@ -254,8 +124,6 @@ def export_animation_clip(config_data, clip_data): ) file_type = config_data.get("file_type", "binary").lower() fbx_version = config_data.get("fbx_version", None) - remove_namespaces = config_data.get("remove_namespace") - scene_clean = config_data.get("scene_clean", True) # Validate timeline range if start_frame > end_frame: @@ -288,9 +156,16 @@ def export_animation_clip(config_data, clip_data): original_anim_layer_weights = animLayers.get_layer_weights() try: + # default mute status to on + animlayer_mute = True + # set anim layer to enable if animLayers.animation_layer_exists(anim_layer): animLayers.set_layer_weight(anim_layer, toggle_other_off=True) + + # Store anim layer mute status + animlayer_mute = cmds.animLayer(anim_layer, query=True, mute=True) + cmds.animLayer(anim_layer, edit=True, mute=False) # disable viewport mel.eval("paneLayout -e -manage false $gMainPane") @@ -298,14 +173,12 @@ def export_animation_clip(config_data, clip_data): pfbx.FBXResetExport() # set configuration + fbx_version_str = None if preset_path is not None: # load FBX export preset file pfbx.FBXLoadExportPresetFile(f=preset_path) - pfbx.FBXExportSkins(v=False) - pfbx.FBXExportShapes(v=False) - fbx_version_str = None if up_axis is not None: - pfbx.FBXExportUpAxis(up_axis) + pfbx.FBXExportUpAxis(up_axis.lower()) if fbx_version is not None: fbx_version_str = "{}00".format( fbx_version.split("/")[0].replace(" ", "") @@ -356,34 +229,14 @@ def export_animation_clip(config_data, clip_data): pfbx.FBXExportSplitAnimationIntoTakes(c=True) pfbx.FBXExportGenerateLog(v=False) pfbx.FBXExport(f=path, s=True) - - fbx_modified = False - fbx_file = sdk_utils.FbxSdkGameToolsWrapper(path) - fbx_file.parent_to_world(root_joint, remove_top_parent=True) - if remove_namespaces: - fbx_file.remove_namespaces() - fbx_modified = True - if scene_clean: - fbx_file.clean_scene( - no_export_tag=NO_EXPORT_TAG, - world_control_name=WORLD_CONTROL_NAME, - ) - fbx_modified = True - if fbx_modified: - fbx_file.save( - mode=file_type, - file_version=fbx_version_str, - close=True, - preset_path=preset_path, - skins=True, - ) - except Exception as exc: raise exc finally: # setup again original anim layer weights if anim_layer and original_anim_layer_weights: animLayers.set_layer_weights(original_anim_layer_weights) + # Sets the animation layer back to default + cmds.animLayer(anim_layer, edit=True, mute=animlayer_mute) if temp_skin_cluster and cmds.objExists(temp_skin_cluster): cmds.delete(temp_skin_cluster) @@ -583,6 +436,113 @@ def get_end_joint(start_joint): return end_joint +# ------- namespaces ------ + + +def _count_namespaces(name): + # Custom function to count the number of ":" in a name + return name.count(':') + + +def clean_namespaces(export_data): + """ + Gets all available namespaces in scene. + Checks each for objects that have it assigned. + Removes the namespace from the object. + """ + namespaces = get_scene_namespaces() + + # Sort namespaces by longest nested first + namespaces = sorted(namespaces, key=_count_namespaces, reverse=True) + + for namespace in namespaces: + print(" - {}".format(namespace)) + child_namespaces = om.MNamespace.getNamespaces(namespace, True) + + for chld_ns in child_namespaces: + m_objs = om.MNamespace.getNamespaceObjects(chld_ns) + for m_obj in m_objs: + remove_namespace(m_obj) + + m_objs = om.MNamespace.getNamespaceObjects(namespace) + for m_obj in m_objs: + remove_namespace(m_obj) + + filtered_export_data = clean_export_namespaces(export_data) + return filtered_export_data + + +def clean_export_namespaces(export_data): + """ + Looks at all the joints and mesh data in the export data and removes + any namespaces that exists. + """ + + for key in export_data.keys(): + + # ignore filepath, as it contains ':', which will break the path + if key == "file_path" or key == "color": + continue + + value = export_data[key] + + print(key, value) + + if isinstance(value, list): + for i in range(len(value)): + value[i] = trim_namespace_from_name(value[i]) + elif isinstance(value, dict): + value = clean_export_namespaces(value) + elif isinstance(value, str): + value = trim_namespace_from_name(value) + + export_data[key] = value + + return export_data + + +def count_namespaces(name): + # Custom function to count the number of ":" in a name + return name.count(':') + + +def trim_namespace_from_name(name): + if name.find(":") >= 0: + return name.split(":")[-1] + return name + + +def remove_namespace(mobj): + """ + Removes the namesspace that is currently assigned to the asset + """ + dg = om.MFnDependencyNode(mobj) + name = dg.name() + dg.setName(name[len(dg.namespace):]) + + +def get_scene_namespaces(): + """ + Gets all namespaces in the scene. + """ + IGNORED_NAMESPACES = [":UI", ":shared", ":root"] + spaces = om.MNamespace.getNamespaces(recurse=True) + for ignored in IGNORED_NAMESPACES: + if ignored in spaces: + spaces.remove(ignored) + + return spaces + + +def get_scene_path(): + """ + Get the file path of the current scene. + + Returns: + str: path of the current open scene file + """ + return cmds.file(query=True, sceneName=True) + if __name__ == "__main__": if sys.version_info[0] == 2: