diff --git a/addons/unidot_importer/.gitrepo b/addons/unidot_importer/.gitrepo index 7e64e2bc..d59b6df1 100644 --- a/addons/unidot_importer/.gitrepo +++ b/addons/unidot_importer/.gitrepo @@ -5,8 +5,8 @@ ; [subrepo] remote = https://github.com/V-Sekai/unidot_importer.git - branch = main - commit = 8fdd0423f960dc11391bd7b829aa196a45d5c7c8 - parent = 8bfbc8eebf5e9ec6bc002d2ac0e5151577cd2472 + branch = dfab16fa87a13fc9d084d6b5bd3660378e9f680a + commit = dfab16fa87a13fc9d084d6b5bd3660378e9f680a + parent = dc31b82cfdb953a884ba0277e48cad677ae38819 method = merge cmdver = 0.4.6 diff --git a/addons/unidot_importer/asset_adapter.gd b/addons/unidot_importer/asset_adapter.gd index fdca3270..b575afed 100644 --- a/addons/unidot_importer/asset_adapter.gd +++ b/addons/unidot_importer/asset_adapter.gd @@ -23,6 +23,7 @@ const ASSET_TYPE_SCENE = 7 const ASSET_TYPE_UNKNOWN = 8 const SHOULD_CONVERT_TO_GLB: bool = false +const USE_BUILTIN_FBX: bool = false # true const SILHOUETTE_FIX_THRESHOLD: float = 3.0 # 28.0 @@ -77,14 +78,6 @@ class AssetHandler: func was_modified() -> bool: return modified - static func file_exists_hack(dres: DirAccess, fn: String): - if not dres.file_exists(fn): - return false - var fa = FileAccess.open(fn, FileAccess.READ) - if fa == null: - return false - return fa.get_length() != 0 - func calc_existing_md5(fname: String) -> PackedByteArray: var dres: DirAccess = DirAccess.open("res://") if not dres.file_exists(fname): @@ -117,12 +110,9 @@ class AssetHandler: pkgasset.data_md5 = calc_md5(data_buf) if pkgasset.existing_data_md5 != pkgasset.data_md5: var outfile: FileAccess = FileAccess.open(tmpdir + "/" + path, FileAccess.WRITE_READ) - if outfile == null: - pkgasset.log_fail("Unable to open file " + str(tmpdir + "/"+ path) + " for writing!") - else: - outfile.store_buffer(data_buf) - outfile.flush() - outfile.close() + outfile.store_buffer(data_buf) + outfile.flush() + outfile.close() outfile = null output_path = pkgasset.pathname if len(output_path) == 0: @@ -150,9 +140,6 @@ class AssetHandler: func preprocess_asset(pkgasset: Object, tmpdir: String, thread_subdir: String, path: String, data_buf: PackedByteArray, unique_texture_map: Dictionary = {}) -> String: return "" - func about_to_import(pkgasset: Object): - pass - func finished_import(pkgasset: Object, res: Resource): if res == null: var dres = DirAccess.open("res://") @@ -182,7 +169,7 @@ class ImageHandler: if not is_png and path.get_extension().to_lower() == "png": pkgasset.log_debug("I am a JPG pretending to be a " + str(path.get_extension()) + " " + str(path)) full_output_path = full_output_path.get_basename() + ".jpg" - elif (is_png or is_tiff or is_psd) and path.get_extension().to_lower() != "png": + elif is_png and path.get_extension().to_lower() != "png": pkgasset.log_debug("I am a PNG pretending to be a " + str(path.get_extension()) + " " + str(path)) full_output_path = full_output_path.get_basename() + ".png" pkgasset.log_debug("PREPROCESS_IMAGE " + str(is_tiff or is_psd) + "/" + str(is_png) + " path " + str(path) + " to " + str(full_output_path)) @@ -199,25 +186,13 @@ class ImageHandler: var addon_path: String = "convert" # ImageMagick installed system-wide. if OS.get_name() == "Windows": addon_path = post_import_material_remap_script.resource_path.get_base_dir().path_join("convert.exe") - if d.file_exists(addon_path): - addon_path = ProjectSettings.globalize_path(addon_path) - else: - pkgasset.log_warn("Not converting tiff to png because convert.exe is not present.") - addon_path = "convert.exe" - # [0] forces multi-layer files to only output the first layer. Otherwise filenames may be -0, -1, -2... - var convert_src: String = temp_output_path.get_basename() + ext + "[0]" - var convert_dst: String = temp_output_path - var convert_args: Array = [convert_src, convert_dst] - var ret = OS.execute(addon_path, convert_args, stdout) - for i in range(5): - OS.delay_msec(500) - OS.delay_msec(500) - # Hack, but I don't know what to do about this for now. The .close() is async or something. - if ret == 1 or "".join(stdout).strip_edges().find("@ error") != -1: - pkgasset.log_warn("Attempt to rerun FBX2glTF to mitigate windows file close race " + str(i) + ".") - ret = OS.execute(addon_path, convert_args, stdout) - pkgasset.log_debug("convert " + str(addon_path) + " " + str(convert_args) + " => result " + str(ret)) - pkgasset.log_debug(str(stdout)) + if addon_path.begins_with("res://"): + if not d.file_exists(addon_path): + pkgasset.log_warn("Not converting tiff to png because convert.exe is not present.") + return "" + if OS.get_name() == "Windows": + addon_path = "convert.exe" + var ret = OS.execute(addon_path, [temp_output_path.get_basename() + ext, temp_output_path], stdout) d.remove(temp_output_path.get_basename() + ext) var res_file: FileAccess = FileAccess.open(temp_output_path, FileAccess.READ) pkgasset.data_md5 = calc_md5(res_file.get_buffer(res_file.get_length())) @@ -641,8 +616,7 @@ class BaseModelHandler: # The importer seems to operate in terms of frames but no indication of time here.... cfile.set_value_compare("params", "animation/import", importer.animation_import) # Humanoid animations in particular are sensititve to immutable tracks being shared across rigs. - cfile.set_value_compare("params", "animation/remove_immutable_tracks", (importer.keys.get("animationType", 2) == 3 or pkgasset.parsed_meta.is_force_humanoid())) - cfile.set_value_compare("params", "animation/import_rest_as_RESET", true) + cfile.set_value_compare("params", "animation/remove_immutable_tracks", false) # FIXME: Godot has a major bug if light baking is used: # it leaves a file ".glb.unwrap_cache" open and causes future imports to fail. @@ -652,7 +626,7 @@ class BaseModelHandler: cfile.set_value_compare("params", "meshes/create_shadow_meshes", false) # Until visual artifacts with shadow meshes get fixed cfile.set_value_compare("params", "nodes/root_scale", pkgasset.parsed_meta.internal_data.get("scale_correction_factor", 1.0)) cfile.set_value_compare("params", "nodes/apply_root_scale", true) - cfile.set_value_compare("params", "nodes/root_name", "") + cfile.set_value_compare("params", "nodes/root_name", "Root Scene") # addCollider???? TODO # ??? animation/optimizer setting seems to be missing? @@ -666,9 +640,8 @@ class BaseModelHandler: var fps_ratio: float = pkgasset.parsed_meta.internal_data.get("anim_frames_fps_ratio", 1.0) var used_animation_names: Dictionary = pkgasset.parsed_meta.internal_data.get("used_animation_names", {}) - var orig_to_godot_sanitized_anim_takes: Dictionary = pkgasset.parsed_meta.internal_data.get("orig_to_godot_sanitized_anim_takes", {}) for anim_clip in anim_clips: - var take_name = orig_to_godot_sanitized_anim_takes.get(anim_clip["take_name"], anim_clip["take_name"]) + var take_name = anim_clip["take_name"] if not used_animation_names.has(take_name): for key in used_animation_names: pkgasset.log_warn("Substituting requested takeName " + str(take_name) + " for first animation " + str(key)) @@ -697,7 +670,6 @@ class BaseModelHandler: if not subresources["nodes"].has("PATH:AnimationPlayer"): subresources["nodes"]["PATH:AnimationPlayer"] = {} if importer.keys.get("animationType", 2) == 3 or pkgasset.parsed_meta.is_force_humanoid(): - cfile.set_value_compare("params", "nodes/import_as_skeleton_bones", true) if not subresources["nodes"].has("PATH:Skeleton3D"): subresources["nodes"]["PATH:Skeleton3D"] = {} var bone_map: BoneMap = importer.generate_bone_map_from_human() @@ -705,18 +677,8 @@ class BaseModelHandler: subresources["nodes"]["PATH:Skeleton3D"]["retarget/bone_map"] = bone_map # FIXME: Disabled fix_silhouette because the pre-silhouette matrix is not being calculated yet # This would break skins and unpacked prefabs. - if pkgasset.parsed_meta.is_using_builtin_ufbx(): - var silhouette_anim: Animation = pkgasset.parsed_meta.internal_data.get("silhouette_anim", null) - if silhouette_anim != null and pkgasset.parsed_meta.internal_data.get("copy_avatar_src", []): - subresources["nodes"]["PATH:Skeleton3D"]["rest_pose/load_pose"] = 2 - subresources["nodes"]["PATH:Skeleton3D"]["rest_pose/external_animation_library"] = silhouette_anim - subresources["nodes"]["PATH:Skeleton3D"]["retarget/rest_fixer/fix_silhouette/enable"] = false - subresources["nodes"]["PATH:Skeleton3D"]["retarget/rest_fixer/fix_silhouette/enable"] = true - subresources["nodes"]["PATH:Skeleton3D"]["retarget/rest_fixer/fix_silhouette/filter"] = [&"LeftFoot", &"RightFoot", &"LeftToes", &"RightToes"] - subresources["nodes"]["PATH:Skeleton3D"]["retarget/rest_fixer/fix_silhouette/threshold"] = SILHOUETTE_FIX_THRESHOLD - subresources["nodes"]["PATH:Skeleton3D"]["retarget/rest_fixer/reset_all_bone_poses_after_import"] = false - else: - subresources["nodes"]["PATH:Skeleton3D"]["retarget/rest_fixer/fix_silhouette/enable"] = false + subresources["nodes"]["PATH:Skeleton3D"]["retarget/rest_fixer/fix_silhouette/enable"] = false + #subresources["nodes"]["PATH:Skeleton3D"]["retarget/rest_fixer/fix_silhouette/threshold"] = SILHOUETTE_FIX_THRESHOLD var anim_player_settings: Dictionary = subresources["nodes"]["PATH:AnimationPlayer"] var optim_setting: Dictionary = importer.animation_optimizer_settings() anim_player_settings["optimizer/enabled"] = optim_setting.get("enabled", false) @@ -732,28 +694,7 @@ class BaseModelHandler: cfile.save("res://" + pkgasset.pathname + ".import") return cfile.was_modified() - func about_to_import(pkgasset: Object): - var dres := DirAccess.open("res://") - for tex_name in pkgasset.parsed_meta.unique_texture_map: - var tex_uri = pkgasset.parsed_meta.unique_texture_map[tex_name] - var tex_pathname: String = "res://".path_join(pkgasset.pathname.get_base_dir().path_join(tex_name)) - if not file_exists_hack(dres, tex_pathname): - var src_tex: Texture2D = ResourceLoader.load("res://".path_join(pkgasset.pathname.get_base_dir().path_join(tex_uri))) as Texture2D - if src_tex != null: - var tmp_tex: Texture2D = src_tex.duplicate() - tmp_tex.set_meta(&"src_tex", src_tex) - tmp_tex.resource_name = tex_name - if not tex_pathname.begins_with("res://"): - tex_pathname = "res://" + tex_pathname - pkgasset.log_debug("Would reference non-existent texture " + str(tex_pathname) + " from " + str(tex_name) + " rather than " + str(tex_uri)) - tmp_tex.take_over_path(tex_pathname) - pkgasset.parsed_meta.taken_over_import_references[tex_name] = tmp_tex - else: - pkgasset.log_debug("Would reference existing texture " + str(tex_pathname) + " from " + str(tex_name) + " rather than " + str(tex_uri)) - func finished_import(pkgasset: Object, res: Resource): - pkgasset.log_debug("taken_over_import_references: " + str(pkgasset.parsed_meta.taken_over_import_references)) - pkgasset.parsed_meta.taken_over_import_references.clear() super.finished_import(pkgasset, res) if res != null: pass @@ -807,13 +748,6 @@ class BaseModelHandler: subresources["materials"][material_name] = {} subresources["materials"][material_name]["use_external/enabled"] = true subresources["materials"][material_name]["use_external/path"] = pkgasset.parsed_meta.imported_material_paths[material_name] - if pkgasset.parsed_meta.imported_material_paths.has(""): - for i in range(pkgasset.parsed_meta.internal_data.get("unnamed_material_count", 0)): - var material_name: String = "@MATERIAL:" + str(i) - if not (subresources["materials"].has(material_name)): - subresources["materials"][material_name] = {} - subresources["materials"][material_name]["use_external/enabled"] = true - subresources["materials"][material_name]["use_external/path"] = pkgasset.parsed_meta.imported_material_paths[""] cfile.set_value("params", "_subresources", subresources) #cfile.set_value_compare("params", "animation/optimizer/enabled", optim_setting.get("enabled")) #cfile.set_value_compare("params", "animation/optimizer/max_linear_error", optim_setting.get("max_linear_error")) @@ -821,10 +755,10 @@ class BaseModelHandler: cfile.save(import_file_path) func write_godot_asset(pkgasset: Object, temp_path: String) -> bool: - var dres = DirAccess.open("res://") # super.write_godot_asset(pkgasset, temp_path) # Duplicate code since super causes a weird nonsensical error cannot call "importer()" function... - if pkgasset.existing_data_md5 != pkgasset.data_md5: + if USE_BUILTIN_FBX or pkgasset.existing_data_md5 != pkgasset.data_md5: + var dres = DirAccess.open("res://") pkgasset.log_debug("Renaming " + temp_path + " to " + pkgasset.pathname) dres.rename(temp_path, pkgasset.pathname) if temp_path.ends_with(".gltf"): @@ -889,8 +823,7 @@ class FbxHandler: var output_scale: float = desired_scale # FBX2glTF does not implement scale correctly, so we must set it to 1.0 and correct it later # Godot's new built-in FBX importer works correctly and does not need this correction. - if not pkgasset.parsed_meta.is_using_builtin_ufbx(): - output_scale = 1.0 + output_scale = 1.0 pkgasset.parsed_meta.internal_data["scale_correction_factor"] = desired_scale / output_scale pkgasset.parsed_meta.internal_data["output_fbx_scale"] = output_scale return output_scale @@ -973,171 +906,6 @@ class FbxHandler: strlist.append(buffer_as_ascii.substr(nextpos + 1, lastquote - nextpos - 1)) return strlist - func _extract_fbx_object_offsets_binary(obj_type: String, pkgasset: Object, fbx_file_binary: PackedByteArray) -> Dictionary: - var spb: StreamPeerBuffer = StreamPeerBuffer.new() - spb.data_array = fbx_file_binary - spb.big_endian = false - var str_offsets: Dictionary - - # Binary FBX serializes ids such as Model::SomeNodeName as S__\x00\x00SomeNodeName\x00\x01Model - var modelname_needle_buf: PackedByteArray = (".\u0001" + obj_type).to_ascii_buffer() - modelname_needle_buf[0] = 0 - - var modelname_needle_pos: int = find_in_buffer(fbx_file_binary, modelname_needle_buf) - while modelname_needle_pos != -1: - var length_prefix_pos: int = modelname_needle_pos - 1 - while fbx_file_binary[length_prefix_pos] != 0: - if length_prefix_pos < 4: - push_error("Failed to find beginning of Model name") - return str_offsets - length_prefix_pos -= 1 - var nextpos: int = find_in_buffer(fbx_file_binary, modelname_needle_buf, modelname_needle_pos + 7) - spb.seek(length_prefix_pos - 3) - var strlen: int = spb.get_32() - #print(strlen + 1 + length_prefix_pos) - #print(modelname_needle_pos) - assert(strlen + 1 + length_prefix_pos == modelname_needle_pos + 2 + len(obj_type)) - if strlen >= len(obj_type) + 2 and strlen < 1024: - var fn_utf8: PackedByteArray = spb.get_data(strlen - len(obj_type) - 2)[1] # NOTE: Do we need to detect charset? FBX should be unicode - var mesh_name: String = fn_utf8.get_string_from_utf8() - if not str_offsets.has(mesh_name): - str_offsets[mesh_name] = [] - str_offsets[mesh_name].append(Vector2i(spb.get_position(), nextpos)) - modelname_needle_pos = nextpos - return str_offsets - - func _extract_fbx_object_offsets_ascii(obj_type: String, pkgasset: Object, buffer_as_utf8: String) -> Dictionary: - var str_offsets: Dictionary - var modelname_needle: String = '"' + obj_type + '::' - var modelname_needle_pos: int = buffer_as_utf8.find(modelname_needle) - print("ASCII " + modelname_needle + " " + str(modelname_needle_pos)) - while modelname_needle_pos != -1: - var modelname_needle_end: int = buffer_as_utf8.find('"', modelname_needle_pos + len(obj_type) + 3) - print("ASCII QUOT " + str(modelname_needle_end)) - if modelname_needle_end == -1: - print("ASCII EMD " + str(modelname_needle_end)) - break - var model_needle_end: int = buffer_as_utf8.find(modelname_needle, modelname_needle_end) - if model_needle_end == -1: - model_needle_end = buffer_as_utf8.find("::", modelname_needle_end) - var str_utf8: String = buffer_as_utf8.substr(modelname_needle_pos + len(obj_type) + 3, modelname_needle_end - modelname_needle_pos - len(obj_type) - 3) - if not str_offsets.has(str_utf8): - str_offsets[str_utf8] = [] - str_offsets[str_utf8].append(Vector2i(modelname_needle_end, model_needle_end)) - modelname_needle_pos = buffer_as_utf8.find(modelname_needle, modelname_needle_end) - print("ASCII DO " + str(modelname_needle_pos)) - return str_offsets - - func _extract_fbx_objects_binary(obj_type: String, pkgasset: Object, fbx_file_binary: PackedByteArray) -> PackedStringArray: - return PackedStringArray(_extract_fbx_object_offsets_binary(obj_type, pkgasset, fbx_file_binary).keys()) - - func _extract_fbx_objects_ascii(obj_type: String, pkgasset: Object, buffer_as_utf8: String) -> PackedStringArray: - return PackedStringArray(_extract_fbx_object_offsets_ascii(obj_type, pkgasset, buffer_as_utf8).keys()) - - func _extract_fbx_geometry_material_order_binary(pkgasset: Object, fbx_file_binary: PackedByteArray) -> Dictionary: - var spb: StreamPeerBuffer = StreamPeerBuffer.new() - spb.data_array = fbx_file_binary - spb.big_endian = false - var str_offsets: Dictionary = _extract_fbx_object_offsets_binary("Geometry", pkgasset, fbx_file_binary) - var material_order_by_mesh: Dictionary - var duplicate_map: Dictionary - for mesh_name in str_offsets: - for which_mesh in str_offsets[mesh_name]: - var start_mesh: int = which_mesh.x - var end_mesh: int = which_mesh.y - # print("Found binary mesh " + str(mesh_name) + ": " + str(start_mesh) + "/" + str(end_mesh)) - var layer_element_material_pos: int = find_in_buffer(fbx_file_binary, ("\u0014LayerElementMaterial").to_ascii_buffer(), start_mesh, end_mesh) - if layer_element_material_pos == -1: - continue - var materials_key_pos: int = find_in_buffer(fbx_file_binary, ("\u0009Materialsi").to_ascii_buffer(), layer_element_material_pos, end_mesh) - if materials_key_pos == -1: - continue - spb.seek(materials_key_pos + 11) - var face_count: int = spb.get_u32() - var encoding: int = spb.get_u32() # 0 = raw; 1 = deflate - var compressed_size: int = spb.get_u32() - # print("comp " + str(compressed_size) + " face count " + str(face_count) + " one " + str(one_int)) - if face_count > 250000000: # one_int != 1 or (sometimes this is 0) - push_error("Incorrect compression tag for Materials " + str(encoding) + " face count " + str(face_count)) - continue - if end_mesh != -1 and compressed_size > end_mesh - materials_key_pos - 11: - push_error("Invalid compressed size " + str(compressed_size)) - continue - var compressed_data = spb.get_data(compressed_size)[1] - var face_indices: PackedInt32Array - if encoding == 1: - face_indices = compressed_data.decompress(face_count * 4, FileAccess.COMPRESSION_DEFLATE).to_int32_array() - else: - face_indices = compressed_data.to_int32_array() - if len(face_indices) != face_count: - push_warning("incorrect number of decompressed face indices") - continue - var material_order: PackedInt32Array - var found_face_indices: PackedInt32Array - for idx in face_indices: - if idx > face_count or idx < 0: - push_error("Incorrect material index " + str(idx)) - if idx >= len(found_face_indices): - found_face_indices.resize(idx + 1) - if found_face_indices[idx] == 0: - material_order.append(idx) - found_face_indices[idx] = 1 - if not material_order_by_mesh.has(mesh_name): - material_order_by_mesh[mesh_name] = [] - material_order_by_mesh[mesh_name].append(material_order) - return material_order_by_mesh - - func _extract_fbx_geometry_material_order_ascii(pkgasset: Object, buffer_as_utf8: String) -> Dictionary: - var str_offsets: Dictionary = _extract_fbx_object_offsets_ascii("Geometry", pkgasset, buffer_as_utf8) - var material_order_by_mesh: Dictionary - for mesh_name in str_offsets: - for which_mesh in str_offsets[mesh_name]: - var start_mesh: int = which_mesh.x - var end_mesh: int = which_mesh.y - var mat_pos: int = buffer_as_utf8.find("LayerElementMaterial:", start_mesh) - if mat_pos == -1 or mat_pos > end_mesh: - print("Cont 1 " + str(mat_pos) + " " + str(end_mesh)) - continue - var materials_pos: int = buffer_as_utf8.find("Materials:", mat_pos + 21) - if materials_pos == -1 or materials_pos > end_mesh: - print("Cont 2 " + str(mat_pos) + " " + str(end_mesh) + " " + str(materials_pos)) - continue - var close_brace_pos: int = buffer_as_utf8.find("}", materials_pos) - if close_brace_pos == -1 or close_brace_pos > end_mesh: - print("Cont 3 " + str(mat_pos) + " " + str(close_brace_pos) + " " + str(materials_pos)) - continue - var open_brace_pos: int = buffer_as_utf8.find("{", materials_pos) - if open_brace_pos == -1 or open_brace_pos > close_brace_pos: - print("Cont 4 " + str(mat_pos) + " " + str(open_brace_pos) + " " + str(close_brace_pos)) - continue - var star_pos: int = buffer_as_utf8.find("*", materials_pos) - if star_pos == -1 or star_pos > open_brace_pos: - print("Cont 5 " + str(mat_pos) + " " + str(star_pos) + " " + str(open_brace_pos)) - continue - var face_count: int = buffer_as_utf8.substr(star_pos + 1, open_brace_pos - star_pos - 1).strip_edges().to_int() - var array_start = buffer_as_utf8.find(":", open_brace_pos) - if array_start == -1 or array_start > close_brace_pos: - print("Cont 6 " + str(mat_pos) + " " + str(array_start) + " " + str(close_brace_pos)) - continue - var face_index_strs: PackedStringArray = buffer_as_utf8.substr(array_start + 1, close_brace_pos - array_start - 1).split(",") - var material_order: PackedInt32Array - var found_face_indices: PackedInt32Array - for idx_str in face_index_strs: - var idx: int = idx_str.strip_edges().to_int() - if idx > face_count or idx < 0: - push_error("Incorrect material index " + str(idx)) - if idx >= len(found_face_indices): - found_face_indices.resize(idx + 1) - if found_face_indices[idx] == 0: - material_order.append(idx) - found_face_indices[idx] = 1 - if not material_order_by_mesh.has(mesh_name): - material_order_by_mesh[mesh_name] = [] - print(material_order) - material_order_by_mesh[mesh_name].append(material_order) - return material_order_by_mesh - - func _preprocess_fbx_scale_binary(pkgasset: Object, fbx_file_binary: PackedByteArray, useFileScale: bool, globalScale: float) -> PackedByteArray: if useFileScale and is_equal_approx(globalScale, 1.0): pkgasset.log_debug("TODO: when we switch to the Godot FBX implementation, we can short-circuit this code and return early.") @@ -1308,12 +1076,12 @@ class FbxHandler: var filename: String = pkgasset.pathname var time_mode_pos: int = buffer_as_ascii.find('"TimeMode"') if time_mode_pos == -1: - pkgasset.log_fail(filename + ": Failed to find TimeMode in ASCII FBX.") + pkgasset.log_fail(filename + ": Failed to find UnitScaleFactor in ASCII FBX.") return 30 var newline_pos: int = buffer_as_ascii.find("\n", time_mode_pos) var comma_pos: int = buffer_as_ascii.rfind(",", newline_pos) if newline_pos == -1 or comma_pos == -1: - pkgasset.log_fail(filename + ": Failed to find value for TimeMode in ASCII FBX.") + pkgasset.log_fail(filename + ": Failed to find value for UnitScaleFactor in ASCII FBX.") return 30 var fps: float = 1 @@ -1327,12 +1095,12 @@ class FbxHandler: var framerate_pos: int = buffer_as_ascii.find('"CustomFrameRate"') if framerate_pos == -1: - pkgasset.log_fail(filename + ": Failed to find CustomFrameRate in ASCII FBX.") + pkgasset.log_fail(filename + ": Failed to find UnitScaleFactor in ASCII FBX.") return 30 newline_pos = buffer_as_ascii.find("\n", framerate_pos) comma_pos = buffer_as_ascii.rfind(",", newline_pos) if newline_pos == -1 or comma_pos == -1: - pkgasset.log_fail(filename + ": Failed to find value for CustomFrameRate in ASCII FBX.") + pkgasset.log_fail(filename + ": Failed to find value for UnitScaleFactor in ASCII FBX.") return 30 var custom_fps_str: String = buffer_as_ascii.substr(comma_pos + 1, newline_pos - comma_pos - 1).strip_edges() @@ -1394,7 +1162,7 @@ class FbxHandler: var full_tmpdir: String = tmpdir + "/" + thread_subdir var input_path: String = thread_subdir + "/" + "input.fbx" var temp_input_path: String = tmpdir + "/" + input_path - if pkgasset.parsed_meta.is_using_builtin_ufbx(): + if USE_BUILTIN_FBX: input_path = pkgasset.pathname temp_input_path = input_path var importer = pkgasset.parsed_meta.importer @@ -1411,27 +1179,15 @@ class FbxHandler: var is_binary: bool = _is_fbx_binary(fbx_file) var fps: float var texture_name_list: PackedStringArray = PackedStringArray() - var node_name_list: PackedStringArray - var mesh_name_list: PackedStringArray - var animation_name_list: PackedStringArray - var material_order_by_mesh: Dictionary if is_binary: texture_name_list = _extract_fbx_textures_binary(pkgasset, fbx_file) fps = _preprocess_fbx_anim_fps_binary(pkgasset, fbx_file) - #node_name_list = _extract_fbx_objects_binary("Model", pkgasset, fbx_file) - #mesh_name_list = _extract_fbx_objects_binary("Geometry", pkgasset, fbx_file) - #animation_name_list = _extract_fbx_objects_binary("AnimStack", pkgasset, fbx_file) - material_order_by_mesh = _extract_fbx_geometry_material_order_binary(pkgasset, fbx_file) fbx_file = _preprocess_fbx_scale_binary(pkgasset, fbx_file, importer.keys.get("meshes", {}).get("useFileScale", 0) == 1, importer.keys.get("meshes", {}).get("globalScale", 1)) else: var buffer_as_utf8: String = fbx_file.get_string_from_utf8() # may contain unicode texture_name_list = _extract_fbx_textures_ascii(pkgasset, buffer_as_utf8) var buffer_as_ascii: String = fbx_file.get_string_from_ascii() # match 1-to-1 with bytes fps = _preprocess_fbx_anim_fps_ascii(pkgasset, buffer_as_ascii) - #node_name_list = _extract_fbx_objects_ascii("Model", pkgasset, buffer_as_utf8) - #mesh_name_list = _extract_fbx_objects_ascii("Geometry", pkgasset, buffer_as_utf8) - #animation_name_list = _extract_fbx_objects_ascii("AnimStack", pkgasset, buffer_as_utf8) - material_order_by_mesh = _extract_fbx_geometry_material_order_ascii(pkgasset, buffer_as_utf8) fbx_file = _preprocess_fbx_scale_ascii(pkgasset, fbx_file, buffer_as_ascii, importer.keys.get("meshes", {}).get("useFileScale", 0) == 1, importer.keys.get("meshes", {}).get("globalScale", 1)) var d := DirAccess.open("res://") var closest_bake_fps: float = 30 @@ -1458,10 +1214,8 @@ class FbxHandler: retry_count += 1 outfile = null var unique_texture_map: Dictionary = {} - var texture_dirname: String = "" - if not pkgasset.parsed_meta.is_using_builtin_ufbx(): - texture_dirname = full_tmpdir - var output_dirname: String = pkgasset.pathname.get_base_dir() + var texture_dirname = full_tmpdir + var output_dirname = pkgasset.pathname.get_base_dir() pkgasset.log_debug("Referenced texture list: " + str(texture_name_list)) for fn in texture_name_list: var fn_filename: String = fn.get_file() @@ -1472,7 +1226,7 @@ class FbxHandler: pkgasset.log_debug("Referenced textures: " + str(unique_texture_map.keys())) var tex_not_exists = {} for fn in unique_texture_map.keys(): - if not texture_dirname.is_empty() and not d.file_exists(texture_dirname + "/" + fn): + if not d.file_exists(texture_dirname + "/" + fn): pkgasset.log_debug("Creating dummy texture: " + str(texture_dirname + "/" + fn)) var tmpf = FileAccess.open(texture_dirname + "/" + fn, FileAccess.WRITE_READ) tmpf.close() @@ -1481,7 +1235,7 @@ class FbxHandler: var tex_exists: bool = false for candidate_fn in candidate_texture_dict: #pkgasset.log_debug("candidate " + str(candidate_fn) + " INPKG=" + str(pkgasset.packagefile.path_to_pkgasset.has(candidate_fn)) + " FILEEXIST=" + str(d.file_exists(candidate_fn))) - if pkgasset.packagefile.path_to_pkgasset.has(candidate_fn) or file_exists_hack(d, candidate_fn): + if pkgasset.packagefile.path_to_pkgasset.has(candidate_fn) or d.file_exists(candidate_fn): unique_texture_map[fn] = candidate_texture_dict[candidate_fn] tex_exists = true break @@ -1498,26 +1252,13 @@ class FbxHandler: for texname in tex_not_exists.keys(): if not tex_not_exists[texname].is_empty(): unique_texture_map[texname] = _make_relative_to(tex_not_exists[texname], output_dirname) - var extracted_assets_dir: String = post_import_material_remap_script.get_extracted_assets_dir(pkgasset.pathname) - if not DirAccess.dir_exists_absolute(extracted_assets_dir): - DirAccess.make_dir_absolute(extracted_assets_dir) - - var extra_data = { - "unique_texture_map": unique_texture_map, - #"node_name_list": node_name_list, - #"mesh_name_list": mesh_name_list, - #"animation_name_list": animation_name_list, - "material_order_by_mesh": material_order_by_mesh, - } - pkgasset.parsed_meta.unique_texture_map = unique_texture_map - var output_path: String = self.preprocess_asset(pkgasset, tmpdir, thread_subdir, input_path, fbx_file, extra_data) + var output_path: String = self.preprocess_asset(pkgasset, tmpdir, thread_subdir, input_path, fbx_file, unique_texture_map) #if len(output_path) == 0: # output_path = path - if not pkgasset.parsed_meta.is_using_builtin_ufbx(): + if not USE_BUILTIN_FBX: d.remove(temp_input_path) # delete "input.fbx" - if not texture_dirname.is_empty(): - for fn in unique_texture_map.keys(): - d.remove(texture_dirname + "/" + fn) + for fn in unique_texture_map.keys(): + d.remove(texture_dirname + "/" + fn) pkgasset.log_debug("Updating file at " + output_path) return output_path @@ -1710,380 +1451,7 @@ class FbxHandler: func encode_uri_components(path: String): return path.replace("%", "%25").replace("+", "%2B") - func preprocess_asset(pkgasset: Object, tmpdir: String, thread_subdir: String, path: String, data_buf: PackedByteArray, extra_data: Dictionary = {}) -> String: - if pkgasset.parsed_meta.is_using_builtin_ufbx(): - return preprocess_asset_ufbx(pkgasset, tmpdir, thread_subdir, path, data_buf, extra_data) - else: - return preprocess_asset_fbx2gltf(pkgasset, tmpdir, thread_subdir, path, data_buf, extra_data) - - func create_rest_pose_anim(nodes: Array, hip_node_idx: int) -> Animation: - var anim := Animation.new() - for node in nodes: - var trk: int = anim.add_track(Animation.TYPE_ROTATION_3D) - anim.track_set_path(trk, NodePath("Skeleton3D:" + node.resource_name)) - anim.rotation_track_insert_key(trk, 0.0, node.rotation) - var par_idx: int = node.parent - var hip_descendent: bool = false - while par_idx != -1: - if par_idx == hip_node_idx: - hip_descendent = true - par_idx = nodes[par_idx].parent - if not hip_descendent: - trk = anim.add_track(Animation.TYPE_POSITION_3D) - anim.track_set_path(trk, NodePath("Skeleton3D:" + node.resource_name)) - anim.position_track_insert_key(trk, 0.0, node.position) - return anim - - func preprocess_asset_ufbx(pkgasset: Object, tmpdir: String, thread_subdir: String, path: String, data_buf: PackedByteArray, extra_data: Dictionary = {}) -> String: - # I think we should depend on relative paths for this, for example make dummy textures to get it to generate the materials, then move them back. - # We can do the FBXDocument step in preprocess, so we will know in advance if this works. - var unique_texture_map: Dictionary = extra_data["unique_texture_map"] - var material_order_by_mesh: Dictionary = extra_data["material_order_by_mesh"] - var used_animation_names: Dictionary - var orig_to_godot_sanitized_anim_takes: Dictionary - - var godot_sanitized_to_orig_remap: Dictionary = {"bone_name": {}, "nodes": {}, "meshes": {}, "animations": {}} - # Anything after this point will be using sanitized names, and should go through godot_sanitized_to_orig_remap / bone_map_dict - # for key in ["scenes", "nodes", "meshes", "skins", "images", "materials", "animations"]: - - var doc = ClassDB.instantiate(&"FBXDocument") - var state = ClassDB.instantiate(&"FBXState") - state.import_as_skeleton_bones = true - doc.append_from_file(tmpdir + "/" + path, state) - # Name unmangling: - var godot_to_fbx_node_name: Dictionary - var godot_to_meta_node_name: Dictionary - var nodes = state.get_nodes() - var meshes = state.get_meshes() - var materials = state.get_materials() - var animations = state.get_animations() - pkgasset.log_debug("Preprocessing ufbx nodes " + str(len(nodes))) - - var used_names: Dictionary - var used_godot_skel_names: Dictionary - for node_i in range(len(nodes)): - var node: GLTFNode = nodes[node_i] - var godot_mangled_name: String = node.resource_name - var orig_name: String = node.original_name - pkgasset.log_debug("Found node " + str(node.parent) + "/" + str(node_i) + " " + str(godot_mangled_name) + " " + str(orig_name)) - if node_i == 0: - if orig_name == "": - continue - else: - pkgasset.log_debug("Found a different node " + str(godot_mangled_name) + "/" + str(orig_name) + " instead of Root") - var next_num: int = used_names.get(orig_name, 1) - var try_name: String = orig_name - while used_names.has(try_name): - try_name = "%s %d" % [orig_name, next_num] - next_num += 1 - used_names[orig_name] = next_num - used_names[try_name] = 1 - var used_type: String = "bone_name" if node.skeleton >= 0 else "nodes" - godot_sanitized_to_orig_remap[used_type][godot_mangled_name] = try_name - if node.mesh >= 0: - var mesh = meshes[node.mesh] - var mesh_mangled_name: String = mesh.resource_name - godot_sanitized_to_orig_remap["meshes"][mesh_mangled_name] = try_name - if node.skeleton >= 0: - # For meshes that are also bones that are also nodes (since the current godot importer makes bones for meshes) - godot_sanitized_to_orig_remap["nodes"][godot_mangled_name.validate_node_name()] = try_name - if node.skeleton >= 0: - used_godot_skel_names[node.resource_name] = 2 - var material_idx_by_mesh = {} - var used_node_names = {} - used_names = {} - var unnamed_material_count: int = 0 - var internal_material_order_by_mesh_rev: Dictionary - for mesh_i in range(len(meshes)): - var mesh: GLTFMesh = meshes[mesh_i] - var godot_mangled_name: String = mesh.resource_name - var orig_name: String = mesh.original_name - var next_num: int = used_names.get(orig_name, 1) - var try_name: String = orig_name - while used_names.has(try_name): - try_name = "%s %d" % [orig_name, next_num] - next_num += 1 - used_names[orig_name] = next_num - used_names[try_name] = 1 - if not godot_sanitized_to_orig_remap["meshes"].has(godot_mangled_name): - godot_sanitized_to_orig_remap["meshes"][godot_mangled_name] = try_name - var imp_mesh: ImporterMesh = mesh.mesh - if imp_mesh != null: - for surf_i in range(imp_mesh.get_surface_count()): - if imp_mesh.get_surface_material(surf_i) == null or imp_mesh.get_surface_material(surf_i).resource_name.is_empty(): - unnamed_material_count += 1 - # Handle material sort orderp - var material_key_idx: int = 0 - if material_idx_by_mesh.has(orig_name): - material_key_idx = material_idx_by_mesh[orig_name] - material_idx_by_mesh[orig_name] += 1 - else: - material_idx_by_mesh[orig_name] = 1 - var material_used: PackedInt32Array = PackedInt32Array() - var material_order: PackedInt32Array = PackedInt32Array() - if mesh.mesh != null: - material_used.resize(mesh.mesh.get_surface_count()) - if material_order_by_mesh.has(orig_name): - var material_order_temp: PackedInt32Array = PackedInt32Array() - if len(material_order_by_mesh[orig_name]) > material_key_idx: - material_order_temp = material_order_by_mesh[orig_name][material_key_idx] - else: - material_order_temp = material_order_by_mesh[orig_name][-1] - for mat in material_order_temp: - if mat >= 0 and mat < len(material_used): - if material_used[mat] == 0: - material_used[mat] = -1 - material_order.append(mat) - for mat in range(len(material_used)): - if not material_used[mat]: - material_order.append(mat) - for mat in range(len(material_order)): - material_used[material_order[mat]] = mat - material_used = material_order - internal_material_order_by_mesh_rev[godot_mangled_name] = material_used - pkgasset.parsed_meta.internal_data["material_order_by_mesh_rev"] = internal_material_order_by_mesh_rev - - used_names = {} - for anim_i in range(len(animations)): - var anim = animations[anim_i] - var godot_mangled_name: String = anim.resource_name - var orig_name: String = anim.original_name - var next_num: int = used_names.get(orig_name, 1) - used_animation_names[godot_mangled_name] = anim_i - var try_name: String = orig_name - while used_names.has(try_name): - try_name = "%s %d" % [orig_name, next_num] - next_num += 1 - used_names[orig_name] = next_num - used_names[try_name] = 1 - godot_sanitized_to_orig_remap["animations"][godot_mangled_name] = try_name - orig_to_godot_sanitized_anim_takes[try_name] = godot_mangled_name - - var importer = pkgasset.parsed_meta.importer - var humanoid_original_transforms: Dictionary = {} # name -> Transform3D - var original_rotations: Dictionary = {} # name -> Quaternion - var orig_hip_position: Vector3 - var human_skin_nodes: Array = [] - var is_humanoid: bool = importer.keys.get("animationType", 2) == 3 or pkgasset.parsed_meta.is_force_humanoid() - var bone_map_dict: Dictionary - var copy_avatar: bool = false - var src_ava: Array = [] - var silhouette_anim: Animation - if is_humanoid and (importer.keys.get("avatarSetup", 1) >= 1 or pkgasset.parsed_meta.is_force_humanoid()): - if importer.keys.get("avatarSetup", 1) == 2 or importer.keys.get("copyAvatar", 0) == 1: - src_ava = importer.keys.get("lastHumanDescriptionAvatarSource", [null, 0, "", 0]) - var src_ava_meta = pkgasset.meta_dependencies.get(src_ava[2], null) - if src_ava_meta == null: - pkgasset.log_fail("Unable to lookup meta copy avatar dependency", "lastHumanDescriptionAvatarSource", src_ava) - else: - bone_map_dict = src_ava_meta.importer.generate_bone_map_dict_from_human() - humanoid_original_transforms = src_ava_meta.internal_data.get("humanoid_original_transforms", {}).duplicate() - orig_hip_position = src_ava_meta.internal_data.get("hips_position", Vector3()) - original_rotations = src_ava_meta.internal_data.get("original_rotations", {}).duplicate() - pkgasset.log_debug("Copying from avatar " + str(src_ava_meta.path) + " " + str(src_ava_meta.guid) + " orig transforms " + str(len(src_ava_meta.internal_data.get("humanoid_original_transforms", {})))) - copy_avatar = true - - if not copy_avatar and len(importer.keys.get("humanDescription", {}).get("human", [])) < 10: - var skel: Skeleton3D = Skeleton3D.new() - for node in nodes: - var node_name = node.resource_name - skel.add_bone(node_name) - var i: int = 0 - for node in nodes: - if node.parent != -1: - skel.set_bone_parent(i, node.parent) - i += 1 - i = 0 - for node in nodes: - var n:GLTFNode - var xform: Transform3D = node.xform - skel.set_bone_rest(i, xform) - skel.set_bone_pose_position(i, xform.origin) - skel.set_bone_pose_rotation(i, xform.basis.get_rotation_quaternion()) - skel.set_bone_pose_scale(i, xform.basis.get_scale()) - i += 1 - pkgasset.parsed_meta.autodetected_bone_map_dict = bone_map_editor_plugin.auto_mapping_process_dictionary(skel, pkgasset.log_debug, pkgasset.parsed_meta.is_force_humanoid()) - # The above fails if Hips could not be found. If forcing humanoid, try anyway in case this is an outfit. - if pkgasset.parsed_meta.is_force_humanoid() and pkgasset.parsed_meta.autodetected_bone_map_dict.is_empty(): - pkgasset.parsed_meta.autodetected_bone_map_dict = bone_map_editor_plugin.auto_mapping_process_dictionary(skel, pkgasset.log_debug, true, true) - var counter: int = 0 - if pkgasset.parsed_meta.autodetected_bone_map_dict.is_empty(): - for root_idx in range(len(nodes)): - if nodes[root_idx].parent != -1: - continue - counter ++ 1 - if counter == 100: - break - var try_bone_map = bone_map_editor_plugin.auto_mapping_process_dictionary(skel, pkgasset.log_debug, true, true, root_idx) - if len(try_bone_map) > len(pkgasset.parsed_meta.autodetected_bone_map_dict): - pkgasset.parsed_meta.autodetected_bone_map_dict = try_bone_map - skel.free() - - if not copy_avatar: - pkgasset.log_debug(str(importer.keys.get("humanDescription", {}).get("human", []))) - pkgasset.log_debug("AAAA set to humanoid and has nodes") - bone_map_dict = importer.generate_bone_map_dict_from_human() - pkgasset.log_debug(str(bone_map_dict)) - - # Discover missing Root bone if any, and correct for name conflicts. - var node_idx: int = 0 - var human_skin_set: Dictionary - for node in nodes: - var node_name = node.resource_name - if bone_map_dict.has(node_name): - var godot_human_name: String = bone_map_dict[node_name] - human_skin_nodes.push_back(node_idx) - human_skin_set[node_idx] = true - node_idx += 1 - - var root_bone_name: String = "" - for key in bone_map_dict: - if bone_map_dict[key] == "Root": - root_bone_name = key - var cur_human_node_idx: int = -1 if human_skin_nodes.is_empty() else human_skin_nodes[-1] # Doesn't matter which...just need to find a common ancestor. - pkgasset.log_debug("cur_human_node_idx=" + str(cur_human_node_idx) + " / " + str(nodes[cur_human_node_idx])) - # Add up to three levels up into the skeleton. Our goal is to make the toplevel Armature node be a skeleton, so that we are guaranteed a root bone. - while nodes[cur_human_node_idx].parent != -1: - var new_root_idx = nodes[cur_human_node_idx].parent - var node = nodes[new_root_idx] - if node.original_name.is_empty() or node.skeleton < 0: - break - pkgasset.log_debug("Adding node to skin " + str(new_root_idx) + " parent of " + str(cur_human_node_idx)) - pkgasset.parsed_meta.internal_data["humanoid_root_bone"] = node.resource_name - pkgasset.log_debug("Attempt root bone name: " + str(node.resource_name) + " in map: " + str(bone_map_dict.has(node.resource_name))) - if not bone_map_dict.has(node.resource_name): - root_bone_name = node.resource_name - if not human_skin_set.has(new_root_idx): - human_skin_nodes.push_back(new_root_idx) - human_skin_set[new_root_idx] = true - cur_human_node_idx = new_root_idx - pkgasset.log_debug("Root bone name: " + str(root_bone_name)) - if root_bone_name != "": - bone_map_dict[root_bone_name] = "Root" - - var hip_parent_node_idx = -1 - var hip_node_idx = -1 - - for node in nodes: - if bone_map_dict.get(node.resource_name, "") == "Hips": - hip_parent_node_idx = node.parent - while hip_parent_node_idx != -1: - hip_parent_node_idx = nodes[hip_parent_node_idx].parent - - var prof_bone_names = {} - var prof = SkeletonProfileHumanoid.new() - for prof_i in range(prof.bone_size): - prof_bone_names[prof.get_bone_name(prof_i)] = 2 - used_names = used_godot_skel_names - for godot_bone_name in godot_sanitized_to_orig_remap["bone_name"].keys().duplicate(): - if not bone_map_dict.has(godot_bone_name): - #if mapped_bone_name != godot_bone_name: - if prof_bone_names.has(godot_bone_name): - var next_num: int = used_names.get(godot_bone_name, 2) - var try_name: String = godot_bone_name - while used_names.has(try_name) or prof_bone_names.has(try_name): - try_name = "%s%d" % [godot_bone_name, next_num] # Godot naming convention - next_num += 1 - used_names[godot_bone_name] = next_num - used_names[try_name] = 2 - godot_sanitized_to_orig_remap["bone_name"][try_name] = godot_sanitized_to_orig_remap["bone_name"][godot_bone_name] - - # Finally, record the original post-silhouette transforms for transform_fileid_to_rotation_delta - for node in nodes: - var node_name = node.resource_name - if bone_map_dict.has(node_name): - var godot_human_name: String = bone_map_dict[node_name] - if godot_human_name not in humanoid_original_transforms: - humanoid_original_transforms[godot_human_name] = node.xform - elif node_name not in humanoid_original_transforms: - humanoid_original_transforms[node_name] = node.xform - - pkgasset.parsed_meta.internal_data["humanoid_original_transforms"] = humanoid_original_transforms - - - # Now we correct the silhouette, either by copying from another model, or applying silhouette fixer. - if copy_avatar: - for x_node_idx in range(len(nodes)): - var node: GLTFNode = nodes[x_node_idx] - var node_name: String = node.resource_name - if original_rotations.has(node_name): - var quat: Quaternion = original_rotations[node_name] - #node["rotation"] = [quat.x, quat.y, quat.z, quat.w] - if bone_map_dict.get(node_name, "") == "Hips": - #node["translation"] = [orig_hip_position.x, orig_hip_position.y, orig_hip_position.z] - hip_parent_node_idx = node.parent - hip_node_idx = x_node_idx - else: - ## bone_map_editor_plugin.silhouette_fix_gltf(json, importer.generate_bone_map_from_human(), SILHOUETTE_FIX_THRESHOLD) - for node in nodes: - var node_name: String = node.resource_name - original_rotations[node_name] = node.rotation - #for node in nodes: - #var tmp: Variant = node.get_additional_data("GODOT_rest_transform") - #if typeof(tmp) == TYPE_TRANSFORM3D: - #node.xform = tmp - #node.position = node.xform.origin - #node.rotation = node.xform.basis.get_rotation_quaternion() - #node.scale = node.xform.basis.get_scale() - for node in nodes: - var node_name: String = node.resource_name - if bone_map_dict.get(node_name, "") == "Hips": - var pos: Vector3 = node.position - var par_id: int = node.parent - var global_xform: Transform3D - while par_id != -1: - if nodes[par_id].skeleton < 0: - break - var n: GLTFNode = nodes[par_id] - pos = n.xform * pos - n.xform = Transform3D(n.xform.basis, Vector3.ZERO) - n.position = Vector3.ZERO - global_xform = n.xform * global_xform - par_id = n.parent - pos = Vector3(0.0, pos.y, 0.0) - pos = global_xform.affine_inverse() * pos - node.position = pos - node.xform = Transform3D(node.xform.basis, pos) - pkgasset.parsed_meta.internal_data["hips_position"] = pos - - pkgasset.parsed_meta.internal_data["original_rotations"] = original_rotations - - if silhouette_anim == null: - silhouette_anim = create_rest_pose_anim(nodes, hip_node_idx) - - # Adding missing tracks just to the T-Pose animation after silhouette fix generates a T-Pose - #add_empty_animation(json, "_T-Pose_") - #add_missing_humanoid_tracks_to_animations(pkgasset, json, bindata, bone_map_dict) - pkgasset.parsed_meta.internal_data["silhouette_anim"] = silhouette_anim - if copy_avatar: - pkgasset.parsed_meta.internal_data["copy_avatar_src"] = src_ava - - var skinned_parents: Dictionary - for node in nodes: - if node.mesh != -1 and node.skin != -1: - var parent_name := "" - if node.parent != -1: - var par_node = nodes[node.parent] - parent_name = godot_sanitized_to_orig_remap.get("bone_name", {}).get( - par_node.resource_name, godot_sanitized_to_orig_remap.get("nodes", {}).get( - par_node.resource_name, par_node.original_name)) - if not skinned_parents.has(parent_name): - skinned_parents[parent_name] = [] - skinned_parents[parent_name].append(godot_sanitized_to_orig_remap.get("bone_name", {}).get( - node.resource_name, godot_sanitized_to_orig_remap.get("nodes", {}).get( - node.resource_name, node.original_name))) - - pkgasset.parsed_meta.internal_data["skinned_parents"] = skinned_parents - pkgasset.parsed_meta.internal_data["godot_sanitized_to_orig_remap"] = godot_sanitized_to_orig_remap - pkgasset.parsed_meta.internal_data["used_animation_names"] = used_animation_names - pkgasset.parsed_meta.internal_data["orig_to_godot_sanitized_anim_takes"] = orig_to_godot_sanitized_anim_takes - pkgasset.parsed_meta.internal_data["unnamed_material_count"] = unnamed_material_count - - # TODO: materials and images/textures - return pkgasset.pathname - - func preprocess_asset_fbx2gltf(pkgasset: Object, tmpdir: String, thread_subdir: String, path: String, data_buf: PackedByteArray, extra_data: Dictionary = {}) -> String: - var unique_texture_map: Dictionary = extra_data["unique_texture_map"] + func preprocess_asset(pkgasset: Object, tmpdir: String, thread_subdir: String, path: String, data_buf: PackedByteArray, unique_texture_map: Dictionary = {}) -> String: var user_path_base: String = OS.get_user_data_dir() pkgasset.log_debug("I am an FBX " + str(path)) var full_output_path: String = tmpdir + "/" + pkgasset.pathname @@ -2095,6 +1463,11 @@ class FbxHandler: var tmp_bin_output_path: String = tmpdir + "/" + thread_subdir + "/buffer.bin" if SHOULD_CONVERT_TO_GLB: output_path = full_output_path.get_basename() + ".glb" + var extracted_assets_dir: String = post_import_material_remap_script.get_extracted_assets_dir(pkgasset.pathname) + if not DirAccess.dir_exists_absolute(extracted_assets_dir): + DirAccess.make_dir_absolute(extracted_assets_dir) + if USE_BUILTIN_FBX: + return pkgasset.pathname var stdout: Array = [].duplicate() var d = DirAccess.open("res://") var addon_path: String = editor_interface.get_editor_settings().get_setting("filesystem/import/fbx/fbx2gltf_path") @@ -2207,10 +1580,9 @@ class FbxHandler: var is_humanoid: bool = importer.keys.get("animationType", 2) == 3 or pkgasset.parsed_meta.is_force_humanoid() var bone_map_dict: Dictionary var copy_avatar: bool = false - var src_ava: Array if is_humanoid and json.has("nodes") and (importer.keys.get("avatarSetup", 1) >= 1 or pkgasset.parsed_meta.is_force_humanoid()): if importer.keys.get("avatarSetup", 1) == 2 or importer.keys.get("copyAvatar", 0) == 1: - src_ava = importer.keys.get("lastHumanDescriptionAvatarSource", [null, 0, "", 0]) + var src_ava = importer.keys.get("lastHumanDescriptionAvatarSource", [null, 0, "", 0]) var src_ava_meta = pkgasset.meta_dependencies.get(src_ava[2], null) if src_ava_meta == null: pkgasset.log_fail("Unable to lookup meta copy avatar dependency", "lastHumanDescriptionAvatarSource", src_ava) @@ -2348,8 +1720,6 @@ class FbxHandler: json["skins"] = [] json["skins"].append({"joints": human_skin_nodes}) - if copy_avatar: - pkgasset.parsed_meta.internal_data["copy_avatar_src"] = src_ava # skinned_parents use the original gltf names before the remap. pkgasset.parsed_meta.internal_data["skinned_parents"] = assign_skinned_parents({}.duplicate(), json["nodes"], "", json["scenes"][json.get("scene", 0)]["nodes"]) pkgasset.parsed_meta.internal_data["godot_sanitized_to_orig_remap"] = {"bone_name": {}} @@ -2660,12 +2030,6 @@ func preprocess_asset_stage2(pkgasset: Object, tmpdir: String, guid_to_pkgasset: pkgasset.parsed_meta.log_debug(0, "Asset " + str(obj.keys.get("m_Name", "")) + " did not have a roughness texture") -func about_to_import(pkgasset: Object): - var path = pkgasset.orig_pathname - var asset_handler: AssetHandler = file_handlers.get(path.get_extension().to_lower(), file_handlers.get("default")) - asset_handler.about_to_import(pkgasset) - - func write_additional_import_dependencies(pkgasset: Object, guid_to_pkgasset: Dictionary) -> Array[String]: var dependencies: Array[String] for extra_tex_filename in pkgasset.parsed_meta.internal_data.get("extra_textures", []): @@ -2685,6 +2049,8 @@ func write_additional_import_dependencies(pkgasset: Object, guid_to_pkgasset: Di continue extra_tex_data.erase("temp_path") extra_tex_data.erase("source_meta_guid") + var path = pkgasset.orig_pathname + var asset_handler: AssetHandler = file_handlers.get(path.get_extension().to_lower(), file_handlers.get("default")) var cfile_source := ConfigFile.new() var cfile := ConfigFile.new() # AssetHandler.ConfigFileCompare.new(pkgasset) diff --git a/addons/unidot_importer/asset_database.gd b/addons/unidot_importer/asset_database.gd index 3644400a..3e5a848b 100644 --- a/addons/unidot_importer/asset_database.gd +++ b/addons/unidot_importer/asset_database.gd @@ -26,7 +26,6 @@ var log_message_holder = asset_meta_class.LogMessageHolder.new() @export var enable_verbose_logs: bool = false @export var set_animation_trees_active: bool = true @export var vrm_spring_bones: bool = true -@export var convert_fbx_to_gltf: bool = false var vrm_integration_plugin var log_limit_per_guid: int = 100000 diff --git a/addons/unidot_importer/asset_meta.gd b/addons/unidot_importer/asset_meta.gd index 7c81448e..919d9a78 100644 --- a/addons/unidot_importer/asset_meta.gd +++ b/addons/unidot_importer/asset_meta.gd @@ -67,7 +67,6 @@ var prefab_fileid_to_skeleton_bone = {} # int -> string var prefab_fileid_to_utype = {} # int -> int var prefab_type_to_fileids = {} # int -> int var prefab_fileid_to_gameobject_fileid: Dictionary = {} # int -> int -var prefab_fileid_to_material_order_rev: Dictionary = {} var fileid_to_component_fileids: Dictionary = {} # int -> int # TODO: remove @export @export var prefab_gameobject_name_to_fileid_and_children: Dictionary = {} # {null: 400000, "SomeName": {null: 1234, "SomeName2": ...} @@ -84,7 +83,6 @@ var fileid_to_component_fileids: Dictionary = {} # int -> int @export var fileid_to_skeleton_bone: Dictionary = {} # int -> string: scene_node_state.add_fileID_to_skeleton_bone @export var fileid_to_utype: Dictionary = {} # int -> int: parse_binary_asset/parse_asset @export var fileid_to_gameobject_fileid: Dictionary = {} # int -> int: parse_binary_asset/parse_asset -@export var fileid_to_material_order_rev: Dictionary = {} # Mesh OR MeshRenderer fileID int -> PackedInt32Array @export var type_to_fileids: Dictionary = {} # string -> Array[int]: parse_binary_asset/parse_asset @export var godot_resources: Dictionary = {} # int -> Resource: insert_resource/override_resource @export var main_object_id: int = 0 # e.g. 2100000 for .mat; 100000 for .fbx or GameObject; 100100000 for .prefab @@ -106,9 +104,6 @@ var fileid_to_component_fileids: Dictionary = {} # int -> int @export var imported_mesh_paths: Dictionary # mesh_name (with and without "Root Scene_") -> file_path @export var imported_material_paths: Dictionary # material_name -> file_path -var taken_over_import_references: Dictionary = {} # String -> Resource -@export var unique_texture_map: Dictionary - class ParsedAsset: extends RefCounted var local_id_alias: Dictionary = {} # type*100000 + file_index*2 -> real fileId @@ -255,12 +250,6 @@ func is_humanoid() -> bool: return false return true -func is_using_builtin_ufbx() -> bool: - # if Engine.get_version_info().hex >= 0x040300: - if ClassDB.class_exists(&"FBXDocument") and ClassDB.class_exists(&"FBXState"): - return not log_database_holder.database.convert_fbx_to_gltf - return false - # Set to false to debug or avoid auto-playing animations func setting_animtree_active() -> bool: @@ -428,10 +417,6 @@ func remap_prefab_fileids(prefab_fileid: int, target_prefab_meta: Resource): self.prefab_transform_fileid_to_scale_signs[xor_or_stripped(target_fileid, prefab_fileid)] = target_prefab_meta.transform_fileid_to_scale_signs.get(target_fileid) for target_fileid in target_prefab_meta.prefab_transform_fileid_to_scale_signs: self.prefab_transform_fileid_to_scale_signs[xor_or_stripped(target_fileid, prefab_fileid)] = target_prefab_meta.prefab_transform_fileid_to_scale_signs.get(target_fileid) - for target_fileid in target_prefab_meta.fileid_to_material_order_rev: - self.prefab_fileid_to_material_order_rev[xor_or_stripped(target_fileid, prefab_fileid)] = target_prefab_meta.fileid_to_material_order_rev.get(target_fileid) - for target_fileid in target_prefab_meta.prefab_fileid_to_material_order_rev: - self.prefab_fileid_to_material_order_rev[xor_or_stripped(target_fileid, prefab_fileid)] = target_prefab_meta.prefab_fileid_to_material_order_rev.get(target_fileid) func calculate_prefab_nodepaths_recursive(): diff --git a/addons/unidot_importer/convert_scene.gd b/addons/unidot_importer/convert_scene.gd index 85f68b58..a63f2fa9 100644 --- a/addons/unidot_importer/convert_scene.gd +++ b/addons/unidot_importer/convert_scene.gd @@ -158,9 +158,7 @@ func pack_scene(pkgasset, is_prefab) -> PackedScene: env.fog_light_energy = max_c if asset.keys.get("m_FogMode", 3) == 1: # Linear const TARGET_FOG_DENSITY = 0.05 - # It's not possible to emulate linear fog exactly with the level of control we currently have. - # Multiply by 0.1 because the fog seemed too aggressive. - env.fog_density = 0.1 * -log(TARGET_FOG_DENSITY) / asset.keys.get("m_LinearFogEnd", 0.0) + env.fog_density = -log(TARGET_FOG_DENSITY) / asset.keys.get("m_LinearFogEnd", 0.0) else: env.fog_density = asset.keys.get("m_FogDensity", 0.0) var sun: Array = asset.keys.get("m_Sun", [null, 0, null, null]) diff --git a/addons/unidot_importer/object_adapter.gd b/addons/unidot_importer/object_adapter.gd index 99071668..d423a69f 100644 --- a/addons/unidot_importer/object_adapter.gd +++ b/addons/unidot_importer/object_adapter.gd @@ -305,7 +305,7 @@ class UnidotObject: last_material = node.get_active_material(old_surface_count - 1) var last_extra_material: Resource = last_material - var current_materials_raw: Array = [].duplicate() + var current_materials: Array = [].duplicate() var prefix: String = material_prefix + str(old_surface_count - 1) + ":" var new_prefix: String = prefix @@ -315,18 +315,9 @@ class UnidotObject: var mat: Resource = node.get_active_material(material_idx) if mat != null and str(mat.resource_name).begins_with(prefix): break - current_materials_raw.push_back(mat) + current_materials.push_back(mat) material_idx += 1 - var mat_slots: PackedInt32Array = meta.fileid_to_material_order_rev.get(fileID, meta.prefab_fileid_to_material_order_rev.get(fileID, PackedInt32Array())) - var current_materials: Array - current_materials.resize(len(mat_slots)) - for i in range(len(mat_slots)): - current_materials[i] = current_materials_raw[mat_slots[i]] - for i in range(len(mat_slots), len(current_materials_raw)): - current_materials.append(current_materials_raw[i]) - log_debug("Current mat slots " + str(mat_slots) + " materials before: " + str(current_materials)) - while last_extra_material != null and (str(last_extra_material.resource_name).begins_with(prefix) or str(last_extra_material.resource_name).begins_with(new_prefix)): if str(last_extra_material.resource_name).begins_with(new_prefix): prefix = new_prefix @@ -344,11 +335,6 @@ class UnidotObject: var new_materials_size = props.get("_materials_size", material_idx) if props.has("_mesh"): - var mesh_ref: Array = props["_mesh_ref"] - var mesh_meta: Resource = meta.lookup_meta(mesh_ref) - if mesh_meta != null: - meta.fileid_to_material_order_rev[fileID] = mesh_meta.fileid_to_material_order_rev.get(mesh_ref[1], PackedInt32Array()) - log_debug("GET " + str(fileID) + ": " + str(mesh_ref[1]) + " from " + str(mesh_meta.fileid_to_material_order_rev.get(mesh_ref[1]))) node.mesh = props.get("_mesh") node.material_override = null @@ -370,15 +356,9 @@ class UnidotObject: # GI_MODE_DISABLED seems buggy and ignores light probes. node.gi_mode = GeometryInstance3D.GI_MODE_DYNAMIC - - mat_slots = meta.fileid_to_material_order_rev.get(fileID, meta.prefab_fileid_to_material_order_rev.get(fileID, PackedInt32Array())) current_materials.resize(new_materials_size) for i in range(new_materials_size): - var idx = mat_slots[i] if i < len(mat_slots) else i - if idx < new_materials_size: - current_materials[idx] = props.get("_materials/" + str(i), current_materials[i]) - - log_debug("After mat slots " + str(mat_slots) + " materials after: " + str(current_materials)) + current_materials[i] = props.get("_materials/" + str(i), current_materials[i]) var new_surface_count: int = 0 if node.mesh == null else node.mesh.get_surface_count() if new_surface_count != 0 and node.mesh != null: @@ -4362,9 +4342,7 @@ class UnidotPrefabInstance: instanced_scene.scene_file_path = "" set_owner_rec(instanced_scene, state.owner) var anim_player: AnimationPlayer = instanced_scene.get_node_or_null("AnimationPlayer") as AnimationPlayer - # Scenes with only a RESET but not a unidot-created _T-Pose_ are following the Godot convention of rest-pose as RESET. - # In Godot engine versions which support rest-as-RESET, the model will already be posed correctly and we do not want this. - if anim_player != null and anim_player.has_animation(&"RESET") and anim_player.has_animation(&"_T-Pose_"): + if anim_player != null and anim_player.has_animation(&"RESET"): var root_node: Node = anim_player.get_node(anim_player.root_node) var reset_anim: Animation = anim_player.get_animation(&"RESET") if reset_anim != null: @@ -5453,7 +5431,6 @@ class UnidotMeshFilter: var new_mesh: Mesh = meta.get_godot_resource(mesh_ref) log_debug("MeshFilter " + str(self) + " ref " + str(mesh_ref) + " new mesh " + str(new_mesh) + " old mesh " + str(node.mesh)) outdict["_mesh"] = new_mesh # property track? - outdict["_mesh_ref"] = mesh_ref # property track? return outdict func get_filter_mesh() -> Array: # UnidotRef @@ -5536,11 +5513,7 @@ class UnidotMeshRenderer: assign_object_meta(new_node) if meta.get_database().enable_unidot_keys: new_node.editor_description = str(self) - var mesh_ref: Array = self.get_mesh() - var mesh_meta: Resource = meta.lookup_meta(mesh_ref) - if mesh_meta != null: - new_node.mesh = meta.get_godot_resource(mesh_ref) - meta.fileid_to_material_order_rev[fileID] = mesh_meta.fileid_to_material_order_rev.get(mesh_ref[1], PackedInt32Array()) + new_node.mesh = meta.get_godot_resource(self.get_mesh()) if is_stripped or gameObject.is_stripped: log_fail("Oh no i am stripped MeshRenderer create_godot_node_orig") @@ -5548,9 +5521,8 @@ class UnidotMeshRenderer: if mf != null: state.add_fileID(new_node, mf) var idx: int = 0 - var mat_slots: PackedInt32Array = meta.fileid_to_material_order_rev.get(fileID, meta.prefab_fileid_to_material_order_rev.get(fileID, PackedInt32Array())) for m in keys.get("m_Materials", []): - new_node.set_surface_override_material(mat_slots[idx] if idx < len(mat_slots) else idx, meta.get_godot_resource(m)) + new_node.set_surface_override_material(idx, meta.get_godot_resource(m)) idx += 1 return new_node @@ -5676,7 +5648,7 @@ class UnidotSkinnedMeshRenderer: var skin_rotation_delta: Transform3D = skin_humanoid_rotation_delta.get(skin.get_bind_name(idx), Transform3D.IDENTITY) var rotation_delta: Transform3D = meta.transform_fileid_to_rotation_delta.get(bone_fileID, meta.prefab_transform_fileid_to_rotation_delta.get(bone_fileID)) if !rotation_delta.is_equal_approx(skin_rotation_delta): - log_debug("skin " + str(idx) + " : This fileID is a humanoid bone rotation offset=" + str(rotation_delta.basis.get_rotation_quaternion()) + " scale " + str(rotation_delta.basis.get_scale()) + " pos " + str(rotation_delta.origin)) + log_debug("skin " + str(idx) + " : This fileID is a humanoid bone rotation offset=" + str(rotation_delta.basis.get_rotation_quaternion()) + " scale " + str(rotation_delta.basis.get_scale())) skin.set_bind_pose(idx, rotation_delta * skin_rotation_delta.affine_inverse() * skin.get_bind_pose(idx)) return skin @@ -5684,7 +5656,6 @@ class UnidotSkinnedMeshRenderer: var outdict = super.convert_properties(node, uprops) if uprops.has("m_Mesh"): var mesh_ref: Array = get_ref(uprops, "m_Mesh") - outdict["_mesh_ref"] = mesh_ref var new_mesh: Mesh = meta.get_godot_resource(mesh_ref) outdict["_mesh"] = new_mesh # property track? var skin_ref: Array = mesh_ref diff --git a/addons/unidot_importer/package_import_dialog.gd b/addons/unidot_importer/package_import_dialog.gd index 51b1fddc..f787d446 100644 --- a/addons/unidot_importer/package_import_dialog.gd +++ b/addons/unidot_importer/package_import_dialog.gd @@ -87,7 +87,6 @@ var debug_disable_silhouette_fix_checkbox: CheckBox var force_humanoid_checkbox: CheckBox var enable_verbose_log_checkbox: CheckBox var enable_vrm_spring_bones_checkbox: CheckBox -var convert_fbx_to_gltf_checkbox: CheckBox var batch_import_list_widget: ItemList var batch_import_add_button: Button @@ -123,8 +122,6 @@ var asset_yaml_post_model: Array = [].duplicate() var asset_prefabs: Array = [].duplicate() var asset_scenes: Array = [].duplicate() -var builtin_ufbx_supported: bool = ClassDB.class_exists(&"FBXDocument") and ClassDB.class_exists(&"FBXState") - var result_log_lineedit: TextEdit var new_editor_plugin := EditorPlugin.new() @@ -792,12 +789,6 @@ func _selected_package(p_path: String) -> void: enable_vrm_spring_bones_checkbox.toggled.connect(self._enable_vrm_spring_bones_changed) force_humanoid_checkbox = _add_checkbox_option("Import ALL scenes as humanoid retargeted skeletons", true if asset_database.force_humanoid else false) force_humanoid_checkbox.toggled.connect(self._force_humanoid_changed) - if builtin_ufbx_supported: - convert_fbx_to_gltf_checkbox = _add_advanced_checkbox_option("Convert FBX models to glTF", true if asset_database.convert_fbx_to_gltf else false) - convert_fbx_to_gltf_checkbox.toggled.connect(self._convert_fbx_to_gltf_changed) - else: - convert_fbx_to_gltf_checkbox = _add_checkbox_option("Convert FBX models to glTF\n(Update to Godot 4.3 to disable)", true) - convert_fbx_to_gltf_checkbox.disabled = true var vspace := Control.new() vspace.custom_minimum_size = Vector2(0, 16) @@ -957,8 +948,6 @@ func show_importer(ep: EditorPlugin) -> void: func check_fbx2gltf(): - if builtin_ufbx_supported: - return # No need to check for FBX2glTF on engines with native fbx. var d = DirAccess.open("res://") var addon_path: String = new_editor_plugin.get_editor_interface().get_editor_settings().get_setting("filesystem/import/fbx/fbx2gltf_path") if not addon_path.get_file().is_empty(): @@ -1033,9 +1022,6 @@ func _enable_verbose_log_changed(val: bool): func _enable_vrm_spring_bones_changed(val: bool): asset_database.vrm_spring_bones = val -func _convert_fbx_to_gltf_changed(val: bool): - asset_database.convert_fbx_to_gltf = val - func _show_advanced_options_toggled(val: bool): advanced_options_hbox.visible = val @@ -1347,9 +1333,6 @@ func on_file_completed_godot_import(tw: RefCounted, loaded: bool): asset_work_completed.append(tw) -var tmp_material_texture_files: PackedStringArray - - func do_import_step(): if _currently_preprocessing_assets != 0: asset_database.log_fail([null, 0, "", 0], "Import step called during preprocess") @@ -1403,19 +1386,6 @@ func do_import_step(): asset_work_waiting_write.reverse() asset_models = [].duplicate() elif tree_dialog_state == STATE_IMPORTING_MODELS: - # Clear stale dummy texture file placeholders. - var dres = DirAccess.open("res://") - for file_path in tmp_material_texture_files: - var fa := FileAccess.open(file_path, FileAccess.READ) - if fa.get_length() != 0: - asset_database.log_debug([null, 0, "", 0], "Dummy texture " + str(file_path) + " is not very dummy") - continue - fa.close() - asset_database.log_debug([null, 0, "", 0], "Removing dummy empty texture " + str(file_path)) - dres.remove(file_path) - dres.remove(file_path + ".import") - tmp_material_texture_files.clear() - update_progress_bar(10) tree_dialog_state = STATE_IMPORTING_YAML_POST_MODEL for tw in asset_yaml_post_model: @@ -1495,7 +1465,6 @@ func do_import_step(): for tw in asset_work: asset_work_currently_importing.push_back(tw) if asset_adapter.uses_godot_importer(tw.asset): - asset_adapter.about_to_import(tw.asset) tw.asset.log_debug("asset " + str(tw.asset) + " uses godot import") asset_database.log_debug([null, 0, "", 0], "Importing " + str(tw.asset.pathname) + " with the godot importer...") files_to_reimport.append("res://" + tw.asset.pathname) @@ -1563,34 +1532,6 @@ func _done_preprocessing_assets(): import_worker2.start_threads(THREAD_COUNT) #asset_adapter.write_sentinel_png(generate_sentinel_png_filename()) - # Write temporary dummy texture file placeholders relative to .fbx files so the FBX importer will find them. - var dres = DirAccess.open("res://") - asset_database.log_debug([null, 0, "", 0], "Go through assets " + str(pkg.guid_to_pkgasset) + ".") - for guid in pkg.guid_to_pkgasset: - var pkgasset = pkg.guid_to_pkgasset.get(guid, null) - asset_database.log_debug([null, 0, "", 0], str(guid) + " " + str(pkgasset) + " " + str(pkgasset.pathname)) - var dbgref = [null, 0, "", 0] # [null, pkgasset.parsed_meta.main_object_id, guid, 0] - if not pkgasset: - continue - asset_database.log_debug(dbgref, str(pkgasset.parsed_meta.unique_texture_map)) - for tex_name in pkgasset.parsed_meta.unique_texture_map: - var tex_pathname: String = pkgasset.pathname.get_base_dir().path_join(tex_name) - if dres.file_exists(tex_pathname + ".import") or dres.file_exists(tex_pathname): - asset_database.log_debug(dbgref, "letsgooo already exists " + str(tex_pathname)) - tmp_material_texture_files.append(tex_pathname) - continue - var fa := FileAccess.open(tex_pathname + ".import", FileAccess.WRITE_READ) - if fa == null: - continue - asset_database.log_debug(dbgref, "Writing " + str(tex_pathname)) - fa.store_buffer("[remap]\n\nimporter=\"skip\"\n".to_utf8_buffer()) - fa.close() - fa = FileAccess.open(tex_pathname, FileAccess.WRITE_READ) - asset_database.log_debug(dbgref, "Writing " + str(tex_pathname)) - fa.close() - asset_database.log_debug(dbgref, "letsgooo") - tmp_material_texture_files.append(tex_pathname) - status_bar.text = "Converting textures to metal roughness..." import_worker2.set_stage2(pkg.guid_to_pkgasset) var visited = {}.duplicate() @@ -1632,8 +1573,6 @@ func start_godot_import(tw: Object): ti.erase_button(0, 0) ti.set_custom_color(0, Color("#22bb66")) ti.set_icon(0, status_success_icon) - if tw.asset.parsed_meta != null: - tw.asset.parsed_meta.taken_over_import_references.clear() return if asset_adapter.uses_godot_importer(tw.asset): @@ -1954,7 +1893,7 @@ func _asset_tree_window_confirmed(): asset_database.log_limit_per_guid = 20000 else: asset_database.log_limit_per_guid = 100000 - status_bar.text = "Preprocessing FBX and reading assets ..." + status_bar.text = "Preprocessing and converting FBX2glTF..." _preprocessing_second_pass = second_pass if _currently_preprocessing_assets == 0: _preprocess_second_pass() diff --git a/addons/unidot_importer/post_import_model.gd b/addons/unidot_importer/post_import_model.gd index 8620e4cd..2ec167a8 100644 --- a/addons/unidot_importer/post_import_model.gd +++ b/addons/unidot_importer/post_import_model.gd @@ -35,7 +35,6 @@ class ParseState: var object_adapter: Object var scene: Node var toplevel_node: Node - var root_bone_idx: int = -1 var metaobj: Resource var source_file_path: String var extracted_assets_basename: String @@ -50,11 +49,9 @@ class ParseState: var external_objects_by_type_name: Dictionary = {}.duplicate() # type -> name -> UnidotRef Array var material_to_texture_name: Dictionary = {}.duplicate() # for Extract Legacy Materials / By Base Texture Name var animation_to_take_name: Dictionary = {}.duplicate() - var material_order_by_mesh_rev: Dictionary var saved_materials_by_name: Dictionary = {}.duplicate() var saved_meshes_by_name: Dictionary = {}.duplicate() - var saved_mesh_fileids_by_name: Dictionary = {}.duplicate() var saved_skins_by_name: Dictionary = {}.duplicate() var saved_animations_by_name: Dictionary = {}.duplicate() @@ -159,20 +156,16 @@ class ParseState: # metaobj.log_debug(0, "Lookup bone name " + str(p_obj_name) + " -> " + str(obj_name)) return self.godot_sanitized_to_orig_remap.get(obj_gltf_type, {}).get(obj_name, obj_name) - func get_orig_bone_or_node(p_obj_name: String) -> String: - return self.godot_sanitized_to_orig_remap.get("nodes", {}).get(p_obj_name, - self.godot_sanitized_to_orig_remap.get("bone_name", {}).get(p_obj_name, p_obj_name)) - func build_skinned_name_to_node_map(node: Node, p_name_to_node_dict: Dictionary) -> Dictionary: var name_to_node_dict: Dictionary = p_name_to_node_dict - var node_name = get_orig_bone_or_node(node.name) + var node_name = get_orig_name("nodes", node.name) for child in node.get_children(): name_to_node_dict = build_skinned_name_to_node_map(child, name_to_node_dict) - metaobj.log_debug(0, "node.name " + str(node_name) + ": " + str(name_to_node_dict)) + # metaobj.log_debug(0, "node.name " + str(node_name) + ": " + str(name_to_node_dict)) if node is MeshInstance3D: if node.skin != null: name_to_node_dict[node_name] = node - metaobj.log_debug(0, "adding " + str(node_name) + ": " + str(name_to_node_dict)) + # metaobj.log_debug(0, "adding " + str(node_name) + ": " + str(name_to_node_dict)) return name_to_node_dict func get_resource_path(sanitized_name: String, extension: String) -> String: @@ -218,7 +211,7 @@ class ParseState: if child_node is Skeleton3D: if len(child_node.get_parentless_bones()) > 1: return false - root_bone_idx = child_node.get_parentless_bones()[0] + var root_bone_idx: int = child_node.get_parentless_bones()[0] root_rotation_delta = root_rotation_delta * child_node.transform * child_node.get_bone_pose(root_bone_idx) toplevel_node = child_node return true @@ -228,7 +221,7 @@ class ParseState: return true return false - func register_component(node: Node, p_path: PackedStringArray, p_component: String, fileId_go: int = 0, p_bone_idx: int = -1, parent_transform_id: int = 0) -> int: + func register_component(node: Node, p_path: PackedStringArray, p_component: String, fileId_go: int = 0, p_bone_idx: int = -1, parent_transform_id: int = 0): #??? #if node == toplevel_node: # return # GameObject nodes always point to the toplevel node. @@ -279,9 +272,7 @@ class ParseState: if p_component == "Transform": fileid_to_skeleton_bone[fileId_go] = og_bone_name #metaobj.log_debug(0, "fileid_go:" + str(fileId_go) + '/ ' + str(all_name_map[fileId_go])) - if p_component == "Transform": - return fileId_go - return fileId_comp + return fileId_go func register_resource(p_resource: Resource, p_name: String, p_type: String, fileId_object: int, p_aux_resource: Variant = null): # Using : Variant for argument 5 to workaround the following GDScript bug: @@ -308,24 +299,21 @@ class ParseState: var fileId_transform: int = all_name_map[fileId_go][4] var bone_name: String = node.get_bone_name(p_skel_bone) - if p_skel_bone == root_bone_idx: - p_pre_retarget_global_rest *= (node.transform * node.get_bone_pose(root_bone_idx)).affine_inverse() - # ParOrigT * ChildOrigT = GodotHumanT * X * ChildOrigT # GodotHumanT = ParOrigT * GodotCorrectionT # We want to solve for X = GodotCorrectionT.inv # GodotCorrectionT = ParOrigT.inv * GodotHumanT if humanoid_original_transforms.has(bone_name): - p_pre_retarget_global_rest *= Transform3D(humanoid_original_transforms.get(bone_name).basis, p_pre_retarget_global_rest.basis.inverse() * p_global_rest.basis * node.get_bone_pose(p_skel_bone).origin) - metaobj.log_debug(0, "Humanoid Bone " + str(bone_name) + ": " + str(humanoid_original_transforms.get(bone_name).basis.get_rotation_quaternion()) + " origin: " + str(humanoid_original_transforms.get(bone_name).origin)) + p_pre_retarget_global_rest *= Transform3D(humanoid_original_transforms.get(bone_name).basis, p_pre_retarget_global_rest.basis.inverse() * p_global_rest.basis * node.get_bone_rest(p_skel_bone).origin) + metaobj.log_debug(0, "Humanoid Bone " + str(bone_name) + ": " + str(humanoid_original_transforms.get(bone_name).basis.get_rotation_quaternion())) if bone_name == "Hips": - humanoid_skeleton_hip_position = node.get_bone_pose(p_skel_bone).origin + humanoid_skeleton_hip_position = node.get_bone_rest(p_skel_bone).origin humanoid_skeleton_hip_position.y = node.motion_scale else: - p_pre_retarget_global_rest *= node.get_bone_pose(p_skel_bone) - # metaobj.log_debug(0, "Non-humanoid Bone " + str(bone_name) + ": " + str(node.get_bone_pose(p_skel_bone).basis.get_rotation_quaternion())) - p_global_rest *= node.get_bone_pose(p_skel_bone) - # metaobj.log_debug(0, "global rest " + str(bone_name) + ": " + str(node.get_bone_pose(p_skel_bone).basis.get_rotation_quaternion())) + p_pre_retarget_global_rest *= node.get_bone_rest(p_skel_bone) + # metaobj.log_debug(0, "Non-humanoid Bone " + str(bone_name) + ": " + str(node.get_bone_rest(p_skel_bone).basis.get_rotation_quaternion())) + p_global_rest *= node.get_bone_rest(p_skel_bone) + # metaobj.log_debug(0, "global rest " + str(bone_name) + ": " + str(node.get_bone_rest(p_skel_bone).basis.get_rotation_quaternion())) if not p_global_rest.is_equal_approx(p_pre_retarget_global_rest): # metaobj.log_debug(0, "bone " + bone_name + " rest " + str(p_global_rest) + " pre ret " + str(p_pre_retarget_global_rest)) @@ -346,14 +334,12 @@ class ParseState: var attachment_node: Node = p_attachment for child in attachment_node.get_children(): if child is MeshInstance3D: - var renderer_fileId: int if child.get_blend_shape_count() > 0: # if skin != null - renderer_fileId = register_component(child, p_path, "SkinnedMeshRenderer", fileId_go, p_skel_bone) + register_component(child, p_path, "SkinnedMeshRenderer", fileId_go, p_skel_bone) else: register_component(child, p_path, "MeshFilter", fileId_go, p_skel_bone) - renderer_fileId = register_component(child, p_path, "MeshRenderer", fileId_go, p_skel_bone) - var mesh_fileId = process_mesh_instance(child) - metaobj.fileid_to_material_order_rev[renderer_fileId] = metaobj.fileid_to_material_order_rev.get(mesh_fileId, PackedInt32Array()) + register_component(child, p_path, "MeshRenderer", fileId_go, p_skel_bone) + process_mesh_instance(child) elif child is Camera3D: register_component(child, p_path, "Camera", fileId_go, p_skel_bone) elif child is Light3D: @@ -395,7 +381,7 @@ class ParseState: skinned_parent_to_node.erase(key) for child in skinned_children: metaobj.log_debug(0, "Skinned parent " + str(node.name) + ": " + str(child.name)) - var orig_child_name: String = get_orig_bone_or_node(child.name) + var orig_child_name: String = get_orig_name("nodes", child.name) var new_id: int = 0 if len(p_path) == 1: # If not preserve_hierarchy and we determined we can fold root node, that means skinned parents is empty. @@ -430,14 +416,12 @@ class ParseState: if node.skin != null and not skinned_parent_to_node.is_empty() and not from_skinned_parent: metaobj.log_debug(0, "Already recursed " + str(node.name)) return 0 # We already recursed into this skinned mesh. - var renderer_fileId: int if from_skinned_parent or node.get_blend_shape_count() > 0: # has_obj_id("SkinnedMeshRenderer", node_name): - renderer_fileId = register_component(node, p_path, "SkinnedMeshRenderer", fileId_go) + register_component(node, p_path, "SkinnedMeshRenderer", fileId_go) else: register_component(node, p_path, "MeshFilter", fileId_go) - renderer_fileId = register_component(node, p_path, "MeshRenderer", fileId_go) - var mesh_fileId = process_mesh_instance(node) - metaobj.fileid_to_material_order_rev[renderer_fileId] = metaobj.fileid_to_material_order_rev.get(mesh_fileId, PackedInt32Array()) + register_component(node, p_path, "MeshRenderer", fileId_go) + process_mesh_instance(node) elif node is Camera3D: register_component(node, p_path, "Camera", fileId_go) elif node is Light3D: @@ -537,7 +521,7 @@ class ParseState: skinned_parent_to_node.erase(key) for child in skinned_children: metaobj.log_debug(0, "Skinned parent from non-bone " + str(node.name) + ": " + str(child.name)) - var orig_child_name: String = get_orig_bone_or_node(child.name) + var orig_child_name: String = get_orig_name("nodes", child.name) var new_id: int = 0 if len(p_path) == 1: # If not preserve_hierarchy and we determined we can fold root node, that means skinned parents is empty. @@ -602,7 +586,7 @@ class ParseState: # metaobj.log_debug(0, "AnimationPlayer " + str(scene.get_path_to(node)) + " / Anim " + str(i) + " anim_name: " + anim_name + " resource_name: " + str(anim.resource_name)) i += 1 - func process_mesh_instance(node: MeshInstance3D) -> int: + func process_mesh_instance(node: MeshInstance3D): metaobj.log_debug(0, "Process mesh instance: " + str(node.name)) if node.skin == null and node.skeleton != NodePath(): metaobj.log_fail(0, "A Skeleton exists for MeshRenderer " + str(node.name)) @@ -610,7 +594,7 @@ class ParseState: metaobj.log_fail(0, "No Skeleton exists for SkinnedMeshRenderer " + str(node.name)) var mesh: Mesh = node.mesh if mesh == null: - return 0 + return # FIXME: mesh_name is broken on master branch, maybe 3.2 as well. var godot_mesh_name: String = str(mesh.resource_name) if godot_mesh_name.begins_with("Root Scene_"): @@ -618,10 +602,8 @@ class ParseState: if is_obj: godot_mesh_name = default_obj_mesh_name var mesh_name: String = get_orig_name("meshes", godot_mesh_name) - var mesh_fileId: int = 0 if saved_meshes_by_name.has(godot_mesh_name): mesh = saved_meshes_by_name.get(godot_mesh_name) - mesh_fileId = saved_mesh_fileids_by_name[godot_mesh_name] if mesh != null: node.mesh = mesh if node.skin != null: @@ -634,7 +616,7 @@ class ParseState: continue var godot_mat_name: String = mat.resource_name var mat_name: String = get_orig_name("materials", godot_mat_name) - if mat_name == "DefaultMaterial" or mat_name == "": + if mat_name == "DefaultMaterial": mat_name = "No Name" if is_obj: mat_name = default_obj_mesh_name + "Mat" # obj files seem to use this rule @@ -663,8 +645,6 @@ class ParseState: elif importMaterials and extractLegacyMaterials: # Exmaple [S*S]kitsune_men.material -> [S_S]kitsune_men.material var legacy_material_name: String = sanitize_filename(godot_mat_name, "_") # It's unclear what should happen if this contains a "." character. - if legacy_material_name.is_empty(): - legacy_material_name = "No Name" if legacy_material_name_setting == 0: legacy_material_name = sanitize_filename(material_to_texture_name.get(godot_mat_name, godot_mat_name), "_") if legacy_material_name_setting == 2: @@ -702,27 +682,12 @@ class ParseState: else: metaobj.log_fail(fileId, "Material " + str(legacy_material_name) + " was not found. using default") if new_mat == null and mat != null: - var respath: String - if godot_mat_name.is_empty(): - respath = get_resource_path("DefaultMaterial", ".material") - else: - respath = get_resource_path(godot_mat_name, ".material") + var respath: String = get_resource_path(godot_mat_name, ".material") metaobj.log_debug(fileId, "Before save " + str(mat_name) + " " + str(mat.resource_name) + "@" + str(respath) + " from " + str(mat.resource_path)) if mat.albedo_texture != null: metaobj.log_debug(fileId, " albedo = " + str(mat.albedo_texture.resource_name) + " / " + str(mat.albedo_texture.resource_path)) if mat.normal_texture != null: metaobj.log_debug(fileId, " normal = " + str(mat.normal_texture.resource_name) + " / " + str(mat.normal_texture.resource_path)) - for prop in mat.get_property_list(): - var prop_name: String = prop["name"] - if (prop["usage"] & PROPERTY_USAGE_STORAGE) != 0 and prop["hint_string"].contains("Texture"): - var tex: Texture2D = mat.get(prop_name) as Texture2D - # metaobj.log_debug(fileId, "Tex " + str(tex) + " for prop " + str(prop_name)) - if tex != null and tex.has_meta(&"src_tex"): - # metaobj.log_debug(fileId, "tmp tex " + str(tex.resource_path) + " meta list " + str(tex.get_meta_list()) + " " + str(tex.get_meta(&"src_tex"))) - var src_tex: Texture2D = tex.get_meta(&"src_tex") as Texture2D - if src_tex != null: - metaobj.log_debug(fileId, " " + str(prop_name) + " = " + str(tex.resource_path) + " => " + str(src_tex.resource_path)) - mat.set(prop_name, src_tex) unidot_utils.save_resource(mat, respath) mat = load(respath) metaobj.log_debug(fileId, "Save-and-load material object " + str(mat_name) + " " + str(mat.resource_name) + "@" + str(mat.resource_path)) @@ -745,7 +710,6 @@ class ParseState: if fileId == 0: metaobj.log_fail(0, "Missing fileId for Mesh " + str(mesh_name) + " (" + str(godot_mesh_name) + ")") else: - metaobj.fileid_to_material_order_rev[fileId] = material_order_by_mesh_rev.get(godot_mesh_name, PackedInt32Array()) var skin: Skin = node.skin if external_objects_by_id.has(fileId): mesh = metaobj.get_godot_resource(external_objects_by_id.get(fileId)) @@ -767,8 +731,6 @@ class ParseState: if mesh != null: node.mesh = mesh saved_meshes_by_name[godot_mesh_name] = mesh - mesh_fileId = fileId - saved_mesh_fileids_by_name[godot_mesh_name] = mesh_fileId metaobj.imported_mesh_paths[godot_mesh_name] = str(mesh.resource_path) metaobj.imported_mesh_paths["Root Scene_" + godot_mesh_name] = str(mesh.resource_path) register_resource(mesh, mesh_name, "Mesh", fileId, skin) @@ -776,7 +738,6 @@ class ParseState: node.skin = skin saved_skins_by_name[godot_mesh_name] = skin is_obj = false - return mesh_fileId func _post_import(p_scene: Node) -> Object: var source_file_path: String = get_source_file() @@ -813,7 +774,6 @@ func _post_import(p_scene: Node) -> Object: ps.extracted_assets_basename = get_extracted_assets_dir(source_file_path).path_join(source_file_path.get_basename().get_file()) ps.metaobj = metaobj ps.asset_database = asset_database - ps.material_order_by_mesh_rev = metaobj.internal_data.get("material_order_by_mesh_rev", {}) ps.material_to_texture_name = metaobj.internal_data.get("material_to_texture_name", {}) ps.godot_sanitized_to_orig_remap = metaobj.internal_data.get("godot_sanitized_to_orig_remap", {}) ps.humanoid_original_transforms = metaobj.internal_data.get("humanoid_original_transforms", {}) diff --git a/addons/unidot_importer/vrm_integration.gd b/addons/unidot_importer/vrm_integration.gd index 998a71fb..377b95c4 100644 --- a/addons/unidot_importer/vrm_integration.gd +++ b/addons/unidot_importer/vrm_integration.gd @@ -395,7 +395,7 @@ func setup_post_scene(pkgasset: RefCounted, root_objects: Array, root_skelleys: func initialize_skelleys(state: RefCounted, objs: Array, is_prefab: bool): if vrm_collider_group_class == null or vrm_collider_class == null: - # print("Failed to import vrm classes") + print("Failed to import vrm classes") return null var some_obj: RefCounted = null