/src/assimp/code/AssetLib/FBX/FBXConverter.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | Open Asset Import Library (assimp) |
3 | | ---------------------------------------------------------------------- |
4 | | |
5 | | Copyright (c) 2006-2025, assimp team |
6 | | |
7 | | All rights reserved. |
8 | | |
9 | | Redistribution and use of this software in source and binary forms, |
10 | | with or without modification, are permitted provided that the |
11 | | following conditions are met: |
12 | | |
13 | | * Redistributions of source code must retain the above |
14 | | copyright notice, this list of conditions and the |
15 | | following disclaimer. |
16 | | |
17 | | * Redistributions in binary form must reproduce the above |
18 | | copyright notice, this list of conditions and the |
19 | | following disclaimer in the documentation and/or other |
20 | | materials provided with the distribution. |
21 | | |
22 | | * Neither the name of the assimp team, nor the names of its |
23 | | contributors may be used to endorse or promote products |
24 | | derived from this software without specific prior |
25 | | written permission of the assimp team. |
26 | | |
27 | | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
28 | | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
29 | | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
30 | | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
31 | | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
32 | | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
33 | | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
34 | | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
35 | | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
36 | | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
37 | | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
38 | | |
39 | | ---------------------------------------------------------------------- |
40 | | */ |
41 | | |
42 | | /** @file FBXConverter.cpp |
43 | | * @brief Implementation of the FBX DOM -> aiScene converter |
44 | | */ |
45 | | |
46 | | #ifndef ASSIMP_BUILD_NO_FBX_IMPORTER |
47 | | |
48 | | #include "FBXConverter.h" |
49 | | #include "FBXDocument.h" |
50 | | #include "FBXImporter.h" |
51 | | #include "FBXMeshGeometry.h" |
52 | | #include "FBXParser.h" |
53 | | #include "FBXProperties.h" |
54 | | #include "FBXUtil.h" |
55 | | |
56 | | #include <assimp/MathFunctions.h> |
57 | | #include <assimp/StringComparison.h> |
58 | | #include <assimp/scene.h> |
59 | | #include <assimp/CreateAnimMesh.h> |
60 | | #include <assimp/StringUtils.h> |
61 | | #include <assimp/commonMetaData.h> |
62 | | |
63 | | #include <stdlib.h> |
64 | | #include <cstdint> |
65 | | #include <iomanip> |
66 | | #include <iterator> |
67 | | #include <memory> |
68 | | #include <sstream> |
69 | | |
70 | | namespace Assimp { |
71 | | namespace FBX { |
72 | | |
73 | | using namespace Util; |
74 | | |
75 | 0 | #define MAGIC_NODE_TAG "_$AssimpFbx$" |
76 | | |
77 | 0 | #define CONVERT_FBX_TIME(time) static_cast<double>(time) / 46186158000LL |
78 | | |
79 | 0 | static void correctRootTransform(const aiScene *scene) { |
80 | 0 | if (scene == nullptr) { |
81 | 0 | return; |
82 | 0 | } |
83 | | |
84 | 0 | if (scene->mMetaData == nullptr) { |
85 | 0 | return; |
86 | 0 | } |
87 | | |
88 | 0 | int32_t UpAxis = 1, UpAxisSign = 1, FrontAxis = 2, FrontAxisSign = 1, CoordAxis = 0, CoordAxisSign = 1; |
89 | 0 | double UnitScaleFactor = 1.0; |
90 | 0 | for (unsigned MetadataIndex = 0; MetadataIndex < scene->mMetaData->mNumProperties; ++MetadataIndex) { |
91 | 0 | if (strcmp(scene->mMetaData->mKeys[MetadataIndex].C_Str(), "UpAxis") == 0) { |
92 | 0 | scene->mMetaData->Get<int32_t>(MetadataIndex, UpAxis); |
93 | 0 | } |
94 | 0 | if (strcmp(scene->mMetaData->mKeys[MetadataIndex].C_Str(), "UpAxisSign") == 0) { |
95 | 0 | scene->mMetaData->Get<int32_t>(MetadataIndex, UpAxisSign); |
96 | 0 | } |
97 | 0 | if (strcmp(scene->mMetaData->mKeys[MetadataIndex].C_Str(), "FrontAxis") == 0) { |
98 | 0 | scene->mMetaData->Get<int32_t>(MetadataIndex, FrontAxis); |
99 | 0 | } |
100 | 0 | if (strcmp(scene->mMetaData->mKeys[MetadataIndex].C_Str(), "FrontAxisSign") == 0) { |
101 | 0 | scene->mMetaData->Get<int32_t>(MetadataIndex, FrontAxisSign); |
102 | 0 | } |
103 | 0 | if (strcmp(scene->mMetaData->mKeys[MetadataIndex].C_Str(), "CoordAxis") == 0) { |
104 | 0 | scene->mMetaData->Get<int32_t>(MetadataIndex, CoordAxis); |
105 | 0 | } |
106 | 0 | if (strcmp(scene->mMetaData->mKeys[MetadataIndex].C_Str(), "CoordAxisSign") == 0) { |
107 | 0 | scene->mMetaData->Get<int32_t>(MetadataIndex, CoordAxisSign); |
108 | 0 | } |
109 | 0 | if (strcmp(scene->mMetaData->mKeys[MetadataIndex].C_Str(), "UnitScaleFactor") == 0) { |
110 | 0 | scene->mMetaData->Get<double>(MetadataIndex, UnitScaleFactor); |
111 | 0 | } |
112 | 0 | } |
113 | |
|
114 | 0 | aiVector3D upVec, forwardVec, rightVec; |
115 | 0 | upVec[UpAxis] = UpAxisSign * static_cast<float>(UnitScaleFactor); |
116 | 0 | forwardVec[FrontAxis] = FrontAxisSign * static_cast<float>(UnitScaleFactor); |
117 | 0 | rightVec[CoordAxis] = CoordAxisSign * (float)UnitScaleFactor; |
118 | |
|
119 | 0 | aiMatrix4x4 mat(rightVec.x, rightVec.y, rightVec.z, 0.0f, |
120 | 0 | upVec.x, upVec.y, upVec.z, 0.0f, |
121 | 0 | forwardVec.x, forwardVec.y, forwardVec.z, 0.0f, |
122 | 0 | 0.0f, 0.0f, 0.0f, 1.0f); |
123 | 0 | scene->mRootNode->mTransformation *= mat; |
124 | 0 | } |
125 | | |
126 | | FBXConverter::FBXConverter(aiScene *out, const Document &doc, bool removeEmptyBones) : |
127 | | defaultMaterialIndex(), |
128 | 0 | mMeshes(), |
129 | 0 | lights(), |
130 | 0 | cameras(), |
131 | 0 | textures(), |
132 | 0 | materials_converted(), |
133 | 0 | textures_converted(), |
134 | 0 | meshes_converted(), |
135 | 0 | node_anim_chain_bits(), |
136 | 0 | mNodeNames(), |
137 | | anim_fps(), |
138 | 0 | mSceneOut(out), |
139 | 0 | doc(doc), |
140 | 0 | mRemoveEmptyBones(removeEmptyBones) { |
141 | | |
142 | | |
143 | | // animations need to be converted first since this will |
144 | | // populate the node_anim_chain_bits map, which is needed |
145 | | // to determine which nodes need to be generated. |
146 | 0 | ConvertAnimations(); |
147 | | // Embedded textures in FBX could be connected to nothing but to itself, |
148 | | // for instance Texture -> Video connection only but not to the main graph, |
149 | | // The idea here is to traverse all objects to find these Textures and convert them, |
150 | | // so later during material conversion it will find converted texture in the textures_converted array. |
151 | 0 | if (doc.Settings().readTextures) { |
152 | 0 | ConvertOrphanedEmbeddedTextures(); |
153 | 0 | } |
154 | 0 | ConvertRootNode(); |
155 | |
|
156 | 0 | if (doc.Settings().readAllMaterials) { |
157 | | // unfortunately this means we have to evaluate all objects |
158 | 0 | for (const ObjectMap::value_type &v : doc.Objects()) { |
159 | |
|
160 | 0 | const Object *ob = v.second->Get(); |
161 | 0 | if (!ob) { |
162 | 0 | continue; |
163 | 0 | } |
164 | | |
165 | 0 | const Material *mat = dynamic_cast<const Material *>(ob); |
166 | 0 | if (mat) { |
167 | |
|
168 | 0 | if (materials_converted.find(mat) == materials_converted.end()) { |
169 | 0 | ConvertMaterial(*mat, nullptr); |
170 | 0 | } |
171 | 0 | } |
172 | 0 | } |
173 | 0 | } |
174 | |
|
175 | 0 | ConvertGlobalSettings(); |
176 | 0 | TransferDataToScene(); |
177 | | |
178 | | // if we didn't read any meshes set the AI_SCENE_FLAGS_INCOMPLETE |
179 | | // to make sure the scene passes assimp's validation. FBX files |
180 | | // need not contain geometry (i.e. camera animations, raw armatures). |
181 | 0 | if (out->mNumMeshes == 0) { |
182 | 0 | out->mFlags |= AI_SCENE_FLAGS_INCOMPLETE; |
183 | 0 | } else { |
184 | | // Apply the FBX axis metadata unless requested not to |
185 | 0 | if (!doc.Settings().ignoreUpDirection) |
186 | 0 | correctRootTransform(mSceneOut); |
187 | 0 | } |
188 | 0 | } |
189 | | |
190 | 0 | FBXConverter::~FBXConverter() { |
191 | 0 | std::for_each(mMeshes.begin(), mMeshes.end(), Util::delete_fun<aiMesh>()); |
192 | 0 | std::for_each(materials.begin(), materials.end(), Util::delete_fun<aiMaterial>()); |
193 | 0 | std::for_each(animations.begin(), animations.end(), Util::delete_fun<aiAnimation>()); |
194 | 0 | std::for_each(lights.begin(), lights.end(), Util::delete_fun<aiLight>()); |
195 | 0 | std::for_each(cameras.begin(), cameras.end(), Util::delete_fun<aiCamera>()); |
196 | 0 | std::for_each(textures.begin(), textures.end(), Util::delete_fun<aiTexture>()); |
197 | 0 | } |
198 | | |
199 | 0 | void FBXConverter::ConvertRootNode() { |
200 | 0 | mSceneOut->mRootNode = new aiNode(); |
201 | 0 | std::string unique_name; |
202 | 0 | GetUniqueName("RootNode", unique_name); |
203 | 0 | mSceneOut->mRootNode->mName.Set(unique_name); |
204 | | |
205 | | // root has ID 0 |
206 | 0 | ConvertNodes(0L, mSceneOut->mRootNode, mSceneOut->mRootNode); |
207 | 0 | } |
208 | | |
209 | 0 | static std::string getAncestorBaseName(const aiNode *node) { |
210 | 0 | const char *nodeName = nullptr; |
211 | 0 | size_t length = 0; |
212 | 0 | while (node && (!nodeName || length == 0)) { |
213 | 0 | nodeName = node->mName.C_Str(); |
214 | 0 | length = node->mName.length; |
215 | 0 | node = node->mParent; |
216 | 0 | } |
217 | |
|
218 | 0 | if (!nodeName || length == 0) { |
219 | 0 | return {}; |
220 | 0 | } |
221 | | // could be std::string_view if c++17 available |
222 | 0 | return std::string(nodeName, length); |
223 | 0 | } |
224 | | |
225 | | // Make unique name |
226 | 0 | std::string FBXConverter::MakeUniqueNodeName(const Model *const model, const aiNode &parent) { |
227 | 0 | std::string original_name = FixNodeName(model->Name()); |
228 | 0 | if (original_name.empty()) { |
229 | 0 | original_name = getAncestorBaseName(&parent); |
230 | 0 | } |
231 | 0 | std::string unique_name; |
232 | 0 | GetUniqueName(original_name, unique_name); |
233 | 0 | return unique_name; |
234 | 0 | } |
235 | | |
236 | | /// This struct manages nodes which may or may not end up in the node hierarchy. |
237 | | /// When a node becomes a child of another node, that node becomes its owner and mOwnership should be released. |
238 | | struct FBXConverter::PotentialNode { |
239 | 0 | PotentialNode() : mOwnership(new aiNode), mNode(mOwnership.get()) {} |
240 | 0 | PotentialNode(const std::string& name) : mOwnership(new aiNode(name)), mNode(mOwnership.get()) {} |
241 | 0 | aiNode* operator->() { return mNode; } |
242 | | std::unique_ptr<aiNode> mOwnership; |
243 | | aiNode* mNode; |
244 | | }; |
245 | | |
246 | | /// todo: pre-build node hierarchy |
247 | | /// todo: get bone from stack |
248 | | /// todo: make map of aiBone* to aiNode* |
249 | | /// then update convert clusters to the new format |
250 | 0 | void FBXConverter::ConvertNodes(uint64_t id, aiNode *parent, aiNode *root_node, const aiMatrix4x4& parent_transform) { |
251 | 0 | const std::vector<const Connection *> &conns = doc.GetConnectionsByDestinationSequenced(id, "Model"); |
252 | |
|
253 | 0 | std::vector<PotentialNode> nodes; |
254 | 0 | nodes.reserve(conns.size()); |
255 | |
|
256 | 0 | std::vector<PotentialNode> nodes_chain; |
257 | 0 | std::vector<PotentialNode> post_nodes_chain; |
258 | |
|
259 | 0 | for (const Connection *con : conns) { |
260 | | // ignore object-property links |
261 | 0 | if (con->PropertyName().length()) { |
262 | | // really important we document why this is ignored. |
263 | 0 | FBXImporter::LogInfo("ignoring property link - no docs on why this is ignored"); |
264 | 0 | continue; //? |
265 | 0 | } |
266 | | |
267 | | // convert connection source object into Object base class |
268 | 0 | const Object *const object = con->SourceObject(); |
269 | 0 | if (nullptr == object) { |
270 | 0 | FBXImporter::LogError("failed to convert source object for Model link"); |
271 | 0 | continue; |
272 | 0 | } |
273 | | |
274 | | // FBX Model::Cube, Model::Bone001, etc elements |
275 | | // This detects if we can cast the object into this model structure. |
276 | 0 | const Model *const model = dynamic_cast<const Model *>(object); |
277 | |
|
278 | 0 | if (nullptr != model) { |
279 | 0 | nodes_chain.clear(); |
280 | 0 | post_nodes_chain.clear(); |
281 | 0 | aiMatrix4x4 new_abs_transform = parent_transform; |
282 | 0 | std::string node_name = FixNodeName(model->Name()); |
283 | | // even though there is only a single input node, the design of |
284 | | // assimp (or rather: the complicated transformation chain that |
285 | | // is employed by fbx) means that we may need multiple aiNode's |
286 | | // to represent a fbx node's transformation. |
287 | | |
288 | | // generate node transforms - this includes pivot data |
289 | | // if need_additional_node is true then you t |
290 | 0 | const bool need_additional_node = GenerateTransformationNodeChain(*model, node_name, nodes_chain, post_nodes_chain); |
291 | | |
292 | | // assert that for the current node we must have at least a single transform |
293 | 0 | ai_assert(nodes_chain.size()); |
294 | |
|
295 | 0 | if (need_additional_node) { |
296 | 0 | nodes_chain.emplace_back(node_name); |
297 | 0 | } |
298 | | |
299 | | //setup metadata on newest node |
300 | 0 | SetupNodeMetadata(*model, *nodes_chain.back().mNode); |
301 | | |
302 | | // link all nodes in a row |
303 | 0 | aiNode *last_parent = parent; |
304 | 0 | for (PotentialNode& child : nodes_chain) { |
305 | 0 | ai_assert(child.mNode); |
306 | |
|
307 | 0 | if (last_parent != parent) { |
308 | 0 | last_parent->mNumChildren = 1; |
309 | 0 | last_parent->mChildren = new aiNode *[1]; |
310 | 0 | last_parent->mChildren[0] = child.mOwnership.release(); |
311 | 0 | } |
312 | |
|
313 | 0 | child->mParent = last_parent; |
314 | 0 | last_parent = child.mNode; |
315 | |
|
316 | 0 | new_abs_transform *= child->mTransformation; |
317 | 0 | } |
318 | | |
319 | | // attach geometry |
320 | 0 | ConvertModel(*model, nodes_chain.back().mNode, root_node, new_abs_transform); |
321 | | |
322 | | // check if there will be any child nodes |
323 | 0 | const std::vector<const Connection *> &child_conns = doc.GetConnectionsByDestinationSequenced(model->ID(), "Model"); |
324 | | |
325 | | // if so, link the geometric transform inverse nodes |
326 | | // before we attach any child nodes |
327 | 0 | if (child_conns.size()) { |
328 | 0 | for (PotentialNode& postnode : post_nodes_chain) { |
329 | 0 | ai_assert(postnode.mNode); |
330 | |
|
331 | 0 | if (last_parent != parent) { |
332 | 0 | last_parent->mNumChildren = 1; |
333 | 0 | last_parent->mChildren = new aiNode *[1]; |
334 | 0 | last_parent->mChildren[0] = postnode.mOwnership.release(); |
335 | 0 | } |
336 | |
|
337 | 0 | postnode->mParent = last_parent; |
338 | 0 | last_parent = postnode.mNode; |
339 | |
|
340 | 0 | new_abs_transform *= postnode->mTransformation; |
341 | 0 | } |
342 | 0 | } else { |
343 | | // free the nodes we allocated as we don't need them |
344 | 0 | post_nodes_chain.clear(); |
345 | 0 | } |
346 | | |
347 | | // recursion call - child nodes |
348 | 0 | ConvertNodes(model->ID(), last_parent, root_node, new_abs_transform); |
349 | |
|
350 | 0 | if (doc.Settings().readLights) { |
351 | 0 | ConvertLights(*model, node_name); |
352 | 0 | } |
353 | |
|
354 | 0 | if (doc.Settings().readCameras) { |
355 | 0 | ConvertCameras(*model, node_name); |
356 | 0 | } |
357 | |
|
358 | 0 | nodes.push_back(std::move(nodes_chain.front())); |
359 | 0 | nodes_chain.clear(); |
360 | 0 | } |
361 | 0 | } |
362 | |
|
363 | 0 | if (nodes.empty()) { |
364 | 0 | parent->mNumChildren = 0; |
365 | 0 | parent->mChildren = nullptr; |
366 | 0 | } else { |
367 | 0 | parent->mChildren = new aiNode *[nodes.size()](); |
368 | 0 | parent->mNumChildren = static_cast<unsigned int>(nodes.size()); |
369 | 0 | for (unsigned int i = 0; i < nodes.size(); ++i) { |
370 | 0 | parent->mChildren[i] = nodes[i].mOwnership.release(); |
371 | 0 | } |
372 | 0 | } |
373 | 0 | } |
374 | | |
375 | 0 | void FBXConverter::ConvertLights(const Model &model, const std::string &orig_name) { |
376 | 0 | const std::vector<const NodeAttribute *> &node_attrs = model.GetAttributes(); |
377 | 0 | for (const NodeAttribute *attr : node_attrs) { |
378 | 0 | const Light *const light = dynamic_cast<const Light *>(attr); |
379 | 0 | if (light) { |
380 | 0 | ConvertLight(*light, orig_name); |
381 | 0 | } |
382 | 0 | } |
383 | 0 | } |
384 | | |
385 | 0 | void FBXConverter::ConvertCameras(const Model &model, const std::string &orig_name) { |
386 | 0 | const std::vector<const NodeAttribute *> &node_attrs = model.GetAttributes(); |
387 | 0 | for (const NodeAttribute *attr : node_attrs) { |
388 | 0 | const Camera *const cam = dynamic_cast<const Camera *>(attr); |
389 | 0 | if (cam) { |
390 | 0 | ConvertCamera(*cam, orig_name); |
391 | 0 | } |
392 | 0 | } |
393 | 0 | } |
394 | | |
395 | 0 | void FBXConverter::ConvertLight(const Light &light, const std::string &orig_name) { |
396 | 0 | lights.push_back(new aiLight()); |
397 | 0 | aiLight *const out_light = lights.back(); |
398 | |
|
399 | 0 | out_light->mName.Set(orig_name); |
400 | |
|
401 | 0 | const float intensity = light.Intensity() / 100.0f; |
402 | 0 | const aiVector3D &col = light.Color(); |
403 | |
|
404 | 0 | out_light->mColorDiffuse = aiColor3D(col.x, col.y, col.z); |
405 | 0 | out_light->mColorDiffuse.r *= intensity; |
406 | 0 | out_light->mColorDiffuse.g *= intensity; |
407 | 0 | out_light->mColorDiffuse.b *= intensity; |
408 | |
|
409 | 0 | out_light->mColorSpecular = out_light->mColorDiffuse; |
410 | | |
411 | | //lights are defined along negative y direction |
412 | 0 | out_light->mPosition = aiVector3D(0.0f); |
413 | 0 | out_light->mDirection = aiVector3D(0.0f, -1.0f, 0.0f); |
414 | 0 | out_light->mUp = aiVector3D(0.0f, 0.0f, -1.0f); |
415 | |
|
416 | 0 | switch (light.LightType()) { |
417 | 0 | case Light::Type_Point: |
418 | 0 | out_light->mType = aiLightSource_POINT; |
419 | 0 | break; |
420 | | |
421 | 0 | case Light::Type_Directional: |
422 | 0 | out_light->mType = aiLightSource_DIRECTIONAL; |
423 | 0 | break; |
424 | | |
425 | 0 | case Light::Type_Spot: |
426 | 0 | out_light->mType = aiLightSource_SPOT; |
427 | 0 | out_light->mAngleOuterCone = AI_DEG_TO_RAD(light.OuterAngle()); |
428 | 0 | out_light->mAngleInnerCone = AI_DEG_TO_RAD(light.InnerAngle()); |
429 | 0 | break; |
430 | | |
431 | 0 | case Light::Type_Area: |
432 | 0 | FBXImporter::LogWarn("cannot represent area light, set to UNDEFINED"); |
433 | 0 | out_light->mType = aiLightSource_UNDEFINED; |
434 | 0 | break; |
435 | | |
436 | 0 | case Light::Type_Volume: |
437 | 0 | FBXImporter::LogWarn("cannot represent volume light, set to UNDEFINED"); |
438 | 0 | out_light->mType = aiLightSource_UNDEFINED; |
439 | 0 | break; |
440 | 0 | default: |
441 | 0 | FBXImporter::LogError("Not handled light type: ", light.LightType()); |
442 | 0 | break; |
443 | 0 | } |
444 | | |
445 | 0 | float decay = light.DecayStart(); |
446 | 0 | switch (light.DecayType()) { |
447 | 0 | case Light::Decay_None: |
448 | 0 | out_light->mAttenuationConstant = decay; |
449 | 0 | out_light->mAttenuationLinear = 0.0f; |
450 | 0 | out_light->mAttenuationQuadratic = 0.0f; |
451 | 0 | break; |
452 | 0 | case Light::Decay_Linear: |
453 | 0 | out_light->mAttenuationConstant = 0.0f; |
454 | 0 | out_light->mAttenuationLinear = 2.0f / decay; |
455 | 0 | out_light->mAttenuationQuadratic = 0.0f; |
456 | 0 | break; |
457 | 0 | case Light::Decay_Quadratic: |
458 | 0 | out_light->mAttenuationConstant = 0.0f; |
459 | 0 | out_light->mAttenuationLinear = 0.0f; |
460 | 0 | out_light->mAttenuationQuadratic = 2.0f / (decay * decay); |
461 | 0 | break; |
462 | 0 | case Light::Decay_Cubic: |
463 | 0 | FBXImporter::LogWarn("cannot represent cubic attenuation, set to Quadratic"); |
464 | 0 | out_light->mAttenuationQuadratic = 1.0f; |
465 | 0 | break; |
466 | 0 | default: |
467 | 0 | FBXImporter::LogError("Not handled light decay type: ", light.DecayType()); |
468 | 0 | break; |
469 | 0 | } |
470 | 0 | } |
471 | | |
472 | 0 | void FBXConverter::ConvertCamera(const Camera &cam, const std::string &orig_name) { |
473 | 0 | cameras.push_back(new aiCamera()); |
474 | 0 | aiCamera *const out_camera = cameras.back(); |
475 | |
|
476 | 0 | out_camera->mName.Set(orig_name); |
477 | |
|
478 | 0 | out_camera->mAspect = cam.AspectWidth() / cam.AspectHeight(); |
479 | | |
480 | | // NOTE: Camera mPosition, mLookAt and mUp must be set to default here. |
481 | | // All transformations to the camera will be handled by its node in the scenegraph. |
482 | 0 | out_camera->mPosition = aiVector3D(0.0f); |
483 | 0 | out_camera->mLookAt = aiVector3D(1.0f, 0.0f, 0.0f); |
484 | 0 | out_camera->mUp = aiVector3D(0.0f, 1.0f, 0.0f); |
485 | | |
486 | | // NOTE: Some software (maya) does not put FieldOfView in FBX, so we compute |
487 | | // mHorizontalFOV from FocalLength and FilmWidth with unit conversion. |
488 | | |
489 | | // TODO: This is not a complete solution for how FBX cameras can be stored. |
490 | | // TODO: Incorporate non-square pixel aspect ratio. |
491 | | // TODO: FBX aperture mode might be storing vertical FOV in need of conversion with aspect ratio. |
492 | |
|
493 | 0 | float fov_deg = cam.FieldOfView(); |
494 | | // If FOV not specified in file, compute using FilmWidth and FocalLength. |
495 | 0 | if (fov_deg == kFovUnknown) { |
496 | 0 | float film_width_inches = cam.FilmWidth(); |
497 | 0 | float focal_length_mm = cam.FocalLength(); |
498 | 0 | ASSIMP_LOG_VERBOSE_DEBUG("FBX FOV unspecified. Computing from FilmWidth (", film_width_inches, "inches) and FocalLength (", focal_length_mm, "mm)."); |
499 | 0 | double half_fov_rad = std::atan2(film_width_inches * 25.4 * 0.5, focal_length_mm); |
500 | 0 | out_camera->mHorizontalFOV = static_cast<float>(half_fov_rad); |
501 | 0 | } else { |
502 | | // FBX fov is full-view degrees. We want half-view radians. |
503 | 0 | out_camera->mHorizontalFOV = AI_DEG_TO_RAD(fov_deg) * 0.5f; |
504 | 0 | } |
505 | |
|
506 | 0 | out_camera->mClipPlaneNear = cam.NearPlane(); |
507 | 0 | out_camera->mClipPlaneFar = cam.FarPlane(); |
508 | 0 | } |
509 | | |
510 | 0 | void FBXConverter::GetUniqueName(const std::string &name, std::string &uniqueName) { |
511 | 0 | uniqueName = name; |
512 | 0 | auto it_pair = mNodeNames.insert({ name, 0 }); // duplicate node name instance count |
513 | 0 | unsigned int &i = it_pair.first->second; |
514 | 0 | while (!it_pair.second) { |
515 | 0 | ++i; |
516 | 0 | std::ostringstream ext; |
517 | 0 | ext << name << std::setfill('0') << std::setw(3) << i; |
518 | 0 | uniqueName = ext.str(); |
519 | 0 | it_pair = mNodeNames.insert({ uniqueName, 0 }); |
520 | 0 | } |
521 | 0 | } |
522 | | |
523 | 0 | const char *FBXConverter::NameTransformationComp(TransformationComp comp) { |
524 | 0 | switch (comp) { |
525 | 0 | case TransformationComp_Translation: |
526 | 0 | return "Translation"; |
527 | 0 | case TransformationComp_RotationOffset: |
528 | 0 | return "RotationOffset"; |
529 | 0 | case TransformationComp_RotationPivot: |
530 | 0 | return "RotationPivot"; |
531 | 0 | case TransformationComp_PreRotation: |
532 | 0 | return "PreRotation"; |
533 | 0 | case TransformationComp_Rotation: |
534 | 0 | return "Rotation"; |
535 | 0 | case TransformationComp_PostRotation: |
536 | 0 | return "PostRotation"; |
537 | 0 | case TransformationComp_RotationPivotInverse: |
538 | 0 | return "RotationPivotInverse"; |
539 | 0 | case TransformationComp_ScalingOffset: |
540 | 0 | return "ScalingOffset"; |
541 | 0 | case TransformationComp_ScalingPivot: |
542 | 0 | return "ScalingPivot"; |
543 | 0 | case TransformationComp_Scaling: |
544 | 0 | return "Scaling"; |
545 | 0 | case TransformationComp_ScalingPivotInverse: |
546 | 0 | return "ScalingPivotInverse"; |
547 | 0 | case TransformationComp_GeometricScaling: |
548 | 0 | return "GeometricScaling"; |
549 | 0 | case TransformationComp_GeometricRotation: |
550 | 0 | return "GeometricRotation"; |
551 | 0 | case TransformationComp_GeometricTranslation: |
552 | 0 | return "GeometricTranslation"; |
553 | 0 | case TransformationComp_GeometricScalingInverse: |
554 | 0 | return "GeometricScalingInverse"; |
555 | 0 | case TransformationComp_GeometricRotationInverse: |
556 | 0 | return "GeometricRotationInverse"; |
557 | 0 | case TransformationComp_GeometricTranslationInverse: |
558 | 0 | return "GeometricTranslationInverse"; |
559 | 0 | case TransformationComp_MAXIMUM: // this is to silence compiler warnings |
560 | 0 | default: |
561 | 0 | break; |
562 | 0 | } |
563 | | |
564 | 0 | ai_assert(false); |
565 | |
|
566 | 0 | return nullptr; |
567 | 0 | } |
568 | | |
569 | 0 | const char *FBXConverter::NameTransformationCompProperty(TransformationComp comp) { |
570 | 0 | switch (comp) { |
571 | 0 | case TransformationComp_Translation: |
572 | 0 | return "Lcl Translation"; |
573 | 0 | case TransformationComp_RotationOffset: |
574 | 0 | return "RotationOffset"; |
575 | 0 | case TransformationComp_RotationPivot: |
576 | 0 | return "RotationPivot"; |
577 | 0 | case TransformationComp_PreRotation: |
578 | 0 | return "PreRotation"; |
579 | 0 | case TransformationComp_Rotation: |
580 | 0 | return "Lcl Rotation"; |
581 | 0 | case TransformationComp_PostRotation: |
582 | 0 | return "PostRotation"; |
583 | 0 | case TransformationComp_RotationPivotInverse: |
584 | 0 | return "RotationPivotInverse"; |
585 | 0 | case TransformationComp_ScalingOffset: |
586 | 0 | return "ScalingOffset"; |
587 | 0 | case TransformationComp_ScalingPivot: |
588 | 0 | return "ScalingPivot"; |
589 | 0 | case TransformationComp_Scaling: |
590 | 0 | return "Lcl Scaling"; |
591 | 0 | case TransformationComp_ScalingPivotInverse: |
592 | 0 | return "ScalingPivotInverse"; |
593 | 0 | case TransformationComp_GeometricScaling: |
594 | 0 | return "GeometricScaling"; |
595 | 0 | case TransformationComp_GeometricRotation: |
596 | 0 | return "GeometricRotation"; |
597 | 0 | case TransformationComp_GeometricTranslation: |
598 | 0 | return "GeometricTranslation"; |
599 | 0 | case TransformationComp_GeometricScalingInverse: |
600 | 0 | return "GeometricScalingInverse"; |
601 | 0 | case TransformationComp_GeometricRotationInverse: |
602 | 0 | return "GeometricRotationInverse"; |
603 | 0 | case TransformationComp_GeometricTranslationInverse: |
604 | 0 | return "GeometricTranslationInverse"; |
605 | 0 | case TransformationComp_MAXIMUM: |
606 | 0 | break; |
607 | 0 | } |
608 | | |
609 | 0 | ai_assert(false); |
610 | |
|
611 | 0 | return nullptr; |
612 | 0 | } |
613 | | |
614 | 0 | aiVector3D FBXConverter::TransformationCompDefaultValue(TransformationComp comp) { |
615 | | // XXX a neat way to solve the never-ending special cases for scaling |
616 | | // would be to do everything in log space! |
617 | 0 | return comp == TransformationComp_Scaling ? aiVector3D(1.f, 1.f, 1.f) : aiVector3D(); |
618 | 0 | } |
619 | | |
620 | 0 | void FBXConverter::GetRotationMatrix(Model::RotOrder mode, const aiVector3D &rotation, aiMatrix4x4 &out) { |
621 | 0 | if (mode == Model::RotOrder_SphericXYZ) { |
622 | 0 | FBXImporter::LogError("Unsupported RotationMode: SphericXYZ"); |
623 | 0 | out = aiMatrix4x4(); |
624 | 0 | return; |
625 | 0 | } |
626 | | |
627 | 0 | const float angle_epsilon = Math::getEpsilon<float>(); |
628 | |
|
629 | 0 | out = aiMatrix4x4(); |
630 | |
|
631 | 0 | bool is_id[3] = { true, true, true }; |
632 | |
|
633 | 0 | aiMatrix4x4 temp[3]; |
634 | 0 | const auto rot = AI_DEG_TO_RAD(rotation); |
635 | 0 | if (std::fabs(rot.z) > angle_epsilon) { |
636 | 0 | aiMatrix4x4::RotationZ(rot.z, temp[2]); |
637 | 0 | is_id[2] = false; |
638 | 0 | } |
639 | 0 | if (std::fabs(rot.y) > angle_epsilon) { |
640 | 0 | aiMatrix4x4::RotationY(rot.y, temp[1]); |
641 | 0 | is_id[1] = false; |
642 | 0 | } |
643 | 0 | if (std::fabs(rot.x) > angle_epsilon) { |
644 | 0 | aiMatrix4x4::RotationX(rot.x, temp[0]); |
645 | 0 | is_id[0] = false; |
646 | 0 | } |
647 | |
|
648 | 0 | int order[3] = { -1, -1, -1 }; |
649 | | |
650 | | // note: rotation order is inverted since we're left multiplying as is usual in assimp |
651 | 0 | switch (mode) { |
652 | 0 | case Model::RotOrder_EulerXYZ: |
653 | 0 | order[0] = 2; |
654 | 0 | order[1] = 1; |
655 | 0 | order[2] = 0; |
656 | 0 | break; |
657 | | |
658 | 0 | case Model::RotOrder_EulerXZY: |
659 | 0 | order[0] = 1; |
660 | 0 | order[1] = 2; |
661 | 0 | order[2] = 0; |
662 | 0 | break; |
663 | | |
664 | 0 | case Model::RotOrder_EulerYZX: |
665 | 0 | order[0] = 0; |
666 | 0 | order[1] = 2; |
667 | 0 | order[2] = 1; |
668 | 0 | break; |
669 | | |
670 | 0 | case Model::RotOrder_EulerYXZ: |
671 | 0 | order[0] = 2; |
672 | 0 | order[1] = 0; |
673 | 0 | order[2] = 1; |
674 | 0 | break; |
675 | | |
676 | 0 | case Model::RotOrder_EulerZXY: |
677 | 0 | order[0] = 1; |
678 | 0 | order[1] = 0; |
679 | 0 | order[2] = 2; |
680 | 0 | break; |
681 | | |
682 | 0 | case Model::RotOrder_EulerZYX: |
683 | 0 | order[0] = 0; |
684 | 0 | order[1] = 1; |
685 | 0 | order[2] = 2; |
686 | 0 | break; |
687 | | |
688 | 0 | default: |
689 | 0 | ai_assert(false); |
690 | 0 | break; |
691 | 0 | } |
692 | | |
693 | 0 | ai_assert(order[0] >= 0); |
694 | 0 | ai_assert(order[0] <= 2); |
695 | 0 | ai_assert(order[1] >= 0); |
696 | 0 | ai_assert(order[1] <= 2); |
697 | 0 | ai_assert(order[2] >= 0); |
698 | 0 | ai_assert(order[2] <= 2); |
699 | |
|
700 | 0 | if (!is_id[order[0]]) { |
701 | 0 | out = temp[order[0]]; |
702 | 0 | } |
703 | |
|
704 | 0 | if (!is_id[order[1]]) { |
705 | 0 | out = out * temp[order[1]]; |
706 | 0 | } |
707 | |
|
708 | 0 | if (!is_id[order[2]]) { |
709 | 0 | out = out * temp[order[2]]; |
710 | 0 | } |
711 | 0 | } |
712 | | |
713 | 0 | bool FBXConverter::NeedsComplexTransformationChain(const Model &model) { |
714 | 0 | const PropertyTable &props = model.Props(); |
715 | |
|
716 | 0 | const auto zero_epsilon = Math::getEpsilon<ai_real>(); |
717 | 0 | const aiVector3D all_ones(1.0f, 1.0f, 1.0f); |
718 | 0 | for (size_t i = 0; i < TransformationComp_MAXIMUM; ++i) { |
719 | 0 | const TransformationComp comp = static_cast<TransformationComp>(i); |
720 | |
|
721 | 0 | if (comp == TransformationComp_Rotation || comp == TransformationComp_Scaling || comp == TransformationComp_Translation) { |
722 | 0 | continue; |
723 | 0 | } |
724 | | |
725 | 0 | bool scale_compare = (comp == TransformationComp_GeometricScaling || comp == TransformationComp_Scaling); |
726 | |
|
727 | 0 | bool ok = true; |
728 | 0 | const aiVector3D &v = PropertyGet<aiVector3D>(props, NameTransformationCompProperty(comp), ok); |
729 | 0 | if (ok && scale_compare) { |
730 | 0 | if ((v - all_ones).SquareLength() > zero_epsilon) { |
731 | 0 | return true; |
732 | 0 | } |
733 | 0 | } else if (ok) { |
734 | 0 | if (v.SquareLength() > zero_epsilon) { |
735 | 0 | return true; |
736 | 0 | } |
737 | 0 | } |
738 | 0 | } |
739 | | |
740 | 0 | return false; |
741 | 0 | } |
742 | | |
743 | 0 | std::string FBXConverter::NameTransformationChainNode(const std::string &name, TransformationComp comp) { |
744 | 0 | return name + std::string(MAGIC_NODE_TAG) + "_" + NameTransformationComp(comp); |
745 | 0 | } |
746 | | |
747 | | bool FBXConverter::GenerateTransformationNodeChain(const Model &model, const std::string &name, std::vector<PotentialNode> &output_nodes, |
748 | 0 | std::vector<PotentialNode> &post_output_nodes) { |
749 | 0 | const PropertyTable &props = model.Props(); |
750 | 0 | const Model::RotOrder rot = model.RotationOrder(); |
751 | |
|
752 | 0 | bool ok; |
753 | |
|
754 | 0 | aiMatrix4x4 chain[TransformationComp_MAXIMUM]; |
755 | |
|
756 | 0 | ai_assert(TransformationComp_MAXIMUM < 32); |
757 | 0 | std::uint32_t chainBits = 0; |
758 | | // A node won't need a node chain if it only has these. |
759 | 0 | const std::uint32_t chainMaskSimple = (1 << TransformationComp_Translation) + (1 << TransformationComp_Scaling) + (1 << TransformationComp_Rotation); |
760 | | // A node will need a node chain if it has any of these. |
761 | 0 | const std::uint32_t chainMaskComplex = ((1 << (TransformationComp_MAXIMUM)) - 1) - chainMaskSimple; |
762 | |
|
763 | 0 | std::fill_n(chain, static_cast<unsigned int>(TransformationComp_MAXIMUM), aiMatrix4x4()); |
764 | | |
765 | | // generate transformation matrices for all the different transformation components |
766 | 0 | const float zero_epsilon = Math::getEpsilon<float>(); |
767 | 0 | const aiVector3D all_ones(1.0f, 1.0f, 1.0f); |
768 | |
|
769 | 0 | const aiVector3D &PreRotation = PropertyGet<aiVector3D>(props, "PreRotation", ok); |
770 | 0 | if (ok && PreRotation.SquareLength() > zero_epsilon) { |
771 | 0 | chainBits = chainBits | (1 << TransformationComp_PreRotation); |
772 | |
|
773 | 0 | GetRotationMatrix(Model::RotOrder::RotOrder_EulerXYZ, PreRotation, chain[TransformationComp_PreRotation]); |
774 | 0 | } |
775 | |
|
776 | 0 | const aiVector3D &PostRotation = PropertyGet<aiVector3D>(props, "PostRotation", ok); |
777 | 0 | if (ok && PostRotation.SquareLength() > zero_epsilon) { |
778 | 0 | chainBits = chainBits | (1 << TransformationComp_PostRotation); |
779 | |
|
780 | 0 | GetRotationMatrix(Model::RotOrder::RotOrder_EulerXYZ, PostRotation, chain[TransformationComp_PostRotation]); |
781 | 0 | } |
782 | |
|
783 | 0 | const aiVector3D &RotationPivot = PropertyGet<aiVector3D>(props, "RotationPivot", ok); |
784 | 0 | if (ok && RotationPivot.SquareLength() > zero_epsilon) { |
785 | 0 | chainBits = chainBits | (1 << TransformationComp_RotationPivot) | (1 << TransformationComp_RotationPivotInverse); |
786 | |
|
787 | 0 | aiMatrix4x4::Translation(RotationPivot, chain[TransformationComp_RotationPivot]); |
788 | 0 | aiMatrix4x4::Translation(-RotationPivot, chain[TransformationComp_RotationPivotInverse]); |
789 | 0 | } |
790 | |
|
791 | 0 | const aiVector3D &RotationOffset = PropertyGet<aiVector3D>(props, "RotationOffset", ok); |
792 | 0 | if (ok && RotationOffset.SquareLength() > zero_epsilon) { |
793 | 0 | chainBits = chainBits | (1 << TransformationComp_RotationOffset); |
794 | |
|
795 | 0 | aiMatrix4x4::Translation(RotationOffset, chain[TransformationComp_RotationOffset]); |
796 | 0 | } |
797 | |
|
798 | 0 | const aiVector3D &ScalingOffset = PropertyGet<aiVector3D>(props, "ScalingOffset", ok); |
799 | 0 | if (ok && ScalingOffset.SquareLength() > zero_epsilon) { |
800 | 0 | chainBits = chainBits | (1 << TransformationComp_ScalingOffset); |
801 | |
|
802 | 0 | aiMatrix4x4::Translation(ScalingOffset, chain[TransformationComp_ScalingOffset]); |
803 | 0 | } |
804 | |
|
805 | 0 | const aiVector3D &ScalingPivot = PropertyGet<aiVector3D>(props, "ScalingPivot", ok); |
806 | 0 | if (ok && ScalingPivot.SquareLength() > zero_epsilon) { |
807 | 0 | chainBits = chainBits | (1 << TransformationComp_ScalingPivot) | (1 << TransformationComp_ScalingPivotInverse); |
808 | |
|
809 | 0 | aiMatrix4x4::Translation(ScalingPivot, chain[TransformationComp_ScalingPivot]); |
810 | 0 | aiMatrix4x4::Translation(-ScalingPivot, chain[TransformationComp_ScalingPivotInverse]); |
811 | 0 | } |
812 | |
|
813 | 0 | const aiVector3D &Translation = PropertyGet<aiVector3D>(props, "Lcl Translation", ok); |
814 | 0 | if (ok && Translation.SquareLength() > zero_epsilon) { |
815 | 0 | chainBits = chainBits | (1 << TransformationComp_Translation); |
816 | |
|
817 | 0 | aiMatrix4x4::Translation(Translation, chain[TransformationComp_Translation]); |
818 | 0 | } |
819 | |
|
820 | 0 | const aiVector3D &Scaling = PropertyGet<aiVector3D>(props, "Lcl Scaling", ok); |
821 | 0 | if (ok && (Scaling - all_ones).SquareLength() > zero_epsilon) { |
822 | 0 | chainBits = chainBits | (1 << TransformationComp_Scaling); |
823 | |
|
824 | 0 | aiMatrix4x4::Scaling(Scaling, chain[TransformationComp_Scaling]); |
825 | 0 | } |
826 | |
|
827 | 0 | const aiVector3D &Rotation = PropertyGet<aiVector3D>(props, "Lcl Rotation", ok); |
828 | 0 | if (ok && Rotation.SquareLength() > zero_epsilon) { |
829 | 0 | chainBits = chainBits | (1 << TransformationComp_Rotation); |
830 | |
|
831 | 0 | GetRotationMatrix(rot, Rotation, chain[TransformationComp_Rotation]); |
832 | 0 | } |
833 | |
|
834 | 0 | const aiVector3D &GeometricScaling = PropertyGet<aiVector3D>(props, "GeometricScaling", ok); |
835 | 0 | if (ok && (GeometricScaling - all_ones).SquareLength() > zero_epsilon) { |
836 | 0 | chainBits = chainBits | (1 << TransformationComp_GeometricScaling); |
837 | 0 | aiMatrix4x4::Scaling(GeometricScaling, chain[TransformationComp_GeometricScaling]); |
838 | 0 | aiVector3D GeometricScalingInverse = GeometricScaling; |
839 | 0 | bool canscale = true; |
840 | 0 | for (unsigned int i = 0; i < 3; ++i) { |
841 | 0 | if (std::fabs(GeometricScalingInverse[i]) > zero_epsilon) { |
842 | 0 | GeometricScalingInverse[i] = 1.0f / GeometricScaling[i]; |
843 | 0 | } else { |
844 | 0 | FBXImporter::LogError("cannot invert geometric scaling matrix with a 0.0 scale component"); |
845 | 0 | canscale = false; |
846 | 0 | break; |
847 | 0 | } |
848 | 0 | } |
849 | 0 | if (canscale) { |
850 | 0 | chainBits = chainBits | (1 << TransformationComp_GeometricScalingInverse); |
851 | 0 | aiMatrix4x4::Scaling(GeometricScalingInverse, chain[TransformationComp_GeometricScalingInverse]); |
852 | 0 | } |
853 | 0 | } |
854 | |
|
855 | 0 | const aiVector3D &GeometricRotation = PropertyGet<aiVector3D>(props, "GeometricRotation", ok); |
856 | 0 | if (ok && GeometricRotation.SquareLength() > zero_epsilon) { |
857 | 0 | chainBits = chainBits | (1 << TransformationComp_GeometricRotation) | (1 << TransformationComp_GeometricRotationInverse); |
858 | 0 | GetRotationMatrix(rot, GeometricRotation, chain[TransformationComp_GeometricRotation]); |
859 | 0 | GetRotationMatrix(rot, GeometricRotation, chain[TransformationComp_GeometricRotationInverse]); |
860 | 0 | chain[TransformationComp_GeometricRotationInverse].Inverse(); |
861 | 0 | } |
862 | |
|
863 | 0 | const aiVector3D &GeometricTranslation = PropertyGet<aiVector3D>(props, "GeometricTranslation", ok); |
864 | 0 | if (ok && GeometricTranslation.SquareLength() > zero_epsilon) { |
865 | 0 | chainBits = chainBits | (1 << TransformationComp_GeometricTranslation) | (1 << TransformationComp_GeometricTranslationInverse); |
866 | 0 | aiMatrix4x4::Translation(GeometricTranslation, chain[TransformationComp_GeometricTranslation]); |
867 | 0 | aiMatrix4x4::Translation(-GeometricTranslation, chain[TransformationComp_GeometricTranslationInverse]); |
868 | 0 | } |
869 | | |
870 | | // now, if we have more than just Translation, Scaling and Rotation, |
871 | | // we need to generate a full node chain to accommodate for assimp's |
872 | | // lack to express pivots and offsets. |
873 | 0 | if ((chainBits & chainMaskComplex) && doc.Settings().preservePivots) { |
874 | 0 | FBXImporter::LogInfo("generating full transformation chain for node: ", name); |
875 | | |
876 | | // query the anim_chain_bits dictionary to find out which chain elements |
877 | | // have associated node animation channels. These can not be dropped |
878 | | // even if they have identity transform in bind pose. |
879 | 0 | NodeAnimBitMap::const_iterator it = node_anim_chain_bits.find(name); |
880 | 0 | const unsigned int anim_chain_bitmask = (it == node_anim_chain_bits.end() ? 0 : (*it).second); |
881 | |
|
882 | 0 | unsigned int bit = 0x1; |
883 | 0 | for (size_t i = 0; i < TransformationComp_MAXIMUM; ++i, bit <<= 1) { |
884 | 0 | const TransformationComp comp = static_cast<TransformationComp>(i); |
885 | |
|
886 | 0 | if ((chainBits & bit) == 0 && (anim_chain_bitmask & bit) == 0) { |
887 | 0 | continue; |
888 | 0 | } |
889 | | |
890 | 0 | if (comp == TransformationComp_PostRotation) { |
891 | 0 | chain[i] = chain[i].Inverse(); |
892 | 0 | } |
893 | |
|
894 | 0 | PotentialNode nd; |
895 | 0 | nd->mName.Set(NameTransformationChainNode(name, comp)); |
896 | 0 | nd->mTransformation = chain[i]; |
897 | | |
898 | | // geometric inverses go in a post-node chain |
899 | 0 | if (comp == TransformationComp_GeometricScalingInverse || |
900 | 0 | comp == TransformationComp_GeometricRotationInverse || |
901 | 0 | comp == TransformationComp_GeometricTranslationInverse) { |
902 | 0 | post_output_nodes.emplace_back(std::move(nd)); |
903 | 0 | } else { |
904 | 0 | output_nodes.emplace_back(std::move(nd)); |
905 | 0 | } |
906 | 0 | } |
907 | |
|
908 | 0 | ai_assert(output_nodes.size()); |
909 | 0 | return true; |
910 | 0 | } |
911 | | |
912 | | // else, we can just multiply the matrices together |
913 | 0 | PotentialNode nd; |
914 | | |
915 | | // name passed to the method is already unique |
916 | 0 | nd->mName.Set(name); |
917 | | // for (const auto &transform : chain) { |
918 | | // skip inverse chain for no preservePivots |
919 | 0 | for (unsigned int i = TransformationComp_Translation; i < TransformationComp_MAXIMUM; i++) { |
920 | 0 | nd->mTransformation = nd->mTransformation * chain[i]; |
921 | 0 | } |
922 | 0 | output_nodes.push_back(std::move(nd)); |
923 | 0 | return false; |
924 | 0 | } |
925 | | |
926 | 0 | void FBXConverter::SetupNodeMetadata(const Model &model, aiNode &nd) { |
927 | 0 | const PropertyTable &props = model.Props(); |
928 | 0 | DirectPropertyMap unparsedProperties = props.GetUnparsedProperties(); |
929 | | |
930 | | // create metadata on node |
931 | 0 | const std::size_t numStaticMetaData = 2; |
932 | 0 | aiMetadata *data = aiMetadata::Alloc(static_cast<unsigned int>(unparsedProperties.size() + numStaticMetaData)); |
933 | 0 | nd.mMetaData = data; |
934 | 0 | int index = 0; |
935 | | |
936 | | // find user defined properties (3ds Max) |
937 | 0 | data->Set(index++, "UserProperties", aiString(PropertyGet<std::string>(props, "UDP3DSMAX", ""))); |
938 | | // preserve the info that a node was marked as Null node in the original file. |
939 | 0 | data->Set(index++, "IsNull", model.IsNull() ? true : false); |
940 | | |
941 | | // add unparsed properties to the node's metadata |
942 | 0 | for (const DirectPropertyMap::value_type &prop : unparsedProperties) { |
943 | | // Interpret the property as a concrete type |
944 | 0 | if (const TypedProperty<bool> *interpretedBool = prop.second->As<TypedProperty<bool>>()) { |
945 | 0 | data->Set(index++, prop.first, interpretedBool->Value()); |
946 | 0 | } else if (const TypedProperty<int> *interpretedInt = prop.second->As<TypedProperty<int>>()) { |
947 | 0 | data->Set(index++, prop.first, interpretedInt->Value()); |
948 | 0 | } else if (const TypedProperty<uint32_t> *interpretedUInt = prop.second->As<TypedProperty<uint32_t>>()) { |
949 | 0 | data->Set(index++, prop.first, interpretedUInt->Value()); |
950 | 0 | } else if (const TypedProperty<uint64_t> *interpretedUint64 = prop.second->As<TypedProperty<uint64_t>>()) { |
951 | 0 | data->Set(index++, prop.first, interpretedUint64->Value()); |
952 | 0 | } else if (const TypedProperty<int64_t> *interpretedint64 = prop.second->As<TypedProperty<int64_t>>()) { |
953 | 0 | data->Set(index++, prop.first, interpretedint64->Value()); |
954 | 0 | } else if (const TypedProperty<float> *interpretedFloat = prop.second->As<TypedProperty<float>>()) { |
955 | 0 | data->Set(index++, prop.first, interpretedFloat->Value()); |
956 | 0 | } else if (const TypedProperty<std::string> *interpretedString = prop.second->As<TypedProperty<std::string>>()) { |
957 | 0 | data->Set(index++, prop.first, aiString(interpretedString->Value())); |
958 | 0 | } else if (const TypedProperty<aiVector3D> *interpretedVec3 = prop.second->As<TypedProperty<aiVector3D>>()) { |
959 | 0 | data->Set(index++, prop.first, interpretedVec3->Value()); |
960 | 0 | } else { |
961 | 0 | ai_assert(false); |
962 | 0 | } |
963 | 0 | } |
964 | 0 | } |
965 | | |
966 | 0 | void FBXConverter::ConvertModel(const Model &model, aiNode *parent, aiNode *root_node, const aiMatrix4x4 &absolute_transform) { |
967 | 0 | const std::vector<const Geometry *> &geos = model.GetGeometry(); |
968 | |
|
969 | 0 | std::vector<unsigned int> meshes; |
970 | 0 | meshes.reserve(geos.size()); |
971 | |
|
972 | 0 | for (const Geometry *geo : geos) { |
973 | 0 | const MeshGeometry *const mesh = dynamic_cast<const MeshGeometry *>(geo); |
974 | 0 | const LineGeometry *const line = dynamic_cast<const LineGeometry *>(geo); |
975 | 0 | if (mesh) { |
976 | 0 | const std::vector<unsigned int> &indices = ConvertMesh(*mesh, model, parent, root_node, absolute_transform); |
977 | 0 | std::copy(indices.begin(), indices.end(), std::back_inserter(meshes)); |
978 | 0 | } else if (line) { |
979 | 0 | const std::vector<unsigned int> &indices = ConvertLine(*line, root_node); |
980 | 0 | std::copy(indices.begin(), indices.end(), std::back_inserter(meshes)); |
981 | 0 | } else if (geo) { |
982 | 0 | FBXImporter::LogWarn("ignoring unrecognized geometry: ", geo->Name()); |
983 | 0 | } else { |
984 | 0 | FBXImporter::LogWarn("skipping null geometry"); |
985 | 0 | } |
986 | 0 | } |
987 | |
|
988 | 0 | if (meshes.size()) { |
989 | 0 | parent->mMeshes = new unsigned int[meshes.size()](); |
990 | 0 | parent->mNumMeshes = static_cast<unsigned int>(meshes.size()); |
991 | |
|
992 | 0 | std::swap_ranges(meshes.begin(), meshes.end(), parent->mMeshes); |
993 | 0 | } |
994 | 0 | } |
995 | | |
996 | | std::vector<unsigned int> |
997 | 0 | FBXConverter::ConvertMesh(const MeshGeometry &mesh, const Model &model, aiNode *parent, aiNode *root_node, const aiMatrix4x4 &absolute_transform) { |
998 | 0 | std::vector<unsigned int> temp; |
999 | |
|
1000 | 0 | MeshMap::const_iterator it = meshes_converted.find(&mesh); |
1001 | 0 | if (it != meshes_converted.end()) { |
1002 | 0 | std::copy((*it).second.begin(), (*it).second.end(), std::back_inserter(temp)); |
1003 | 0 | return temp; |
1004 | 0 | } |
1005 | | |
1006 | 0 | const std::vector<aiVector3D> &vertices = mesh.GetVertices(); |
1007 | 0 | const std::vector<unsigned int> &faces = mesh.GetFaceIndexCounts(); |
1008 | 0 | if (vertices.empty() || faces.empty()) { |
1009 | 0 | FBXImporter::LogWarn("ignoring empty geometry: ", mesh.Name()); |
1010 | 0 | return temp; |
1011 | 0 | } |
1012 | | |
1013 | | // one material per mesh maps easily to aiMesh. Multiple material |
1014 | | // meshes need to be split. |
1015 | 0 | const MatIndexArray &mindices = mesh.GetMaterialIndices(); |
1016 | 0 | if (doc.Settings().readMaterials && !mindices.empty()) { |
1017 | 0 | const MatIndexArray::value_type base = mindices[0]; |
1018 | 0 | for (MatIndexArray::value_type index : mindices) { |
1019 | 0 | if (index != base) { |
1020 | 0 | return ConvertMeshMultiMaterial(mesh, model, absolute_transform, parent, root_node); |
1021 | 0 | } |
1022 | 0 | } |
1023 | 0 | } |
1024 | | |
1025 | | // faster code-path, just copy the data |
1026 | 0 | temp.push_back(ConvertMeshSingleMaterial(mesh, model, absolute_transform, parent, root_node)); |
1027 | 0 | return temp; |
1028 | 0 | } |
1029 | | |
1030 | 0 | std::vector<unsigned int> FBXConverter::ConvertLine(const LineGeometry &line, aiNode *root_node) { |
1031 | 0 | std::vector<unsigned int> temp; |
1032 | |
|
1033 | 0 | const std::vector<aiVector3D> &vertices = line.GetVertices(); |
1034 | 0 | const std::vector<int> &indices = line.GetIndices(); |
1035 | 0 | if (vertices.empty() || indices.empty()) { |
1036 | 0 | FBXImporter::LogWarn("ignoring empty line: ", line.Name()); |
1037 | 0 | return temp; |
1038 | 0 | } |
1039 | | |
1040 | 0 | aiMesh *const out_mesh = SetupEmptyMesh(line, root_node); |
1041 | 0 | out_mesh->mPrimitiveTypes |= aiPrimitiveType_LINE; |
1042 | | |
1043 | | // copy vertices |
1044 | 0 | out_mesh->mNumVertices = static_cast<unsigned int>(vertices.size()); |
1045 | 0 | out_mesh->mVertices = new aiVector3D[out_mesh->mNumVertices]; |
1046 | 0 | std::copy(vertices.begin(), vertices.end(), out_mesh->mVertices); |
1047 | | |
1048 | | //Number of line segments (faces) is "Number of Points - Number of Endpoints" |
1049 | | //N.B.: Endpoints in FbxLine are denoted by negative indices. |
1050 | | //If such an Index is encountered, add 1 and multiply by -1 to get the real index. |
1051 | 0 | unsigned int epcount = 0; |
1052 | 0 | for (unsigned i = 0; i < indices.size(); i++) { |
1053 | 0 | if (indices[i] < 0) { |
1054 | 0 | epcount++; |
1055 | 0 | } |
1056 | 0 | } |
1057 | 0 | unsigned int pcount = static_cast<unsigned int>(indices.size()); |
1058 | 0 | unsigned int scount = out_mesh->mNumFaces = pcount - epcount; |
1059 | |
|
1060 | 0 | aiFace *fac = out_mesh->mFaces = new aiFace[scount](); |
1061 | 0 | for (unsigned int i = 0; i < pcount; ++i) { |
1062 | 0 | if (indices[i] < 0) continue; |
1063 | 0 | aiFace &f = *fac++; |
1064 | 0 | f.mNumIndices = 2; //2 == aiPrimitiveType_LINE |
1065 | 0 | f.mIndices = new unsigned int[2]; |
1066 | 0 | f.mIndices[0] = indices[i]; |
1067 | 0 | int segid = indices[(i + 1 == pcount ? 0 : i + 1)]; //If we have reached he last point, wrap around |
1068 | 0 | f.mIndices[1] = (segid < 0 ? (segid + 1) * -1 : segid); //Convert EndPoint Index to normal Index |
1069 | 0 | } |
1070 | 0 | temp.push_back(static_cast<unsigned int>(mMeshes.size() - 1)); |
1071 | 0 | return temp; |
1072 | 0 | } |
1073 | | |
1074 | 0 | aiMesh *FBXConverter::SetupEmptyMesh(const Geometry &mesh, aiNode *parent) { |
1075 | 0 | aiMesh *const out_mesh = new aiMesh(); |
1076 | 0 | mMeshes.push_back(out_mesh); |
1077 | 0 | meshes_converted[&mesh].push_back(static_cast<unsigned int>(mMeshes.size() - 1)); |
1078 | | |
1079 | | // set name |
1080 | 0 | std::string name = mesh.Name(); |
1081 | 0 | if (name.substr(0, 10) == "Geometry::") { |
1082 | 0 | name = name.substr(10); |
1083 | 0 | } |
1084 | |
|
1085 | 0 | if (name.length()) { |
1086 | 0 | out_mesh->mName.Set(name); |
1087 | 0 | } else { |
1088 | 0 | out_mesh->mName = parent->mName; |
1089 | 0 | } |
1090 | |
|
1091 | 0 | return out_mesh; |
1092 | 0 | } |
1093 | | |
1094 | 0 | static aiSkeleton *createAiSkeleton(SkeletonBoneContainer &sbc) { |
1095 | 0 | if (sbc.MeshArray.empty() || sbc.SkeletonBoneToMeshLookup.empty()) { |
1096 | 0 | return nullptr; |
1097 | 0 | } |
1098 | | |
1099 | 0 | aiSkeleton *skeleton = new aiSkeleton; |
1100 | 0 | for (auto *mesh : sbc.MeshArray) { |
1101 | 0 | auto it = sbc.SkeletonBoneToMeshLookup.find(mesh); |
1102 | 0 | if (it == sbc.SkeletonBoneToMeshLookup.end()) { |
1103 | 0 | continue; |
1104 | 0 | } |
1105 | 0 | SkeletonBoneArray *ba = it->second; |
1106 | 0 | if (ba == nullptr) { |
1107 | 0 | continue; |
1108 | 0 | } |
1109 | | |
1110 | 0 | skeleton->mNumBones = static_cast<unsigned int>(ba->size()); |
1111 | 0 | skeleton->mBones = new aiSkeletonBone*[skeleton->mNumBones]; |
1112 | 0 | size_t index = 0; |
1113 | 0 | for (auto bone : (* ba)) { |
1114 | 0 | skeleton->mBones[index] = bone; |
1115 | 0 | ++index; |
1116 | 0 | } |
1117 | 0 | } |
1118 | |
|
1119 | 0 | return skeleton; |
1120 | 0 | } |
1121 | | |
1122 | | unsigned int FBXConverter::ConvertMeshSingleMaterial(const MeshGeometry &mesh, const Model &model, const aiMatrix4x4 &absolute_transform, |
1123 | 0 | aiNode *parent, aiNode *) { |
1124 | 0 | const MatIndexArray &mindices = mesh.GetMaterialIndices(); |
1125 | 0 | aiMesh *const out_mesh = SetupEmptyMesh(mesh, parent); |
1126 | |
|
1127 | 0 | const std::vector<aiVector3D> &vertices = mesh.GetVertices(); |
1128 | 0 | const std::vector<unsigned int> &faces = mesh.GetFaceIndexCounts(); |
1129 | | |
1130 | | // copy vertices |
1131 | 0 | out_mesh->mNumVertices = static_cast<unsigned int>(vertices.size()); |
1132 | 0 | out_mesh->mVertices = new aiVector3D[vertices.size()]; |
1133 | |
|
1134 | 0 | std::copy(vertices.begin(), vertices.end(), out_mesh->mVertices); |
1135 | | |
1136 | | // generate dummy faces |
1137 | 0 | out_mesh->mNumFaces = static_cast<unsigned int>(faces.size()); |
1138 | 0 | aiFace *fac = out_mesh->mFaces = new aiFace[faces.size()](); |
1139 | |
|
1140 | 0 | unsigned int cursor = 0; |
1141 | 0 | for (unsigned int pcount : faces) { |
1142 | 0 | aiFace &f = *fac++; |
1143 | 0 | f.mNumIndices = pcount; |
1144 | 0 | f.mIndices = new unsigned int[pcount]; |
1145 | 0 | switch (pcount) { |
1146 | 0 | case 1: |
1147 | 0 | out_mesh->mPrimitiveTypes |= aiPrimitiveType_POINT; |
1148 | 0 | break; |
1149 | 0 | case 2: |
1150 | 0 | out_mesh->mPrimitiveTypes |= aiPrimitiveType_LINE; |
1151 | 0 | break; |
1152 | 0 | case 3: |
1153 | 0 | out_mesh->mPrimitiveTypes |= aiPrimitiveType_TRIANGLE; |
1154 | 0 | break; |
1155 | 0 | default: |
1156 | 0 | out_mesh->mPrimitiveTypes |= aiPrimitiveType_POLYGON; |
1157 | 0 | break; |
1158 | 0 | } |
1159 | 0 | for (unsigned int i = 0; i < pcount; ++i) { |
1160 | 0 | f.mIndices[i] = cursor++; |
1161 | 0 | } |
1162 | 0 | } |
1163 | | |
1164 | | // copy normals |
1165 | 0 | const std::vector<aiVector3D> &normals = mesh.GetNormals(); |
1166 | 0 | if (normals.size()) { |
1167 | 0 | ai_assert(normals.size() == vertices.size()); |
1168 | |
|
1169 | 0 | out_mesh->mNormals = new aiVector3D[vertices.size()]; |
1170 | 0 | std::copy(normals.begin(), normals.end(), out_mesh->mNormals); |
1171 | 0 | } |
1172 | | |
1173 | | // copy tangents - assimp requires both tangents and bitangents (binormals) |
1174 | | // to be present, or neither of them. Compute binormals from normals |
1175 | | // and tangents if needed. |
1176 | 0 | const std::vector<aiVector3D> &tangents = mesh.GetTangents(); |
1177 | 0 | const std::vector<aiVector3D> *binormals = &mesh.GetBinormals(); |
1178 | |
|
1179 | 0 | if (tangents.size()) { |
1180 | 0 | std::vector<aiVector3D> tempBinormals; |
1181 | 0 | if (!binormals->size()) { |
1182 | 0 | if (normals.size()) { |
1183 | 0 | tempBinormals.resize(normals.size()); |
1184 | 0 | for (unsigned int i = 0; i < tangents.size(); ++i) { |
1185 | 0 | tempBinormals[i] = normals[i] ^ tangents[i]; |
1186 | 0 | } |
1187 | |
|
1188 | 0 | binormals = &tempBinormals; |
1189 | 0 | } else { |
1190 | 0 | binormals = nullptr; |
1191 | 0 | } |
1192 | 0 | } |
1193 | |
|
1194 | 0 | if (binormals) { |
1195 | 0 | ai_assert(tangents.size() == vertices.size()); |
1196 | 0 | ai_assert(binormals->size() == vertices.size()); |
1197 | |
|
1198 | 0 | out_mesh->mTangents = new aiVector3D[vertices.size()]; |
1199 | 0 | std::copy(tangents.begin(), tangents.end(), out_mesh->mTangents); |
1200 | |
|
1201 | 0 | out_mesh->mBitangents = new aiVector3D[vertices.size()]; |
1202 | 0 | std::copy(binormals->begin(), binormals->end(), out_mesh->mBitangents); |
1203 | 0 | } |
1204 | 0 | } |
1205 | | |
1206 | | // copy texture coords |
1207 | 0 | for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) { |
1208 | 0 | const std::vector<aiVector2D> &uvs = mesh.GetTextureCoords(i); |
1209 | 0 | if (uvs.empty()) { |
1210 | 0 | break; |
1211 | 0 | } |
1212 | | |
1213 | 0 | aiVector3D *out_uv = out_mesh->mTextureCoords[i] = new aiVector3D[vertices.size()]; |
1214 | 0 | for (const aiVector2D &v : uvs) { |
1215 | 0 | *out_uv++ = aiVector3D(v.x, v.y, 0.0f); |
1216 | 0 | } |
1217 | |
|
1218 | 0 | out_mesh->SetTextureCoordsName(i, aiString(mesh.GetTextureCoordChannelName(i))); |
1219 | |
|
1220 | 0 | out_mesh->mNumUVComponents[i] = 2; |
1221 | 0 | } |
1222 | | |
1223 | | // copy vertex colors |
1224 | 0 | for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_COLOR_SETS; ++i) { |
1225 | 0 | const std::vector<aiColor4D> &colors = mesh.GetVertexColors(i); |
1226 | 0 | if (colors.empty()) { |
1227 | 0 | break; |
1228 | 0 | } |
1229 | | |
1230 | 0 | out_mesh->mColors[i] = new aiColor4D[vertices.size()]; |
1231 | 0 | std::copy(colors.begin(), colors.end(), out_mesh->mColors[i]); |
1232 | 0 | } |
1233 | |
|
1234 | 0 | if (!doc.Settings().readMaterials || mindices.empty()) { |
1235 | 0 | FBXImporter::LogError("no material assigned to mesh, setting default material"); |
1236 | 0 | out_mesh->mMaterialIndex = GetDefaultMaterial(); |
1237 | 0 | } else { |
1238 | 0 | ConvertMaterialForMesh(out_mesh, model, mesh, mindices[0]); |
1239 | 0 | } |
1240 | |
|
1241 | 0 | if (doc.Settings().readWeights && mesh.DeformerSkin() != nullptr && !doc.Settings().useSkeleton) { |
1242 | 0 | ConvertWeights(out_mesh, mesh, absolute_transform, parent, NO_MATERIAL_SEPARATION, nullptr); |
1243 | 0 | } else if (doc.Settings().readWeights && mesh.DeformerSkin() != nullptr && doc.Settings().useSkeleton) { |
1244 | 0 | SkeletonBoneContainer sbc; |
1245 | 0 | ConvertWeightsToSkeleton(out_mesh, mesh, absolute_transform, parent, NO_MATERIAL_SEPARATION, nullptr, sbc); |
1246 | 0 | aiSkeleton *skeleton = createAiSkeleton(sbc); |
1247 | 0 | if (skeleton != nullptr) { |
1248 | 0 | mSkeletons.emplace_back(skeleton); |
1249 | 0 | } |
1250 | 0 | } |
1251 | |
|
1252 | 0 | std::vector<aiAnimMesh *> animMeshes; |
1253 | 0 | for (const BlendShape *blendShape : mesh.GetBlendShapes()) { |
1254 | 0 | for (const BlendShapeChannel *blendShapeChannel : blendShape->BlendShapeChannels()) { |
1255 | 0 | const auto& shapeGeometries = blendShapeChannel->GetShapeGeometries(); |
1256 | 0 | for (const ShapeGeometry *shapeGeometry : shapeGeometries) { |
1257 | 0 | const auto &curNormals = shapeGeometry->GetNormals(); |
1258 | 0 | aiAnimMesh *animMesh = aiCreateAnimMesh(out_mesh, true, !curNormals.empty()); |
1259 | 0 | const auto &curVertices = shapeGeometry->GetVertices(); |
1260 | 0 | const auto &curIndices = shapeGeometry->GetIndices(); |
1261 | | //losing channel name if using shapeGeometry->Name() |
1262 | | // if blendShapeChannel Name is empty or doesn't have a ".", add geoMetryName; |
1263 | 0 | auto aniName = FixAnimMeshName(blendShapeChannel->Name()); |
1264 | 0 | auto geoMetryName = FixAnimMeshName(shapeGeometry->Name()); |
1265 | 0 | if (aniName.empty()) { |
1266 | 0 | aniName = geoMetryName; |
1267 | 0 | } |
1268 | 0 | else if (aniName.find('.') == aniName.npos) { |
1269 | 0 | aniName += "." + geoMetryName; |
1270 | 0 | } |
1271 | 0 | animMesh->mName.Set(aniName); |
1272 | 0 | for (size_t j = 0; j < curIndices.size(); j++) { |
1273 | 0 | const unsigned int curIndex = curIndices.at(j); |
1274 | 0 | aiVector3D vertex = curVertices.at(j); |
1275 | 0 | aiVector3D normal = curNormals.empty() ? aiVector3D() : curNormals.at(j); |
1276 | 0 | unsigned int count = 0; |
1277 | 0 | const unsigned int *outIndices = mesh.ToOutputVertexIndex(curIndex, count); |
1278 | 0 | for (unsigned int k = 0; k < count; k++) { |
1279 | 0 | unsigned int index = outIndices[k]; |
1280 | 0 | animMesh->mVertices[index] += vertex; |
1281 | 0 | if (animMesh->mNormals != nullptr) { |
1282 | 0 | animMesh->mNormals[index] += normal; |
1283 | 0 | animMesh->mNormals[index].NormalizeSafe(); |
1284 | 0 | } |
1285 | 0 | } |
1286 | 0 | } |
1287 | 0 | animMesh->mWeight = shapeGeometries.size() > 1 ? blendShapeChannel->DeformPercent() / 100.0f : 1.0f; |
1288 | 0 | animMeshes.push_back(animMesh); |
1289 | 0 | } |
1290 | 0 | } |
1291 | 0 | } |
1292 | 0 | const size_t numAnimMeshes = animMeshes.size(); |
1293 | 0 | if (numAnimMeshes > 0) { |
1294 | 0 | out_mesh->mNumAnimMeshes = static_cast<unsigned int>(numAnimMeshes); |
1295 | 0 | out_mesh->mAnimMeshes = new aiAnimMesh *[numAnimMeshes]; |
1296 | 0 | for (size_t i = 0; i < numAnimMeshes; i++) { |
1297 | 0 | out_mesh->mAnimMeshes[i] = animMeshes.at(i); |
1298 | 0 | } |
1299 | 0 | } |
1300 | 0 | return static_cast<unsigned int>(mMeshes.size() - 1); |
1301 | 0 | } |
1302 | | |
1303 | | std::vector<unsigned int> |
1304 | | FBXConverter::ConvertMeshMultiMaterial(const MeshGeometry &mesh, const Model &model, const aiMatrix4x4 &absolute_transform, aiNode *parent, |
1305 | 0 | aiNode *root_node) { |
1306 | 0 | const MatIndexArray &mindices = mesh.GetMaterialIndices(); |
1307 | 0 | ai_assert(mindices.size()); |
1308 | |
|
1309 | 0 | std::set<MatIndexArray::value_type> had; |
1310 | 0 | std::vector<unsigned int> indices; |
1311 | |
|
1312 | 0 | for (MatIndexArray::value_type index : mindices) { |
1313 | 0 | if (had.find(index) == had.end()) { |
1314 | |
|
1315 | 0 | indices.push_back(ConvertMeshMultiMaterial(mesh, model, absolute_transform, index, parent, root_node)); |
1316 | 0 | had.insert(index); |
1317 | 0 | } |
1318 | 0 | } |
1319 | |
|
1320 | 0 | return indices; |
1321 | 0 | } |
1322 | | |
1323 | | unsigned int FBXConverter::ConvertMeshMultiMaterial(const MeshGeometry &mesh, const Model &model, const aiMatrix4x4 &absolute_transform, |
1324 | 0 | MatIndexArray::value_type index, aiNode *parent, aiNode *) { |
1325 | 0 | aiMesh *const out_mesh = SetupEmptyMesh(mesh, parent); |
1326 | |
|
1327 | 0 | const MatIndexArray &mindices = mesh.GetMaterialIndices(); |
1328 | 0 | const std::vector<aiVector3D> &vertices = mesh.GetVertices(); |
1329 | 0 | const std::vector<unsigned int> &faces = mesh.GetFaceIndexCounts(); |
1330 | |
|
1331 | 0 | const bool process_weights = doc.Settings().readWeights && mesh.DeformerSkin() != nullptr; |
1332 | |
|
1333 | 0 | unsigned int count_faces = 0; |
1334 | 0 | unsigned int count_vertices = 0; |
1335 | | |
1336 | | // count faces |
1337 | 0 | std::vector<unsigned int>::const_iterator itf = faces.begin(); |
1338 | 0 | for (MatIndexArray::const_iterator it = mindices.begin(), |
1339 | 0 | end = mindices.end(); |
1340 | 0 | it != end; ++it, ++itf) { |
1341 | 0 | if ((*it) != index) { |
1342 | 0 | continue; |
1343 | 0 | } |
1344 | 0 | ++count_faces; |
1345 | 0 | count_vertices += *itf; |
1346 | 0 | } |
1347 | |
|
1348 | 0 | ai_assert(count_faces); |
1349 | 0 | ai_assert(count_vertices); |
1350 | | |
1351 | | // mapping from output indices to DOM indexing, needed to resolve weights or blendshapes |
1352 | 0 | std::vector<unsigned int> reverseMapping; |
1353 | 0 | std::map<unsigned int, unsigned int> translateIndexMap; |
1354 | 0 | if (process_weights || mesh.GetBlendShapes().size() > 0) { |
1355 | 0 | reverseMapping.resize(count_vertices); |
1356 | 0 | } |
1357 | | |
1358 | | // allocate output data arrays, but don't fill them yet |
1359 | 0 | out_mesh->mNumVertices = count_vertices; |
1360 | 0 | out_mesh->mVertices = new aiVector3D[count_vertices]; |
1361 | |
|
1362 | 0 | out_mesh->mNumFaces = count_faces; |
1363 | 0 | aiFace *fac = out_mesh->mFaces = new aiFace[count_faces](); |
1364 | | |
1365 | | // allocate normals |
1366 | 0 | const std::vector<aiVector3D> &normals = mesh.GetNormals(); |
1367 | 0 | if (normals.size()) { |
1368 | 0 | ai_assert(normals.size() == vertices.size()); |
1369 | 0 | out_mesh->mNormals = new aiVector3D[count_vertices]; |
1370 | 0 | } |
1371 | | |
1372 | | // allocate tangents, binormals. |
1373 | 0 | const std::vector<aiVector3D> &tangents = mesh.GetTangents(); |
1374 | 0 | const std::vector<aiVector3D> *binormals = &mesh.GetBinormals(); |
1375 | 0 | std::vector<aiVector3D> tempBinormals; |
1376 | |
|
1377 | 0 | if (tangents.size()) { |
1378 | 0 | if (!binormals->size()) { |
1379 | 0 | if (normals.size()) { |
1380 | | // XXX this computes the binormals for the entire mesh, not only |
1381 | | // the part for which we need them. |
1382 | 0 | tempBinormals.resize(normals.size()); |
1383 | 0 | for (unsigned int i = 0; i < tangents.size(); ++i) { |
1384 | 0 | tempBinormals[i] = normals[i] ^ tangents[i]; |
1385 | 0 | } |
1386 | |
|
1387 | 0 | binormals = &tempBinormals; |
1388 | 0 | } else { |
1389 | 0 | binormals = nullptr; |
1390 | 0 | } |
1391 | 0 | } |
1392 | |
|
1393 | 0 | if (binormals) { |
1394 | 0 | ai_assert(tangents.size() == vertices.size()); |
1395 | 0 | ai_assert(binormals->size() == vertices.size()); |
1396 | |
|
1397 | 0 | out_mesh->mTangents = new aiVector3D[count_vertices]; |
1398 | 0 | out_mesh->mBitangents = new aiVector3D[count_vertices]; |
1399 | 0 | } |
1400 | 0 | } |
1401 | | |
1402 | | // allocate texture coords |
1403 | 0 | unsigned int num_uvs = 0; |
1404 | 0 | for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i, ++num_uvs) { |
1405 | 0 | const std::vector<aiVector2D> &uvs = mesh.GetTextureCoords(i); |
1406 | 0 | if (uvs.empty()) { |
1407 | 0 | break; |
1408 | 0 | } |
1409 | | |
1410 | 0 | out_mesh->mTextureCoords[i] = new aiVector3D[count_vertices]; |
1411 | 0 | out_mesh->mNumUVComponents[i] = 2; |
1412 | 0 | } |
1413 | | |
1414 | | // allocate vertex colors |
1415 | 0 | unsigned int num_vcs = 0; |
1416 | 0 | for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_COLOR_SETS; ++i, ++num_vcs) { |
1417 | 0 | const std::vector<aiColor4D> &colors = mesh.GetVertexColors(i); |
1418 | 0 | if (colors.empty()) { |
1419 | 0 | break; |
1420 | 0 | } |
1421 | | |
1422 | 0 | out_mesh->mColors[i] = new aiColor4D[count_vertices]; |
1423 | 0 | } |
1424 | |
|
1425 | 0 | unsigned int cursor = 0, in_cursor = 0; |
1426 | |
|
1427 | 0 | itf = faces.begin(); |
1428 | 0 | for (MatIndexArray::const_iterator it = mindices.begin(), end = mindices.end(); it != end; ++it, ++itf) { |
1429 | 0 | const unsigned int pcount = *itf; |
1430 | 0 | if ((*it) != index) { |
1431 | 0 | in_cursor += pcount; |
1432 | 0 | continue; |
1433 | 0 | } |
1434 | | |
1435 | 0 | aiFace &f = *fac++; |
1436 | |
|
1437 | 0 | f.mNumIndices = pcount; |
1438 | 0 | f.mIndices = new unsigned int[pcount]; |
1439 | 0 | switch (pcount) { |
1440 | 0 | case 1: |
1441 | 0 | out_mesh->mPrimitiveTypes |= aiPrimitiveType_POINT; |
1442 | 0 | break; |
1443 | 0 | case 2: |
1444 | 0 | out_mesh->mPrimitiveTypes |= aiPrimitiveType_LINE; |
1445 | 0 | break; |
1446 | 0 | case 3: |
1447 | 0 | out_mesh->mPrimitiveTypes |= aiPrimitiveType_TRIANGLE; |
1448 | 0 | break; |
1449 | 0 | default: |
1450 | 0 | out_mesh->mPrimitiveTypes |= aiPrimitiveType_POLYGON; |
1451 | 0 | break; |
1452 | 0 | } |
1453 | 0 | for (unsigned int i = 0; i < pcount; ++i, ++cursor, ++in_cursor) { |
1454 | 0 | f.mIndices[i] = cursor; |
1455 | |
|
1456 | 0 | if (reverseMapping.size()) { |
1457 | 0 | reverseMapping[cursor] = in_cursor; |
1458 | 0 | translateIndexMap[in_cursor] = cursor; |
1459 | 0 | } |
1460 | |
|
1461 | 0 | out_mesh->mVertices[cursor] = vertices[in_cursor]; |
1462 | |
|
1463 | 0 | if (out_mesh->mNormals) { |
1464 | 0 | out_mesh->mNormals[cursor] = normals[in_cursor]; |
1465 | 0 | } |
1466 | |
|
1467 | 0 | if (out_mesh->mTangents) { |
1468 | 0 | out_mesh->mTangents[cursor] = tangents[in_cursor]; |
1469 | 0 | out_mesh->mBitangents[cursor] = (*binormals)[in_cursor]; |
1470 | 0 | } |
1471 | |
|
1472 | 0 | for (unsigned int j = 0; j < num_uvs; ++j) { |
1473 | 0 | const std::vector<aiVector2D> &uvs = mesh.GetTextureCoords(j); |
1474 | 0 | out_mesh->mTextureCoords[j][cursor] = aiVector3D(uvs[in_cursor].x, uvs[in_cursor].y, 0.0f); |
1475 | 0 | } |
1476 | |
|
1477 | 0 | for (unsigned int j = 0; j < num_vcs; ++j) { |
1478 | 0 | const std::vector<aiColor4D> &cols = mesh.GetVertexColors(j); |
1479 | 0 | out_mesh->mColors[j][cursor] = cols[in_cursor]; |
1480 | 0 | } |
1481 | 0 | } |
1482 | 0 | } |
1483 | | |
1484 | 0 | ConvertMaterialForMesh(out_mesh, model, mesh, index); |
1485 | |
|
1486 | 0 | if (process_weights) { |
1487 | 0 | ConvertWeights(out_mesh, mesh, absolute_transform, parent, index, &reverseMapping); |
1488 | 0 | } |
1489 | |
|
1490 | 0 | std::vector<aiAnimMesh *> animMeshes; |
1491 | 0 | for (const BlendShape *blendShape : mesh.GetBlendShapes()) { |
1492 | 0 | for (const BlendShapeChannel *blendShapeChannel : blendShape->BlendShapeChannels()) { |
1493 | 0 | const auto& shapeGeometries = blendShapeChannel->GetShapeGeometries(); |
1494 | 0 | for (const ShapeGeometry *shapeGeometry : shapeGeometries) { |
1495 | 0 | const auto& curNormals = shapeGeometry->GetNormals(); |
1496 | 0 | aiAnimMesh *animMesh = aiCreateAnimMesh(out_mesh, true, !curNormals.empty()); |
1497 | 0 | const auto& curVertices = shapeGeometry->GetVertices(); |
1498 | 0 | const auto& curIndices = shapeGeometry->GetIndices(); |
1499 | 0 | animMesh->mName.Set(FixAnimMeshName(shapeGeometry->Name())); |
1500 | 0 | for (size_t j = 0; j < curIndices.size(); j++) { |
1501 | 0 | unsigned int curIndex = curIndices.at(j); |
1502 | 0 | aiVector3D vertex = curVertices.at(j); |
1503 | 0 | aiVector3D normal = curNormals.empty() ? aiVector3D() : curNormals.at(j); |
1504 | 0 | unsigned int count = 0; |
1505 | 0 | const unsigned int *outIndices = mesh.ToOutputVertexIndex(curIndex, count); |
1506 | 0 | for (unsigned int k = 0; k < count; k++) { |
1507 | 0 | unsigned int outIndex = outIndices[k]; |
1508 | 0 | if (translateIndexMap.find(outIndex) == translateIndexMap.end()) |
1509 | 0 | continue; |
1510 | 0 | unsigned int transIndex = translateIndexMap[outIndex]; |
1511 | 0 | animMesh->mVertices[transIndex] += vertex; |
1512 | 0 | if (animMesh->mNormals != nullptr) { |
1513 | 0 | animMesh->mNormals[transIndex] += normal; |
1514 | 0 | animMesh->mNormals[transIndex].NormalizeSafe(); |
1515 | 0 | } |
1516 | 0 | } |
1517 | 0 | } |
1518 | 0 | animMesh->mWeight = shapeGeometries.size() > 1 ? blendShapeChannel->DeformPercent() / 100.0f : 1.0f; |
1519 | 0 | animMeshes.push_back(animMesh); |
1520 | 0 | } |
1521 | 0 | } |
1522 | 0 | } |
1523 | |
|
1524 | 0 | const size_t numAnimMeshes = animMeshes.size(); |
1525 | 0 | if (numAnimMeshes > 0) { |
1526 | 0 | out_mesh->mNumAnimMeshes = static_cast<unsigned int>(numAnimMeshes); |
1527 | 0 | out_mesh->mAnimMeshes = new aiAnimMesh *[numAnimMeshes]; |
1528 | 0 | for (size_t i = 0; i < numAnimMeshes; i++) { |
1529 | 0 | out_mesh->mAnimMeshes[i] = animMeshes.at(i); |
1530 | 0 | } |
1531 | 0 | } |
1532 | |
|
1533 | 0 | return static_cast<unsigned int>(mMeshes.size() - 1); |
1534 | 0 | } |
1535 | | |
1536 | 0 | static void copyBoneToSkeletonBone(aiMesh *mesh, aiBone *bone, aiSkeletonBone *skeletonBone ) { |
1537 | 0 | skeletonBone->mNumnWeights = bone->mNumWeights; |
1538 | 0 | skeletonBone->mWeights = bone->mWeights; |
1539 | 0 | skeletonBone->mOffsetMatrix = bone->mOffsetMatrix; |
1540 | 0 | skeletonBone->mMeshId = mesh; |
1541 | 0 | #ifndef ASSIMP_BUILD_NO_ARMATUREPOPULATE_PROCESS |
1542 | 0 | skeletonBone->mNode = bone->mNode; |
1543 | 0 | #endif |
1544 | 0 | skeletonBone->mParent = -1; |
1545 | 0 | } |
1546 | | |
1547 | | void FBXConverter::ConvertWeightsToSkeleton(aiMesh *out, const MeshGeometry &geo, const aiMatrix4x4 &absolute_transform, aiNode *parent, unsigned int materialIndex, |
1548 | 0 | std::vector<unsigned int> *outputVertStartIndices, SkeletonBoneContainer &skeletonContainer) { |
1549 | |
|
1550 | 0 | if (skeletonContainer.SkeletonBoneToMeshLookup.find(out) != skeletonContainer.SkeletonBoneToMeshLookup.end()) { |
1551 | 0 | return; |
1552 | 0 | } |
1553 | | |
1554 | 0 | ConvertWeights(out, geo, absolute_transform, parent, materialIndex, outputVertStartIndices); |
1555 | 0 | skeletonContainer.MeshArray.emplace_back(out); |
1556 | 0 | SkeletonBoneArray *ba = new SkeletonBoneArray; |
1557 | 0 | for (size_t i = 0; i < out->mNumBones; ++i) { |
1558 | 0 | aiBone *bone = out->mBones[i]; |
1559 | 0 | if (bone == nullptr) { |
1560 | 0 | continue; |
1561 | 0 | } |
1562 | 0 | aiSkeletonBone *skeletonBone = new aiSkeletonBone; |
1563 | 0 | copyBoneToSkeletonBone(out, bone, skeletonBone); |
1564 | 0 | ba->emplace_back(skeletonBone); |
1565 | 0 | } |
1566 | 0 | skeletonContainer.SkeletonBoneToMeshLookup[out] = ba; |
1567 | 0 | } |
1568 | | |
1569 | | void FBXConverter::ConvertWeights(aiMesh *out, const MeshGeometry &geo, const aiMatrix4x4 &absolute_transform, |
1570 | | aiNode *parent, unsigned int materialIndex, |
1571 | 0 | std::vector<unsigned int> *outputVertStartIndices) { |
1572 | 0 | ai_assert(geo.DeformerSkin()); |
1573 | |
|
1574 | 0 | std::vector<size_t> out_indices, index_out_indices, count_out_indices; |
1575 | |
|
1576 | 0 | const Skin &sk = *geo.DeformerSkin(); |
1577 | |
|
1578 | 0 | std::vector<aiBone*> bones; |
1579 | 0 | const bool no_mat_check = materialIndex == NO_MATERIAL_SEPARATION; |
1580 | 0 | ai_assert(no_mat_check || outputVertStartIndices); |
1581 | |
|
1582 | 0 | try { |
1583 | | // iterate over the sub deformers |
1584 | 0 | for (const Cluster *cluster : sk.Clusters()) { |
1585 | 0 | ai_assert(cluster); |
1586 | |
|
1587 | 0 | const WeightIndexArray &indices = cluster->GetIndices(); |
1588 | |
|
1589 | 0 | const MatIndexArray &mats = geo.GetMaterialIndices(); |
1590 | |
|
1591 | 0 | const size_t no_index_sentinel = std::numeric_limits<size_t>::max(); |
1592 | |
|
1593 | 0 | count_out_indices.clear(); |
1594 | 0 | index_out_indices.clear(); |
1595 | 0 | out_indices.clear(); |
1596 | | |
1597 | | // now check if *any* of these weights is contained in the output mesh, |
1598 | | // taking notes so we don't need to do it twice. |
1599 | 0 | for (WeightIndexArray::value_type index : indices) { |
1600 | |
|
1601 | 0 | unsigned int count = 0; |
1602 | 0 | const unsigned int *const out_idx = geo.ToOutputVertexIndex(index, count); |
1603 | | // ToOutputVertexIndex only returns nullptr if index is out of bounds |
1604 | | // which should never happen |
1605 | 0 | ai_assert(out_idx != nullptr); |
1606 | |
|
1607 | 0 | index_out_indices.push_back(no_index_sentinel); |
1608 | 0 | count_out_indices.push_back(0); |
1609 | |
|
1610 | 0 | for (unsigned int i = 0; i < count; ++i) { |
1611 | 0 | if (no_mat_check || static_cast<size_t>(mats[geo.FaceForVertexIndex(out_idx[i])]) == materialIndex) { |
1612 | |
|
1613 | 0 | if (index_out_indices.back() == no_index_sentinel) { |
1614 | 0 | index_out_indices.back() = out_indices.size(); |
1615 | 0 | } |
1616 | |
|
1617 | 0 | if (no_mat_check) { |
1618 | 0 | out_indices.push_back(out_idx[i]); |
1619 | 0 | } else { |
1620 | | // this extra lookup is in O(logn), so the entire algorithm becomes O(nlogn) |
1621 | 0 | const std::vector<unsigned int>::iterator it = std::lower_bound( |
1622 | 0 | outputVertStartIndices->begin(), |
1623 | 0 | outputVertStartIndices->end(), |
1624 | 0 | out_idx[i]); |
1625 | |
|
1626 | 0 | out_indices.push_back(std::distance(outputVertStartIndices->begin(), it)); |
1627 | 0 | } |
1628 | |
|
1629 | 0 | ++count_out_indices.back(); |
1630 | 0 | } |
1631 | 0 | } |
1632 | 0 | } |
1633 | | |
1634 | | // if we found at least one, generate the output bones |
1635 | | // XXX this could be heavily simplified by collecting the bone |
1636 | | // data in a single step. |
1637 | 0 | ConvertCluster(bones, cluster, out_indices, index_out_indices, |
1638 | 0 | count_out_indices, absolute_transform, parent); |
1639 | 0 | } |
1640 | |
|
1641 | 0 | bone_map.clear(); |
1642 | 0 | } catch (std::exception &) { |
1643 | 0 | std::for_each(bones.begin(), bones.end(), Util::delete_fun<aiBone>()); |
1644 | 0 | throw; |
1645 | 0 | } |
1646 | | |
1647 | 0 | if (bones.empty()) { |
1648 | 0 | out->mBones = nullptr; |
1649 | 0 | out->mNumBones = 0; |
1650 | 0 | return; |
1651 | 0 | } |
1652 | | |
1653 | 0 | out->mBones = new aiBone *[bones.size()](); |
1654 | 0 | out->mNumBones = static_cast<unsigned int>(bones.size()); |
1655 | 0 | std::swap_ranges(bones.begin(), bones.end(), out->mBones); |
1656 | 0 | } |
1657 | | |
1658 | | void FBXConverter::ConvertCluster(std::vector<aiBone*> &local_mesh_bones, const Cluster *cluster, |
1659 | | std::vector<size_t> &out_indices, std::vector<size_t> &index_out_indices, |
1660 | | std::vector<size_t> &count_out_indices, const aiMatrix4x4 &absolute_transform, |
1661 | 0 | aiNode *) { |
1662 | 0 | ai_assert(cluster != nullptr); // make sure cluster valid |
1663 | |
|
1664 | 0 | std::string deformer_name = cluster->TargetNode()->Name(); |
1665 | 0 | aiString bone_name = aiString(FixNodeName(deformer_name)); |
1666 | |
|
1667 | 0 | aiBone *bone = nullptr; |
1668 | |
|
1669 | 0 | if (bone_map.count(deformer_name)) { |
1670 | 0 | ASSIMP_LOG_VERBOSE_DEBUG("retrieved bone from lookup ", bone_name.C_Str(), ". Deformer:", deformer_name); |
1671 | 0 | bone = bone_map[deformer_name]; |
1672 | 0 | } else { |
1673 | 0 | ASSIMP_LOG_VERBOSE_DEBUG("created new bone ", bone_name.C_Str(), ". Deformer: ", deformer_name); |
1674 | 0 | bone = new aiBone(); |
1675 | 0 | bone->mName = bone_name; |
1676 | | |
1677 | | //bone->mOffsetMatrix = cluster->Transform(); |
1678 | | // store local transform link for post processing |
1679 | |
|
1680 | 0 | bone->mOffsetMatrix = cluster->TransformLink(); |
1681 | 0 | bone->mOffsetMatrix.Inverse(); |
1682 | |
|
1683 | 0 | const aiMatrix4x4 matrix = (aiMatrix4x4)absolute_transform; |
1684 | |
|
1685 | 0 | bone->mOffsetMatrix = bone->mOffsetMatrix * matrix; // * mesh_offset |
1686 | | |
1687 | | // |
1688 | | // Now calculate the aiVertexWeights |
1689 | | // |
1690 | |
|
1691 | 0 | aiVertexWeight *cursor = nullptr; |
1692 | |
|
1693 | 0 | bone->mNumWeights = static_cast<unsigned int>(out_indices.size()); |
1694 | 0 | cursor = bone->mWeights = new aiVertexWeight[out_indices.size()]; |
1695 | |
|
1696 | 0 | const size_t no_index_sentinel = std::numeric_limits<size_t>::max(); |
1697 | 0 | const WeightArray &weights = cluster->GetWeights(); |
1698 | |
|
1699 | 0 | const size_t c = index_out_indices.size(); |
1700 | 0 | for (size_t i = 0; i < c; ++i) { |
1701 | 0 | const size_t index_index = index_out_indices[i]; |
1702 | |
|
1703 | 0 | if (index_index == no_index_sentinel) { |
1704 | 0 | continue; |
1705 | 0 | } |
1706 | | |
1707 | 0 | const size_t cc = count_out_indices[i]; |
1708 | 0 | for (size_t j = 0; j < cc; ++j) { |
1709 | | // cursor runs from first element relative to the start |
1710 | | // or relative to the start of the next indexes. |
1711 | 0 | aiVertexWeight &out_weight = *cursor++; |
1712 | |
|
1713 | 0 | out_weight.mVertexId = static_cast<unsigned int>(out_indices[index_index + j]); |
1714 | 0 | out_weight.mWeight = weights[i]; |
1715 | 0 | } |
1716 | 0 | } |
1717 | |
|
1718 | 0 | bone_map.insert(std::pair<const std::string, aiBone *>(deformer_name, bone)); |
1719 | 0 | } |
1720 | |
|
1721 | 0 | ASSIMP_LOG_DEBUG("bone research: Indices size: ", out_indices.size()); |
1722 | | |
1723 | | // lookup must be populated in case something goes wrong |
1724 | | // this also allocates bones to mesh instance outside |
1725 | 0 | local_mesh_bones.push_back(bone); |
1726 | 0 | } |
1727 | | |
1728 | | void FBXConverter::ConvertMaterialForMesh(aiMesh *out, const Model &model, const MeshGeometry &geo, |
1729 | 0 | MatIndexArray::value_type materialIndex) { |
1730 | | // locate source materials for this mesh |
1731 | 0 | const std::vector<const Material *> &mats = model.GetMaterials(); |
1732 | 0 | if (static_cast<unsigned int>(materialIndex) >= mats.size() || materialIndex < 0) { |
1733 | 0 | FBXImporter::LogError("material index out of bounds, setting default material"); |
1734 | 0 | out->mMaterialIndex = GetDefaultMaterial(); |
1735 | 0 | return; |
1736 | 0 | } |
1737 | | |
1738 | 0 | const Material *const mat = mats[materialIndex]; |
1739 | 0 | MaterialMap::const_iterator it = materials_converted.find(mat); |
1740 | 0 | if (it != materials_converted.end()) { |
1741 | 0 | out->mMaterialIndex = (*it).second; |
1742 | 0 | return; |
1743 | 0 | } |
1744 | | |
1745 | 0 | out->mMaterialIndex = ConvertMaterial(*mat, &geo); |
1746 | 0 | materials_converted[mat] = out->mMaterialIndex; |
1747 | 0 | } |
1748 | | |
1749 | 0 | unsigned int FBXConverter::GetDefaultMaterial() { |
1750 | 0 | if (defaultMaterialIndex) { |
1751 | 0 | return defaultMaterialIndex - 1; |
1752 | 0 | } |
1753 | | |
1754 | 0 | aiMaterial *out_mat = new aiMaterial(); |
1755 | 0 | materials.push_back(out_mat); |
1756 | |
|
1757 | 0 | const aiColor3D diffuse = aiColor3D(0.8f, 0.8f, 0.8f); |
1758 | 0 | out_mat->AddProperty(&diffuse, 1, AI_MATKEY_COLOR_DIFFUSE); |
1759 | |
|
1760 | 0 | aiString s; |
1761 | 0 | s.Set(AI_DEFAULT_MATERIAL_NAME); |
1762 | |
|
1763 | 0 | out_mat->AddProperty(&s, AI_MATKEY_NAME); |
1764 | |
|
1765 | 0 | defaultMaterialIndex = static_cast<unsigned int>(materials.size()); |
1766 | 0 | return defaultMaterialIndex - 1; |
1767 | 0 | } |
1768 | | |
1769 | 0 | unsigned int FBXConverter::ConvertMaterial(const Material &material, const MeshGeometry *const mesh) { |
1770 | 0 | const PropertyTable &props = material.Props(); |
1771 | | |
1772 | | // generate empty output material |
1773 | 0 | aiMaterial *out_mat = new aiMaterial(); |
1774 | 0 | materials_converted[&material] = static_cast<unsigned int>(materials.size()); |
1775 | |
|
1776 | 0 | materials.push_back(out_mat); |
1777 | |
|
1778 | 0 | aiString str; |
1779 | | |
1780 | | // strip Material:: prefix |
1781 | 0 | std::string name = material.Name(); |
1782 | 0 | if (name.substr(0, 10) == "Material::") { |
1783 | 0 | name = name.substr(10); |
1784 | 0 | } |
1785 | | |
1786 | | // set material name if not empty - this could happen |
1787 | | // and there should be no key for it in this case. |
1788 | 0 | if (name.length()) { |
1789 | 0 | str.Set(name); |
1790 | 0 | out_mat->AddProperty(&str, AI_MATKEY_NAME); |
1791 | 0 | } |
1792 | | |
1793 | | // Set the shading mode as best we can: The FBX specification only mentions Lambert and Phong, and only Phong is mentioned in Assimp's aiShadingMode enum. |
1794 | 0 | if (material.GetShadingModel() == "phong") { |
1795 | 0 | aiShadingMode shadingMode = aiShadingMode_Phong; |
1796 | 0 | out_mat->AddProperty<aiShadingMode>(&shadingMode, 1, AI_MATKEY_SHADING_MODEL); |
1797 | 0 | } |
1798 | | |
1799 | | // shading stuff and colors |
1800 | 0 | SetShadingPropertiesCommon(out_mat, props); |
1801 | 0 | SetShadingPropertiesRaw(out_mat, props, material.Textures(), mesh); |
1802 | | |
1803 | | // texture assignments |
1804 | 0 | SetTextureProperties(out_mat, material.Textures(), mesh); |
1805 | 0 | SetTextureProperties(out_mat, material.LayeredTextures(), mesh); |
1806 | |
|
1807 | 0 | return static_cast<unsigned int>(materials.size() - 1); |
1808 | 0 | } |
1809 | | |
1810 | 0 | unsigned int FBXConverter::ConvertVideo(const Video &video) { |
1811 | | // generate empty output texture |
1812 | 0 | aiTexture *out_tex = new aiTexture(); |
1813 | 0 | textures.push_back(out_tex); |
1814 | | |
1815 | | // assuming the texture is compressed |
1816 | 0 | out_tex->mWidth = static_cast<unsigned int>(video.ContentLength()); // total data size |
1817 | 0 | out_tex->mHeight = 0; // fixed to 0 |
1818 | | |
1819 | | // steal the data from the Video to avoid an additional copy |
1820 | 0 | out_tex->pcData = reinterpret_cast<aiTexel *>(const_cast<Video &>(video).RelinquishContent()); |
1821 | | |
1822 | | // try to extract a hint from the file extension |
1823 | 0 | const std::string &filename = video.RelativeFilename().empty() ? video.FileName() : video.RelativeFilename(); |
1824 | 0 | std::string ext = BaseImporter::GetExtension(filename); |
1825 | |
|
1826 | 0 | if (ext == "jpeg") { |
1827 | 0 | ext = "jpg"; |
1828 | 0 | } |
1829 | |
|
1830 | 0 | if (ext.size() <= 3) { |
1831 | 0 | memcpy(out_tex->achFormatHint, ext.c_str(), ext.size()); |
1832 | 0 | } |
1833 | |
|
1834 | 0 | out_tex->mFilename.Set(filename.c_str()); |
1835 | |
|
1836 | 0 | return static_cast<unsigned int>(textures.size() - 1); |
1837 | 0 | } |
1838 | | |
1839 | 0 | aiString FBXConverter::GetTexturePath(const Texture *tex) { |
1840 | 0 | aiString path; |
1841 | 0 | path.Set(tex->RelativeFilename()); |
1842 | |
|
1843 | 0 | const Video *media = tex->Media(); |
1844 | 0 | if (media != nullptr) { |
1845 | 0 | bool textureReady = false; //tells if our texture is ready (if it was loaded or if it was found) |
1846 | 0 | unsigned int index=0; |
1847 | |
|
1848 | 0 | VideoMap::const_iterator it = textures_converted.find(media); |
1849 | 0 | if (it != textures_converted.end()) { |
1850 | 0 | index = (*it).second; |
1851 | 0 | textureReady = true; |
1852 | 0 | } else { |
1853 | 0 | if (media->ContentLength() > 0) { |
1854 | 0 | index = ConvertVideo(*media); |
1855 | 0 | textures_converted[media] = index; |
1856 | 0 | textureReady = true; |
1857 | 0 | } |
1858 | 0 | } |
1859 | | |
1860 | | // setup texture reference string (copied from ColladaLoader::FindFilenameForEffectTexture), if the texture is ready |
1861 | 0 | if (doc.Settings().useLegacyEmbeddedTextureNaming) { |
1862 | 0 | if (textureReady) { |
1863 | | // TODO: check the possibility of using the flag "AI_CONFIG_IMPORT_FBX_EMBEDDED_TEXTURES_LEGACY_NAMING" |
1864 | | // In FBX files textures are now stored internally by Assimp with their filename included |
1865 | | // Now Assimp can lookup through the loaded textures after all data is processed |
1866 | | // We need to load all textures before referencing them, as FBX file format order may reference a texture before loading it |
1867 | | // This may occur on this case too, it has to be studied |
1868 | 0 | path.data[0] = '*'; |
1869 | 0 | path.length = 1 + ASSIMP_itoa10(path.data + 1, AI_MAXLEN - 1, index); |
1870 | 0 | } |
1871 | 0 | } |
1872 | 0 | } |
1873 | |
|
1874 | 0 | return path; |
1875 | 0 | } |
1876 | | |
1877 | | void FBXConverter::TrySetTextureProperties(aiMaterial *out_mat, const TextureMap &_textures, |
1878 | | const std::string &propName, |
1879 | 0 | aiTextureType target, const MeshGeometry *const mesh) { |
1880 | 0 | TextureMap::const_iterator it = _textures.find(propName); |
1881 | 0 | if (it == _textures.end()) { |
1882 | 0 | return; |
1883 | 0 | } |
1884 | | |
1885 | 0 | const Texture *const tex = (*it).second; |
1886 | 0 | if (tex != nullptr) { |
1887 | 0 | aiString path = GetTexturePath(tex); |
1888 | 0 | out_mat->AddProperty(&path, _AI_MATKEY_TEXTURE_BASE, target, 0); |
1889 | |
|
1890 | 0 | aiUVTransform uvTrafo; |
1891 | | // XXX handle all kinds of UV transformations |
1892 | 0 | uvTrafo.mScaling = tex->UVScaling(); |
1893 | 0 | uvTrafo.mTranslation = tex->UVTranslation(); |
1894 | 0 | uvTrafo.mRotation = tex->UVRotation(); |
1895 | 0 | out_mat->AddProperty(&uvTrafo, 1, _AI_MATKEY_UVTRANSFORM_BASE, target, 0); |
1896 | |
|
1897 | 0 | const PropertyTable &props = tex->Props(); |
1898 | |
|
1899 | 0 | int uvIndex = 0; |
1900 | |
|
1901 | 0 | bool ok; |
1902 | 0 | const std::string &uvSet = PropertyGet<std::string>(props, "UVSet", ok); |
1903 | 0 | if (ok) { |
1904 | | // "default" is the name which usually appears in the FbxFileTexture template |
1905 | 0 | if (uvSet != "default" && uvSet.length()) { |
1906 | | // this is a bit awkward - we need to find a mesh that uses this |
1907 | | // material and scan its UV channels for the given UV name because |
1908 | | // assimp references UV channels by index, not by name. |
1909 | | |
1910 | | // XXX: the case that UV channels may appear in different orders |
1911 | | // in meshes is unhandled. A possible solution would be to sort |
1912 | | // the UV channels alphabetically, but this would have the side |
1913 | | // effect that the primary (first) UV channel would sometimes |
1914 | | // be moved, causing trouble when users read only the first |
1915 | | // UV channel and ignore UV channel assignments altogether. |
1916 | |
|
1917 | 0 | const unsigned int matIndex = static_cast<unsigned int>(std::distance(materials.begin(), |
1918 | 0 | std::find(materials.begin(), materials.end(), out_mat))); |
1919 | |
|
1920 | 0 | uvIndex = -1; |
1921 | 0 | if (!mesh) { |
1922 | 0 | for (const MeshMap::value_type &v : meshes_converted) { |
1923 | 0 | const MeshGeometry *const meshGeom = dynamic_cast<const MeshGeometry *>(v.first); |
1924 | 0 | if (!meshGeom) { |
1925 | 0 | continue; |
1926 | 0 | } |
1927 | | |
1928 | 0 | const MatIndexArray &mats = meshGeom->GetMaterialIndices(); |
1929 | 0 | MatIndexArray::const_iterator curIt = std::find(mats.begin(), mats.end(), (int) matIndex); |
1930 | 0 | if (curIt == mats.end()) { |
1931 | 0 | continue; |
1932 | 0 | } |
1933 | | |
1934 | 0 | int index = -1; |
1935 | 0 | for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) { |
1936 | 0 | if (meshGeom->GetTextureCoords(i).empty()) { |
1937 | 0 | break; |
1938 | 0 | } |
1939 | 0 | const std::string &name = meshGeom->GetTextureCoordChannelName(i); |
1940 | 0 | if (name == uvSet) { |
1941 | 0 | index = static_cast<int>(i); |
1942 | 0 | break; |
1943 | 0 | } |
1944 | 0 | } |
1945 | 0 | if (index == -1) { |
1946 | 0 | FBXImporter::LogWarn("did not find UV channel named ", uvSet, " in a mesh using this material"); |
1947 | 0 | continue; |
1948 | 0 | } |
1949 | | |
1950 | 0 | if (uvIndex == -1) { |
1951 | 0 | uvIndex = index; |
1952 | 0 | } else { |
1953 | 0 | FBXImporter::LogWarn("the UV channel named ", uvSet, |
1954 | 0 | " appears at different positions in meshes, results will be wrong"); |
1955 | 0 | } |
1956 | 0 | } |
1957 | 0 | } else { |
1958 | 0 | int index = -1; |
1959 | 0 | for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) { |
1960 | 0 | if (mesh->GetTextureCoords(i).empty()) { |
1961 | 0 | break; |
1962 | 0 | } |
1963 | 0 | const std::string &name = mesh->GetTextureCoordChannelName(i); |
1964 | 0 | if (name == uvSet) { |
1965 | 0 | index = static_cast<int>(i); |
1966 | 0 | break; |
1967 | 0 | } |
1968 | 0 | } |
1969 | 0 | if (index == -1) { |
1970 | 0 | FBXImporter::LogWarn("did not find UV channel named ", uvSet, " in a mesh using this material"); |
1971 | 0 | } |
1972 | |
|
1973 | 0 | if (uvIndex == -1) { |
1974 | 0 | uvIndex = index; |
1975 | 0 | } |
1976 | 0 | } |
1977 | |
|
1978 | 0 | if (uvIndex == -1) { |
1979 | 0 | FBXImporter::LogWarn("failed to resolve UV channel ", uvSet, ", using first UV channel"); |
1980 | 0 | uvIndex = 0; |
1981 | 0 | } |
1982 | 0 | } |
1983 | 0 | } |
1984 | |
|
1985 | 0 | out_mat->AddProperty(&uvIndex, 1, _AI_MATKEY_UVWSRC_BASE, target, 0); |
1986 | 0 | } |
1987 | 0 | } |
1988 | | |
1989 | | void FBXConverter::TrySetTextureProperties(aiMaterial *out_mat, const LayeredTextureMap &layeredTextures, |
1990 | | const std::string &propName, |
1991 | 0 | aiTextureType target, const MeshGeometry *const mesh) { |
1992 | 0 | LayeredTextureMap::const_iterator it = layeredTextures.find(propName); |
1993 | 0 | if (it == layeredTextures.end()) { |
1994 | 0 | return; |
1995 | 0 | } |
1996 | | |
1997 | 0 | int texCount = (*it).second->textureCount(); |
1998 | | |
1999 | | // Set the blend mode for layered textures |
2000 | 0 | int blendmode = (*it).second->GetBlendMode(); |
2001 | 0 | out_mat->AddProperty(&blendmode, 1, _AI_MATKEY_TEXOP_BASE, target, 0); |
2002 | |
|
2003 | 0 | for (int texIndex = 0; texIndex < texCount; texIndex++) { |
2004 | |
|
2005 | 0 | const Texture *const tex = (*it).second->getTexture(texIndex); |
2006 | |
|
2007 | 0 | aiString path = GetTexturePath(tex); |
2008 | 0 | out_mat->AddProperty(&path, _AI_MATKEY_TEXTURE_BASE, target, texIndex); |
2009 | |
|
2010 | 0 | aiUVTransform uvTrafo; |
2011 | | // XXX handle all kinds of UV transformations |
2012 | 0 | uvTrafo.mScaling = tex->UVScaling(); |
2013 | 0 | uvTrafo.mTranslation = tex->UVTranslation(); |
2014 | 0 | uvTrafo.mRotation = tex->UVRotation(); |
2015 | 0 | out_mat->AddProperty(&uvTrafo, 1, _AI_MATKEY_UVTRANSFORM_BASE, target, texIndex); |
2016 | |
|
2017 | 0 | const PropertyTable &props = tex->Props(); |
2018 | |
|
2019 | 0 | int uvIndex = 0; |
2020 | |
|
2021 | 0 | bool ok; |
2022 | 0 | const std::string &uvSet = PropertyGet<std::string>(props, "UVSet", ok); |
2023 | 0 | if (ok) { |
2024 | | // "default" is the name which usually appears in the FbxFileTexture template |
2025 | 0 | if (uvSet != "default" && uvSet.length()) { |
2026 | | // this is a bit awkward - we need to find a mesh that uses this |
2027 | | // material and scan its UV channels for the given UV name because |
2028 | | // assimp references UV channels by index, not by name. |
2029 | | |
2030 | | // XXX: the case that UV channels may appear in different orders |
2031 | | // in meshes is unhandled. A possible solution would be to sort |
2032 | | // the UV channels alphabetically, but this would have the side |
2033 | | // effect that the primary (first) UV channel would sometimes |
2034 | | // be moved, causing trouble when users read only the first |
2035 | | // UV channel and ignore UV channel assignments altogether. |
2036 | |
|
2037 | 0 | const unsigned int matIndex = static_cast<unsigned int>(std::distance(materials.begin(), |
2038 | 0 | std::find(materials.begin(), materials.end(), out_mat))); |
2039 | |
|
2040 | 0 | uvIndex = -1; |
2041 | 0 | if (!mesh) { |
2042 | 0 | for (const MeshMap::value_type &v : meshes_converted) { |
2043 | 0 | const MeshGeometry *const meshGeom = dynamic_cast<const MeshGeometry *>(v.first); |
2044 | 0 | if (!meshGeom) { |
2045 | 0 | continue; |
2046 | 0 | } |
2047 | | |
2048 | 0 | const MatIndexArray &mats = meshGeom->GetMaterialIndices(); |
2049 | 0 | MatIndexArray::const_iterator curIt = std::find(mats.begin(), mats.end(), (int) matIndex); |
2050 | 0 | if ( curIt == mats.end()) { |
2051 | 0 | continue; |
2052 | 0 | } |
2053 | | |
2054 | 0 | int index = -1; |
2055 | 0 | for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) { |
2056 | 0 | if (meshGeom->GetTextureCoords(i).empty()) { |
2057 | 0 | break; |
2058 | 0 | } |
2059 | 0 | const std::string &name = meshGeom->GetTextureCoordChannelName(i); |
2060 | 0 | if (name == uvSet) { |
2061 | 0 | index = static_cast<int>(i); |
2062 | 0 | break; |
2063 | 0 | } |
2064 | 0 | } |
2065 | 0 | if (index == -1) { |
2066 | 0 | FBXImporter::LogWarn("did not find UV channel named ", uvSet, " in a mesh using this material"); |
2067 | 0 | continue; |
2068 | 0 | } |
2069 | | |
2070 | 0 | if (uvIndex == -1) { |
2071 | 0 | uvIndex = index; |
2072 | 0 | } else { |
2073 | 0 | FBXImporter::LogWarn("the UV channel named ", uvSet, |
2074 | 0 | " appears at different positions in meshes, results will be wrong"); |
2075 | 0 | } |
2076 | 0 | } |
2077 | 0 | } else { |
2078 | 0 | int index = -1; |
2079 | 0 | for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) { |
2080 | 0 | if (mesh->GetTextureCoords(i).empty()) { |
2081 | 0 | break; |
2082 | 0 | } |
2083 | 0 | const std::string &name = mesh->GetTextureCoordChannelName(i); |
2084 | 0 | if (name == uvSet) { |
2085 | 0 | index = static_cast<int>(i); |
2086 | 0 | break; |
2087 | 0 | } |
2088 | 0 | } |
2089 | 0 | if (index == -1) { |
2090 | 0 | FBXImporter::LogWarn("did not find UV channel named ", uvSet, " in a mesh using this material"); |
2091 | 0 | } |
2092 | |
|
2093 | 0 | if (uvIndex == -1) { |
2094 | 0 | uvIndex = index; |
2095 | 0 | } |
2096 | 0 | } |
2097 | |
|
2098 | 0 | if (uvIndex == -1) { |
2099 | 0 | FBXImporter::LogWarn("failed to resolve UV channel ", uvSet, ", using first UV channel"); |
2100 | 0 | uvIndex = 0; |
2101 | 0 | } |
2102 | 0 | } |
2103 | 0 | } |
2104 | |
|
2105 | 0 | out_mat->AddProperty(&uvIndex, 1, _AI_MATKEY_UVWSRC_BASE, target, texIndex); |
2106 | 0 | } |
2107 | 0 | } |
2108 | | |
2109 | 0 | void FBXConverter::SetTextureProperties(aiMaterial *out_mat, const TextureMap &_textures, const MeshGeometry *const mesh) { |
2110 | 0 | TrySetTextureProperties(out_mat, _textures, "DiffuseColor", aiTextureType_DIFFUSE, mesh); |
2111 | 0 | TrySetTextureProperties(out_mat, _textures, "AmbientColor", aiTextureType_AMBIENT, mesh); |
2112 | 0 | TrySetTextureProperties(out_mat, _textures, "EmissiveColor", aiTextureType_EMISSIVE, mesh); |
2113 | 0 | TrySetTextureProperties(out_mat, _textures, "SpecularColor", aiTextureType_SPECULAR, mesh); |
2114 | 0 | TrySetTextureProperties(out_mat, _textures, "SpecularFactor", aiTextureType_SPECULAR, mesh); |
2115 | 0 | TrySetTextureProperties(out_mat, _textures, "TransparentColor", aiTextureType_OPACITY, mesh); |
2116 | 0 | TrySetTextureProperties(out_mat, _textures, "ReflectionColor", aiTextureType_REFLECTION, mesh); |
2117 | 0 | TrySetTextureProperties(out_mat, _textures, "DisplacementColor", aiTextureType_DISPLACEMENT, mesh); |
2118 | 0 | TrySetTextureProperties(out_mat, _textures, "NormalMap", aiTextureType_NORMALS, mesh); |
2119 | 0 | TrySetTextureProperties(out_mat, _textures, "Bump", aiTextureType_HEIGHT, mesh); |
2120 | 0 | TrySetTextureProperties(out_mat, _textures, "ShininessExponent", aiTextureType_SHININESS, mesh); |
2121 | 0 | TrySetTextureProperties(out_mat, _textures, "TransparencyFactor", aiTextureType_OPACITY, mesh); |
2122 | 0 | TrySetTextureProperties(out_mat, _textures, "EmissiveFactor", aiTextureType_EMISSIVE, mesh); |
2123 | 0 | TrySetTextureProperties(out_mat, _textures, "ReflectionFactor", aiTextureType_METALNESS, mesh); |
2124 | | //Maya counterparts |
2125 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|DiffuseTexture", aiTextureType_DIFFUSE, mesh); |
2126 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|NormalTexture", aiTextureType_NORMALS, mesh); |
2127 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|SpecularTexture", aiTextureType_SPECULAR, mesh); |
2128 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|FalloffTexture", aiTextureType_OPACITY, mesh); |
2129 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|ReflectionMapTexture", aiTextureType_REFLECTION, mesh); |
2130 | | |
2131 | | // Maya PBR |
2132 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|baseColor", aiTextureType_BASE_COLOR, mesh); |
2133 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|normalCamera", aiTextureType_NORMAL_CAMERA, mesh); |
2134 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|emissionColor", aiTextureType_EMISSION_COLOR, mesh); |
2135 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|metalness", aiTextureType_METALNESS, mesh); |
2136 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|diffuseRoughness", aiTextureType_DIFFUSE_ROUGHNESS, mesh); |
2137 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|base", aiTextureType_MAYA_BASE, mesh); |
2138 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|specular", aiTextureType_MAYA_SPECULAR, mesh); |
2139 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|specularColor", aiTextureType_MAYA_SPECULAR_COLOR, mesh); |
2140 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|specularRoughness", aiTextureType_MAYA_SPECULAR_ROUGHNESS, mesh); |
2141 | | |
2142 | | // Maya stingray |
2143 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|TEX_color_map", aiTextureType_BASE_COLOR, mesh); |
2144 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|TEX_normal_map", aiTextureType_NORMAL_CAMERA, mesh); |
2145 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|TEX_emissive_map", aiTextureType_EMISSION_COLOR, mesh); |
2146 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|TEX_metallic_map", aiTextureType_METALNESS, mesh); |
2147 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|TEX_roughness_map", aiTextureType_DIFFUSE_ROUGHNESS, mesh); |
2148 | 0 | TrySetTextureProperties(out_mat, _textures, "Maya|TEX_ao_map", aiTextureType_AMBIENT_OCCLUSION, mesh); |
2149 | | |
2150 | | // 3DSMax Physical material |
2151 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|Parameters|base_color_map", aiTextureType_BASE_COLOR, mesh); |
2152 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|Parameters|bump_map", aiTextureType_NORMAL_CAMERA, mesh); |
2153 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|Parameters|emission_map", aiTextureType_EMISSION_COLOR, mesh); |
2154 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|Parameters|metalness_map", aiTextureType_METALNESS, mesh); |
2155 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|Parameters|roughness_map", aiTextureType_DIFFUSE_ROUGHNESS, mesh); |
2156 | | |
2157 | | // 3DSMax PBR materials |
2158 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|main|base_color_map", aiTextureType_BASE_COLOR, mesh); |
2159 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|main|norm_map", aiTextureType_NORMAL_CAMERA, mesh); |
2160 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|main|emit_color_map", aiTextureType_EMISSION_COLOR, mesh); |
2161 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|main|ao_map", aiTextureType_AMBIENT_OCCLUSION, mesh); |
2162 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|main|opacity_map", aiTextureType_OPACITY, mesh); |
2163 | | // Metalness/Roughness material type |
2164 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|main|metalness_map", aiTextureType_METALNESS, mesh); |
2165 | | // Specular/Gloss material type |
2166 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|main|specular_map", aiTextureType_SPECULAR, mesh); |
2167 | | |
2168 | | // Glossiness vs roughness in 3ds Max Pbr Materials |
2169 | 0 | int useGlossiness; |
2170 | 0 | if (out_mat->Get("$raw.3dsMax|main|useGlossiness", aiTextureType_NONE, 0, useGlossiness) == aiReturn_SUCCESS) { |
2171 | | // These textures swap meaning if ((useGlossiness == 1) != (material type is Specular/Gloss)) |
2172 | 0 | if (useGlossiness == 1) { |
2173 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|main|roughness_map", aiTextureType_SHININESS, mesh); |
2174 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|main|glossiness_map", aiTextureType_SHININESS, mesh); |
2175 | 0 | } |
2176 | 0 | else if (useGlossiness == 2) { |
2177 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|main|roughness_map", aiTextureType_DIFFUSE_ROUGHNESS, mesh); |
2178 | 0 | TrySetTextureProperties(out_mat, _textures, "3dsMax|main|glossiness_map", aiTextureType_DIFFUSE_ROUGHNESS, mesh); |
2179 | 0 | } |
2180 | 0 | else { |
2181 | 0 | FBXImporter::LogWarn("A 3dsMax Pbr Material must have a useGlossiness value to correctly interpret roughness and glossiness textures."); |
2182 | 0 | } |
2183 | 0 | } |
2184 | 0 | } |
2185 | | |
2186 | 0 | void FBXConverter::SetTextureProperties(aiMaterial *out_mat, const LayeredTextureMap &layeredTextures, const MeshGeometry *const mesh) { |
2187 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "DiffuseColor", aiTextureType_DIFFUSE, mesh); |
2188 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "AmbientColor", aiTextureType_AMBIENT, mesh); |
2189 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "EmissiveColor", aiTextureType_EMISSIVE, mesh); |
2190 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "SpecularColor", aiTextureType_SPECULAR, mesh); |
2191 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "SpecularFactor", aiTextureType_SPECULAR, mesh); |
2192 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "TransparentColor", aiTextureType_OPACITY, mesh); |
2193 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "ReflectionColor", aiTextureType_REFLECTION, mesh); |
2194 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "DisplacementColor", aiTextureType_DISPLACEMENT, mesh); |
2195 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "NormalMap", aiTextureType_NORMALS, mesh); |
2196 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "Bump", aiTextureType_HEIGHT, mesh); |
2197 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "ShininessExponent", aiTextureType_SHININESS, mesh); |
2198 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "EmissiveFactor", aiTextureType_EMISSIVE, mesh); |
2199 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "TransparencyFactor", aiTextureType_OPACITY, mesh); |
2200 | 0 | TrySetTextureProperties(out_mat, layeredTextures, "ReflectionFactor", aiTextureType_METALNESS, mesh); |
2201 | 0 | } |
2202 | | |
2203 | | aiColor3D FBXConverter::GetColorPropertyFactored(const PropertyTable &props, const std::string &colorName, |
2204 | 0 | const std::string &factorName, bool &result, bool useTemplate) { |
2205 | 0 | result = true; |
2206 | |
|
2207 | 0 | bool ok; |
2208 | 0 | aiVector3D BaseColor = PropertyGet<aiVector3D>(props, colorName, ok, useTemplate); |
2209 | 0 | if (!ok) { |
2210 | 0 | result = false; |
2211 | 0 | return aiColor3D(0.0f, 0.0f, 0.0f); |
2212 | 0 | } |
2213 | | |
2214 | | // if no factor name, return the colour as is |
2215 | 0 | if (factorName.empty()) { |
2216 | 0 | return aiColor3D(BaseColor.x, BaseColor.y, BaseColor.z); |
2217 | 0 | } |
2218 | | |
2219 | | // otherwise it should be multiplied by the factor, if found. |
2220 | 0 | float factor = PropertyGet<float>(props, factorName, ok, useTemplate); |
2221 | 0 | if (ok) { |
2222 | 0 | BaseColor *= factor; |
2223 | 0 | } |
2224 | 0 | return aiColor3D(BaseColor.x, BaseColor.y, BaseColor.z); |
2225 | 0 | } |
2226 | | |
2227 | | aiColor3D FBXConverter::GetColorPropertyFromMaterial(const PropertyTable &props, const std::string &baseName, |
2228 | 0 | bool &result) { |
2229 | 0 | return GetColorPropertyFactored(props, baseName + "Color", baseName + "Factor", result, true); |
2230 | 0 | } |
2231 | | |
2232 | | aiColor3D FBXConverter::GetColorProperty(const PropertyTable &props, const std::string &colorName, |
2233 | 0 | bool &result, bool useTemplate) { |
2234 | 0 | result = true; |
2235 | 0 | bool ok; |
2236 | 0 | const aiVector3D &ColorVec = PropertyGet<aiVector3D>(props, colorName, ok, useTemplate); |
2237 | 0 | if (!ok) { |
2238 | 0 | result = false; |
2239 | 0 | return aiColor3D(0.0f, 0.0f, 0.0f); |
2240 | 0 | } |
2241 | 0 | return aiColor3D(ColorVec.x, ColorVec.y, ColorVec.z); |
2242 | 0 | } |
2243 | | |
2244 | 0 | void FBXConverter::SetShadingPropertiesCommon(aiMaterial *out_mat, const PropertyTable &props) { |
2245 | | // Set shading properties. |
2246 | | // Modern FBX Files have two separate systems for defining these, |
2247 | | // with only the more comprehensive one described in the property template. |
2248 | | // Likely the other values are a legacy system, |
2249 | | // which is still always exported by the official FBX SDK. |
2250 | | // |
2251 | | // Blender's FBX import and export mostly ignore this legacy system, |
2252 | | // and as we only support recent versions of FBX anyway, we can do the same. |
2253 | 0 | bool ok; |
2254 | |
|
2255 | 0 | const aiColor3D &Diffuse = GetColorPropertyFromMaterial(props, "Diffuse", ok); |
2256 | 0 | if (ok) { |
2257 | 0 | out_mat->AddProperty(&Diffuse, 1, AI_MATKEY_COLOR_DIFFUSE); |
2258 | 0 | } |
2259 | |
|
2260 | 0 | const aiColor3D &Emissive = GetColorPropertyFromMaterial(props, "Emissive", ok); |
2261 | 0 | if (ok) { |
2262 | 0 | out_mat->AddProperty(&Emissive, 1, AI_MATKEY_COLOR_EMISSIVE); |
2263 | 0 | } else { |
2264 | 0 | const aiColor3D &emissiveColor = GetColorProperty(props, "Maya|emissive", ok); |
2265 | 0 | if (ok) { |
2266 | 0 | out_mat->AddProperty(&emissiveColor, 1, AI_MATKEY_COLOR_EMISSIVE); |
2267 | 0 | } |
2268 | 0 | } |
2269 | |
|
2270 | 0 | const aiColor3D &Ambient = GetColorPropertyFromMaterial(props, "Ambient", ok); |
2271 | 0 | if (ok) { |
2272 | 0 | out_mat->AddProperty(&Ambient, 1, AI_MATKEY_COLOR_AMBIENT); |
2273 | 0 | } |
2274 | | |
2275 | | // we store specular factor as SHININESS_STRENGTH, so just get the color |
2276 | 0 | const aiColor3D &Specular = GetColorProperty(props, "SpecularColor", ok, true); |
2277 | 0 | if (ok) { |
2278 | 0 | out_mat->AddProperty(&Specular, 1, AI_MATKEY_COLOR_SPECULAR); |
2279 | 0 | } |
2280 | | |
2281 | | // and also try to get SHININESS_STRENGTH |
2282 | 0 | const float SpecularFactor = PropertyGet<float>(props, "SpecularFactor", ok, true); |
2283 | 0 | if (ok) { |
2284 | 0 | out_mat->AddProperty(&SpecularFactor, 1, AI_MATKEY_SHININESS_STRENGTH); |
2285 | 0 | } |
2286 | | |
2287 | | // and the specular exponent |
2288 | 0 | const float ShininessExponent = PropertyGet<float>(props, "ShininessExponent", ok); |
2289 | 0 | if (ok) { |
2290 | 0 | out_mat->AddProperty(&ShininessExponent, 1, AI_MATKEY_SHININESS); |
2291 | | // Match Blender behavior to extract roughness when only shininess is present |
2292 | 0 | const float roughness = 1.0f - (sqrt(ShininessExponent) / 10.0f); |
2293 | 0 | out_mat->AddProperty(&roughness, 1, AI_MATKEY_ROUGHNESS_FACTOR); |
2294 | 0 | } |
2295 | | |
2296 | | // TransparentColor / TransparencyFactor... gee thanks FBX :rolleyes: |
2297 | 0 | const aiColor3D &Transparent = GetColorPropertyFactored(props, "TransparentColor", "TransparencyFactor", ok); |
2298 | 0 | float CalculatedOpacity = 1.0f; |
2299 | 0 | if (ok) { |
2300 | 0 | out_mat->AddProperty(&Transparent, 1, AI_MATKEY_COLOR_TRANSPARENT); |
2301 | | // as calculated by FBX SDK 2017: |
2302 | 0 | CalculatedOpacity = 1.0f - ((Transparent.r + Transparent.g + Transparent.b) / 3.0f); |
2303 | 0 | } |
2304 | | |
2305 | | // try to get the transparency factor |
2306 | 0 | const float TransparencyFactor = PropertyGet<float>(props, "TransparencyFactor", ok); |
2307 | 0 | if (ok) { |
2308 | 0 | out_mat->AddProperty(&TransparencyFactor, 1, AI_MATKEY_TRANSPARENCYFACTOR); |
2309 | 0 | } |
2310 | | |
2311 | | // use of TransparencyFactor is inconsistent. |
2312 | | // Maya always stores it as 1.0, |
2313 | | // so we can't use it to set AI_MATKEY_OPACITY. |
2314 | | // Blender is more sensible and stores it as the alpha value. |
2315 | | // However both the FBX SDK and Blender always write an additional |
2316 | | // legacy "Opacity" field, so we can try to use that. |
2317 | | // |
2318 | | // If we can't find it, |
2319 | | // we can fall back to the value which the FBX SDK calculates |
2320 | | // from transparency colour (RGB) and factor (F) as |
2321 | | // 1.0 - F*((R+G+B)/3). |
2322 | | // |
2323 | | // There's no consistent way to interpret this opacity value, |
2324 | | // so it's up to clients to do the correct thing. |
2325 | 0 | const float Opacity = PropertyGet<float>(props, "Opacity", ok); |
2326 | 0 | if (ok) { |
2327 | 0 | out_mat->AddProperty(&Opacity, 1, AI_MATKEY_OPACITY); |
2328 | 0 | } else if (CalculatedOpacity != 1.0) { |
2329 | 0 | out_mat->AddProperty(&CalculatedOpacity, 1, AI_MATKEY_OPACITY); |
2330 | 0 | } |
2331 | | |
2332 | | // reflection color and factor are stored separately |
2333 | 0 | const aiColor3D &Reflection = GetColorProperty(props, "ReflectionColor", ok, true); |
2334 | 0 | if (ok) { |
2335 | 0 | out_mat->AddProperty(&Reflection, 1, AI_MATKEY_COLOR_REFLECTIVE); |
2336 | 0 | } |
2337 | |
|
2338 | 0 | float ReflectionFactor = PropertyGet<float>(props, "ReflectionFactor", ok, true); |
2339 | 0 | if (ok) { |
2340 | 0 | out_mat->AddProperty(&ReflectionFactor, 1, AI_MATKEY_REFLECTIVITY); |
2341 | 0 | } |
2342 | |
|
2343 | 0 | const float BumpFactor = PropertyGet<float>(props, "BumpFactor", ok); |
2344 | 0 | if (ok) { |
2345 | 0 | out_mat->AddProperty(&BumpFactor, 1, AI_MATKEY_BUMPSCALING); |
2346 | 0 | } |
2347 | |
|
2348 | 0 | const float DispFactor = PropertyGet<float>(props, "DisplacementFactor", ok); |
2349 | 0 | if (ok) { |
2350 | 0 | out_mat->AddProperty(&DispFactor, 1, "$mat.displacementscaling", 0, 0); |
2351 | 0 | } |
2352 | | |
2353 | | // PBR material information |
2354 | 0 | const aiColor3D &baseColor = GetColorProperty(props, "Maya|base_color", ok); |
2355 | 0 | if (ok) { |
2356 | 0 | out_mat->AddProperty(&baseColor, 1, AI_MATKEY_BASE_COLOR); |
2357 | 0 | } |
2358 | |
|
2359 | 0 | const float useColorMap = PropertyGet<float>(props, "Maya|use_color_map", ok); |
2360 | 0 | if (ok) { |
2361 | 0 | out_mat->AddProperty(&useColorMap, 1, AI_MATKEY_USE_COLOR_MAP); |
2362 | 0 | } |
2363 | |
|
2364 | 0 | const float useMetallicMap = PropertyGet<float>(props, "Maya|use_metallic_map", ok); |
2365 | 0 | if (ok) { |
2366 | 0 | out_mat->AddProperty(&useMetallicMap, 1, AI_MATKEY_USE_METALLIC_MAP); |
2367 | 0 | } |
2368 | |
|
2369 | 0 | const float metallicFactor = PropertyGet<float>(props, "Maya|metallic", ok); |
2370 | 0 | if (ok) { |
2371 | 0 | out_mat->AddProperty(&metallicFactor, 1, AI_MATKEY_METALLIC_FACTOR); |
2372 | 0 | } |
2373 | |
|
2374 | 0 | const float useRoughnessMap = PropertyGet<float>(props, "Maya|use_roughness_map", ok); |
2375 | 0 | if (ok) { |
2376 | 0 | out_mat->AddProperty(&useRoughnessMap, 1, AI_MATKEY_USE_ROUGHNESS_MAP); |
2377 | 0 | } |
2378 | |
|
2379 | 0 | const float roughnessFactor = PropertyGet<float>(props, "Maya|roughness", ok); |
2380 | 0 | if (ok) { |
2381 | 0 | out_mat->AddProperty(&roughnessFactor, 1, AI_MATKEY_ROUGHNESS_FACTOR); |
2382 | 0 | } |
2383 | |
|
2384 | 0 | const float useEmissiveMap = PropertyGet<float>(props, "Maya|use_emissive_map", ok); |
2385 | 0 | if (ok) { |
2386 | 0 | out_mat->AddProperty(&useEmissiveMap, 1, AI_MATKEY_USE_EMISSIVE_MAP); |
2387 | 0 | } |
2388 | |
|
2389 | 0 | const float emissiveIntensity = PropertyGet<float>(props, "Maya|emissive_intensity", ok); |
2390 | 0 | if (ok) { |
2391 | 0 | out_mat->AddProperty(&emissiveIntensity, 1, AI_MATKEY_EMISSIVE_INTENSITY); |
2392 | 0 | } |
2393 | |
|
2394 | 0 | const float useAOMap = PropertyGet<float>(props, "Maya|use_ao_map", ok); |
2395 | 0 | if (ok) { |
2396 | 0 | out_mat->AddProperty(&useAOMap, 1, AI_MATKEY_USE_AO_MAP); |
2397 | 0 | } |
2398 | 0 | } |
2399 | | |
2400 | 0 | void FBXConverter::SetShadingPropertiesRaw(aiMaterial *out_mat, const PropertyTable &props, const TextureMap &_textures, const MeshGeometry *const mesh) { |
2401 | | // Add all the unparsed properties with a "$raw." prefix |
2402 | |
|
2403 | 0 | const std::string prefix = "$raw."; |
2404 | |
|
2405 | 0 | for (const DirectPropertyMap::value_type &prop : props.GetUnparsedProperties()) { |
2406 | |
|
2407 | 0 | std::string name = prefix + prop.first; |
2408 | |
|
2409 | 0 | if (const TypedProperty<aiVector3D> *interpretedVec3 = prop.second->As<TypedProperty<aiVector3D>>()) { |
2410 | 0 | out_mat->AddProperty(&interpretedVec3->Value(), 1, name.c_str(), 0, 0); |
2411 | 0 | } else if (const TypedProperty<aiColor3D> *interpretedCol3 = prop.second->As<TypedProperty<aiColor3D>>()) { |
2412 | 0 | out_mat->AddProperty(&interpretedCol3->Value(), 1, name.c_str(), 0, 0); |
2413 | 0 | } else if (const TypedProperty<aiColor4D> *interpretedCol4 = prop.second->As<TypedProperty<aiColor4D>>()) { |
2414 | 0 | out_mat->AddProperty(&interpretedCol4->Value(), 1, name.c_str(), 0, 0); |
2415 | 0 | } else if (const TypedProperty<float> *interpretedFloat = prop.second->As<TypedProperty<float>>()) { |
2416 | 0 | out_mat->AddProperty(&interpretedFloat->Value(), 1, name.c_str(), 0, 0); |
2417 | 0 | } else if (const TypedProperty<int> *interpretedInt = prop.second->As<TypedProperty<int>>()) { |
2418 | 0 | out_mat->AddProperty(&interpretedInt->Value(), 1, name.c_str(), 0, 0); |
2419 | 0 | } else if (const TypedProperty<bool> *interpretedBool = prop.second->As<TypedProperty<bool>>()) { |
2420 | 0 | int value = interpretedBool->Value() ? 1 : 0; |
2421 | 0 | out_mat->AddProperty(&value, 1, name.c_str(), 0, 0); |
2422 | 0 | } else if (const TypedProperty<std::string> *interpretedString = prop.second->As<TypedProperty<std::string>>()) { |
2423 | 0 | const aiString value = aiString(interpretedString->Value()); |
2424 | 0 | out_mat->AddProperty(&value, name.c_str(), 0, 0); |
2425 | 0 | } |
2426 | 0 | } |
2427 | | |
2428 | | // Add the textures' properties |
2429 | |
|
2430 | 0 | for (TextureMap::const_iterator it = _textures.begin(); it != _textures.end(); ++it) { |
2431 | |
|
2432 | 0 | std::string name = prefix + it->first; |
2433 | |
|
2434 | 0 | const Texture *const tex = it->second; |
2435 | 0 | if (tex != nullptr) { |
2436 | 0 | aiString path; |
2437 | 0 | path.Set(tex->RelativeFilename()); |
2438 | |
|
2439 | 0 | const Video *media = tex->Media(); |
2440 | 0 | if (media != nullptr && media->ContentLength() > 0) { |
2441 | 0 | unsigned int index; |
2442 | |
|
2443 | 0 | VideoMap::const_iterator videoIt = textures_converted.find(media); |
2444 | 0 | if (videoIt != textures_converted.end()) { |
2445 | 0 | index = videoIt->second; |
2446 | 0 | } else { |
2447 | 0 | index = ConvertVideo(*media); |
2448 | 0 | textures_converted[media] = index; |
2449 | 0 | } |
2450 | | |
2451 | | // setup texture reference string (copied from ColladaLoader::FindFilenameForEffectTexture) |
2452 | 0 | path.data[0] = '*'; |
2453 | 0 | path.length = 1 + ASSIMP_itoa10(path.data + 1, AI_MAXLEN - 1, index); |
2454 | 0 | } |
2455 | |
|
2456 | 0 | out_mat->AddProperty(&path, (name + "|file").c_str(), aiTextureType_UNKNOWN, 0); |
2457 | |
|
2458 | 0 | aiUVTransform uvTrafo; |
2459 | | // XXX handle all kinds of UV transformations |
2460 | 0 | uvTrafo.mScaling = tex->UVScaling(); |
2461 | 0 | uvTrafo.mTranslation = tex->UVTranslation(); |
2462 | 0 | uvTrafo.mRotation = tex->UVRotation(); |
2463 | 0 | out_mat->AddProperty(&uvTrafo, 1, (name + "|uvtrafo").c_str(), aiTextureType_UNKNOWN, 0); |
2464 | |
|
2465 | 0 | int uvIndex = 0; |
2466 | |
|
2467 | 0 | bool uvFound = false; |
2468 | 0 | const std::string &uvSet = PropertyGet<std::string>(tex->Props(), "UVSet", uvFound); |
2469 | 0 | if (uvFound) { |
2470 | | // "default" is the name which usually appears in the FbxFileTexture template |
2471 | 0 | if (uvSet != "default" && uvSet.length()) { |
2472 | | // this is a bit awkward - we need to find a mesh that uses this |
2473 | | // material and scan its UV channels for the given UV name because |
2474 | | // assimp references UV channels by index, not by name. |
2475 | | |
2476 | | // XXX: the case that UV channels may appear in different orders |
2477 | | // in meshes is unhandled. A possible solution would be to sort |
2478 | | // the UV channels alphabetically, but this would have the side |
2479 | | // effect that the primary (first) UV channel would sometimes |
2480 | | // be moved, causing trouble when users read only the first |
2481 | | // UV channel and ignore UV channel assignments altogether. |
2482 | |
|
2483 | 0 | std::vector<aiMaterial *>::iterator materialIt = std::find(materials.begin(), materials.end(), out_mat); |
2484 | 0 | const unsigned int matIndex = static_cast<unsigned int>(std::distance(materials.begin(), materialIt)); |
2485 | |
|
2486 | 0 | uvIndex = -1; |
2487 | 0 | if (!mesh) { |
2488 | 0 | for (const MeshMap::value_type &v : meshes_converted) { |
2489 | 0 | const MeshGeometry *const meshGeom = dynamic_cast<const MeshGeometry *>(v.first); |
2490 | 0 | if (!meshGeom) { |
2491 | 0 | continue; |
2492 | 0 | } |
2493 | | |
2494 | 0 | const MatIndexArray &mats = meshGeom->GetMaterialIndices(); |
2495 | 0 | if (std::find(mats.begin(), mats.end(), (int)matIndex) == mats.end()) { |
2496 | 0 | continue; |
2497 | 0 | } |
2498 | | |
2499 | 0 | int index = -1; |
2500 | 0 | for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) { |
2501 | 0 | if (meshGeom->GetTextureCoords(i).empty()) { |
2502 | 0 | break; |
2503 | 0 | } |
2504 | 0 | const std::string &curName = meshGeom->GetTextureCoordChannelName(i); |
2505 | 0 | if (curName == uvSet) { |
2506 | 0 | index = static_cast<int>(i); |
2507 | 0 | break; |
2508 | 0 | } |
2509 | 0 | } |
2510 | 0 | if (index == -1) { |
2511 | 0 | FBXImporter::LogWarn("did not find UV channel named ", uvSet, " in a mesh using this material"); |
2512 | 0 | continue; |
2513 | 0 | } |
2514 | | |
2515 | 0 | if (uvIndex == -1) { |
2516 | 0 | uvIndex = index; |
2517 | 0 | } else { |
2518 | 0 | FBXImporter::LogWarn("the UV channel named ", uvSet, " appears at different positions in meshes, results will be wrong"); |
2519 | 0 | } |
2520 | 0 | } |
2521 | 0 | } else { |
2522 | 0 | int index = -1; |
2523 | 0 | for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) { |
2524 | 0 | if (mesh->GetTextureCoords(i).empty()) { |
2525 | 0 | break; |
2526 | 0 | } |
2527 | 0 | const std::string &curName = mesh->GetTextureCoordChannelName(i); |
2528 | 0 | if (curName == uvSet) { |
2529 | 0 | index = static_cast<int>(i); |
2530 | 0 | break; |
2531 | 0 | } |
2532 | 0 | } |
2533 | 0 | if (index == -1) { |
2534 | 0 | FBXImporter::LogWarn("did not find UV channel named ", uvSet, " in a mesh using this material"); |
2535 | 0 | } |
2536 | |
|
2537 | 0 | if (uvIndex == -1) { |
2538 | 0 | uvIndex = index; |
2539 | 0 | } |
2540 | 0 | } |
2541 | |
|
2542 | 0 | if (uvIndex == -1) { |
2543 | 0 | FBXImporter::LogWarn("failed to resolve UV channel ", uvSet, ", using first UV channel"); |
2544 | 0 | uvIndex = 0; |
2545 | 0 | } |
2546 | 0 | } |
2547 | 0 | } |
2548 | |
|
2549 | 0 | out_mat->AddProperty(&uvIndex, 1, (name + "|uvwsrc").c_str(), aiTextureType_UNKNOWN, 0); |
2550 | 0 | } |
2551 | 0 | } |
2552 | 0 | } |
2553 | | |
2554 | 0 | double FBXConverter::FrameRateToDouble(FileGlobalSettings::FrameRate fp, double customFPSVal) { |
2555 | 0 | switch (fp) { |
2556 | 0 | case FileGlobalSettings::FrameRate_DEFAULT: |
2557 | 0 | return 1.0; |
2558 | | |
2559 | 0 | case FileGlobalSettings::FrameRate_120: |
2560 | 0 | return 120.0; |
2561 | | |
2562 | 0 | case FileGlobalSettings::FrameRate_100: |
2563 | 0 | return 100.0; |
2564 | | |
2565 | 0 | case FileGlobalSettings::FrameRate_60: |
2566 | 0 | return 60.0; |
2567 | | |
2568 | 0 | case FileGlobalSettings::FrameRate_50: |
2569 | 0 | return 50.0; |
2570 | | |
2571 | 0 | case FileGlobalSettings::FrameRate_48: |
2572 | 0 | return 48.0; |
2573 | | |
2574 | 0 | case FileGlobalSettings::FrameRate_30: |
2575 | 0 | case FileGlobalSettings::FrameRate_30_DROP: |
2576 | 0 | return 30.0; |
2577 | | |
2578 | 0 | case FileGlobalSettings::FrameRate_NTSC_DROP_FRAME: |
2579 | 0 | case FileGlobalSettings::FrameRate_NTSC_FULL_FRAME: |
2580 | 0 | return 29.9700262; |
2581 | | |
2582 | 0 | case FileGlobalSettings::FrameRate_PAL: |
2583 | 0 | return 25.0; |
2584 | | |
2585 | 0 | case FileGlobalSettings::FrameRate_CINEMA: |
2586 | 0 | return 24.0; |
2587 | | |
2588 | 0 | case FileGlobalSettings::FrameRate_1000: |
2589 | 0 | return 1000.0; |
2590 | | |
2591 | 0 | case FileGlobalSettings::FrameRate_CINEMA_ND: |
2592 | 0 | return 23.976; |
2593 | | |
2594 | 0 | case FileGlobalSettings::FrameRate_CUSTOM: |
2595 | 0 | return customFPSVal; |
2596 | | |
2597 | 0 | case FileGlobalSettings::FrameRate_MAX: // this is to silence compiler warnings |
2598 | 0 | break; |
2599 | 0 | } |
2600 | | |
2601 | 0 | ai_assert(false); |
2602 | |
|
2603 | 0 | return -1.0f; |
2604 | 0 | } |
2605 | | |
2606 | 0 | void FBXConverter::ConvertAnimations() { |
2607 | | // first of all determine framerate |
2608 | 0 | const FileGlobalSettings::FrameRate fps = doc.GlobalSettings().TimeMode(); |
2609 | 0 | const float custom = doc.GlobalSettings().CustomFrameRate(); |
2610 | 0 | anim_fps = FrameRateToDouble(fps, custom); |
2611 | |
|
2612 | 0 | const std::vector<const AnimationStack *> &curAnimations = doc.AnimationStacks(); |
2613 | 0 | for (const AnimationStack *stack : curAnimations) { |
2614 | 0 | ConvertAnimationStack(*stack); |
2615 | 0 | } |
2616 | 0 | } |
2617 | | |
2618 | 0 | std::string FBXConverter::FixNodeName(const std::string &name) { |
2619 | | // strip Model:: prefix, avoiding ambiguities (i.e. don't strip if |
2620 | | // this causes ambiguities, well possible between empty identifiers, |
2621 | | // such as "Model::" and ""). Make sure the behaviour is consistent |
2622 | | // across multiple calls to FixNodeName(). |
2623 | 0 | if (name.substr(0, 7) == "Model::") { |
2624 | 0 | std::string temp = name.substr(7); |
2625 | 0 | return temp; |
2626 | 0 | } |
2627 | | |
2628 | 0 | return name; |
2629 | 0 | } |
2630 | | |
2631 | 0 | std::string FBXConverter::FixAnimMeshName(const std::string &name) { |
2632 | 0 | if (name.length()) { |
2633 | 0 | size_t indexOf = name.find_first_of("::"); |
2634 | 0 | if (indexOf != std::string::npos && indexOf < name.size() - 2) { |
2635 | 0 | return name.substr(indexOf + 2); |
2636 | 0 | } |
2637 | 0 | } |
2638 | 0 | return name.length() ? name : "AnimMesh"; |
2639 | 0 | } |
2640 | | |
2641 | 0 | void FBXConverter::ConvertAnimationStack(const AnimationStack &st) { |
2642 | 0 | const AnimationLayerList &layers = st.Layers(); |
2643 | 0 | if (layers.empty()) { |
2644 | 0 | return; |
2645 | 0 | } |
2646 | | |
2647 | 0 | aiAnimation *const anim = new aiAnimation(); |
2648 | 0 | animations.push_back(anim); |
2649 | | |
2650 | | // strip AnimationStack:: prefix |
2651 | 0 | std::string name = st.Name(); |
2652 | 0 | if (name.substr(0, 16) == "AnimationStack::") { |
2653 | 0 | name = name.substr(16); |
2654 | 0 | } else if (name.substr(0, 11) == "AnimStack::") { |
2655 | 0 | name = name.substr(11); |
2656 | 0 | } |
2657 | |
|
2658 | 0 | anim->mName.Set(name); |
2659 | | |
2660 | | // need to find all nodes for which we need to generate node animations - |
2661 | | // it may happen that we need to merge multiple layers, though. |
2662 | 0 | NodeMap node_map; |
2663 | | |
2664 | | // reverse mapping from curves to layers, much faster than querying |
2665 | | // the FBX DOM for it. |
2666 | 0 | LayerMap layer_map; |
2667 | |
|
2668 | 0 | const char *prop_whitelist[] = { |
2669 | 0 | "Lcl Scaling", |
2670 | 0 | "Lcl Rotation", |
2671 | 0 | "Lcl Translation", |
2672 | 0 | "DeformPercent" |
2673 | 0 | }; |
2674 | |
|
2675 | 0 | std::map<std::string, morphAnimData *> morphAnimDatas; |
2676 | |
|
2677 | 0 | for (const AnimationLayer *layer : layers) { |
2678 | 0 | ai_assert(layer); |
2679 | 0 | const AnimationCurveNodeList &nodes = layer->Nodes(prop_whitelist, 4); |
2680 | 0 | for (const AnimationCurveNode *node : nodes) { |
2681 | 0 | ai_assert(node); |
2682 | 0 | const Model *const model = dynamic_cast<const Model *>(node->Target()); |
2683 | 0 | if (model) { |
2684 | 0 | const std::string &curName = FixNodeName(model->Name()); |
2685 | 0 | node_map[curName].push_back(node); |
2686 | 0 | layer_map[node] = layer; |
2687 | 0 | continue; |
2688 | 0 | } |
2689 | 0 | const BlendShapeChannel *const bsc = dynamic_cast<const BlendShapeChannel *>(node->Target()); |
2690 | 0 | if (bsc) { |
2691 | 0 | ProcessMorphAnimDatas(&morphAnimDatas, bsc, node); |
2692 | 0 | } |
2693 | 0 | } |
2694 | 0 | } |
2695 | | |
2696 | | // generate node animations |
2697 | 0 | std::vector<aiNodeAnim *> node_anims; |
2698 | |
|
2699 | 0 | double min_time = 1e10; |
2700 | 0 | double max_time = -1e10; |
2701 | |
|
2702 | 0 | int64_t start_time = st.LocalStart(); |
2703 | 0 | int64_t stop_time = st.LocalStop(); |
2704 | 0 | bool has_local_startstop = start_time != 0 || stop_time != 0; |
2705 | 0 | if (!has_local_startstop) { |
2706 | | // no time range given, so accept every keyframe and use the actual min/max time |
2707 | | // the numbers are INT64_MIN/MAX, the 20000 is for safety because GenerateNodeAnimations uses an epsilon of 10000 |
2708 | 0 | start_time = -9223372036854775807ll + 20000; |
2709 | 0 | stop_time = 9223372036854775807ll - 20000; |
2710 | 0 | } |
2711 | |
|
2712 | 0 | try { |
2713 | 0 | for (const NodeMap::value_type &kv : node_map) { |
2714 | 0 | GenerateNodeAnimations(node_anims, |
2715 | 0 | kv.first, |
2716 | 0 | kv.second, |
2717 | 0 | layer_map, |
2718 | 0 | start_time, stop_time, |
2719 | 0 | max_time, |
2720 | 0 | min_time); |
2721 | 0 | } |
2722 | 0 | } catch (std::exception &) { |
2723 | 0 | std::for_each(node_anims.begin(), node_anims.end(), Util::delete_fun<aiNodeAnim>()); |
2724 | 0 | throw; |
2725 | 0 | } |
2726 | | |
2727 | 0 | if (node_anims.size() || morphAnimDatas.size()) { |
2728 | 0 | if (node_anims.size()) { |
2729 | 0 | anim->mChannels = new aiNodeAnim *[node_anims.size()](); |
2730 | 0 | anim->mNumChannels = static_cast<unsigned int>(node_anims.size()); |
2731 | 0 | std::swap_ranges(node_anims.begin(), node_anims.end(), anim->mChannels); |
2732 | 0 | } |
2733 | 0 | if (morphAnimDatas.size()) { |
2734 | 0 | unsigned int numMorphMeshChannels = static_cast<unsigned int>(morphAnimDatas.size()); |
2735 | 0 | anim->mMorphMeshChannels = new aiMeshMorphAnim *[numMorphMeshChannels]; |
2736 | 0 | anim->mNumMorphMeshChannels = numMorphMeshChannels; |
2737 | 0 | unsigned int i = 0; |
2738 | 0 | for (const auto &morphAnimIt : morphAnimDatas) { |
2739 | 0 | morphAnimData *animData = morphAnimIt.second; |
2740 | 0 | unsigned int numKeys = static_cast<unsigned int>(animData->size()); |
2741 | 0 | aiMeshMorphAnim *meshMorphAnim = new aiMeshMorphAnim(); |
2742 | 0 | meshMorphAnim->mName.Set(morphAnimIt.first); |
2743 | 0 | meshMorphAnim->mNumKeys = numKeys; |
2744 | 0 | meshMorphAnim->mKeys = new aiMeshMorphKey[numKeys]; |
2745 | 0 | unsigned int j = 0; |
2746 | 0 | for (auto &animIt : *animData) { |
2747 | 0 | morphKeyData *keyData = animIt.second; |
2748 | 0 | unsigned int numValuesAndWeights = static_cast<unsigned int>(keyData->values.size()); |
2749 | 0 | meshMorphAnim->mKeys[j].mNumValuesAndWeights = numValuesAndWeights; |
2750 | 0 | meshMorphAnim->mKeys[j].mValues = new unsigned int[numValuesAndWeights]; |
2751 | 0 | meshMorphAnim->mKeys[j].mWeights = new double[numValuesAndWeights]; |
2752 | 0 | meshMorphAnim->mKeys[j].mTime = CONVERT_FBX_TIME(animIt.first) * anim_fps; |
2753 | 0 | for (unsigned int k = 0; k < numValuesAndWeights; k++) { |
2754 | 0 | meshMorphAnim->mKeys[j].mValues[k] = keyData->values.at(k); |
2755 | 0 | meshMorphAnim->mKeys[j].mWeights[k] = keyData->weights.at(k); |
2756 | 0 | } |
2757 | 0 | j++; |
2758 | 0 | } |
2759 | 0 | anim->mMorphMeshChannels[i++] = meshMorphAnim; |
2760 | 0 | } |
2761 | 0 | } |
2762 | 0 | } else { |
2763 | | // empty animations would fail validation, so drop them |
2764 | 0 | delete anim; |
2765 | 0 | animations.pop_back(); |
2766 | 0 | FBXImporter::LogInfo("ignoring empty AnimationStack (using IK?): ", name); |
2767 | 0 | return; |
2768 | 0 | } |
2769 | | |
2770 | 0 | double start_time_fps = has_local_startstop ? (CONVERT_FBX_TIME(start_time) * anim_fps) : min_time; |
2771 | 0 | double stop_time_fps = has_local_startstop ? (CONVERT_FBX_TIME(stop_time) * anim_fps) : max_time; |
2772 | | |
2773 | | // adjust relative timing for animation |
2774 | 0 | for (unsigned int c = 0; c < anim->mNumChannels; c++) { |
2775 | 0 | aiNodeAnim *channel = anim->mChannels[c]; |
2776 | 0 | for (uint32_t i = 0; i < channel->mNumPositionKeys; i++) { |
2777 | 0 | channel->mPositionKeys[i].mTime -= start_time_fps; |
2778 | 0 | } |
2779 | 0 | for (uint32_t i = 0; i < channel->mNumRotationKeys; i++) { |
2780 | 0 | channel->mRotationKeys[i].mTime -= start_time_fps; |
2781 | 0 | } |
2782 | 0 | for (uint32_t i = 0; i < channel->mNumScalingKeys; i++) { |
2783 | 0 | channel->mScalingKeys[i].mTime -= start_time_fps; |
2784 | 0 | } |
2785 | 0 | } |
2786 | 0 | for (unsigned int c = 0; c < anim->mNumMorphMeshChannels; c++) { |
2787 | 0 | aiMeshMorphAnim *channel = anim->mMorphMeshChannels[c]; |
2788 | 0 | for (uint32_t i = 0; i < channel->mNumKeys; i++) { |
2789 | 0 | channel->mKeys[i].mTime -= start_time_fps; |
2790 | 0 | } |
2791 | 0 | } |
2792 | | |
2793 | | // for some mysterious reason, mDuration is simply the maximum key -- the |
2794 | | // validator always assumes animations to start at zero. |
2795 | 0 | anim->mDuration = stop_time_fps - start_time_fps; |
2796 | 0 | anim->mTicksPerSecond = anim_fps; |
2797 | 0 | } |
2798 | | |
2799 | | // ------------------------------------------------------------------------------------------------ |
2800 | 0 | void FBXConverter::ProcessMorphAnimDatas(std::map<std::string, morphAnimData *> *morphAnimDatas, const BlendShapeChannel *bsc, const AnimationCurveNode *node) { |
2801 | 0 | std::vector<const Connection *> bscConnections = doc.GetConnectionsBySourceSequenced(bsc->ID(), "Deformer"); |
2802 | 0 | for (const Connection *bscConnection : bscConnections) { |
2803 | 0 | auto bs = dynamic_cast<const BlendShape *>(bscConnection->DestinationObject()); |
2804 | 0 | if (bs) { |
2805 | 0 | auto channelIt = std::find(bs->BlendShapeChannels().begin(), bs->BlendShapeChannels().end(), bsc); |
2806 | 0 | if (channelIt != bs->BlendShapeChannels().end()) { |
2807 | 0 | auto channelIndex = static_cast<unsigned int>(std::distance(bs->BlendShapeChannels().begin(), channelIt)); |
2808 | 0 | std::vector<const Connection *> bsConnections = doc.GetConnectionsBySourceSequenced(bs->ID(), "Geometry"); |
2809 | 0 | for (const Connection *bsConnection : bsConnections) { |
2810 | 0 | auto geo = dynamic_cast<const Geometry *>(bsConnection->DestinationObject()); |
2811 | 0 | if (geo) { |
2812 | 0 | std::vector<const Connection *> geoConnections = doc.GetConnectionsBySourceSequenced(geo->ID(), "Model"); |
2813 | 0 | for (const Connection *geoConnection : geoConnections) { |
2814 | 0 | auto model = dynamic_cast<const Model *>(geoConnection->DestinationObject()); |
2815 | 0 | if (model) { |
2816 | 0 | auto geoIt = std::find(model->GetGeometry().begin(), model->GetGeometry().end(), geo); |
2817 | 0 | auto geoIndex = static_cast<unsigned int>(std::distance(model->GetGeometry().begin(), geoIt)); |
2818 | 0 | auto name = aiString(FixNodeName(model->Name() + "*")); |
2819 | 0 | name.length = 1 + ASSIMP_itoa10(name.data + name.length, AI_MAXLEN - 1, geoIndex); |
2820 | 0 | morphAnimData *animData; |
2821 | 0 | auto animIt = morphAnimDatas->find(name.C_Str()); |
2822 | 0 | if (animIt == morphAnimDatas->end()) { |
2823 | 0 | animData = new morphAnimData(); |
2824 | 0 | morphAnimDatas->insert(std::make_pair(name.C_Str(), animData)); |
2825 | 0 | } else { |
2826 | 0 | animData = animIt->second; |
2827 | 0 | } |
2828 | 0 | for (std::pair<std::string, const AnimationCurve *> curvesIt : node->Curves()) { |
2829 | 0 | if (curvesIt.first == "d|DeformPercent") { |
2830 | 0 | const AnimationCurve *animationCurve = curvesIt.second; |
2831 | 0 | const KeyTimeList &keys = animationCurve->GetKeys(); |
2832 | 0 | const KeyValueList &values = animationCurve->GetValues(); |
2833 | 0 | unsigned int k = 0; |
2834 | 0 | for (auto key : keys) { |
2835 | 0 | morphKeyData *keyData; |
2836 | 0 | auto keyIt = animData->find(key); |
2837 | 0 | if (keyIt == animData->end()) { |
2838 | 0 | keyData = new morphKeyData(); |
2839 | 0 | animData->insert(std::make_pair(key, keyData)); |
2840 | 0 | } else { |
2841 | 0 | keyData = keyIt->second; |
2842 | 0 | } |
2843 | 0 | keyData->values.push_back(channelIndex); |
2844 | 0 | keyData->weights.push_back(values.at(k) / 100.0f); |
2845 | 0 | k++; |
2846 | 0 | } |
2847 | 0 | } |
2848 | 0 | } |
2849 | 0 | } |
2850 | 0 | } |
2851 | 0 | } |
2852 | 0 | } |
2853 | 0 | } |
2854 | 0 | } |
2855 | 0 | } |
2856 | 0 | } |
2857 | | |
2858 | | // ------------------------------------------------------------------------------------------------ |
2859 | | #ifdef ASSIMP_BUILD_DEBUG |
2860 | | // ------------------------------------------------------------------------------------------------ |
2861 | | // sanity check whether the input is ok |
2862 | | static void validateAnimCurveNodes(const std::vector<const AnimationCurveNode *> &curves, |
2863 | 0 | bool strictMode) { |
2864 | 0 | const Object *target(nullptr); |
2865 | 0 | for (const AnimationCurveNode *node : curves) { |
2866 | 0 | if (!target) { |
2867 | 0 | target = node->Target(); |
2868 | 0 | } |
2869 | 0 | if (node->Target() != target) { |
2870 | 0 | FBXImporter::LogWarn("Node target is nullptr type."); |
2871 | 0 | } |
2872 | 0 | if (strictMode) { |
2873 | 0 | ai_assert(node->Target() == target); |
2874 | 0 | } |
2875 | 0 | } |
2876 | 0 | } |
2877 | | #endif // ASSIMP_BUILD_DEBUG |
2878 | | |
2879 | | // ------------------------------------------------------------------------------------------------ |
2880 | | void FBXConverter::GenerateNodeAnimations(std::vector<aiNodeAnim *> &node_anims, |
2881 | | const std::string &fixed_name, |
2882 | | const std::vector<const AnimationCurveNode *> &curves, |
2883 | | const LayerMap &layer_map, |
2884 | | int64_t start, int64_t stop, |
2885 | | double &max_time, |
2886 | 0 | double &min_time) { |
2887 | |
|
2888 | 0 | NodeMap node_property_map; |
2889 | 0 | ai_assert(curves.size()); |
2890 | |
|
2891 | 0 | #ifdef ASSIMP_BUILD_DEBUG |
2892 | 0 | validateAnimCurveNodes(curves, doc.Settings().strictMode); |
2893 | 0 | #endif |
2894 | 0 | const AnimationCurveNode *curve_node = nullptr; |
2895 | 0 | for (const AnimationCurveNode *node : curves) { |
2896 | 0 | ai_assert(node); |
2897 | |
|
2898 | 0 | if (node->TargetProperty().empty()) { |
2899 | 0 | FBXImporter::LogWarn("target property for animation curve not set: ", node->Name()); |
2900 | 0 | continue; |
2901 | 0 | } |
2902 | | |
2903 | 0 | curve_node = node; |
2904 | 0 | if (node->Curves().empty()) { |
2905 | 0 | FBXImporter::LogWarn("no animation curves assigned to AnimationCurveNode: ", node->Name()); |
2906 | 0 | continue; |
2907 | 0 | } |
2908 | | |
2909 | 0 | node_property_map[node->TargetProperty()].push_back(node); |
2910 | 0 | } |
2911 | |
|
2912 | 0 | ai_assert(curve_node); |
2913 | 0 | ai_assert(curve_node->TargetAsModel()); |
2914 | |
|
2915 | 0 | const Model &target = *curve_node->TargetAsModel(); |
2916 | | |
2917 | | // check for all possible transformation components |
2918 | 0 | NodeMap::const_iterator chain[TransformationComp_MAXIMUM]; |
2919 | |
|
2920 | 0 | bool has_any = false; |
2921 | 0 | bool has_complex = false; |
2922 | |
|
2923 | 0 | for (size_t i = 0; i < TransformationComp_MAXIMUM; ++i) { |
2924 | 0 | const TransformationComp comp = static_cast<TransformationComp>(i); |
2925 | | |
2926 | | // inverse pivots don't exist in the input, we just generate them |
2927 | 0 | if (comp == TransformationComp_RotationPivotInverse || comp == TransformationComp_ScalingPivotInverse) { |
2928 | 0 | chain[i] = node_property_map.end(); |
2929 | 0 | continue; |
2930 | 0 | } |
2931 | | |
2932 | 0 | chain[i] = node_property_map.find(NameTransformationCompProperty(comp)); |
2933 | 0 | if (chain[i] != node_property_map.end()) { |
2934 | | |
2935 | | // check if this curves contains redundant information by looking |
2936 | | // up the corresponding node's transformation chain. |
2937 | 0 | if (doc.Settings().optimizeEmptyAnimationCurves && |
2938 | 0 | IsRedundantAnimationData(target, comp, (chain[i]->second))) { |
2939 | |
|
2940 | 0 | FBXImporter::LogVerboseDebug("dropping redundant animation channel for node ", target.Name()); |
2941 | 0 | continue; |
2942 | 0 | } |
2943 | | |
2944 | 0 | has_any = true; |
2945 | |
|
2946 | 0 | if (comp != TransformationComp_Rotation && comp != TransformationComp_Scaling && comp != TransformationComp_Translation) { |
2947 | 0 | has_complex = true; |
2948 | 0 | } |
2949 | 0 | } |
2950 | 0 | } |
2951 | |
|
2952 | 0 | if (!has_any) { |
2953 | 0 | FBXImporter::LogWarn("ignoring node animation, did not find any transformation key frames"); |
2954 | 0 | return; |
2955 | 0 | } |
2956 | | |
2957 | | // this needs to play nicely with GenerateTransformationNodeChain() which will |
2958 | | // be invoked _later_ (animations come first). If this node has only rotation, |
2959 | | // scaling and translation _and_ there are no animated other components either, |
2960 | | // we can use a single node and also a single node animation channel. |
2961 | 0 | if( !has_complex && !NeedsComplexTransformationChain(target)) { |
2962 | 0 | aiNodeAnim* const nd = GenerateSimpleNodeAnim(fixed_name, target, chain, |
2963 | 0 | node_property_map.end(), |
2964 | 0 | start, stop, |
2965 | 0 | max_time, |
2966 | 0 | min_time |
2967 | 0 | ); |
2968 | |
|
2969 | 0 | ai_assert(nd); |
2970 | 0 | if (nd->mNumPositionKeys == 0 && nd->mNumRotationKeys == 0 && nd->mNumScalingKeys == 0) { |
2971 | 0 | delete nd; |
2972 | 0 | } else { |
2973 | 0 | node_anims.push_back(nd); |
2974 | 0 | } |
2975 | 0 | return; |
2976 | 0 | } |
2977 | | |
2978 | | // otherwise, things get gruesome and we need separate animation channels |
2979 | | // for each part of the transformation chain. Remember which channels |
2980 | | // we generated and pass this information to the node conversion |
2981 | | // code to avoid nodes that have identity transform, but non-identity |
2982 | | // animations, being dropped. |
2983 | 0 | unsigned int flags = 0, bit = 0x1; |
2984 | 0 | for (size_t i = 0; i < TransformationComp_MAXIMUM; ++i, bit <<= 1) { |
2985 | 0 | const TransformationComp comp = static_cast<TransformationComp>(i); |
2986 | |
|
2987 | 0 | if (chain[i] != node_property_map.end()) { |
2988 | 0 | flags |= bit; |
2989 | |
|
2990 | 0 | ai_assert(comp != TransformationComp_RotationPivotInverse); |
2991 | 0 | ai_assert(comp != TransformationComp_ScalingPivotInverse); |
2992 | |
|
2993 | 0 | const std::string &chain_name = NameTransformationChainNode(fixed_name, comp); |
2994 | |
|
2995 | 0 | aiNodeAnim *na = nullptr; |
2996 | 0 | switch (comp) { |
2997 | 0 | case TransformationComp_Rotation: |
2998 | 0 | case TransformationComp_PreRotation: |
2999 | 0 | case TransformationComp_PostRotation: |
3000 | 0 | case TransformationComp_GeometricRotation: |
3001 | 0 | na = GenerateRotationNodeAnim(chain_name, |
3002 | 0 | target, |
3003 | 0 | (*chain[i]).second, |
3004 | 0 | layer_map, |
3005 | 0 | start, stop, |
3006 | 0 | max_time, |
3007 | 0 | min_time); |
3008 | |
|
3009 | 0 | break; |
3010 | | |
3011 | 0 | case TransformationComp_RotationOffset: |
3012 | 0 | case TransformationComp_RotationPivot: |
3013 | 0 | case TransformationComp_ScalingOffset: |
3014 | 0 | case TransformationComp_ScalingPivot: |
3015 | 0 | case TransformationComp_Translation: |
3016 | 0 | case TransformationComp_GeometricTranslation: |
3017 | 0 | na = GenerateTranslationNodeAnim(chain_name, |
3018 | 0 | target, |
3019 | 0 | (*chain[i]).second, |
3020 | 0 | layer_map, |
3021 | 0 | start, stop, |
3022 | 0 | max_time, |
3023 | 0 | min_time); |
3024 | | |
3025 | | // pivoting requires us to generate an implicit inverse channel to undo the pivot translation |
3026 | 0 | if (comp == TransformationComp_RotationPivot) { |
3027 | 0 | const std::string &invName = NameTransformationChainNode(fixed_name, |
3028 | 0 | TransformationComp_RotationPivotInverse); |
3029 | |
|
3030 | 0 | aiNodeAnim *const inv = GenerateTranslationNodeAnim(invName, |
3031 | 0 | target, |
3032 | 0 | (*chain[i]).second, |
3033 | 0 | layer_map, |
3034 | 0 | start, stop, |
3035 | 0 | max_time, |
3036 | 0 | min_time, |
3037 | 0 | true); |
3038 | |
|
3039 | 0 | ai_assert(inv); |
3040 | 0 | if (inv->mNumPositionKeys == 0 && inv->mNumRotationKeys == 0 && inv->mNumScalingKeys == 0) { |
3041 | 0 | delete inv; |
3042 | 0 | } else { |
3043 | 0 | node_anims.push_back(inv); |
3044 | 0 | } |
3045 | |
|
3046 | 0 | ai_assert(TransformationComp_RotationPivotInverse > i); |
3047 | 0 | flags |= bit << (TransformationComp_RotationPivotInverse - i); |
3048 | 0 | } else if (comp == TransformationComp_ScalingPivot) { |
3049 | 0 | const std::string &invName = NameTransformationChainNode(fixed_name, |
3050 | 0 | TransformationComp_ScalingPivotInverse); |
3051 | |
|
3052 | 0 | aiNodeAnim *const inv = GenerateTranslationNodeAnim(invName, |
3053 | 0 | target, |
3054 | 0 | (*chain[i]).second, |
3055 | 0 | layer_map, |
3056 | 0 | start, stop, |
3057 | 0 | max_time, |
3058 | 0 | min_time, |
3059 | 0 | true); |
3060 | |
|
3061 | 0 | ai_assert(inv); |
3062 | 0 | if (inv->mNumPositionKeys == 0 && inv->mNumRotationKeys == 0 && inv->mNumScalingKeys == 0) { |
3063 | 0 | delete inv; |
3064 | 0 | } else { |
3065 | 0 | node_anims.push_back(inv); |
3066 | 0 | } |
3067 | |
|
3068 | 0 | ai_assert(TransformationComp_RotationPivotInverse > i); |
3069 | 0 | flags |= bit << (TransformationComp_RotationPivotInverse - i); |
3070 | 0 | } |
3071 | |
|
3072 | 0 | break; |
3073 | | |
3074 | 0 | case TransformationComp_Scaling: |
3075 | 0 | case TransformationComp_GeometricScaling: |
3076 | 0 | na = GenerateScalingNodeAnim(chain_name, |
3077 | 0 | target, |
3078 | 0 | (*chain[i]).second, |
3079 | 0 | layer_map, |
3080 | 0 | start, stop, |
3081 | 0 | max_time, |
3082 | 0 | min_time); |
3083 | |
|
3084 | 0 | break; |
3085 | | |
3086 | 0 | default: |
3087 | 0 | ai_assert(false); |
3088 | 0 | } |
3089 | | |
3090 | 0 | ai_assert(na); |
3091 | 0 | if (na->mNumPositionKeys == 0 && na->mNumRotationKeys == 0 && na->mNumScalingKeys == 0) { |
3092 | 0 | delete na; |
3093 | 0 | } else { |
3094 | 0 | node_anims.push_back(na); |
3095 | 0 | } |
3096 | 0 | continue; |
3097 | 0 | } |
3098 | 0 | } |
3099 | | |
3100 | 0 | node_anim_chain_bits[fixed_name] = flags; |
3101 | 0 | } |
3102 | | |
3103 | | bool FBXConverter::IsRedundantAnimationData(const Model &target, |
3104 | | TransformationComp comp, |
3105 | 0 | const std::vector<const AnimationCurveNode *> &curves) { |
3106 | 0 | ai_assert(curves.size()); |
3107 | | |
3108 | | // look for animation nodes with |
3109 | | // * sub channels for all relevant components set |
3110 | | // * one key/value pair per component |
3111 | | // * combined values match up the corresponding value in the bind pose node transformation |
3112 | | // only such nodes are 'redundant' for this function. |
3113 | |
|
3114 | 0 | if (curves.size() > 1) { |
3115 | 0 | return false; |
3116 | 0 | } |
3117 | | |
3118 | 0 | const AnimationCurveNode &nd = *curves.front(); |
3119 | 0 | const AnimationCurveMap &sub_curves = nd.Curves(); |
3120 | |
|
3121 | 0 | const AnimationCurveMap::const_iterator dx = sub_curves.find("d|X"); |
3122 | 0 | const AnimationCurveMap::const_iterator dy = sub_curves.find("d|Y"); |
3123 | 0 | const AnimationCurveMap::const_iterator dz = sub_curves.find("d|Z"); |
3124 | |
|
3125 | 0 | if (dx == sub_curves.end() || dy == sub_curves.end() || dz == sub_curves.end()) { |
3126 | 0 | return false; |
3127 | 0 | } |
3128 | | |
3129 | 0 | const KeyValueList &vx = (*dx).second->GetValues(); |
3130 | 0 | const KeyValueList &vy = (*dy).second->GetValues(); |
3131 | 0 | const KeyValueList &vz = (*dz).second->GetValues(); |
3132 | |
|
3133 | 0 | if (vx.size() != 1 || vy.size() != 1 || vz.size() != 1) { |
3134 | 0 | return false; |
3135 | 0 | } |
3136 | | |
3137 | 0 | const aiVector3D dyn_val = aiVector3D(vx[0], vy[0], vz[0]); |
3138 | 0 | const aiVector3D &static_val = PropertyGet<aiVector3D>(target.Props(), |
3139 | 0 | NameTransformationCompProperty(comp), |
3140 | 0 | TransformationCompDefaultValue(comp)); |
3141 | |
|
3142 | 0 | const float epsilon = Math::getEpsilon<float>(); |
3143 | 0 | return (dyn_val - static_val).SquareLength() < epsilon; |
3144 | 0 | } |
3145 | | |
3146 | | aiNodeAnim *FBXConverter::GenerateRotationNodeAnim(const std::string &name, |
3147 | | const Model &target, |
3148 | | const std::vector<const AnimationCurveNode *> &curves, |
3149 | | const LayerMap &layer_map, |
3150 | | int64_t start, int64_t stop, |
3151 | | double &max_time, |
3152 | 0 | double &min_time) { |
3153 | 0 | std::unique_ptr<aiNodeAnim> na(new aiNodeAnim()); |
3154 | 0 | na->mNodeName.Set(name); |
3155 | |
|
3156 | 0 | ConvertRotationKeys(na.get(), curves, layer_map, start, stop, max_time, min_time, target.RotationOrder()); |
3157 | | |
3158 | | // dummy scaling key |
3159 | 0 | na->mScalingKeys = new aiVectorKey[1]; |
3160 | 0 | na->mNumScalingKeys = 1; |
3161 | |
|
3162 | 0 | na->mScalingKeys[0].mTime = 0.; |
3163 | 0 | na->mScalingKeys[0].mValue = aiVector3D(1.0f, 1.0f, 1.0f); |
3164 | | |
3165 | | // dummy position key |
3166 | 0 | na->mPositionKeys = new aiVectorKey[1]; |
3167 | 0 | na->mNumPositionKeys = 1; |
3168 | |
|
3169 | 0 | na->mPositionKeys[0].mTime = 0.; |
3170 | 0 | na->mPositionKeys[0].mValue = aiVector3D(); |
3171 | |
|
3172 | 0 | return na.release(); |
3173 | 0 | } |
3174 | | |
3175 | | aiNodeAnim *FBXConverter::GenerateScalingNodeAnim(const std::string &name, |
3176 | | const Model & /*target*/, |
3177 | | const std::vector<const AnimationCurveNode *> &curves, |
3178 | | const LayerMap &layer_map, |
3179 | | int64_t start, int64_t stop, |
3180 | | double &max_time, |
3181 | 0 | double &min_time) { |
3182 | 0 | std::unique_ptr<aiNodeAnim> na(new aiNodeAnim()); |
3183 | 0 | na->mNodeName.Set(name); |
3184 | |
|
3185 | 0 | ConvertScaleKeys(na.get(), curves, layer_map, start, stop, max_time, min_time); |
3186 | | |
3187 | | // dummy rotation key |
3188 | 0 | na->mRotationKeys = new aiQuatKey[1]; |
3189 | 0 | na->mNumRotationKeys = 1; |
3190 | |
|
3191 | 0 | na->mRotationKeys[0].mTime = 0.; |
3192 | 0 | na->mRotationKeys[0].mValue = aiQuaternion(); |
3193 | | |
3194 | | // dummy position key |
3195 | 0 | na->mPositionKeys = new aiVectorKey[1]; |
3196 | 0 | na->mNumPositionKeys = 1; |
3197 | |
|
3198 | 0 | na->mPositionKeys[0].mTime = 0.; |
3199 | 0 | na->mPositionKeys[0].mValue = aiVector3D(); |
3200 | |
|
3201 | 0 | return na.release(); |
3202 | 0 | } |
3203 | | |
3204 | | aiNodeAnim *FBXConverter::GenerateTranslationNodeAnim(const std::string &name, |
3205 | | const Model & /*target*/, |
3206 | | const std::vector<const AnimationCurveNode *> &curves, |
3207 | | const LayerMap &layer_map, |
3208 | | int64_t start, int64_t stop, |
3209 | | double &max_time, |
3210 | | double &min_time, |
3211 | 0 | bool inverse) { |
3212 | 0 | std::unique_ptr<aiNodeAnim> na(new aiNodeAnim()); |
3213 | 0 | na->mNodeName.Set(name); |
3214 | |
|
3215 | 0 | ConvertTranslationKeys(na.get(), curves, layer_map, start, stop, max_time, min_time); |
3216 | |
|
3217 | 0 | if (inverse) { |
3218 | 0 | for (unsigned int i = 0; i < na->mNumPositionKeys; ++i) { |
3219 | 0 | na->mPositionKeys[i].mValue *= -1.0f; |
3220 | 0 | } |
3221 | 0 | } |
3222 | | |
3223 | | // dummy scaling key |
3224 | 0 | na->mScalingKeys = new aiVectorKey[1]; |
3225 | 0 | na->mNumScalingKeys = 1; |
3226 | |
|
3227 | 0 | na->mScalingKeys[0].mTime = 0.; |
3228 | 0 | na->mScalingKeys[0].mValue = aiVector3D(1.0f, 1.0f, 1.0f); |
3229 | | |
3230 | | // dummy rotation key |
3231 | 0 | na->mRotationKeys = new aiQuatKey[1]; |
3232 | 0 | na->mNumRotationKeys = 1; |
3233 | |
|
3234 | 0 | na->mRotationKeys[0].mTime = 0.; |
3235 | 0 | na->mRotationKeys[0].mValue = aiQuaternion(); |
3236 | |
|
3237 | 0 | return na.release(); |
3238 | 0 | } |
3239 | | |
3240 | | aiNodeAnim* FBXConverter::GenerateSimpleNodeAnim(const std::string& name, |
3241 | | const Model& target, |
3242 | | NodeMap::const_iterator chain[TransformationComp_MAXIMUM], |
3243 | | NodeMap::const_iterator iterEnd, |
3244 | | int64_t start, int64_t stop, |
3245 | | double& maxTime, |
3246 | | double& minTime) |
3247 | 0 | { |
3248 | 0 | std::unique_ptr<aiNodeAnim> na(new aiNodeAnim()); |
3249 | 0 | na->mNodeName.Set(name); |
3250 | |
|
3251 | 0 | const PropertyTable &props = target.Props(); |
3252 | | |
3253 | | // collect unique times and keyframe lists |
3254 | 0 | KeyFrameListList keyframeLists[TransformationComp_MAXIMUM]; |
3255 | 0 | KeyTimeList keytimes; |
3256 | |
|
3257 | 0 | for (size_t i = 0; i < TransformationComp_MAXIMUM; ++i) { |
3258 | 0 | if (chain[i] == iterEnd) |
3259 | 0 | continue; |
3260 | | |
3261 | 0 | if (i == TransformationComp_Rotation || i == TransformationComp_PreRotation |
3262 | 0 | || i == TransformationComp_PostRotation || i == TransformationComp_GeometricRotation) { |
3263 | 0 | keyframeLists[i] = GetRotationKeyframeList((*chain[i]).second, start, stop); |
3264 | 0 | } else { |
3265 | 0 | keyframeLists[i] = GetKeyframeList((*chain[i]).second, start, stop); |
3266 | 0 | } |
3267 | |
|
3268 | 0 | for (KeyFrameListList::const_iterator it = keyframeLists[i].begin(); it != keyframeLists[i].end(); ++it) { |
3269 | 0 | const KeyTimeList& times = *std::get<0>(*it); |
3270 | 0 | keytimes.insert(keytimes.end(), times.begin(), times.end()); |
3271 | 0 | } |
3272 | | |
3273 | | // remove duplicates |
3274 | 0 | std::sort(keytimes.begin(), keytimes.end()); |
3275 | |
|
3276 | 0 | auto last = std::unique(keytimes.begin(), keytimes.end()); |
3277 | 0 | keytimes.erase(last, keytimes.end()); |
3278 | 0 | } |
3279 | |
|
3280 | 0 | const Model::RotOrder rotOrder = target.RotationOrder(); |
3281 | 0 | const size_t keyCount = keytimes.size(); |
3282 | |
|
3283 | 0 | aiVector3D defTranslate = PropertyGet(props, "Lcl Translation", aiVector3D(0.f, 0.f, 0.f)); |
3284 | 0 | aiVector3D defRotation = PropertyGet(props, "Lcl Rotation", aiVector3D(0.f, 0.f, 0.f)); |
3285 | 0 | aiVector3D defScale = PropertyGet(props, "Lcl Scaling", aiVector3D(1.f, 1.f, 1.f)); |
3286 | |
|
3287 | 0 | aiVectorKey* outTranslations = new aiVectorKey[keyCount]; |
3288 | 0 | aiQuatKey* outRotations = new aiQuatKey[keyCount]; |
3289 | 0 | aiVectorKey* outScales = new aiVectorKey[keyCount]; |
3290 | |
|
3291 | 0 | if (keyframeLists[TransformationComp_Translation].size() > 0) { |
3292 | 0 | InterpolateKeys(outTranslations, keytimes, keyframeLists[TransformationComp_Translation], defTranslate, maxTime, minTime); |
3293 | 0 | } else { |
3294 | 0 | for (size_t i = 0; i < keyCount; ++i) { |
3295 | 0 | outTranslations[i].mTime = CONVERT_FBX_TIME(keytimes[i]) * anim_fps; |
3296 | 0 | outTranslations[i].mValue = defTranslate; |
3297 | 0 | } |
3298 | 0 | } |
3299 | |
|
3300 | 0 | if (keyframeLists[TransformationComp_Rotation].size() > 0) { |
3301 | 0 | InterpolateKeys(outRotations, keytimes, keyframeLists[TransformationComp_Rotation], defRotation, maxTime, minTime, rotOrder); |
3302 | 0 | } else { |
3303 | 0 | aiQuaternion defQuat = EulerToQuaternion(defRotation, rotOrder); |
3304 | 0 | for (size_t i = 0; i < keyCount; ++i) { |
3305 | 0 | outRotations[i].mTime = CONVERT_FBX_TIME(keytimes[i]) * anim_fps; |
3306 | 0 | outRotations[i].mValue = defQuat; |
3307 | 0 | } |
3308 | 0 | } |
3309 | |
|
3310 | 0 | if (keyframeLists[TransformationComp_Scaling].size() > 0) { |
3311 | 0 | InterpolateKeys(outScales, keytimes, keyframeLists[TransformationComp_Scaling], defScale, maxTime, minTime); |
3312 | 0 | } else { |
3313 | 0 | for (size_t i = 0; i < keyCount; ++i) { |
3314 | 0 | outScales[i].mTime = CONVERT_FBX_TIME(keytimes[i]) * anim_fps; |
3315 | 0 | outScales[i].mValue = defScale; |
3316 | 0 | } |
3317 | 0 | } |
3318 | |
|
3319 | 0 | bool ok = false; |
3320 | |
|
3321 | 0 | const auto zero_epsilon = ai_epsilon; |
3322 | |
|
3323 | 0 | const aiVector3D& preRotation = PropertyGet<aiVector3D>(props, "PreRotation", ok); |
3324 | 0 | if (ok && preRotation.SquareLength() > zero_epsilon) { |
3325 | 0 | const aiQuaternion preQuat = EulerToQuaternion(preRotation, Model::RotOrder_EulerXYZ); |
3326 | 0 | for (size_t i = 0; i < keyCount; ++i) { |
3327 | 0 | outRotations[i].mValue = preQuat * outRotations[i].mValue; |
3328 | 0 | } |
3329 | 0 | } |
3330 | |
|
3331 | 0 | const aiVector3D& postRotation = PropertyGet<aiVector3D>(props, "PostRotation", ok); |
3332 | 0 | if (ok && postRotation.SquareLength() > zero_epsilon) { |
3333 | 0 | const aiQuaternion postQuat = EulerToQuaternion(postRotation, Model::RotOrder_EulerXYZ); |
3334 | 0 | for (size_t i = 0; i < keyCount; ++i) { |
3335 | 0 | outRotations[i].mValue = outRotations[i].mValue * postQuat; |
3336 | 0 | } |
3337 | 0 | } |
3338 | | |
3339 | | // convert TRS to SRT |
3340 | 0 | for (size_t i = 0; i < keyCount; ++i) { |
3341 | 0 | aiQuaternion& r = outRotations[i].mValue; |
3342 | 0 | aiVector3D& s = outScales[i].mValue; |
3343 | 0 | aiVector3D& t = outTranslations[i].mValue; |
3344 | |
|
3345 | 0 | aiMatrix4x4 mat, temp; |
3346 | 0 | aiMatrix4x4::Translation(t, mat); |
3347 | 0 | mat *= aiMatrix4x4(r.GetMatrix()); |
3348 | 0 | mat *= aiMatrix4x4::Scaling(s, temp); |
3349 | |
|
3350 | 0 | mat.Decompose(s, r, t); |
3351 | 0 | } |
3352 | |
|
3353 | 0 | na->mNumScalingKeys = static_cast<unsigned int>(keyCount); |
3354 | 0 | na->mNumRotationKeys = na->mNumScalingKeys; |
3355 | 0 | na->mNumPositionKeys = na->mNumScalingKeys; |
3356 | |
|
3357 | 0 | na->mScalingKeys = outScales; |
3358 | 0 | na->mRotationKeys = outRotations; |
3359 | 0 | na->mPositionKeys = outTranslations; |
3360 | |
|
3361 | 0 | return na.release(); |
3362 | 0 | } |
3363 | | |
3364 | 0 | FBXConverter::KeyFrameListList FBXConverter::GetKeyframeList(const std::vector<const AnimationCurveNode *> &nodes, int64_t start, int64_t stop) { |
3365 | 0 | KeyFrameListList inputs; |
3366 | 0 | inputs.reserve(nodes.size() * 3); |
3367 | | |
3368 | | //give some breathing room for rounding errors |
3369 | 0 | int64_t adj_start = start - 10000; |
3370 | 0 | int64_t adj_stop = stop + 10000; |
3371 | |
|
3372 | 0 | for (const AnimationCurveNode *node : nodes) { |
3373 | 0 | ai_assert(node); |
3374 | |
|
3375 | 0 | const AnimationCurveMap &curves = node->Curves(); |
3376 | 0 | for (const AnimationCurveMap::value_type &kv : curves) { |
3377 | |
|
3378 | 0 | unsigned int mapto; |
3379 | 0 | if (kv.first == "d|X") { |
3380 | 0 | mapto = 0; |
3381 | 0 | } else if (kv.first == "d|Y") { |
3382 | 0 | mapto = 1; |
3383 | 0 | } else if (kv.first == "d|Z") { |
3384 | 0 | mapto = 2; |
3385 | 0 | } else { |
3386 | 0 | FBXImporter::LogWarn("ignoring scale animation curve, did not recognize target component"); |
3387 | 0 | continue; |
3388 | 0 | } |
3389 | | |
3390 | 0 | const AnimationCurve *const curve = kv.second; |
3391 | 0 | ai_assert(curve->GetKeys().size() == curve->GetValues().size()); |
3392 | 0 | ai_assert(curve->GetKeys().size()); |
3393 | | |
3394 | | //get values within the start/stop time window |
3395 | 0 | std::shared_ptr<KeyTimeList> Keys(new KeyTimeList()); |
3396 | 0 | std::shared_ptr<KeyValueList> Values(new KeyValueList()); |
3397 | 0 | const size_t count = curve->GetKeys().size(); |
3398 | 0 | Keys->reserve(count); |
3399 | 0 | Values->reserve(count); |
3400 | 0 | for (size_t n = 0; n < count; n++) { |
3401 | 0 | int64_t k = curve->GetKeys().at(n); |
3402 | 0 | if (k >= adj_start && k <= adj_stop) { |
3403 | 0 | Keys->push_back(k); |
3404 | 0 | Values->push_back(curve->GetValues().at(n)); |
3405 | 0 | } |
3406 | 0 | } |
3407 | |
|
3408 | 0 | inputs.emplace_back(Keys, Values, mapto); |
3409 | 0 | } |
3410 | 0 | } |
3411 | 0 | return inputs; // pray for NRVO :-) |
3412 | 0 | } |
3413 | | |
3414 | | FBXConverter::KeyFrameListList FBXConverter::GetRotationKeyframeList(const std::vector<const AnimationCurveNode *> &nodes, |
3415 | 0 | int64_t start, int64_t stop) { |
3416 | 0 | KeyFrameListList inputs; |
3417 | 0 | inputs.reserve(nodes.size() * 3); |
3418 | | |
3419 | | // give some breathing room for rounding errors |
3420 | 0 | const int64_t adj_start = start - 10000; |
3421 | 0 | const int64_t adj_stop = stop + 10000; |
3422 | |
|
3423 | 0 | for (const AnimationCurveNode *node : nodes) { |
3424 | 0 | ai_assert(node); |
3425 | |
|
3426 | 0 | const AnimationCurveMap &curves = node->Curves(); |
3427 | 0 | for (const AnimationCurveMap::value_type &kv : curves) { |
3428 | |
|
3429 | 0 | unsigned int mapto; |
3430 | 0 | if (kv.first == "d|X") { |
3431 | 0 | mapto = 0; |
3432 | 0 | } else if (kv.first == "d|Y") { |
3433 | 0 | mapto = 1; |
3434 | 0 | } else if (kv.first == "d|Z") { |
3435 | 0 | mapto = 2; |
3436 | 0 | } else { |
3437 | 0 | FBXImporter::LogWarn("ignoring scale animation curve, did not recognize target component"); |
3438 | 0 | continue; |
3439 | 0 | } |
3440 | | |
3441 | 0 | const AnimationCurve *const curve = kv.second; |
3442 | 0 | ai_assert(curve->GetKeys().size() == curve->GetValues().size()); |
3443 | 0 | ai_assert(curve->GetKeys().size()); |
3444 | | |
3445 | | // get values within the start/stop time window |
3446 | 0 | std::shared_ptr<KeyTimeList> Keys(new KeyTimeList()); |
3447 | 0 | std::shared_ptr<KeyValueList> Values(new KeyValueList()); |
3448 | 0 | const size_t count = curve->GetKeys().size(); |
3449 | |
|
3450 | 0 | int64_t tp = curve->GetKeys().at(0); |
3451 | 0 | float vp = curve->GetValues().at(0); |
3452 | 0 | Keys->push_back(tp); |
3453 | 0 | Values->push_back(vp); |
3454 | 0 | if (count > 1) { |
3455 | 0 | int64_t tc = curve->GetKeys().at(1); |
3456 | 0 | float vc = curve->GetValues().at(1); |
3457 | 0 | for (size_t n = 1; n < count; n++) { |
3458 | 0 | while (std::abs(vc - vp) >= 180.0f) { |
3459 | 0 | double step = std::floor(double(tc - tp) / std::abs(vc - vp) * 179.0f); |
3460 | 0 | int64_t tnew = tp + int64_t(step); |
3461 | 0 | float vnew = vp + (vc - vp) * float(step / (tc - tp)); |
3462 | 0 | if (tnew >= adj_start && tnew <= adj_stop) { |
3463 | 0 | Keys->push_back(tnew); |
3464 | 0 | Values->push_back(vnew); |
3465 | 0 | } else { |
3466 | | // Something broke |
3467 | 0 | break; |
3468 | 0 | } |
3469 | 0 | tp = tnew; |
3470 | 0 | vp = vnew; |
3471 | 0 | } |
3472 | 0 | if (tc >= adj_start && tc <= adj_stop) { |
3473 | 0 | Keys->push_back(tc); |
3474 | 0 | Values->push_back(vc); |
3475 | 0 | } |
3476 | 0 | if (n + 1 < count) { |
3477 | 0 | tp = tc; |
3478 | 0 | vp = vc; |
3479 | 0 | tc = curve->GetKeys().at(n + 1); |
3480 | 0 | vc = curve->GetValues().at(n + 1); |
3481 | 0 | } |
3482 | 0 | } |
3483 | 0 | } |
3484 | 0 | inputs.emplace_back(Keys, Values, mapto); |
3485 | 0 | } |
3486 | 0 | } |
3487 | 0 | return inputs; |
3488 | 0 | } |
3489 | | |
3490 | 0 | KeyTimeList FBXConverter::GetKeyTimeList(const KeyFrameListList &inputs) { |
3491 | 0 | ai_assert(!inputs.empty()); |
3492 | | |
3493 | | // reserve some space upfront - it is likely that the key-frame lists |
3494 | | // have matching time values, so max(of all key-frame lists) should |
3495 | | // be a good estimate. |
3496 | 0 | KeyTimeList keys; |
3497 | |
|
3498 | 0 | size_t estimate = 0; |
3499 | 0 | for (const KeyFrameList &kfl : inputs) { |
3500 | 0 | estimate = std::max(estimate, std::get<0>(kfl)->size()); |
3501 | 0 | } |
3502 | |
|
3503 | 0 | keys.reserve(estimate); |
3504 | |
|
3505 | 0 | std::vector<unsigned int> next_pos; |
3506 | 0 | next_pos.resize(inputs.size(), 0); |
3507 | |
|
3508 | 0 | const size_t count = inputs.size(); |
3509 | 0 | while (true) { |
3510 | |
|
3511 | 0 | int64_t min_tick = std::numeric_limits<int64_t>::max(); |
3512 | 0 | for (size_t i = 0; i < count; ++i) { |
3513 | 0 | const KeyFrameList &kfl = inputs[i]; |
3514 | |
|
3515 | 0 | if (std::get<0>(kfl)->size() > next_pos[i] && std::get<0>(kfl)->at(next_pos[i]) < min_tick) { |
3516 | 0 | min_tick = std::get<0>(kfl)->at(next_pos[i]); |
3517 | 0 | } |
3518 | 0 | } |
3519 | |
|
3520 | 0 | if (min_tick == std::numeric_limits<int64_t>::max()) { |
3521 | 0 | break; |
3522 | 0 | } |
3523 | 0 | keys.push_back(min_tick); |
3524 | |
|
3525 | 0 | for (size_t i = 0; i < count; ++i) { |
3526 | 0 | const KeyFrameList &kfl = inputs[i]; |
3527 | |
|
3528 | 0 | while (std::get<0>(kfl)->size() > next_pos[i] && std::get<0>(kfl)->at(next_pos[i]) == min_tick) { |
3529 | 0 | ++next_pos[i]; |
3530 | 0 | } |
3531 | 0 | } |
3532 | 0 | } |
3533 | |
|
3534 | 0 | return keys; |
3535 | 0 | } |
3536 | | |
3537 | | void FBXConverter::InterpolateKeys(aiVectorKey *valOut, const KeyTimeList &keys, const KeyFrameListList &inputs, |
3538 | | const aiVector3D &def_value, |
3539 | | double &max_time, |
3540 | 0 | double &min_time) { |
3541 | 0 | ai_assert(!keys.empty()); |
3542 | 0 | ai_assert(nullptr != valOut); |
3543 | |
|
3544 | 0 | std::vector<unsigned int> next_pos; |
3545 | 0 | const size_t count(inputs.size()); |
3546 | |
|
3547 | 0 | next_pos.resize(inputs.size(), 0); |
3548 | |
|
3549 | 0 | for (KeyTimeList::value_type time : keys) { |
3550 | 0 | ai_real result[3] = { def_value.x, def_value.y, def_value.z }; |
3551 | |
|
3552 | 0 | for (size_t i = 0; i < count; ++i) { |
3553 | 0 | const KeyFrameList &kfl = inputs[i]; |
3554 | |
|
3555 | 0 | const size_t ksize = std::get<0>(kfl)->size(); |
3556 | 0 | if (ksize == 0) { |
3557 | 0 | continue; |
3558 | 0 | } |
3559 | 0 | if (ksize > next_pos[i] && std::get<0>(kfl)->at(next_pos[i]) == time) { |
3560 | 0 | ++next_pos[i]; |
3561 | 0 | } |
3562 | |
|
3563 | 0 | const size_t id0 = next_pos[i] > 0 ? next_pos[i] - 1 : 0; |
3564 | 0 | const size_t id1 = next_pos[i] == ksize ? ksize - 1 : next_pos[i]; |
3565 | | |
3566 | | // use lerp for interpolation |
3567 | 0 | const KeyValueList::value_type valueA = std::get<1>(kfl)->at(id0); |
3568 | 0 | const KeyValueList::value_type valueB = std::get<1>(kfl)->at(id1); |
3569 | |
|
3570 | 0 | const KeyTimeList::value_type timeA = std::get<0>(kfl)->at(id0); |
3571 | 0 | const KeyTimeList::value_type timeB = std::get<0>(kfl)->at(id1); |
3572 | |
|
3573 | 0 | const ai_real factor = timeB == timeA ? ai_real(0.) : static_cast<ai_real>((time - timeA)) / (timeB - timeA); |
3574 | 0 | const ai_real interpValue = static_cast<ai_real>(valueA + (valueB - valueA) * factor); |
3575 | |
|
3576 | 0 | result[std::get<2>(kfl)] = interpValue; |
3577 | 0 | } |
3578 | | |
3579 | | // magic value to convert fbx times to seconds |
3580 | 0 | valOut->mTime = CONVERT_FBX_TIME(time) * anim_fps; |
3581 | |
|
3582 | 0 | min_time = std::min(min_time, valOut->mTime); |
3583 | 0 | max_time = std::max(max_time, valOut->mTime); |
3584 | |
|
3585 | 0 | valOut->mValue.x = result[0]; |
3586 | 0 | valOut->mValue.y = result[1]; |
3587 | 0 | valOut->mValue.z = result[2]; |
3588 | |
|
3589 | 0 | ++valOut; |
3590 | 0 | } |
3591 | 0 | } |
3592 | | |
3593 | | void FBXConverter::InterpolateKeys(aiQuatKey *valOut, const KeyTimeList &keys, const KeyFrameListList &inputs, |
3594 | | const aiVector3D &def_value, |
3595 | | double &maxTime, |
3596 | | double &minTime, |
3597 | 0 | Model::RotOrder order) { |
3598 | 0 | ai_assert(!keys.empty()); |
3599 | 0 | ai_assert(nullptr != valOut); |
3600 | |
|
3601 | 0 | std::unique_ptr<aiVectorKey[]> temp(new aiVectorKey[keys.size()]); |
3602 | 0 | InterpolateKeys(temp.get(), keys, inputs, def_value, maxTime, minTime); |
3603 | |
|
3604 | 0 | aiMatrix4x4 m; |
3605 | |
|
3606 | 0 | aiQuaternion lastq; |
3607 | |
|
3608 | 0 | for (size_t i = 0, c = keys.size(); i < c; ++i) { |
3609 | |
|
3610 | 0 | valOut[i].mTime = temp[i].mTime; |
3611 | |
|
3612 | 0 | GetRotationMatrix(order, temp[i].mValue, m); |
3613 | 0 | aiQuaternion quat = aiQuaternion(aiMatrix3x3(m)); |
3614 | | |
3615 | | // take shortest path by checking the inner product |
3616 | | // http://www.3dkingdoms.com/weekly/weekly.php?a=36 |
3617 | 0 | if (quat.x * lastq.x + quat.y * lastq.y + quat.z * lastq.z + quat.w * lastq.w < 0) { |
3618 | 0 | quat.Conjugate(); |
3619 | 0 | quat.w = -quat.w; |
3620 | 0 | } |
3621 | 0 | lastq = quat; |
3622 | |
|
3623 | 0 | valOut[i].mValue = quat; |
3624 | 0 | } |
3625 | 0 | } |
3626 | | |
3627 | 0 | aiQuaternion FBXConverter::EulerToQuaternion(const aiVector3D &rot, Model::RotOrder order) { |
3628 | 0 | aiMatrix4x4 m; |
3629 | 0 | GetRotationMatrix(order, rot, m); |
3630 | |
|
3631 | 0 | return aiQuaternion(aiMatrix3x3(m)); |
3632 | 0 | } |
3633 | | |
3634 | | void FBXConverter::ConvertScaleKeys(aiNodeAnim *na, const std::vector<const AnimationCurveNode *> &nodes, const LayerMap & /*layers*/, |
3635 | | int64_t start, int64_t stop, |
3636 | | double &maxTime, |
3637 | 0 | double &minTime) { |
3638 | 0 | ai_assert(nodes.size()); |
3639 | | |
3640 | | // XXX for now, assume scale should be blended geometrically (i.e. two |
3641 | | // layers should be multiplied with each other). There is a FBX |
3642 | | // property in the layer to specify the behaviour, though. |
3643 | |
|
3644 | 0 | const KeyFrameListList &inputs = GetKeyframeList(nodes, start, stop); |
3645 | 0 | const KeyTimeList &keys = GetKeyTimeList(inputs); |
3646 | |
|
3647 | 0 | na->mNumScalingKeys = static_cast<unsigned int>(keys.size()); |
3648 | 0 | na->mScalingKeys = new aiVectorKey[keys.size()]; |
3649 | 0 | if (keys.size() > 0) { |
3650 | 0 | InterpolateKeys(na->mScalingKeys, keys, inputs, aiVector3D(1.0f, 1.0f, 1.0f), maxTime, minTime); |
3651 | 0 | } |
3652 | 0 | } |
3653 | | |
3654 | | void FBXConverter::ConvertTranslationKeys(aiNodeAnim *na, const std::vector<const AnimationCurveNode *> &nodes, |
3655 | | const LayerMap & /*layers*/, |
3656 | | int64_t start, int64_t stop, |
3657 | | double &maxTime, |
3658 | 0 | double &minTime) { |
3659 | 0 | ai_assert(nodes.size()); |
3660 | | |
3661 | | // XXX see notes in ConvertScaleKeys() |
3662 | 0 | const KeyFrameListList &inputs = GetKeyframeList(nodes, start, stop); |
3663 | 0 | const KeyTimeList &keys = GetKeyTimeList(inputs); |
3664 | |
|
3665 | 0 | na->mNumPositionKeys = static_cast<unsigned int>(keys.size()); |
3666 | 0 | na->mPositionKeys = new aiVectorKey[keys.size()]; |
3667 | 0 | if (keys.size() > 0) |
3668 | 0 | InterpolateKeys(na->mPositionKeys, keys, inputs, aiVector3D(0.0f, 0.0f, 0.0f), maxTime, minTime); |
3669 | 0 | } |
3670 | | |
3671 | | void FBXConverter::ConvertRotationKeys(aiNodeAnim *na, const std::vector<const AnimationCurveNode *> &nodes, |
3672 | | const LayerMap & /*layers*/, |
3673 | | int64_t start, int64_t stop, |
3674 | | double &maxTime, |
3675 | | double &minTime, |
3676 | 0 | Model::RotOrder order) { |
3677 | 0 | ai_assert(nodes.size()); |
3678 | | |
3679 | | // XXX see notes in ConvertScaleKeys() |
3680 | 0 | const std::vector<KeyFrameList> &inputs = GetRotationKeyframeList(nodes, start, stop); |
3681 | 0 | const KeyTimeList &keys = GetKeyTimeList(inputs); |
3682 | |
|
3683 | 0 | na->mNumRotationKeys = static_cast<unsigned int>(keys.size()); |
3684 | 0 | na->mRotationKeys = new aiQuatKey[keys.size()]; |
3685 | 0 | if (!keys.empty()) { |
3686 | 0 | InterpolateKeys(na->mRotationKeys, keys, inputs, aiVector3D(0.0f, 0.0f, 0.0f), maxTime, minTime, order); |
3687 | 0 | } |
3688 | 0 | } |
3689 | | |
3690 | 0 | void FBXConverter::ConvertGlobalSettings() { |
3691 | 0 | if (nullptr == mSceneOut) { |
3692 | 0 | return; |
3693 | 0 | } |
3694 | | |
3695 | 0 | const bool hasGenerator = !doc.Creator().empty(); |
3696 | |
|
3697 | 0 | mSceneOut->mMetaData = aiMetadata::Alloc(16 + (hasGenerator ? 1 : 0)); |
3698 | 0 | mSceneOut->mMetaData->Set(0, "UpAxis", doc.GlobalSettings().UpAxis()); |
3699 | 0 | mSceneOut->mMetaData->Set(1, "UpAxisSign", doc.GlobalSettings().UpAxisSign()); |
3700 | 0 | mSceneOut->mMetaData->Set(2, "FrontAxis", doc.GlobalSettings().FrontAxis()); |
3701 | 0 | mSceneOut->mMetaData->Set(3, "FrontAxisSign", doc.GlobalSettings().FrontAxisSign()); |
3702 | 0 | mSceneOut->mMetaData->Set(4, "CoordAxis", doc.GlobalSettings().CoordAxis()); |
3703 | 0 | mSceneOut->mMetaData->Set(5, "CoordAxisSign", doc.GlobalSettings().CoordAxisSign()); |
3704 | 0 | mSceneOut->mMetaData->Set(6, "OriginalUpAxis", doc.GlobalSettings().OriginalUpAxis()); |
3705 | 0 | mSceneOut->mMetaData->Set(7, "OriginalUpAxisSign", doc.GlobalSettings().OriginalUpAxisSign()); |
3706 | | //const double unitScaleFactor = (double)doc.GlobalSettings().UnitScaleFactor(); |
3707 | 0 | mSceneOut->mMetaData->Set(8, "UnitScaleFactor", doc.GlobalSettings().UnitScaleFactor()); |
3708 | 0 | mSceneOut->mMetaData->Set(9, "OriginalUnitScaleFactor", doc.GlobalSettings().OriginalUnitScaleFactor()); |
3709 | 0 | mSceneOut->mMetaData->Set(10, "AmbientColor", doc.GlobalSettings().AmbientColor()); |
3710 | 0 | mSceneOut->mMetaData->Set(11, "FrameRate", (int)doc.GlobalSettings().TimeMode()); |
3711 | 0 | mSceneOut->mMetaData->Set(12, "TimeSpanStart", doc.GlobalSettings().TimeSpanStart()); |
3712 | 0 | mSceneOut->mMetaData->Set(13, "TimeSpanStop", doc.GlobalSettings().TimeSpanStop()); |
3713 | 0 | mSceneOut->mMetaData->Set(14, "CustomFrameRate", doc.GlobalSettings().CustomFrameRate()); |
3714 | 0 | mSceneOut->mMetaData->Set(15, AI_METADATA_SOURCE_FORMAT_VERSION, aiString(ai_to_string(doc.FBXVersion()))); |
3715 | 0 | if (hasGenerator) { |
3716 | 0 | mSceneOut->mMetaData->Set(16, AI_METADATA_SOURCE_GENERATOR, aiString(doc.Creator())); |
3717 | 0 | } |
3718 | 0 | } |
3719 | | |
3720 | 0 | void FBXConverter::TransferDataToScene() { |
3721 | 0 | ai_assert(!mSceneOut->mMeshes); |
3722 | 0 | ai_assert(!mSceneOut->mNumMeshes); |
3723 | | |
3724 | | // note: the trailing () ensures initialization with nullptr - not |
3725 | | // many C++ users seem to know this, so pointing it out to avoid |
3726 | | // confusion why this code works. |
3727 | |
|
3728 | 0 | if (!mMeshes.empty()) { |
3729 | 0 | mSceneOut->mMeshes = new aiMesh *[mMeshes.size()](); |
3730 | 0 | mSceneOut->mNumMeshes = static_cast<unsigned int>(mMeshes.size()); |
3731 | |
|
3732 | 0 | std::swap_ranges(mMeshes.begin(), mMeshes.end(), mSceneOut->mMeshes); |
3733 | 0 | } |
3734 | |
|
3735 | 0 | if (!materials.empty()) { |
3736 | 0 | mSceneOut->mMaterials = new aiMaterial *[materials.size()](); |
3737 | 0 | mSceneOut->mNumMaterials = static_cast<unsigned int>(materials.size()); |
3738 | |
|
3739 | 0 | std::swap_ranges(materials.begin(), materials.end(), mSceneOut->mMaterials); |
3740 | 0 | } |
3741 | |
|
3742 | 0 | if (!animations.empty()) { |
3743 | 0 | mSceneOut->mAnimations = new aiAnimation *[animations.size()](); |
3744 | 0 | mSceneOut->mNumAnimations = static_cast<unsigned int>(animations.size()); |
3745 | |
|
3746 | 0 | std::swap_ranges(animations.begin(), animations.end(), mSceneOut->mAnimations); |
3747 | 0 | } |
3748 | |
|
3749 | 0 | if (!lights.empty()) { |
3750 | 0 | mSceneOut->mLights = new aiLight *[lights.size()](); |
3751 | 0 | mSceneOut->mNumLights = static_cast<unsigned int>(lights.size()); |
3752 | |
|
3753 | 0 | std::swap_ranges(lights.begin(), lights.end(), mSceneOut->mLights); |
3754 | 0 | } |
3755 | |
|
3756 | 0 | if (!cameras.empty()) { |
3757 | 0 | mSceneOut->mCameras = new aiCamera *[cameras.size()](); |
3758 | 0 | mSceneOut->mNumCameras = static_cast<unsigned int>(cameras.size()); |
3759 | |
|
3760 | 0 | std::swap_ranges(cameras.begin(), cameras.end(), mSceneOut->mCameras); |
3761 | 0 | } |
3762 | |
|
3763 | 0 | if (!textures.empty()) { |
3764 | 0 | mSceneOut->mTextures = new aiTexture *[textures.size()](); |
3765 | 0 | mSceneOut->mNumTextures = static_cast<unsigned int>(textures.size()); |
3766 | |
|
3767 | 0 | std::swap_ranges(textures.begin(), textures.end(), mSceneOut->mTextures); |
3768 | 0 | } |
3769 | |
|
3770 | 0 | if (!mSkeletons.empty()) { |
3771 | 0 | mSceneOut->mSkeletons = new aiSkeleton *[mSkeletons.size()]; |
3772 | 0 | mSceneOut->mNumSkeletons = static_cast<unsigned int>(mSkeletons.size()); |
3773 | 0 | std::swap_ranges(mSkeletons.begin(), mSkeletons.end(), mSceneOut->mSkeletons); |
3774 | 0 | } |
3775 | 0 | } |
3776 | | |
3777 | 0 | void FBXConverter::ConvertOrphanedEmbeddedTextures() { |
3778 | | // in C++14 it could be: |
3779 | | // for (auto&& [id, object] : objects) |
3780 | 0 | for (auto &&id_and_object : doc.Objects()) { |
3781 | 0 | auto &&id = std::get<0>(id_and_object); |
3782 | 0 | auto &&object = std::get<1>(id_and_object); |
3783 | | // If an object doesn't have parent |
3784 | 0 | if (doc.ConnectionsBySource().count(id) == 0) { |
3785 | 0 | const Texture *realTexture = nullptr; |
3786 | 0 | try { |
3787 | 0 | const auto &element = object->GetElement(); |
3788 | 0 | const Token &key = element.KeyToken(); |
3789 | 0 | const char *obtype = key.begin(); |
3790 | 0 | const size_t length = static_cast<size_t>(key.end() - key.begin()); |
3791 | 0 | if (strncmp(obtype, "Texture", length) == 0) { |
3792 | 0 | if (const Texture *texture = static_cast<const Texture *>(object->Get())) { |
3793 | 0 | if (texture->Media() && texture->Media()->ContentLength() > 0) { |
3794 | 0 | realTexture = texture; |
3795 | 0 | } |
3796 | 0 | } |
3797 | 0 | } |
3798 | 0 | } catch (...) { |
3799 | | // do nothing |
3800 | 0 | } |
3801 | 0 | if (realTexture) { |
3802 | 0 | const Video *media = realTexture->Media(); |
3803 | 0 | unsigned int index = ConvertVideo(*media); |
3804 | 0 | textures_converted[media] = index; |
3805 | 0 | } |
3806 | 0 | } |
3807 | 0 | } |
3808 | 0 | } |
3809 | | |
3810 | | // ------------------------------------------------------------------------------------------------ |
3811 | 0 | void ConvertToAssimpScene(aiScene *out, const Document &doc, bool removeEmptyBones) { |
3812 | 0 | FBXConverter converter(out, doc, removeEmptyBones); |
3813 | 0 | } |
3814 | | |
3815 | | } // namespace FBX |
3816 | | } // namespace Assimp |
3817 | | |
3818 | | #endif |