/src/assimp/code/AssetLib/FBX/FBXExporter.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | Open Asset Import Library (assimp) |
3 | | ---------------------------------------------------------------------- |
4 | | |
5 | | Copyright (c) 2006-2025, assimp team |
6 | | |
7 | | All rights reserved. |
8 | | |
9 | | Redistribution and use of this software in source and binary forms, |
10 | | with or without modification, are permitted provided that the |
11 | | following conditions are met: |
12 | | |
13 | | * Redistributions of source code must retain the above |
14 | | copyright notice, this list of conditions and the |
15 | | following disclaimer. |
16 | | |
17 | | * Redistributions in binary form must reproduce the above |
18 | | copyright notice, this list of conditions and the |
19 | | following disclaimer in the documentation and/or other |
20 | | materials provided with the distribution. |
21 | | |
22 | | * Neither the name of the assimp team, nor the names of its |
23 | | contributors may be used to endorse or promote products |
24 | | derived from this software without specific prior |
25 | | written permission of the assimp team. |
26 | | |
27 | | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
28 | | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
29 | | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
30 | | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
31 | | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
32 | | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
33 | | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
34 | | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
35 | | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
36 | | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
37 | | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
38 | | |
39 | | ---------------------------------------------------------------------- |
40 | | */ |
41 | | #ifndef ASSIMP_BUILD_NO_EXPORT |
42 | | #ifndef ASSIMP_BUILD_NO_FBX_EXPORTER |
43 | | |
44 | | #include "FBXExporter.h" |
45 | | #include "FBXExportNode.h" |
46 | | #include "FBXExportProperty.h" |
47 | | #include "FBXCommon.h" |
48 | | #include "FBXUtil.h" |
49 | | |
50 | | #include <assimp/version.h> // aiGetVersion |
51 | | #include <assimp/IOSystem.hpp> |
52 | | #include <assimp/Exporter.hpp> |
53 | | #include <assimp/DefaultLogger.hpp> |
54 | | #include <assimp/Logger.hpp> |
55 | | #include <assimp/StreamWriter.h> // StreamWriterLE |
56 | | #include <assimp/Exceptional.h> // DeadlyExportError |
57 | | #include <assimp/material.h> // aiTextureType |
58 | | #include <assimp/scene.h> |
59 | | #include <assimp/mesh.h> |
60 | | |
61 | | // Header files, standard library. |
62 | | #include <array> |
63 | | #include <ctime> // localtime, tm_* |
64 | | #include <map> |
65 | | #include <memory> // shared_ptr |
66 | | #include <numeric> |
67 | | #include <set> |
68 | | #include <sstream> // stringstream |
69 | | #include <string> |
70 | | #include <unordered_set> |
71 | | #include <utility> |
72 | | #include <vector> |
73 | | #include <cmath> |
74 | | |
75 | | // RESOURCES: |
76 | | // https://code.blender.org/2013/08/fbx-binary-file-format-specification/ |
77 | | // https://wiki.blender.org/index.php/User:Mont29/Foundation/FBX_File_Structure |
78 | | |
79 | | using namespace Assimp; |
80 | | using namespace Assimp::FBX; |
81 | | |
82 | | // some constants that we'll use for writing metadata |
83 | | namespace Assimp { |
84 | | namespace FBX { |
85 | | const std::string EXPORT_VERSION_STR = "7.5.0"; |
86 | | const uint32_t EXPORT_VERSION_INT = 7500; // 7.5 == 2016+ |
87 | | // FBX files have some hashed values that depend on the creation time field, |
88 | | // but for now we don't actually know how to generate these. |
89 | | // what we can do is set them to a known-working version. |
90 | | // this is the data that Blender uses in their FBX export process. |
91 | | const std::string GENERIC_CTIME = "1970-01-01 10:00:00:000"; |
92 | | const std::string GENERIC_FILEID = |
93 | | "\x28\xb3\x2a\xeb\xb6\x24\xcc\xc2\xbf\xc8\xb0\x2a\xa9\x2b\xfc\xf1"; |
94 | | const std::string GENERIC_FOOTID = |
95 | | "\xfa\xbc\xab\x09\xd0\xc8\xd4\x66\xb1\x76\xfb\x83\x1c\xf7\x26\x7e"; |
96 | | const std::string FOOT_MAGIC = |
97 | | "\xf8\x5a\x8c\x6a\xde\xf5\xd9\x7e\xec\xe9\x0c\xe3\x75\x8f\x29\x0b"; |
98 | | const std::string COMMENT_UNDERLINE = |
99 | | ";------------------------------------------------------------------"; |
100 | | } |
101 | | |
102 | | // --------------------------------------------------------------------- |
103 | | // Worker function for exporting a scene to binary FBX. |
104 | | // Prototyped and registered in Exporter.cpp |
105 | | void ExportSceneFBX ( |
106 | | const char* pFile, |
107 | | IOSystem* pIOSystem, |
108 | | const aiScene* pScene, |
109 | | const ExportProperties* pProperties |
110 | 56 | ){ |
111 | | // initialize the exporter |
112 | 56 | FBXExporter exporter(pScene, pProperties); |
113 | | |
114 | | // perform binary export |
115 | 56 | exporter.ExportBinary(pFile, pIOSystem); |
116 | 56 | } |
117 | | |
118 | | // --------------------------------------------------------------------- |
119 | | // Worker function for exporting a scene to ASCII FBX. |
120 | | // Prototyped and registered in Exporter.cpp |
121 | | void ExportSceneFBXA ( |
122 | | const char* pFile, |
123 | | IOSystem* pIOSystem, |
124 | | const aiScene* pScene, |
125 | | const ExportProperties* pProperties |
126 | | |
127 | 0 | ){ |
128 | | // initialize the exporter |
129 | 0 | FBXExporter exporter(pScene, pProperties); |
130 | | |
131 | | // perform ascii export |
132 | 0 | exporter.ExportAscii(pFile, pIOSystem); |
133 | 0 | } |
134 | | |
135 | | } // end of namespace Assimp |
136 | | |
137 | | FBXExporter::FBXExporter ( const aiScene* pScene, const ExportProperties* pProperties ) |
138 | 56 | : binary(false) |
139 | 56 | , mScene(pScene) |
140 | 56 | , mProperties(pProperties) |
141 | 56 | , outfile() |
142 | 56 | , connections() |
143 | 56 | , mesh_uids() |
144 | 56 | , material_uids() |
145 | 56 | , node_uids() { |
146 | | // will probably need to determine UIDs, connections, etc here. |
147 | | // basically anything that needs to be known |
148 | | // before we start writing sections to the stream. |
149 | 56 | } |
150 | | |
151 | | void FBXExporter::ExportBinary ( |
152 | | const char* pFile, |
153 | | IOSystem* pIOSystem |
154 | 56 | ){ |
155 | | // remember that we're exporting in binary mode |
156 | 56 | binary = true; |
157 | | |
158 | | // we're not currently using these preferences, |
159 | | // but clang will cry about it if we never touch it. |
160 | | // TODO: some of these might be relevant to export |
161 | 56 | (void)mProperties; |
162 | | |
163 | | // open the indicated file for writing (in binary mode) |
164 | 56 | outfile.reset(pIOSystem->Open(pFile,"wb")); |
165 | 56 | if (!outfile) { |
166 | 0 | throw DeadlyExportError( |
167 | 0 | "could not open output .fbx file: " + std::string(pFile) |
168 | 0 | ); |
169 | 0 | } |
170 | | |
171 | | // first a binary-specific file header |
172 | 56 | WriteBinaryHeader(); |
173 | | |
174 | | // the rest of the file is in node entries. |
175 | | // we have to serialize each entry before we write to the output, |
176 | | // as the first thing we write is the byte offset of the _next_ entry. |
177 | | // Either that or we can skip back to write the offset when we finish. |
178 | 56 | WriteAllNodes(); |
179 | | |
180 | | // finally we have a binary footer to the file |
181 | 56 | WriteBinaryFooter(); |
182 | | |
183 | | // explicitly release file pointer, |
184 | | // so we don't have to rely on class destruction. |
185 | 56 | outfile.reset(); |
186 | 56 | } |
187 | | |
188 | | void FBXExporter::ExportAscii ( |
189 | | const char* pFile, |
190 | | IOSystem* pIOSystem |
191 | 0 | ){ |
192 | | // remember that we're exporting in ascii mode |
193 | 0 | binary = false; |
194 | | |
195 | | // open the indicated file for writing in text mode |
196 | 0 | outfile.reset(pIOSystem->Open(pFile,"wt")); |
197 | 0 | if (!outfile) { |
198 | 0 | throw DeadlyExportError( |
199 | 0 | "could not open output .fbx file: " + std::string(pFile) |
200 | 0 | ); |
201 | 0 | } |
202 | | |
203 | | // write the ascii header |
204 | 0 | WriteAsciiHeader(); |
205 | | |
206 | | // write all the sections |
207 | 0 | WriteAllNodes(); |
208 | | |
209 | | // make sure the file ends with a newline. |
210 | | // note: if the file is opened in text mode, |
211 | | // this should do the right cross-platform thing. |
212 | 0 | outfile->Write("\n", 1, 1); |
213 | | |
214 | | // explicitly release file pointer, |
215 | | // so we don't have to rely on class destruction. |
216 | 0 | outfile.reset(); |
217 | 0 | } |
218 | | |
219 | | void FBXExporter::WriteAsciiHeader() |
220 | 0 | { |
221 | | // basically just a comment at the top of the file |
222 | 0 | std::stringstream head; |
223 | 0 | head << "; FBX " << EXPORT_VERSION_STR << " project file\n"; |
224 | 0 | head << "; Created by the Open Asset Import Library (Assimp)\n"; |
225 | 0 | head << "; http://assimp.org\n"; |
226 | 0 | head << "; -------------------------------------------------\n"; |
227 | 0 | const std::string ascii_header = head.str(); |
228 | 0 | outfile->Write(ascii_header.c_str(), ascii_header.size(), 1); |
229 | 0 | } |
230 | | |
231 | | void FBXExporter::WriteAsciiSectionHeader(const std::string& title) |
232 | 0 | { |
233 | 0 | StreamWriterLE outstream(outfile); |
234 | 0 | std::stringstream s; |
235 | 0 | s << "\n\n; " << title << '\n'; |
236 | 0 | s << FBX::COMMENT_UNDERLINE << "\n"; |
237 | 0 | outstream.PutString(s.str()); |
238 | 0 | } |
239 | | |
240 | | void FBXExporter::WriteBinaryHeader() |
241 | 56 | { |
242 | | // first a specific sequence of 23 bytes, always the same |
243 | 56 | const char binary_header[24] = "Kaydara FBX Binary\x20\x20\x00\x1a\x00"; |
244 | 56 | outfile->Write(binary_header, 1, 23); |
245 | | |
246 | | // then FBX version number, "multiplied" by 1000, as little-endian uint32. |
247 | | // so 7.3 becomes 7300 == 0x841C0000, 7.4 becomes 7400 == 0xE81C0000, etc |
248 | 56 | { |
249 | 56 | StreamWriterLE outstream(outfile); |
250 | 56 | outstream.PutU4(EXPORT_VERSION_INT); |
251 | 56 | } // StreamWriter destructor writes the data to the file |
252 | | |
253 | | // after this the node data starts immediately |
254 | | // (probably with the FBXHEaderExtension node) |
255 | 56 | } |
256 | | |
257 | | void FBXExporter::WriteBinaryFooter() |
258 | 56 | { |
259 | 56 | outfile->Write(NULL_RECORD, NumNullRecords, 1); |
260 | | |
261 | 56 | outfile->Write(GENERIC_FOOTID.c_str(), GENERIC_FOOTID.size(), 1); |
262 | | |
263 | | // here some padding is added for alignment to 16 bytes. |
264 | | // if already aligned, the full 16 bytes is added. |
265 | 56 | size_t pos = outfile->Tell(); |
266 | 56 | size_t pad = 16 - (pos % 16); |
267 | 561 | for (size_t i = 0; i < pad; ++i) { |
268 | 505 | outfile->Write("\x00", 1, 1); |
269 | 505 | } |
270 | | |
271 | | // not sure what this is, but it seems to always be 0 in modern files |
272 | 280 | for (size_t i = 0; i < 4; ++i) { |
273 | 224 | outfile->Write("\x00", 1, 1); |
274 | 224 | } |
275 | | |
276 | | // now the file version again |
277 | 56 | { |
278 | 56 | StreamWriterLE outstream(outfile); |
279 | 56 | outstream.PutU4(EXPORT_VERSION_INT); |
280 | 56 | } // StreamWriter destructor writes the data to the file |
281 | | |
282 | | // and finally some binary footer added to all files |
283 | 6.77k | for (size_t i = 0; i < 120; ++i) { |
284 | 6.72k | outfile->Write("\x00", 1, 1); |
285 | 6.72k | } |
286 | 56 | outfile->Write(FOOT_MAGIC.c_str(), FOOT_MAGIC.size(), 1); |
287 | 56 | } |
288 | | |
289 | | void FBXExporter::WriteAllNodes () |
290 | 56 | { |
291 | | // header |
292 | | // (and fileid, creation time, creator, if binary) |
293 | 56 | WriteHeaderExtension(); |
294 | | |
295 | | // global settings |
296 | 56 | WriteGlobalSettings(); |
297 | | |
298 | | // documents |
299 | 56 | WriteDocuments(); |
300 | | |
301 | | // references |
302 | 56 | WriteReferences(); |
303 | | |
304 | | // definitions |
305 | 56 | WriteDefinitions(); |
306 | | |
307 | | // objects |
308 | 56 | WriteObjects(); |
309 | | |
310 | | // connections |
311 | 56 | WriteConnections(); |
312 | | |
313 | | // WriteTakes? (deprecated since at least 2015 (fbx 7.4)) |
314 | 56 | } |
315 | | |
316 | | //FBXHeaderExtension top-level node |
317 | | void FBXExporter::WriteHeaderExtension () |
318 | 56 | { |
319 | 56 | if (!binary) { |
320 | | // no title, follows directly from the top comment |
321 | 0 | } |
322 | 56 | FBX::Node n("FBXHeaderExtension"); |
323 | 56 | StreamWriterLE outstream(outfile); |
324 | 56 | int indent = 0; |
325 | | |
326 | | // begin node |
327 | 56 | n.Begin(outstream, binary, indent); |
328 | | |
329 | | // write properties |
330 | | // (none) |
331 | | |
332 | | // finish properties |
333 | 56 | n.EndProperties(outstream, binary, indent, 0); |
334 | | |
335 | | // begin children |
336 | 56 | n.BeginChildren(outstream, binary, indent); |
337 | | |
338 | 56 | indent = 1; |
339 | | |
340 | | // write child nodes |
341 | 56 | FBX::Node::WritePropertyNode( |
342 | 56 | "FBXHeaderVersion", int32_t(1003), outstream, binary, indent |
343 | 56 | ); |
344 | 56 | FBX::Node::WritePropertyNode( |
345 | 56 | "FBXVersion", int32_t(EXPORT_VERSION_INT), outstream, binary, indent |
346 | 56 | ); |
347 | 56 | if (binary) { |
348 | 56 | FBX::Node::WritePropertyNode( |
349 | 56 | "EncryptionType", int32_t(0), outstream, binary, indent |
350 | 56 | ); |
351 | 56 | } |
352 | | |
353 | 56 | FBX::Node CreationTimeStamp("CreationTimeStamp"); |
354 | 56 | time_t rawtime; |
355 | 56 | time(&rawtime); |
356 | 56 | struct tm * now = localtime(&rawtime); |
357 | 56 | CreationTimeStamp.AddChild("Version", int32_t(1000)); |
358 | 56 | CreationTimeStamp.AddChild("Year", int32_t(now->tm_year + 1900)); |
359 | 56 | CreationTimeStamp.AddChild("Month", int32_t(now->tm_mon + 1)); |
360 | 56 | CreationTimeStamp.AddChild("Day", int32_t(now->tm_mday)); |
361 | 56 | CreationTimeStamp.AddChild("Hour", int32_t(now->tm_hour)); |
362 | 56 | CreationTimeStamp.AddChild("Minute", int32_t(now->tm_min)); |
363 | 56 | CreationTimeStamp.AddChild("Second", int32_t(now->tm_sec)); |
364 | 56 | CreationTimeStamp.AddChild("Millisecond", int32_t(0)); |
365 | 56 | CreationTimeStamp.Dump(outstream, binary, indent); |
366 | | |
367 | 56 | std::stringstream creator; |
368 | 56 | creator << "Open Asset Import Library (Assimp) " << aiGetVersionMajor() |
369 | 56 | << "." << aiGetVersionMinor() << "." << aiGetVersionRevision(); |
370 | 56 | FBX::Node::WritePropertyNode( |
371 | 56 | "Creator", creator.str(), outstream, binary, indent |
372 | 56 | ); |
373 | | |
374 | 56 | indent = 0; |
375 | | |
376 | | // finish node |
377 | 56 | n.End(outstream, binary, indent, true); |
378 | | |
379 | | // that's it for FBXHeaderExtension... |
380 | 56 | if (!binary) { return; } |
381 | | |
382 | | // but binary files also need top-level FileID, CreationTime, Creator: |
383 | 56 | std::vector<uint8_t> raw(GENERIC_FILEID.size()); |
384 | 952 | for (size_t i = 0; i < GENERIC_FILEID.size(); ++i) { |
385 | 896 | raw[i] = uint8_t(GENERIC_FILEID[i]); |
386 | 896 | } |
387 | 56 | FBX::Node::WritePropertyNode( |
388 | 56 | "FileId", std::move(raw), outstream, binary, indent |
389 | 56 | ); |
390 | 56 | FBX::Node::WritePropertyNode( |
391 | 56 | "CreationTime", GENERIC_CTIME, outstream, binary, indent |
392 | 56 | ); |
393 | 56 | FBX::Node::WritePropertyNode( |
394 | 56 | "Creator", creator.str(), outstream, binary, indent |
395 | 56 | ); |
396 | 56 | } |
397 | | |
398 | | // WriteGlobalSettings helpers |
399 | | |
400 | | void WritePropInt(const aiScene* scene, FBX::Node& p, const std::string& key, int defaultValue) |
401 | 504 | { |
402 | 504 | int value; |
403 | 504 | if (scene->mMetaData != nullptr && scene->mMetaData->Get(key, value)) { |
404 | 0 | p.AddP70int(key, value); |
405 | 504 | } else { |
406 | 504 | p.AddP70int(key, defaultValue); |
407 | 504 | } |
408 | 504 | } |
409 | | |
410 | | void WritePropDouble(const aiScene* scene, FBX::Node& p, const std::string& key, double defaultValue) |
411 | 168 | { |
412 | 168 | double value; |
413 | 168 | if (scene->mMetaData != nullptr && scene->mMetaData->Get(key, value)) { |
414 | 0 | p.AddP70double(key, value); |
415 | 168 | } else { |
416 | | // fallback lookup float instead |
417 | 168 | float floatValue; |
418 | 168 | if (scene->mMetaData != nullptr && scene->mMetaData->Get(key, floatValue)) { |
419 | 0 | p.AddP70double(key, (double)floatValue); |
420 | 168 | } else { |
421 | 168 | p.AddP70double(key, defaultValue); |
422 | 168 | } |
423 | 168 | } |
424 | 168 | } |
425 | | |
426 | | void WritePropEnum(const aiScene* scene, FBX::Node& p, const std::string& key, int defaultValue) |
427 | 168 | { |
428 | 168 | int value; |
429 | 168 | if (scene->mMetaData != nullptr && scene->mMetaData->Get(key, value)) { |
430 | 0 | p.AddP70enum(key, value); |
431 | 168 | } else { |
432 | 168 | p.AddP70enum(key, defaultValue); |
433 | 168 | } |
434 | 168 | } |
435 | | |
436 | | void WritePropColor(const aiScene* scene, FBX::Node& p, const std::string& key, const aiVector3D& defaultValue) |
437 | 56 | { |
438 | 56 | aiVector3D value; |
439 | 56 | if (scene->mMetaData != nullptr && scene->mMetaData->Get(key, value)) { |
440 | | // ai_real can be float or double, cast to avoid warnings |
441 | 0 | p.AddP70color(key, (double)value.x, (double)value.y, (double)value.z); |
442 | 56 | } else { |
443 | 56 | p.AddP70color(key, (double)defaultValue.x, (double)defaultValue.y, (double)defaultValue.z); |
444 | 56 | } |
445 | 56 | } |
446 | | |
447 | | void WritePropString(const aiScene* scene, FBX::Node& p, const std::string& key, const std::string& defaultValue) |
448 | 56 | { |
449 | 56 | aiString value; // MetaData doesn't hold std::string |
450 | 56 | if (scene->mMetaData != nullptr && scene->mMetaData->Get(key, value)) { |
451 | 0 | p.AddP70string(key, value.C_Str()); |
452 | 56 | } else { |
453 | 56 | p.AddP70string(key, defaultValue); |
454 | 56 | } |
455 | 56 | } |
456 | | |
457 | 56 | void FBXExporter::WriteGlobalSettings () { |
458 | 56 | FBX::Node gs("GlobalSettings"); |
459 | 56 | gs.AddChild("Version", int32_t(1000)); |
460 | | |
461 | 56 | FBX::Node p("Properties70"); |
462 | 56 | WritePropInt(mScene, p, "UpAxis", 1); |
463 | 56 | WritePropInt(mScene, p, "UpAxisSign", 1); |
464 | 56 | WritePropInt(mScene, p, "FrontAxis", 2); |
465 | 56 | WritePropInt(mScene, p, "FrontAxisSign", 1); |
466 | 56 | WritePropInt(mScene, p, "CoordAxis", 0); |
467 | 56 | WritePropInt(mScene, p, "CoordAxisSign", 1); |
468 | 56 | WritePropInt(mScene, p, "OriginalUpAxis", 1); |
469 | 56 | WritePropInt(mScene, p, "OriginalUpAxisSign", 1); |
470 | 56 | WritePropDouble(mScene, p, "UnitScaleFactor", 1.0); |
471 | 56 | WritePropDouble(mScene, p, "OriginalUnitScaleFactor", 1.0); |
472 | 56 | WritePropColor(mScene, p, "AmbientColor", aiVector3D((ai_real)0.0, (ai_real)0.0, (ai_real)0.0)); |
473 | 56 | WritePropString(mScene, p,"DefaultCamera", "Producer Perspective"); |
474 | 56 | WritePropEnum(mScene, p, "TimeMode", 11); |
475 | 56 | WritePropEnum(mScene, p, "TimeProtocol", 2); |
476 | 56 | WritePropEnum(mScene, p, "SnapOnFrameMode", 0); |
477 | 56 | p.AddP70time("TimeSpanStart", 0); // TODO: animation support |
478 | 56 | p.AddP70time("TimeSpanStop", FBX::SECOND); // TODO: animation support |
479 | 56 | WritePropDouble(mScene, p, "CustomFrameRate", -1.0); |
480 | 56 | p.AddP70("TimeMarker", "Compound", "", ""); // not sure what this is |
481 | 56 | WritePropInt(mScene, p, "CurrentTimeMarker", -1); |
482 | 56 | gs.AddChild(p); |
483 | | |
484 | 56 | gs.Dump(outfile, binary, 0); |
485 | 56 | } |
486 | | |
487 | 56 | void FBXExporter::WriteDocuments() { |
488 | 56 | if (!binary) { |
489 | 0 | WriteAsciiSectionHeader("Documents Description"); |
490 | 0 | } |
491 | | |
492 | | // not sure what the use of multiple documents would be, |
493 | | // or whether any end-application supports it |
494 | 56 | FBX::Node docs("Documents"); |
495 | 56 | docs.AddChild("Count", int32_t(1)); |
496 | 56 | FBX::Node doc("Document"); |
497 | | |
498 | | // generate uid |
499 | 56 | int64_t uid = generate_uid(); |
500 | 56 | doc.AddProperties(uid, "", "Scene"); |
501 | 56 | FBX::Node p("Properties70"); |
502 | 56 | p.AddP70("SourceObject", "object", "", ""); // what is this even for? |
503 | 56 | p.AddP70string("ActiveAnimStackName", ""); // should do this properly? |
504 | 56 | doc.AddChild(p); |
505 | | |
506 | | // UID for root node in scene hierarchy. |
507 | | // always set to 0 in the case of a single document. |
508 | | // not sure what happens if more than one document exists, |
509 | | // but that won't matter to us as we're exporting a single scene. |
510 | 56 | doc.AddChild("RootNode", int64_t(0)); |
511 | | |
512 | 56 | docs.AddChild(doc); |
513 | 56 | docs.Dump(outfile, binary, 0); |
514 | 56 | } |
515 | | |
516 | 56 | void FBXExporter::WriteReferences() { |
517 | 56 | if (!binary) { |
518 | 0 | WriteAsciiSectionHeader("Document References"); |
519 | 0 | } |
520 | | // always empty for now. |
521 | | // not really sure what this is for. |
522 | 56 | FBX::Node n("References"); |
523 | 56 | n.force_has_children = true; |
524 | 56 | n.Dump(outfile, binary, 0); |
525 | 56 | } |
526 | | |
527 | | |
528 | | // --------------------------------------------------------------- |
529 | | // some internal helper functions used for writing the definitions |
530 | | // (before any actual data is written) |
531 | | // --------------------------------------------------------------- |
532 | 15.1k | size_t count_nodes(const aiNode* n, const aiNode* root) { |
533 | 15.1k | size_t count; |
534 | 15.1k | if (n == root) { |
535 | 56 | count = n->mNumMeshes; // (not counting root node) |
536 | 15.1k | } else if (n->mNumMeshes > 1) { |
537 | 282 | count = n->mNumMeshes + 1; |
538 | 14.8k | } else { |
539 | 14.8k | count = 1; |
540 | 14.8k | } |
541 | 30.2k | for (size_t i = 0; i < n->mNumChildren; ++i) { |
542 | 15.1k | count += count_nodes(n->mChildren[i], root); |
543 | 15.1k | } |
544 | 15.1k | return count; |
545 | 15.1k | } |
546 | | |
547 | 46 | static bool has_phong_mat(const aiScene* scene) { |
548 | | // just search for any material with a shininess exponent |
549 | 99 | for (size_t i = 0; i < scene->mNumMaterials; ++i) { |
550 | 53 | aiMaterial* mat = scene->mMaterials[i]; |
551 | 53 | float shininess = 0; |
552 | 53 | mat->Get(AI_MATKEY_SHININESS, shininess); |
553 | 53 | if (shininess > 0) { |
554 | 0 | return true; |
555 | 0 | } |
556 | 53 | } |
557 | 46 | return false; |
558 | 46 | } |
559 | | |
560 | 56 | static size_t count_images(const aiScene* scene) { |
561 | 56 | std::unordered_set<std::string> images; |
562 | 56 | aiString texpath; |
563 | 109 | for (size_t i = 0; i < scene->mNumMaterials; ++i) { |
564 | 53 | aiMaterial *mat = scene->mMaterials[i]; |
565 | 954 | for (size_t tt = aiTextureType_DIFFUSE; tt < aiTextureType_UNKNOWN; ++tt) { |
566 | 901 | const aiTextureType textype = static_cast<aiTextureType>(tt); |
567 | 901 | const size_t texcount = mat->GetTextureCount(textype); |
568 | 911 | for (unsigned int j = 0; j < texcount; ++j) { |
569 | 10 | mat->GetTexture(textype, j, &texpath); |
570 | 10 | images.insert(std::string(texpath.C_Str())); |
571 | 10 | } |
572 | 901 | } |
573 | 53 | } |
574 | | |
575 | 56 | return images.size(); |
576 | 56 | } |
577 | | |
578 | 56 | static size_t count_textures(const aiScene* scene) { |
579 | 56 | size_t count = 0; |
580 | 109 | for (size_t i = 0; i < scene->mNumMaterials; ++i) { |
581 | 53 | aiMaterial* mat = scene->mMaterials[i]; |
582 | 53 | for ( |
583 | 53 | size_t tt = aiTextureType_DIFFUSE; |
584 | 954 | tt < aiTextureType_UNKNOWN; |
585 | 901 | ++tt |
586 | 901 | ){ |
587 | | // TODO: handle layered textures |
588 | 901 | if (mat->GetTextureCount(static_cast<aiTextureType>(tt)) > 0) { |
589 | 10 | count += 1; |
590 | 10 | } |
591 | 901 | } |
592 | 53 | } |
593 | 56 | return count; |
594 | 56 | } |
595 | | |
596 | 56 | static size_t count_deformers(const aiScene* scene) { |
597 | 56 | size_t count = 0; |
598 | 1.90k | for (size_t i = 0; i < scene->mNumMeshes; ++i) { |
599 | 1.85k | const size_t n = scene->mMeshes[i]->mNumBones; |
600 | 1.85k | if (n) { |
601 | | // 1 main deformer, 1 subdeformer per bone |
602 | 6 | count += n + 1; |
603 | 6 | } |
604 | 1.85k | } |
605 | 56 | return count; |
606 | 56 | } |
607 | | |
608 | 56 | void FBXExporter::WriteDefinitions () { |
609 | | // basically this is just bookkeeping: |
610 | | // determining how many of each type of object there are |
611 | | // and specifying the base properties to use when otherwise unspecified. |
612 | | |
613 | | // ascii section header |
614 | 56 | if (!binary) { |
615 | 0 | WriteAsciiSectionHeader("Object definitions"); |
616 | 0 | } |
617 | | |
618 | | // we need to count the objects |
619 | 56 | int32_t count; |
620 | 56 | int32_t total_count = 0; |
621 | | |
622 | | // and store them |
623 | 56 | std::vector<FBX::Node> object_nodes; |
624 | 56 | FBX::Node n, pt, p; |
625 | | |
626 | | // GlobalSettings |
627 | | // this seems to always be here in Maya exports |
628 | 56 | n = FBX::Node("ObjectType", "GlobalSettings"); |
629 | 56 | count = 1; |
630 | 56 | n.AddChild("Count", count); |
631 | 56 | object_nodes.push_back(n); |
632 | 56 | total_count += count; |
633 | | |
634 | | // AnimationStack / FbxAnimStack |
635 | | // this seems to always be here in Maya exports, |
636 | | // but no harm seems to come of leaving it out. |
637 | 56 | count = mScene->mNumAnimations; |
638 | 56 | if (count) { |
639 | 4 | n = FBX::Node("ObjectType", "AnimationStack"); |
640 | 4 | n.AddChild("Count", count); |
641 | 4 | pt = FBX::Node("PropertyTemplate", "FbxAnimStack"); |
642 | 4 | p = FBX::Node("Properties70"); |
643 | 4 | p.AddP70string("Description", ""); |
644 | 4 | p.AddP70time("LocalStart", 0); |
645 | 4 | p.AddP70time("LocalStop", 0); |
646 | 4 | p.AddP70time("ReferenceStart", 0); |
647 | 4 | p.AddP70time("ReferenceStop", 0); |
648 | 4 | pt.AddChild(p); |
649 | 4 | n.AddChild(pt); |
650 | 4 | object_nodes.push_back(n); |
651 | 4 | total_count += count; |
652 | 4 | } |
653 | | |
654 | | // AnimationLayer / FbxAnimLayer |
655 | | // this seems to always be here in Maya exports, |
656 | | // but no harm seems to come of leaving it out. |
657 | | // Assimp doesn't support animation layers, |
658 | | // so there will be one per aiAnimation |
659 | 56 | count = mScene->mNumAnimations; |
660 | 56 | if (count) { |
661 | 4 | n = FBX::Node("ObjectType", "AnimationLayer"); |
662 | 4 | n.AddChild("Count", count); |
663 | 4 | pt = FBX::Node("PropertyTemplate", "FBXAnimLayer"); |
664 | 4 | p = FBX::Node("Properties70"); |
665 | 4 | p.AddP70("Weight", "Number", "", "A", double(100)); |
666 | 4 | p.AddP70bool("Mute", false); |
667 | 4 | p.AddP70bool("Solo", false); |
668 | 4 | p.AddP70bool("Lock", false); |
669 | 4 | p.AddP70color("Color", 0.8, 0.8, 0.8); |
670 | 4 | p.AddP70("BlendMode", "enum", "", "", int32_t(0)); |
671 | 4 | p.AddP70("RotationAccumulationMode", "enum", "", "", int32_t(0)); |
672 | 4 | p.AddP70("ScaleAccumulationMode", "enum", "", "", int32_t(0)); |
673 | 4 | p.AddP70("BlendModeBypass", "ULongLong", "", "", int64_t(0)); |
674 | 4 | pt.AddChild(p); |
675 | 4 | n.AddChild(pt); |
676 | 4 | object_nodes.push_back(n); |
677 | 4 | total_count += count; |
678 | 4 | } |
679 | | |
680 | | // NodeAttribute |
681 | | // this is completely absurd. |
682 | | // there can only be one "NodeAttribute" template, |
683 | | // but FbxSkeleton, FbxCamera, FbxLight all are "NodeAttributes". |
684 | | // so if only one exists we should set the template for that, |
685 | | // otherwise... we just pick one :/. |
686 | | // the others have to set all their properties every instance, |
687 | | // because there's no template. |
688 | 56 | count = 1; // TODO: select properly |
689 | 56 | if (count) { |
690 | | // FbxSkeleton |
691 | 56 | n = FBX::Node("ObjectType", "NodeAttribute"); |
692 | 56 | n.AddChild("Count", count); |
693 | 56 | pt = FBX::Node("PropertyTemplate", "FbxSkeleton"); |
694 | 56 | p = FBX::Node("Properties70"); |
695 | 56 | p.AddP70color("Color", 0.8, 0.8, 0.8); |
696 | 56 | p.AddP70double("Size", 33.333333333333); |
697 | 56 | p.AddP70("LimbLength", "double", "Number", "H", double(1)); |
698 | | // note: not sure what the "H" flag is for - hidden? |
699 | 56 | pt.AddChild(p); |
700 | 56 | n.AddChild(pt); |
701 | 56 | object_nodes.push_back(n); |
702 | 56 | total_count += count; |
703 | 56 | } |
704 | | |
705 | | // Model / FbxNode |
706 | | // <~~ node hierarchy |
707 | 56 | count = int32_t(count_nodes(mScene->mRootNode, mScene->mRootNode)); |
708 | 56 | if (count) { |
709 | 56 | n = FBX::Node("ObjectType", "Model"); |
710 | 56 | n.AddChild("Count", count); |
711 | 56 | pt = FBX::Node("PropertyTemplate", "FbxNode"); |
712 | 56 | p = FBX::Node("Properties70"); |
713 | 56 | p.AddP70enum("QuaternionInterpolate", 0); |
714 | 56 | p.AddP70vector("RotationOffset", 0.0, 0.0, 0.0); |
715 | 56 | p.AddP70vector("RotationPivot", 0.0, 0.0, 0.0); |
716 | 56 | p.AddP70vector("ScalingOffset", 0.0, 0.0, 0.0); |
717 | 56 | p.AddP70vector("ScalingPivot", 0.0, 0.0, 0.0); |
718 | 56 | p.AddP70bool("TranslationActive", false); |
719 | 56 | p.AddP70vector("TranslationMin", 0.0, 0.0, 0.0); |
720 | 56 | p.AddP70vector("TranslationMax", 0.0, 0.0, 0.0); |
721 | 56 | p.AddP70bool("TranslationMinX", false); |
722 | 56 | p.AddP70bool("TranslationMinY", false); |
723 | 56 | p.AddP70bool("TranslationMinZ", false); |
724 | 56 | p.AddP70bool("TranslationMaxX", false); |
725 | 56 | p.AddP70bool("TranslationMaxY", false); |
726 | 56 | p.AddP70bool("TranslationMaxZ", false); |
727 | 56 | p.AddP70enum("RotationOrder", 0); |
728 | 56 | p.AddP70bool("RotationSpaceForLimitOnly", false); |
729 | 56 | p.AddP70double("RotationStiffnessX", 0.0); |
730 | 56 | p.AddP70double("RotationStiffnessY", 0.0); |
731 | 56 | p.AddP70double("RotationStiffnessZ", 0.0); |
732 | 56 | p.AddP70double("AxisLen", 10.0); |
733 | 56 | p.AddP70vector("PreRotation", 0.0, 0.0, 0.0); |
734 | 56 | p.AddP70vector("PostRotation", 0.0, 0.0, 0.0); |
735 | 56 | p.AddP70bool("RotationActive", false); |
736 | 56 | p.AddP70vector("RotationMin", 0.0, 0.0, 0.0); |
737 | 56 | p.AddP70vector("RotationMax", 0.0, 0.0, 0.0); |
738 | 56 | p.AddP70bool("RotationMinX", false); |
739 | 56 | p.AddP70bool("RotationMinY", false); |
740 | 56 | p.AddP70bool("RotationMinZ", false); |
741 | 56 | p.AddP70bool("RotationMaxX", false); |
742 | 56 | p.AddP70bool("RotationMaxY", false); |
743 | 56 | p.AddP70bool("RotationMaxZ", false); |
744 | 56 | p.AddP70enum("InheritType", 0); |
745 | 56 | p.AddP70bool("ScalingActive", false); |
746 | 56 | p.AddP70vector("ScalingMin", 0.0, 0.0, 0.0); |
747 | 56 | p.AddP70vector("ScalingMax", 1.0, 1.0, 1.0); |
748 | 56 | p.AddP70bool("ScalingMinX", false); |
749 | 56 | p.AddP70bool("ScalingMinY", false); |
750 | 56 | p.AddP70bool("ScalingMinZ", false); |
751 | 56 | p.AddP70bool("ScalingMaxX", false); |
752 | 56 | p.AddP70bool("ScalingMaxY", false); |
753 | 56 | p.AddP70bool("ScalingMaxZ", false); |
754 | 56 | p.AddP70vector("GeometricTranslation", 0.0, 0.0, 0.0); |
755 | 56 | p.AddP70vector("GeometricRotation", 0.0, 0.0, 0.0); |
756 | 56 | p.AddP70vector("GeometricScaling", 1.0, 1.0, 1.0); |
757 | 56 | p.AddP70double("MinDampRangeX", 0.0); |
758 | 56 | p.AddP70double("MinDampRangeY", 0.0); |
759 | 56 | p.AddP70double("MinDampRangeZ", 0.0); |
760 | 56 | p.AddP70double("MaxDampRangeX", 0.0); |
761 | 56 | p.AddP70double("MaxDampRangeY", 0.0); |
762 | 56 | p.AddP70double("MaxDampRangeZ", 0.0); |
763 | 56 | p.AddP70double("MinDampStrengthX", 0.0); |
764 | 56 | p.AddP70double("MinDampStrengthY", 0.0); |
765 | 56 | p.AddP70double("MinDampStrengthZ", 0.0); |
766 | 56 | p.AddP70double("MaxDampStrengthX", 0.0); |
767 | 56 | p.AddP70double("MaxDampStrengthY", 0.0); |
768 | 56 | p.AddP70double("MaxDampStrengthZ", 0.0); |
769 | 56 | p.AddP70double("PreferedAngleX", 0.0); |
770 | 56 | p.AddP70double("PreferedAngleY", 0.0); |
771 | 56 | p.AddP70double("PreferedAngleZ", 0.0); |
772 | 56 | p.AddP70("LookAtProperty", "object", "", ""); |
773 | 56 | p.AddP70("UpVectorProperty", "object", "", ""); |
774 | 56 | p.AddP70bool("Show", true); |
775 | 56 | p.AddP70bool("NegativePercentShapeSupport", true); |
776 | 56 | p.AddP70int("DefaultAttributeIndex", -1); |
777 | 56 | p.AddP70bool("Freeze", false); |
778 | 56 | p.AddP70bool("LODBox", false); |
779 | 56 | p.AddP70( |
780 | 56 | "Lcl Translation", "Lcl Translation", "", "A", |
781 | 56 | double(0), double(0), double(0) |
782 | 56 | ); |
783 | 56 | p.AddP70( |
784 | 56 | "Lcl Rotation", "Lcl Rotation", "", "A", |
785 | 56 | double(0), double(0), double(0) |
786 | 56 | ); |
787 | 56 | p.AddP70( |
788 | 56 | "Lcl Scaling", "Lcl Scaling", "", "A", |
789 | 56 | double(1), double(1), double(1) |
790 | 56 | ); |
791 | 56 | p.AddP70("Visibility", "Visibility", "", "A", double(1)); |
792 | 56 | p.AddP70( |
793 | 56 | "Visibility Inheritance", "Visibility Inheritance", "", "", |
794 | 56 | int32_t(1) |
795 | 56 | ); |
796 | 56 | pt.AddChild(p); |
797 | 56 | n.AddChild(pt); |
798 | 56 | object_nodes.push_back(n); |
799 | 56 | total_count += count; |
800 | 56 | } |
801 | | |
802 | | // Geometry / FbxMesh |
803 | | // <~~ aiMesh |
804 | 56 | count = mScene->mNumMeshes; |
805 | | |
806 | | // Blendshapes are considered Geometry |
807 | 56 | int32_t bsDeformerCount=0; |
808 | 1.90k | for (size_t mi = 0; mi < mScene->mNumMeshes; ++mi) { |
809 | 1.85k | aiMesh* m = mScene->mMeshes[mi]; |
810 | 1.85k | if (m->mNumAnimMeshes > 0) { |
811 | 0 | count+=m->mNumAnimMeshes; |
812 | 0 | bsDeformerCount+=m->mNumAnimMeshes; // One deformer per blendshape |
813 | 0 | bsDeformerCount++; // Plus one master blendshape deformer |
814 | 0 | } |
815 | 1.85k | } |
816 | | |
817 | 56 | if (count) { |
818 | 46 | n = FBX::Node("ObjectType", "Geometry"); |
819 | 46 | n.AddChild("Count", count); |
820 | 46 | pt = FBX::Node("PropertyTemplate", "FbxMesh"); |
821 | 46 | p = FBX::Node("Properties70"); |
822 | 46 | p.AddP70color("Color", 0, 0, 0); |
823 | 46 | p.AddP70vector("BBoxMin", 0, 0, 0); |
824 | 46 | p.AddP70vector("BBoxMax", 0, 0, 0); |
825 | 46 | p.AddP70bool("Primary Visibility", true); |
826 | 46 | p.AddP70bool("Casts Shadows", true); |
827 | 46 | p.AddP70bool("Receive Shadows", true); |
828 | 46 | pt.AddChild(p); |
829 | 46 | n.AddChild(pt); |
830 | 46 | object_nodes.push_back(n); |
831 | 46 | total_count += count; |
832 | 46 | } |
833 | | |
834 | | // Material / FbxSurfacePhong, FbxSurfaceLambert, FbxSurfaceMaterial |
835 | | // <~~ aiMaterial |
836 | | // basically if there's any phong material this is defined as phong, |
837 | | // and otherwise lambert. |
838 | | // More complex materials cause a bare-bones FbxSurfaceMaterial definition |
839 | | // and are treated specially, as they're not really supported by FBX. |
840 | | // TODO: support Maya's Stingray PBS material |
841 | 56 | count = mScene->mNumMaterials; |
842 | 56 | if (count) { |
843 | 46 | bool has_phong = has_phong_mat(mScene); |
844 | 46 | n = FBX::Node("ObjectType", "Material"); |
845 | 46 | n.AddChild("Count", count); |
846 | 46 | pt = FBX::Node("PropertyTemplate"); |
847 | 46 | if (has_phong) { |
848 | 0 | pt.AddProperty("FbxSurfacePhong"); |
849 | 46 | } else { |
850 | 46 | pt.AddProperty("FbxSurfaceLambert"); |
851 | 46 | } |
852 | 46 | p = FBX::Node("Properties70"); |
853 | 46 | if (has_phong) { |
854 | 0 | p.AddP70string("ShadingModel", "Phong"); |
855 | 46 | } else { |
856 | 46 | p.AddP70string("ShadingModel", "Lambert"); |
857 | 46 | } |
858 | 46 | p.AddP70bool("MultiLayer", false); |
859 | 46 | p.AddP70colorA("EmissiveColor", 0.0, 0.0, 0.0); |
860 | 46 | p.AddP70numberA("EmissiveFactor", 1.0); |
861 | 46 | p.AddP70colorA("AmbientColor", 0.2, 0.2, 0.2); |
862 | 46 | p.AddP70numberA("AmbientFactor", 1.0); |
863 | 46 | p.AddP70colorA("DiffuseColor", 0.8, 0.8, 0.8); |
864 | 46 | p.AddP70numberA("DiffuseFactor", 1.0); |
865 | 46 | p.AddP70vector("Bump", 0.0, 0.0, 0.0); |
866 | 46 | p.AddP70vector("NormalMap", 0.0, 0.0, 0.0); |
867 | 46 | p.AddP70double("BumpFactor", 1.0); |
868 | 46 | p.AddP70colorA("TransparentColor", 0.0, 0.0, 0.0); |
869 | 46 | p.AddP70numberA("TransparencyFactor", 0.0); |
870 | 46 | p.AddP70color("DisplacementColor", 0.0, 0.0, 0.0); |
871 | 46 | p.AddP70double("DisplacementFactor", 1.0); |
872 | 46 | p.AddP70color("VectorDisplacementColor", 0.0, 0.0, 0.0); |
873 | 46 | p.AddP70double("VectorDisplacementFactor", 1.0); |
874 | 46 | if (has_phong) { |
875 | 0 | p.AddP70colorA("SpecularColor", 0.2, 0.2, 0.2); |
876 | 0 | p.AddP70numberA("SpecularFactor", 1.0); |
877 | 0 | p.AddP70numberA("ShininessExponent", 20.0); |
878 | 0 | p.AddP70colorA("ReflectionColor", 0.0, 0.0, 0.0); |
879 | 0 | p.AddP70numberA("ReflectionFactor", 1.0); |
880 | 0 | } |
881 | 46 | pt.AddChild(p); |
882 | 46 | n.AddChild(pt); |
883 | 46 | object_nodes.push_back(n); |
884 | 46 | total_count += count; |
885 | 46 | } |
886 | | |
887 | | // Video / FbxVideo |
888 | | // one for each image file. |
889 | 56 | count = int32_t(count_images(mScene)); |
890 | 56 | if (count) { |
891 | 6 | n = FBX::Node("ObjectType", "Video"); |
892 | 6 | n.AddChild("Count", count); |
893 | 6 | pt = FBX::Node("PropertyTemplate", "FbxVideo"); |
894 | 6 | p = FBX::Node("Properties70"); |
895 | 6 | p.AddP70bool("ImageSequence", false); |
896 | 6 | p.AddP70int("ImageSequenceOffset", 0); |
897 | 6 | p.AddP70double("FrameRate", 0.0); |
898 | 6 | p.AddP70int("LastFrame", 0); |
899 | 6 | p.AddP70int("Width", 0); |
900 | 6 | p.AddP70int("Height", 0); |
901 | 6 | p.AddP70("Path", "KString", "XRefUrl", "", ""); |
902 | 6 | p.AddP70int("StartFrame", 0); |
903 | 6 | p.AddP70int("StopFrame", 0); |
904 | 6 | p.AddP70double("PlaySpeed", 0.0); |
905 | 6 | p.AddP70time("Offset", 0); |
906 | 6 | p.AddP70enum("InterlaceMode", 0); |
907 | 6 | p.AddP70bool("FreeRunning", false); |
908 | 6 | p.AddP70bool("Loop", false); |
909 | 6 | p.AddP70enum("AccessMode", 0); |
910 | 6 | pt.AddChild(p); |
911 | 6 | n.AddChild(pt); |
912 | 6 | object_nodes.push_back(n); |
913 | 6 | total_count += count; |
914 | 6 | } |
915 | | |
916 | | // Texture / FbxFileTexture |
917 | | // <~~ aiTexture |
918 | 56 | count = int32_t(count_textures(mScene)); |
919 | 56 | if (count) { |
920 | 6 | n = FBX::Node("ObjectType", "Texture"); |
921 | 6 | n.AddChild("Count", count); |
922 | 6 | pt = FBX::Node("PropertyTemplate", "FbxFileTexture"); |
923 | 6 | p = FBX::Node("Properties70"); |
924 | 6 | p.AddP70enum("TextureTypeUse", 0); |
925 | 6 | p.AddP70numberA("Texture alpha", 1.0); |
926 | 6 | p.AddP70enum("CurrentMappingType", 0); |
927 | 6 | p.AddP70enum("WrapModeU", 0); |
928 | 6 | p.AddP70enum("WrapModeV", 0); |
929 | 6 | p.AddP70bool("UVSwap", false); |
930 | 6 | p.AddP70bool("PremultiplyAlpha", true); |
931 | 6 | p.AddP70vectorA("Translation", 0.0, 0.0, 0.0); |
932 | 6 | p.AddP70vectorA("Rotation", 0.0, 0.0, 0.0); |
933 | 6 | p.AddP70vectorA("Scaling", 1.0, 1.0, 1.0); |
934 | 6 | p.AddP70vector("TextureRotationPivot", 0.0, 0.0, 0.0); |
935 | 6 | p.AddP70vector("TextureScalingPivot", 0.0, 0.0, 0.0); |
936 | 6 | p.AddP70enum("CurrentTextureBlendMode", 1); |
937 | 6 | p.AddP70string("UVSet", "default"); |
938 | 6 | p.AddP70bool("UseMaterial", false); |
939 | 6 | p.AddP70bool("UseMipMap", false); |
940 | 6 | pt.AddChild(p); |
941 | 6 | n.AddChild(pt); |
942 | 6 | object_nodes.push_back(n); |
943 | 6 | total_count += count; |
944 | 6 | } |
945 | | |
946 | | // AnimationCurveNode / FbxAnimCurveNode |
947 | 56 | count = mScene->mNumAnimations * 3; |
948 | 56 | if (count) { |
949 | 4 | n = FBX::Node("ObjectType", "AnimationCurveNode"); |
950 | 4 | n.AddChild("Count", count); |
951 | 4 | pt = FBX::Node("PropertyTemplate", "FbxAnimCurveNode"); |
952 | 4 | p = FBX::Node("Properties70"); |
953 | 4 | p.AddP70("d", "Compound", "", ""); |
954 | 4 | pt.AddChild(p); |
955 | 4 | n.AddChild(pt); |
956 | 4 | object_nodes.push_back(n); |
957 | 4 | total_count += count; |
958 | 4 | } |
959 | | |
960 | | // AnimationCurve / FbxAnimCurve |
961 | 56 | count = mScene->mNumAnimations * 9; |
962 | 56 | if (count) { |
963 | 4 | n = FBX::Node("ObjectType", "AnimationCurve"); |
964 | 4 | n.AddChild("Count", count); |
965 | 4 | object_nodes.push_back(n); |
966 | 4 | total_count += count; |
967 | 4 | } |
968 | | |
969 | | // Pose |
970 | 56 | count = 0; |
971 | 1.90k | for (size_t i = 0; i < mScene->mNumMeshes; ++i) { |
972 | 1.85k | aiMesh* mesh = mScene->mMeshes[i]; |
973 | 1.85k | if (mesh->HasBones()) { ++count; } |
974 | 1.85k | } |
975 | 56 | if (count) { |
976 | 3 | n = FBX::Node("ObjectType", "Pose"); |
977 | 3 | n.AddChild("Count", count); |
978 | 3 | object_nodes.push_back(n); |
979 | 3 | total_count += count; |
980 | 3 | } |
981 | | |
982 | | // Deformer |
983 | 56 | count = int32_t(count_deformers(mScene))+bsDeformerCount; |
984 | 56 | if (count) { |
985 | 3 | n = FBX::Node("ObjectType", "Deformer"); |
986 | 3 | n.AddChild("Count", count); |
987 | 3 | object_nodes.push_back(n); |
988 | 3 | total_count += count; |
989 | 3 | } |
990 | | |
991 | | // (template) |
992 | 56 | count = 0; |
993 | 56 | if (count) { |
994 | 0 | n = FBX::Node("ObjectType", ""); |
995 | 0 | n.AddChild("Count", count); |
996 | 0 | pt = FBX::Node("PropertyTemplate", ""); |
997 | 0 | p = FBX::Node("Properties70"); |
998 | 0 | pt.AddChild(p); |
999 | 0 | n.AddChild(pt); |
1000 | 0 | object_nodes.push_back(n); |
1001 | 0 | total_count += count; |
1002 | 0 | } |
1003 | | |
1004 | | // now write it all |
1005 | 56 | FBX::Node defs("Definitions"); |
1006 | 56 | defs.AddChild("Version", int32_t(100)); |
1007 | 56 | defs.AddChild("Count", int32_t(total_count)); |
1008 | 294 | for (auto &on : object_nodes) { |
1009 | 294 | defs.AddChild(on); |
1010 | 294 | } |
1011 | 56 | defs.Dump(outfile, binary, 0); |
1012 | 56 | } |
1013 | | |
1014 | | |
1015 | | // ------------------------------------------------------------------- |
1016 | | // some internal helper functions used for writing the objects section |
1017 | | // (which holds the actual data) |
1018 | | // ------------------------------------------------------------------- |
1019 | 6 | static aiNode* get_node_for_mesh(unsigned int meshIndex, aiNode* node) { |
1020 | 9 | for (size_t i = 0; i < node->mNumMeshes; ++i) { |
1021 | 9 | if (node->mMeshes[i] == meshIndex) { |
1022 | 6 | return node; |
1023 | 6 | } |
1024 | 9 | } |
1025 | 0 | for (size_t i = 0; i < node->mNumChildren; ++i) { |
1026 | 0 | aiNode* ret = get_node_for_mesh(meshIndex, node->mChildren[i]); |
1027 | 0 | if (ret) { return ret; } |
1028 | 0 | } |
1029 | 0 | return nullptr; |
1030 | 0 | } |
1031 | | |
1032 | 7.53k | aiMatrix4x4 get_world_transform(const aiNode* node, const aiScene* scene) { |
1033 | 7.53k | std::vector<const aiNode*> node_chain; |
1034 | 26.3k | while (node != scene->mRootNode && node != nullptr) { |
1035 | 18.8k | node_chain.push_back(node); |
1036 | 18.8k | node = node->mParent; |
1037 | 18.8k | } |
1038 | 7.53k | aiMatrix4x4 transform; |
1039 | 26.3k | for (auto n = node_chain.rbegin(); n != node_chain.rend(); ++n) { |
1040 | 18.8k | transform *= (*n)->mTransformation; |
1041 | 18.8k | } |
1042 | 7.53k | return transform; |
1043 | 7.53k | } |
1044 | | |
1045 | 9.45k | inline int64_t to_ktime(double ticks, const aiAnimation* anim) { |
1046 | 9.45k | if (FP_ZERO == std::fpclassify(anim->mTicksPerSecond)) { |
1047 | 0 | return static_cast<int64_t>(ticks * FBX::SECOND); |
1048 | 0 | } |
1049 | | |
1050 | | // Defensive: handle zero or near-zero mTicksPerSecond |
1051 | 9.45k | double tps = anim->mTicksPerSecond; |
1052 | 9.45k | double timeVal; |
1053 | 9.45k | if (FP_ZERO == std::fpclassify(tps)) { |
1054 | 0 | timeVal = ticks; |
1055 | 9.45k | } else { |
1056 | 9.45k | timeVal = ticks / tps; |
1057 | 9.45k | } |
1058 | | |
1059 | | // Clamp to prevent overflow |
1060 | 9.45k | const double kMax = static_cast<double>(INT64_MAX) / static_cast<double>(FBX::SECOND); |
1061 | 9.45k | const double kMin = static_cast<double>(INT64_MIN) / static_cast<double>(FBX::SECOND); |
1062 | | |
1063 | 9.45k | if (timeVal > kMax) { |
1064 | 0 | return INT64_MAX; |
1065 | 0 | } |
1066 | 9.45k | if (timeVal < kMin) { |
1067 | 1 | return INT64_MIN; |
1068 | 1 | } |
1069 | 9.45k | return static_cast<int64_t>((ticks / anim->mTicksPerSecond) * FBX::SECOND); |
1070 | 9.45k | } |
1071 | | |
1072 | 0 | inline int64_t to_ktime(double time) { |
1073 | 0 | // Clamp to prevent overflow |
1074 | 0 | const double kMax = static_cast<double>(INT64_MAX) / static_cast<double>(FBX::SECOND); |
1075 | 0 | const double kMin = static_cast<double>(INT64_MIN) / static_cast<double>(FBX::SECOND); |
1076 | 0 |
|
1077 | 0 | if (time > kMax) { |
1078 | 0 | return INT64_MAX; |
1079 | 0 | } |
1080 | 0 | if (time < kMin) { |
1081 | 0 | return INT64_MIN; |
1082 | 0 | } |
1083 | 0 | return static_cast<int64_t>(time * FBX::SECOND); |
1084 | 0 | } |
1085 | | |
1086 | 56 | void FBXExporter::WriteObjects () { |
1087 | 56 | if (!binary) { |
1088 | 0 | WriteAsciiSectionHeader("Object properties"); |
1089 | 0 | } |
1090 | | // numbers should match those given in definitions! make sure to check |
1091 | 56 | StreamWriterLE outstream(outfile); |
1092 | 56 | FBX::Node object_node("Objects"); |
1093 | 56 | int indent = 0; |
1094 | 56 | object_node.Begin(outstream, binary, indent); |
1095 | 56 | object_node.EndProperties(outstream, binary, indent); |
1096 | 56 | object_node.BeginChildren(outstream, binary, indent); |
1097 | | |
1098 | 56 | bool bJoinIdenticalVertices = mProperties->GetPropertyBool("bJoinIdenticalVertices", true); |
1099 | | // save vertex_indices as it is needed later |
1100 | 56 | std::vector<std::vector<int32_t>> vVertexIndice(mScene->mNumMeshes); |
1101 | 56 | std::vector<uint32_t> uniq_v_before_mi; |
1102 | | |
1103 | 56 | const auto bTransparencyFactorReferencedToOpacity = mProperties->GetPropertyBool(AI_CONFIG_EXPORT_FBX_TRANSPARENCY_FACTOR_REFER_TO_OPACITY, false); |
1104 | | |
1105 | | // geometry (aiMesh) |
1106 | 56 | mesh_uids.clear(); |
1107 | 56 | indent = 1; |
1108 | 15.1k | std::function<void(const aiNode*)> visit_node_geo = [&](const aiNode *node) { |
1109 | 15.1k | if (node->mNumMeshes == 0) { |
1110 | 25.3k | for (uint32_t ni = 0; ni < node->mNumChildren; ni++) { |
1111 | 11.3k | visit_node_geo(node->mChildren[ni]); |
1112 | 11.3k | } |
1113 | 13.9k | return; |
1114 | 13.9k | } |
1115 | | |
1116 | | // start the node record |
1117 | 1.18k | FBX::Node n("Geometry"); |
1118 | 1.18k | int64_t uid = generate_uid(); |
1119 | 1.18k | mesh_uids[node] = uid; |
1120 | 1.18k | n.AddProperty(uid); |
1121 | 1.18k | n.AddProperty(FBX::SEPARATOR + "Geometry"); |
1122 | 1.18k | n.AddProperty("Mesh"); |
1123 | 1.18k | n.Begin(outstream, binary, indent); |
1124 | 1.18k | n.DumpProperties(outstream, binary, indent); |
1125 | 1.18k | n.EndProperties(outstream, binary, indent); |
1126 | 1.18k | n.BeginChildren(outstream, binary, indent); |
1127 | | |
1128 | | // output vertex data - each vertex should be unique (probably) |
1129 | 1.18k | std::vector<double> flattened_vertices; |
1130 | | // index of original vertex in vertex data vector |
1131 | 1.18k | std::vector<int32_t> vertex_indices; |
1132 | | |
1133 | 1.18k | std::vector<double> normal_data; |
1134 | 1.18k | std::vector<double> color_data; |
1135 | | |
1136 | 1.18k | std::vector<int32_t> polygon_data; |
1137 | | |
1138 | 1.18k | std::vector<std::vector<double>> uv_data; |
1139 | 1.18k | std::vector<std::vector<int32_t>> uv_indices; |
1140 | | |
1141 | 1.18k | indent = 2; |
1142 | | |
1143 | 3.03k | for (uint32_t n_mi = 0; n_mi < node->mNumMeshes; n_mi++) { |
1144 | 1.85k | const auto mi = node->mMeshes[n_mi]; |
1145 | 1.85k | const aiMesh *m = mScene->mMeshes[mi]; |
1146 | | |
1147 | 1.85k | size_t v_offset = vertex_indices.size(); |
1148 | 1.85k | size_t uniq_v_before = flattened_vertices.size() / 3; |
1149 | | |
1150 | | // map of vertex value to its index in the data vector |
1151 | 1.85k | std::map<aiVector3D,size_t> index_by_vertex_value; |
1152 | 1.85k | if (bJoinIdenticalVertices) { |
1153 | 0 | int32_t index = 0; |
1154 | 0 | for (size_t vi = 0; vi < m->mNumVertices; ++vi) { |
1155 | 0 | aiVector3D vtx = m->mVertices[vi]; |
1156 | 0 | auto elem = index_by_vertex_value.find(vtx); |
1157 | 0 | if (elem == index_by_vertex_value.end()) { |
1158 | 0 | vertex_indices.push_back(index); |
1159 | 0 | index_by_vertex_value[vtx] = index; |
1160 | 0 | flattened_vertices.insert(flattened_vertices.end(), { vtx.x, vtx.y, vtx.z }); |
1161 | 0 | ++index; |
1162 | 0 | } else { |
1163 | 0 | vertex_indices.push_back(int32_t(elem->second)); |
1164 | 0 | } |
1165 | 0 | } |
1166 | 1.85k | } else { // do not join vertex, respect the export flag |
1167 | 1.85k | vertex_indices.resize(v_offset + m->mNumVertices); |
1168 | 1.85k | std::iota(vertex_indices.begin() + v_offset, vertex_indices.end(), 0); |
1169 | 13.9k | for(unsigned int v = 0; v < m->mNumVertices; ++ v) { |
1170 | 12.1k | aiVector3D vtx = m->mVertices[v]; |
1171 | 12.1k | flattened_vertices.insert(flattened_vertices.end(), {vtx.x, vtx.y, vtx.z}); |
1172 | 12.1k | } |
1173 | 1.85k | } |
1174 | 1.85k | vVertexIndice[mi].insert( |
1175 | | // TODO test whether this can be end or not |
1176 | 1.85k | vVertexIndice[mi].end(), |
1177 | 1.85k | vertex_indices.begin() + v_offset, |
1178 | 1.85k | vertex_indices.end() |
1179 | 1.85k | ); |
1180 | | |
1181 | | // here could be edges but they're insane. |
1182 | | // it's optional anyway, so let's ignore it. |
1183 | | |
1184 | | // output polygon data as a flattened array of vertex indices. |
1185 | | // the last vertex index of each polygon is negated and - 1 |
1186 | 92.6k | for (size_t fi = 0; fi < m->mNumFaces; fi++) { |
1187 | 90.8k | const aiFace &f = m->mFaces[fi]; |
1188 | 90.8k | if (f.mNumIndices == 0) continue; |
1189 | 90.8k | size_t pvi = 0; |
1190 | 206k | for (; pvi < f.mNumIndices - 1; pvi++) { |
1191 | 115k | polygon_data.push_back( |
1192 | 115k | static_cast<int32_t>(uniq_v_before + vertex_indices[v_offset + f.mIndices[pvi]]) |
1193 | 115k | ); |
1194 | 115k | } |
1195 | 90.8k | polygon_data.push_back( |
1196 | 90.8k | static_cast<int32_t>(-1 ^ (uniq_v_before + vertex_indices[v_offset+f.mIndices[pvi]])) |
1197 | 90.8k | ); |
1198 | 90.8k | } |
1199 | | |
1200 | 1.85k | uniq_v_before_mi.push_back(static_cast<uint32_t>(uniq_v_before)); |
1201 | | |
1202 | 1.85k | if (m->HasNormals()) { |
1203 | 560 | normal_data.reserve(3 * polygon_data.size()); |
1204 | 87.0k | for (size_t fi = 0; fi < m->mNumFaces; fi++) { |
1205 | 86.4k | const aiFace & f = m->mFaces[fi]; |
1206 | 285k | for (size_t pvi = 0; pvi < f.mNumIndices; pvi++) { |
1207 | 199k | const aiVector3D &curN = m->mNormals[f.mIndices[pvi]]; |
1208 | 199k | normal_data.insert(normal_data.end(), { curN.x, curN.y, curN.z }); |
1209 | 199k | } |
1210 | 86.4k | } |
1211 | 560 | } |
1212 | | |
1213 | 1.85k | const int32_t colorChannelIndex = 0; |
1214 | 1.85k | if (m->HasVertexColors(colorChannelIndex)) { |
1215 | 827 | color_data.reserve(4 * polygon_data.size()); |
1216 | 46.9k | for (size_t fi = 0; fi < m->mNumFaces; fi++) { |
1217 | 46.1k | const aiFace &f = m->mFaces[fi]; |
1218 | 180k | for (size_t pvi = 0; pvi < f.mNumIndices; pvi++) { |
1219 | 134k | const aiColor4D &c = m->mColors[colorChannelIndex][f.mIndices[pvi]]; |
1220 | 134k | color_data.insert(color_data.end(), { c.r, c.g, c.b, c.a }); |
1221 | 134k | } |
1222 | 46.1k | } |
1223 | 827 | } |
1224 | | |
1225 | 1.85k | const auto num_uv = static_cast<size_t>(m->GetNumUVChannels()); |
1226 | 1.85k | uv_indices.resize(std::max(num_uv, uv_indices.size())); |
1227 | 1.85k | uv_data.resize(std::max(num_uv, uv_data.size())); |
1228 | 1.85k | std::map<aiVector3D, int32_t> index_by_uv; |
1229 | | |
1230 | | // uvs, if any |
1231 | 1.86k | for (size_t uvi = 0; uvi < m->GetNumUVChannels(); uvi++) { |
1232 | 10 | const auto nc = m->mNumUVComponents[uvi]; |
1233 | 10 | if (nc > 2) { |
1234 | | // FBX only supports 2-channel UV maps... |
1235 | | // or at least i'm not sure how to indicate a different number |
1236 | 0 | std::stringstream err; |
1237 | 0 | err << "Only 2-channel UV maps supported by FBX,"; |
1238 | 0 | err << " but mesh " << mi; |
1239 | 0 | if (m->mName.length) { |
1240 | 0 | err << " (" << m->mName.C_Str() << ")"; |
1241 | 0 | } |
1242 | 0 | err << " UV map " << uvi; |
1243 | 0 | err << " has " << m->mNumUVComponents[uvi]; |
1244 | 0 | err << " components! Data will be preserved,"; |
1245 | 0 | err << " but may be incorrectly interpreted on load."; |
1246 | 0 | ASSIMP_LOG_WARN(err.str()); |
1247 | 0 | } |
1248 | | |
1249 | 10 | int32_t index = static_cast<int32_t>(uv_data[uvi].size()) / nc; |
1250 | 55 | for (size_t fi = 0; fi < m->mNumFaces; fi++) { |
1251 | 45 | const aiFace &f = m->mFaces[fi]; |
1252 | 114 | for (size_t pvi = 0; pvi < f.mNumIndices; pvi++) { |
1253 | 69 | const aiVector3D &curUv = m->mTextureCoords[uvi][f.mIndices[pvi]]; |
1254 | 69 | auto elem = index_by_uv.find(curUv); |
1255 | 69 | if (elem == index_by_uv.end()) { |
1256 | 17 | index_by_uv[curUv] = index; |
1257 | 17 | uv_indices[uvi].push_back(index); |
1258 | 51 | for (uint32_t x = 0; x < nc; ++x) { |
1259 | 34 | uv_data[uvi].push_back(curUv[x]); |
1260 | 34 | } |
1261 | 17 | ++index; |
1262 | 52 | } else { |
1263 | 52 | uv_indices[uvi].push_back(elem->second); |
1264 | 52 | } |
1265 | 69 | } |
1266 | 45 | } |
1267 | 10 | } |
1268 | 1.85k | } |
1269 | | |
1270 | | |
1271 | 1.18k | FBX::Node::WritePropertyNode("Vertices", flattened_vertices, outstream, binary, indent); |
1272 | 1.18k | FBX::Node::WritePropertyNode("PolygonVertexIndex", polygon_data, outstream, binary, indent); |
1273 | 1.18k | FBX::Node::WritePropertyNode("GeometryVersion", int32_t(124), outstream, binary, indent); |
1274 | | |
1275 | 1.18k | if (!normal_data.empty()) { |
1276 | 235 | FBX::Node normals("LayerElementNormal", int32_t(0)); |
1277 | 235 | normals.Begin(outstream, binary, indent); |
1278 | 235 | normals.DumpProperties(outstream, binary, indent); |
1279 | 235 | normals.EndProperties(outstream, binary, indent); |
1280 | 235 | normals.BeginChildren(outstream, binary, indent); |
1281 | 235 | indent = 3; |
1282 | 235 | FBX::Node::WritePropertyNode("Version", int32_t(101), outstream, binary, indent); |
1283 | 235 | FBX::Node::WritePropertyNode("Name", "", outstream, binary, indent); |
1284 | 235 | FBX::Node::WritePropertyNode("MappingInformationType", "ByPolygonVertex", outstream, binary, indent); |
1285 | 235 | FBX::Node::WritePropertyNode("ReferenceInformationType", "Direct", outstream, binary, indent); |
1286 | 235 | FBX::Node::WritePropertyNode("Normals", normal_data, outstream, binary, indent); |
1287 | | // note: version 102 has a NormalsW also... not sure what it is, |
1288 | | // so stick with version 101 for now. |
1289 | 235 | indent = 2; |
1290 | 235 | normals.End(outstream, binary, indent, true); |
1291 | 235 | } |
1292 | | |
1293 | 1.18k | if (!color_data.empty()) { |
1294 | 546 | const auto colorChannelIndex = 0; |
1295 | 546 | FBX::Node vertexcolors("LayerElementColor", int32_t(colorChannelIndex)); |
1296 | 546 | vertexcolors.Begin(outstream, binary, indent); |
1297 | 546 | vertexcolors.DumpProperties(outstream, binary, indent); |
1298 | 546 | vertexcolors.EndProperties(outstream, binary, indent); |
1299 | 546 | vertexcolors.BeginChildren(outstream, binary, indent); |
1300 | 546 | indent = 3; |
1301 | 546 | FBX::Node::WritePropertyNode("Version", int32_t(101), outstream, binary, indent); |
1302 | 546 | char layerName[8]; |
1303 | 546 | snprintf(layerName, sizeof(layerName), "COLOR_%d", colorChannelIndex); |
1304 | 546 | FBX::Node::WritePropertyNode("Name", (const char *)layerName, outstream, binary, indent); |
1305 | 546 | FBX::Node::WritePropertyNode("MappingInformationType", "ByPolygonVertex", outstream, binary, indent); |
1306 | 546 | FBX::Node::WritePropertyNode("ReferenceInformationType", "Direct", outstream, binary, indent); |
1307 | 546 | FBX::Node::WritePropertyNode("Colors", color_data, outstream, binary, indent); |
1308 | 546 | indent = 2; |
1309 | 546 | vertexcolors.End(outstream, binary, indent, true); |
1310 | 546 | } |
1311 | | |
1312 | 1.18k | for (uint32_t uvi = 0; uvi < uv_data.size(); uvi++) { |
1313 | 7 | FBX::Node uv("LayerElementUV", int32_t(uvi)); |
1314 | 7 | uv.Begin(outstream, binary, indent); |
1315 | 7 | uv.DumpProperties(outstream, binary, indent); |
1316 | 7 | uv.EndProperties(outstream, binary, indent); |
1317 | 7 | uv.BeginChildren(outstream, binary, indent); |
1318 | 7 | indent = 3; |
1319 | 7 | FBX::Node::WritePropertyNode("Version", int32_t(101), outstream, binary, indent); |
1320 | 7 | FBX::Node::WritePropertyNode("Name", "", outstream, binary, indent); |
1321 | 7 | FBX::Node::WritePropertyNode("MappingInformationType", "ByPolygonVertex", outstream, binary, indent); |
1322 | 7 | FBX::Node::WritePropertyNode("ReferenceInformationType", "IndexToDirect", outstream, binary, indent); |
1323 | 7 | FBX::Node::WritePropertyNode("UV", uv_data[uvi], outstream, binary, indent); |
1324 | 7 | FBX::Node::WritePropertyNode("UVIndex", uv_indices[uvi], outstream, binary, indent); |
1325 | 7 | indent = 2; |
1326 | 7 | uv.End(outstream, binary, indent, true); |
1327 | 7 | } |
1328 | | |
1329 | | |
1330 | | // When merging multiple meshes, we instead use by polygon so the correct material is |
1331 | | // assigned to each face. Previously, this LayerElementMaterial always had 0 since it |
1332 | | // assumed there was 1 material for each node for all meshes. |
1333 | 1.18k | FBX::Node mat("LayerElementMaterial", int32_t(0)); |
1334 | 1.18k | mat.AddChild("Version", int32_t(101)); |
1335 | 1.18k | mat.AddChild("Name", ""); |
1336 | 1.18k | if (node->mNumMeshes == 1) { |
1337 | 890 | mat.AddChild("MappingInformationType", "AllSame"); |
1338 | 890 | mat.AddChild("ReferenceInformationType", "IndexToDirect"); |
1339 | 890 | std::vector<int32_t> mat_indices = {0}; |
1340 | 890 | mat.AddChild("Materials", mat_indices); |
1341 | 890 | } else { |
1342 | 290 | mat.AddChild("MappingInformationType", "ByPolygon"); |
1343 | 290 | mat.AddChild("ReferenceInformationType", "IndexToDirect"); |
1344 | 290 | std::vector<int32_t> mat_indices; |
1345 | 1.25k | for (uint32_t n_mi = 0; n_mi < node->mNumMeshes; n_mi++) { |
1346 | 961 | const auto mi = node->mMeshes[n_mi]; |
1347 | 961 | const auto *const m = mScene->mMeshes[mi]; |
1348 | 76.7k | for (size_t fi = 0; fi < m->mNumFaces; fi++) { |
1349 | 75.7k | mat_indices.push_back(n_mi); |
1350 | 75.7k | } |
1351 | 961 | } |
1352 | 290 | mat.AddChild("Materials", mat_indices); |
1353 | 290 | } |
1354 | 1.18k | mat.Dump(outstream, binary, indent); |
1355 | | |
1356 | | // finally we have the layer specifications, |
1357 | | // which select the normals / UV set / etc to use. |
1358 | | // TODO: handle multiple uv sets correctly? |
1359 | 1.18k | FBX::Node layer("Layer", int32_t(0)); |
1360 | 1.18k | layer.AddChild("Version", int32_t(100)); |
1361 | 1.18k | FBX::Node le; |
1362 | | |
1363 | 1.18k | if (!normal_data.empty()) { |
1364 | 235 | le = FBX::Node("LayerElement"); |
1365 | 235 | le.AddChild("Type", "LayerElementNormal"); |
1366 | 235 | le.AddChild("TypedIndex", int32_t(0)); |
1367 | 235 | layer.AddChild(le); |
1368 | 235 | } |
1369 | | |
1370 | 1.18k | if (!color_data.empty()) { |
1371 | 546 | le = FBX::Node("LayerElement"); |
1372 | 546 | le.AddChild("Type", "LayerElementColor"); |
1373 | 546 | le.AddChild("TypedIndex", int32_t(0)); |
1374 | 546 | layer.AddChild(le); |
1375 | 546 | } |
1376 | | |
1377 | 1.18k | le = FBX::Node("LayerElement"); |
1378 | 1.18k | le.AddChild("Type", "LayerElementMaterial"); |
1379 | 1.18k | le.AddChild("TypedIndex", int32_t(0)); |
1380 | 1.18k | layer.AddChild(le); |
1381 | 1.18k | le = FBX::Node("LayerElement"); |
1382 | 1.18k | le.AddChild("Type", "LayerElementUV"); |
1383 | 1.18k | le.AddChild("TypedIndex", int32_t(0)); |
1384 | 1.18k | layer.AddChild(le); |
1385 | 1.18k | layer.Dump(outstream, binary, indent); |
1386 | | |
1387 | 1.18k | for(unsigned int lr = 1; lr < uv_data.size(); ++ lr) { |
1388 | 0 | FBX::Node layerExtra("Layer", int32_t(lr)); |
1389 | 0 | layerExtra.AddChild("Version", int32_t(100)); |
1390 | 0 | FBX::Node leExtra("LayerElement"); |
1391 | 0 | leExtra.AddChild("Type", "LayerElementUV"); |
1392 | 0 | leExtra.AddChild("TypedIndex", int32_t(lr)); |
1393 | 0 | layerExtra.AddChild(leExtra); |
1394 | 0 | layerExtra.Dump(outstream, binary, indent); |
1395 | 0 | } |
1396 | | // finish the node record |
1397 | 1.18k | indent = 1; |
1398 | 1.18k | n.End(outstream, binary, indent, true); |
1399 | | |
1400 | 4.93k | for (uint32_t ni = 0; ni < node->mNumChildren; ni++) { |
1401 | 3.75k | visit_node_geo(node->mChildren[ni]); |
1402 | 3.75k | } |
1403 | 1.18k | return; |
1404 | 15.1k | }; |
1405 | | |
1406 | 56 | visit_node_geo(mScene->mRootNode); |
1407 | | |
1408 | | |
1409 | | // aiMaterial |
1410 | 56 | material_uids.clear(); |
1411 | 109 | for (size_t i = 0; i < mScene->mNumMaterials; ++i) { |
1412 | | // it's all about this material |
1413 | 53 | aiMaterial* m = mScene->mMaterials[i]; |
1414 | | |
1415 | | // these are used to receive material data |
1416 | 53 | ai_real f; aiColor3D c; |
1417 | | |
1418 | | // start the node record |
1419 | 53 | FBX::Node n("Material"); |
1420 | | |
1421 | 53 | int64_t uid = generate_uid(); |
1422 | 53 | material_uids.push_back(uid); |
1423 | 53 | n.AddProperty(uid); |
1424 | | |
1425 | 53 | aiString name; |
1426 | 53 | m->Get(AI_MATKEY_NAME, name); |
1427 | 53 | n.AddProperty(name.C_Str() + FBX::SEPARATOR + "Material"); |
1428 | | |
1429 | 53 | n.AddProperty(""); |
1430 | | |
1431 | 53 | n.AddChild("Version", int32_t(102)); |
1432 | 53 | f = 0; |
1433 | 53 | m->Get(AI_MATKEY_SHININESS, f); |
1434 | 53 | bool phong = (f > 0); |
1435 | 53 | if (phong) { |
1436 | 0 | n.AddChild("ShadingModel", "phong"); |
1437 | 53 | } else { |
1438 | 53 | n.AddChild("ShadingModel", "lambert"); |
1439 | 53 | } |
1440 | 53 | n.AddChild("MultiLayer", int32_t(0)); |
1441 | | |
1442 | 53 | FBX::Node p("Properties70"); |
1443 | | |
1444 | | // materials exported using the FBX SDK have two sets of fields. |
1445 | | // there are the properties specified in the PropertyTemplate, |
1446 | | // which are those supported by the modernFBX SDK, |
1447 | | // and an extra set of properties with simpler names. |
1448 | | // The extra properties are a legacy material system from pre-2009. |
1449 | | // |
1450 | | // In the modern system, each property has "color" and "factor". |
1451 | | // Generally the interpretation of these seems to be |
1452 | | // that the colour is multiplied by the factor before use, |
1453 | | // but this is not always clear-cut. |
1454 | | // |
1455 | | // Usually assimp only stores the colour, |
1456 | | // so we can just leave the factors at the default "1.0". |
1457 | | |
1458 | | // first we can export the "standard" properties |
1459 | 53 | if (m->Get(AI_MATKEY_COLOR_AMBIENT, c) == aiReturn_SUCCESS) { |
1460 | 49 | p.AddP70colorA("AmbientColor", c.r, c.g, c.b); |
1461 | | //p.AddP70numberA("AmbientFactor", 1.0); |
1462 | 49 | } |
1463 | 53 | if (m->Get(AI_MATKEY_COLOR_DIFFUSE, c) == aiReturn_SUCCESS) { |
1464 | 49 | p.AddP70colorA("DiffuseColor", c.r, c.g, c.b); |
1465 | | //p.AddP70numberA("DiffuseFactor", 1.0); |
1466 | 49 | } |
1467 | 53 | if (m->Get(AI_MATKEY_COLOR_TRANSPARENT, c) == aiReturn_SUCCESS) { |
1468 | | // "TransparentColor" / "TransparencyFactor"... |
1469 | | // thanks FBX, for your insightful interpretation of consistency |
1470 | 39 | p.AddP70colorA("TransparentColor", c.r, c.g, c.b); |
1471 | | |
1472 | 39 | if (!bTransparencyFactorReferencedToOpacity) { |
1473 | | // TransparencyFactor defaults to 0.0, so set it to 1.0. |
1474 | | // note: Maya always sets this to 1.0, |
1475 | | // so we can't use it sensibly as "Opacity". |
1476 | | // In stead we rely on the legacy "Opacity" value, below. |
1477 | | // Blender also relies on "Opacity" not "TransparencyFactor", |
1478 | | // probably for a similar reason. |
1479 | 39 | p.AddP70numberA("TransparencyFactor", 1.0); |
1480 | 39 | } |
1481 | 39 | } |
1482 | 53 | if (bTransparencyFactorReferencedToOpacity) { |
1483 | 0 | if (m->Get(AI_MATKEY_OPACITY, f) == aiReturn_SUCCESS) { |
1484 | 0 | p.AddP70numberA("TransparencyFactor", 1.0 - f); |
1485 | 0 | } |
1486 | 0 | } |
1487 | 53 | if (m->Get(AI_MATKEY_COLOR_REFLECTIVE, c) == aiReturn_SUCCESS) { |
1488 | 0 | p.AddP70colorA("ReflectionColor", c.r, c.g, c.b); |
1489 | 0 | } |
1490 | 53 | if (m->Get(AI_MATKEY_REFLECTIVITY, f) == aiReturn_SUCCESS) { |
1491 | 0 | p.AddP70numberA("ReflectionFactor", f); |
1492 | 0 | } |
1493 | 53 | if (phong) { |
1494 | 0 | if (m->Get(AI_MATKEY_COLOR_SPECULAR, c) == aiReturn_SUCCESS) { |
1495 | 0 | p.AddP70colorA("SpecularColor", c.r, c.g, c.b); |
1496 | 0 | } |
1497 | 0 | if (m->Get(AI_MATKEY_SHININESS_STRENGTH, f) == aiReturn_SUCCESS) { |
1498 | 0 | p.AddP70numberA("ShininessFactor", f); |
1499 | 0 | } |
1500 | 0 | if (m->Get(AI_MATKEY_SHININESS, f) == aiReturn_SUCCESS) { |
1501 | 0 | p.AddP70numberA("ShininessExponent", f); |
1502 | 0 | } |
1503 | 0 | if (m->Get(AI_MATKEY_REFLECTIVITY, f) == aiReturn_SUCCESS) { |
1504 | 0 | p.AddP70numberA("ReflectionFactor", f); |
1505 | 0 | } |
1506 | 0 | } |
1507 | | |
1508 | | // Now the legacy system. |
1509 | | // For safety let's include it. |
1510 | | // thrse values don't exist in the property template, |
1511 | | // and usually are completely ignored when loading. |
1512 | | // One notable exception is the "Opacity" property, |
1513 | | // which Blender uses as (1.0 - alpha). |
1514 | 53 | c.r = 0.0f; c.g = 0.0f; c.b = 0.0f; |
1515 | 53 | m->Get(AI_MATKEY_COLOR_EMISSIVE, c); |
1516 | 53 | p.AddP70vector("Emissive", c.r, c.g, c.b); |
1517 | 53 | c.r = 0.2f; c.g = 0.2f; c.b = 0.2f; |
1518 | 53 | m->Get(AI_MATKEY_COLOR_AMBIENT, c); |
1519 | 53 | p.AddP70vector("Ambient", c.r, c.g, c.b); |
1520 | 53 | c.r = 0.8f; c.g = 0.8f; c.b = 0.8f; |
1521 | 53 | m->Get(AI_MATKEY_COLOR_DIFFUSE, c); |
1522 | 53 | p.AddP70vector("Diffuse", c.r, c.g, c.b); |
1523 | | // The FBX SDK determines "Opacity" from transparency colour (RGB) |
1524 | | // and factor (F) as: O = (1.0 - F * ((R + G + B) / 3)). |
1525 | | // However we actually have an opacity value, |
1526 | | // so we should take it from AI_MATKEY_OPACITY if possible. |
1527 | | // It might make more sense to use TransparencyFactor, |
1528 | | // but Blender actually loads "Opacity" correctly, so let's use it. |
1529 | 53 | f = 1.0f; |
1530 | 53 | if (m->Get(AI_MATKEY_COLOR_TRANSPARENT, c) == aiReturn_SUCCESS) { |
1531 | 39 | f = 1.0f - ((c.r + c.g + c.b) / 3.0f); |
1532 | 39 | } |
1533 | 53 | m->Get(AI_MATKEY_OPACITY, f); |
1534 | 53 | p.AddP70double("Opacity", f); |
1535 | 53 | if (phong) { |
1536 | | // specular color is multiplied by shininess_strength |
1537 | 0 | c.r = 0.2f; c.g = 0.2f; c.b = 0.2f; |
1538 | 0 | m->Get(AI_MATKEY_COLOR_SPECULAR, c); |
1539 | 0 | f = 1.0f; |
1540 | 0 | m->Get(AI_MATKEY_SHININESS_STRENGTH, f); |
1541 | 0 | p.AddP70vector("Specular", f*c.r, f*c.g, f*c.b); |
1542 | 0 | f = 20.0f; |
1543 | 0 | m->Get(AI_MATKEY_SHININESS, f); |
1544 | 0 | p.AddP70double("Shininess", f); |
1545 | | // Legacy "Reflectivity" is F*F*((R+G+B)/3), |
1546 | | // where F is the proportion of light reflected (AKA reflectivity), |
1547 | | // and RGB is the reflective colour of the material. |
1548 | | // No idea why, but we might as well set it the same way. |
1549 | 0 | f = 0.0f; |
1550 | 0 | m->Get(AI_MATKEY_REFLECTIVITY, f); |
1551 | 0 | c.r = 1.0f, c.g = 1.0f, c.b = 1.0f; |
1552 | 0 | m->Get(AI_MATKEY_COLOR_REFLECTIVE, c); |
1553 | 0 | p.AddP70double("Reflectivity", f*f*((c.r+c.g+c.b)/3.0)); |
1554 | 0 | } |
1555 | | |
1556 | 53 | n.AddChild(p); |
1557 | | |
1558 | 53 | n.Dump(outstream, binary, indent); |
1559 | 53 | } |
1560 | | |
1561 | | // we need to look up all the images we're using, |
1562 | | // so we can generate uids, and eliminate duplicates. |
1563 | 56 | std::map<std::string, int64_t> uid_by_image; |
1564 | 109 | for (size_t i = 0; i < mScene->mNumMaterials; ++i) { |
1565 | 53 | aiString texpath; |
1566 | 53 | aiMaterial* mat = mScene->mMaterials[i]; |
1567 | 53 | for ( |
1568 | 53 | size_t tt = aiTextureType_DIFFUSE; |
1569 | 954 | tt < aiTextureType_UNKNOWN; |
1570 | 901 | ++tt |
1571 | 901 | ){ |
1572 | 901 | const aiTextureType textype = static_cast<aiTextureType>(tt); |
1573 | 901 | const size_t texcount = mat->GetTextureCount(textype); |
1574 | 911 | for (size_t j = 0; j < texcount; ++j) { |
1575 | 10 | mat->GetTexture(textype, (unsigned int)j, &texpath); |
1576 | 10 | const std::string texstring = texpath.C_Str(); |
1577 | 10 | auto elem = uid_by_image.find(texstring); |
1578 | 10 | if (elem == uid_by_image.end()) { |
1579 | 8 | uid_by_image[texstring] = generate_uid(); |
1580 | 8 | } |
1581 | 10 | } |
1582 | 901 | } |
1583 | 53 | } |
1584 | | |
1585 | | // FbxVideo - stores images used by textures. |
1586 | 56 | for (const auto &it : uid_by_image) { |
1587 | 8 | FBX::Node n("Video"); |
1588 | 8 | const int64_t& uid = it.second; |
1589 | 8 | const std::string name = ""; // TODO: ... name??? |
1590 | 8 | n.AddProperties(uid, name + FBX::SEPARATOR + "Video", "Clip"); |
1591 | 8 | n.AddChild("Type", "Clip"); |
1592 | 8 | FBX::Node p("Properties70"); |
1593 | | // TODO: get full path... relative path... etc... ugh... |
1594 | | // for now just use the same path for everything, |
1595 | | // and hopefully one of them will work out. |
1596 | 8 | std::string path = it.first; |
1597 | | // try get embedded texture |
1598 | 8 | const aiTexture* embedded_texture = mScene->GetEmbeddedTexture(it.first.c_str()); |
1599 | 8 | if (embedded_texture != nullptr) { |
1600 | | // change the path (use original filename, if available. If name is empty, concatenate texture index with file extension) |
1601 | 0 | std::stringstream newPath; |
1602 | 0 | if (embedded_texture->mFilename.length > 0) { |
1603 | 0 | newPath << embedded_texture->mFilename.C_Str(); |
1604 | 0 | } else if (embedded_texture->achFormatHint[0]) { |
1605 | 0 | int texture_index = std::stoi(path.substr(1, path.size() - 1)); |
1606 | 0 | newPath << texture_index << "." << embedded_texture->achFormatHint; |
1607 | 0 | } |
1608 | 0 | path = newPath.str(); |
1609 | | // embed the texture |
1610 | 0 | size_t texture_size = static_cast<size_t>(embedded_texture->mWidth * std::max(embedded_texture->mHeight, 1u)); |
1611 | 0 | if (binary) { |
1612 | | // embed texture as binary data |
1613 | 0 | std::vector<uint8_t> tex_data; |
1614 | 0 | tex_data.resize(texture_size); |
1615 | 0 | memcpy(&tex_data[0], (char*)embedded_texture->pcData, texture_size); |
1616 | 0 | n.AddChild("Content", tex_data); |
1617 | 0 | } else { |
1618 | | // embed texture in base64 encoding |
1619 | 0 | std::string encoded_texture = FBX::Util::EncodeBase64((char*)embedded_texture->pcData, texture_size); |
1620 | 0 | n.AddChild("Content", encoded_texture); |
1621 | 0 | } |
1622 | 0 | } |
1623 | 8 | p.AddP70("Path", "KString", "XRefUrl", "", path); |
1624 | 8 | n.AddChild(p); |
1625 | 8 | n.AddChild("UseMipMap", int32_t(0)); |
1626 | 8 | n.AddChild("Filename", path); |
1627 | 8 | n.AddChild("RelativeFilename", path); |
1628 | 8 | n.Dump(outstream, binary, indent); |
1629 | 8 | } |
1630 | | |
1631 | | // Textures |
1632 | | // referenced by material_index/texture_type pairs. |
1633 | 56 | std::map<std::pair<size_t,size_t>,int64_t> texture_uids; |
1634 | 56 | const std::map<aiTextureType,std::string> prop_name_by_tt = { |
1635 | 56 | {aiTextureType_DIFFUSE, "DiffuseColor"}, |
1636 | 56 | {aiTextureType_SPECULAR, "SpecularColor"}, |
1637 | 56 | {aiTextureType_AMBIENT, "AmbientColor"}, |
1638 | 56 | {aiTextureType_EMISSIVE, "EmissiveColor"}, |
1639 | 56 | {aiTextureType_HEIGHT, "Bump"}, |
1640 | 56 | {aiTextureType_NORMALS, "NormalMap"}, |
1641 | 56 | {aiTextureType_SHININESS, "ShininessExponent"}, |
1642 | 56 | {aiTextureType_OPACITY, "TransparentColor"}, |
1643 | 56 | {aiTextureType_DISPLACEMENT, "DisplacementColor"}, |
1644 | | //{aiTextureType_LIGHTMAP, "???"}, |
1645 | 56 | {aiTextureType_REFLECTION, "ReflectionColor"} |
1646 | | //{aiTextureType_UNKNOWN, ""} |
1647 | 56 | }; |
1648 | 109 | for (size_t i = 0; i < mScene->mNumMaterials; ++i) { |
1649 | | // textures are attached to materials |
1650 | 53 | aiMaterial* mat = mScene->mMaterials[i]; |
1651 | 53 | int64_t material_uid = material_uids[i]; |
1652 | | |
1653 | 53 | for ( |
1654 | 53 | size_t j = aiTextureType_DIFFUSE; |
1655 | 954 | j < aiTextureType_UNKNOWN; |
1656 | 901 | ++j |
1657 | 901 | ) { |
1658 | 901 | const aiTextureType tt = static_cast<aiTextureType>(j); |
1659 | 901 | size_t n = mat->GetTextureCount(tt); |
1660 | | |
1661 | 901 | if (n < 1) { // no texture of this type |
1662 | 891 | continue; |
1663 | 891 | } |
1664 | | |
1665 | 10 | if (n > 1) { |
1666 | | // TODO: multilayer textures |
1667 | 0 | std::stringstream err; |
1668 | 0 | err << "Multilayer textures not supported (for now),"; |
1669 | 0 | err << " skipping texture type " << j; |
1670 | 0 | err << " of material " << i; |
1671 | 0 | ASSIMP_LOG_WARN(err.str()); |
1672 | 0 | } |
1673 | | |
1674 | | // get image path for this (single-image) texture |
1675 | 10 | aiString tpath; |
1676 | 10 | if (mat->GetTexture(tt, 0, &tpath) != aiReturn_SUCCESS) { |
1677 | 0 | std::stringstream err; |
1678 | 0 | err << "Failed to get texture 0 for texture of type " << tt; |
1679 | 0 | err << " on material " << i; |
1680 | 0 | err << ", however GetTextureCount returned 1."; |
1681 | 0 | throw DeadlyExportError(err.str()); |
1682 | 0 | } |
1683 | 10 | const std::string texture_path(tpath.C_Str()); |
1684 | | |
1685 | | // get connected image uid |
1686 | 10 | auto elem = uid_by_image.find(texture_path); |
1687 | 10 | if (elem == uid_by_image.end()) { |
1688 | | // this should never happen |
1689 | 0 | std::stringstream err; |
1690 | 0 | err << "Failed to find video element for texture with path"; |
1691 | 0 | err << " \"" << texture_path << "\""; |
1692 | 0 | err << ", type " << j << ", material " << i; |
1693 | 0 | throw DeadlyExportError(err.str()); |
1694 | 0 | } |
1695 | 10 | const int64_t image_uid = elem->second; |
1696 | | |
1697 | | // get the name of the material property to connect to |
1698 | 10 | auto elem2 = prop_name_by_tt.find(tt); |
1699 | 10 | if (elem2 == prop_name_by_tt.end()) { |
1700 | | // don't know how to handle this type of texture, |
1701 | | // so skip it. |
1702 | 1 | std::stringstream err; |
1703 | 1 | err << "Not sure how to handle texture of type " << j; |
1704 | 1 | err << " on material " << i; |
1705 | 1 | err << ", skipping..."; |
1706 | 1 | ASSIMP_LOG_WARN(err.str()); |
1707 | 1 | continue; |
1708 | 1 | } |
1709 | 9 | const std::string& prop_name = elem2->second; |
1710 | | |
1711 | | // generate a uid for this texture |
1712 | 9 | const int64_t texture_uid = generate_uid(); |
1713 | | |
1714 | | // link the texture to the material |
1715 | 9 | connections.emplace_back( |
1716 | 9 | "C", "OP", texture_uid, material_uid, prop_name |
1717 | 9 | ); |
1718 | | |
1719 | | // link the image data to the texture |
1720 | 9 | connections.emplace_back("C", "OO", image_uid, texture_uid); |
1721 | | |
1722 | 9 | aiUVTransform trafo; |
1723 | 9 | unsigned int max = sizeof(aiUVTransform); |
1724 | 9 | aiGetMaterialFloatArray(mat, AI_MATKEY_UVTRANSFORM(aiTextureType_DIFFUSE, 0), (ai_real *)&trafo, &max); |
1725 | | |
1726 | | // now write the actual texture node |
1727 | 9 | FBX::Node tnode("Texture"); |
1728 | | // TODO: some way to determine texture name? |
1729 | 9 | const std::string texture_name = "" + FBX::SEPARATOR + "Texture"; |
1730 | 9 | tnode.AddProperties(texture_uid, texture_name, ""); |
1731 | | // there really doesn't seem to be a better type than this: |
1732 | 9 | tnode.AddChild("Type", "TextureVideoClip"); |
1733 | 9 | tnode.AddChild("Version", int32_t(202)); |
1734 | 9 | tnode.AddChild("TextureName", texture_name); |
1735 | 9 | FBX::Node p("Properties70"); |
1736 | 9 | p.AddP70vectorA("Translation", trafo.mTranslation[0], trafo.mTranslation[1], 0.0); |
1737 | 9 | p.AddP70vectorA("Rotation", 0, 0, trafo.mRotation); |
1738 | 9 | p.AddP70vectorA("Scaling", trafo.mScaling[0], trafo.mScaling[1], 0.0); |
1739 | 9 | p.AddP70enum("CurrentTextureBlendMode", 0); // TODO: verify |
1740 | | //p.AddP70string("UVSet", ""); // TODO: how should this work? |
1741 | 9 | p.AddP70bool("UseMaterial", true); |
1742 | 9 | tnode.AddChild(p); |
1743 | | // can't easily determine which texture path will be correct, |
1744 | | // so just store what we have in every field. |
1745 | | // these being incorrect is a common problem with FBX anyway. |
1746 | 9 | tnode.AddChild("FileName", texture_path); |
1747 | 9 | tnode.AddChild("RelativeFilename", texture_path); |
1748 | 9 | tnode.AddChild("ModelUVTranslation", double(0.0), double(0.0)); |
1749 | 9 | tnode.AddChild("ModelUVScaling", double(1.0), double(1.0)); |
1750 | 9 | tnode.AddChild("Texture_Alpha_Source", "None"); |
1751 | 9 | tnode.AddChild( |
1752 | 9 | "Cropping", int32_t(0), int32_t(0), int32_t(0), int32_t(0) |
1753 | 9 | ); |
1754 | 9 | tnode.Dump(outstream, binary, indent); |
1755 | 9 | } |
1756 | 53 | } |
1757 | | |
1758 | | // Blendshapes, if any |
1759 | 1.90k | for (size_t mi = 0; mi < mScene->mNumMeshes; ++mi) { |
1760 | 1.85k | const aiMesh* m = mScene->mMeshes[mi]; |
1761 | 1.85k | if (m->mNumAnimMeshes == 0) { |
1762 | 1.85k | continue; |
1763 | 1.85k | } |
1764 | | // make a deformer for this mesh |
1765 | 0 | int64_t deformer_uid = generate_uid(); |
1766 | 0 | FBX::Node dnode("Deformer"); |
1767 | 0 | dnode.AddProperties(deformer_uid, m->mName.data + FBX::SEPARATOR + "Blendshapes", "BlendShape"); |
1768 | 0 | dnode.AddChild("Version", int32_t(101)); |
1769 | 0 | dnode.Dump(outstream, binary, indent); |
1770 | | // connect it |
1771 | 0 | const auto node = get_node_for_mesh((unsigned int)mi, mScene->mRootNode); |
1772 | 0 | connections.emplace_back("C", "OO", deformer_uid, mesh_uids[node]); |
1773 | 0 | std::vector<int32_t> vertex_indices = vVertexIndice[mi]; |
1774 | |
|
1775 | 0 | for (unsigned int am = 0; am < m->mNumAnimMeshes; ++am) { |
1776 | 0 | aiAnimMesh *pAnimMesh = m->mAnimMeshes[am]; |
1777 | 0 | std::string blendshape_name = pAnimMesh->mName.data; |
1778 | | |
1779 | | // start the node record |
1780 | 0 | FBX::Node bsnode("Geometry"); |
1781 | 0 | int64_t blendshape_uid = generate_uid(); |
1782 | 0 | blendshape_uids.push_back(blendshape_uid); |
1783 | 0 | bsnode.AddProperty(blendshape_uid); |
1784 | 0 | bsnode.AddProperty(blendshape_name + FBX::SEPARATOR + "Geometry"); |
1785 | 0 | bsnode.AddProperty("Shape"); |
1786 | 0 | bsnode.AddChild("Version", int32_t(100)); |
1787 | 0 | bsnode.Begin(outstream, binary, indent); |
1788 | 0 | bsnode.DumpProperties(outstream, binary, indent); |
1789 | 0 | bsnode.EndProperties(outstream, binary, indent); |
1790 | 0 | bsnode.BeginChildren(outstream, binary, indent); |
1791 | 0 | indent++; |
1792 | 0 | if (pAnimMesh->HasPositions()) { |
1793 | 0 | std::vector<int32_t>shape_indices; |
1794 | 0 | std::vector<float>pPositionDiff; |
1795 | 0 | std::vector<float>pNormalDiff; |
1796 | |
|
1797 | 0 | for (unsigned int vt = 0; vt < vertex_indices.size(); ++vt) { |
1798 | 0 | aiVector3D pDiff = (pAnimMesh->mVertices[vertex_indices[vt]] - m->mVertices[vertex_indices[vt]]); |
1799 | 0 | shape_indices.push_back(vertex_indices[vt]); |
1800 | 0 | pPositionDiff.push_back(pDiff[0]); |
1801 | 0 | pPositionDiff.push_back(pDiff[1]); |
1802 | 0 | pPositionDiff.push_back(pDiff[2]); |
1803 | |
|
1804 | 0 | if (pAnimMesh->HasNormals()) { |
1805 | 0 | aiVector3D nDiff = (pAnimMesh->mNormals[vertex_indices[vt]] - m->mNormals[vertex_indices[vt]]); |
1806 | 0 | pNormalDiff.push_back(nDiff[0]); |
1807 | 0 | pNormalDiff.push_back(nDiff[1]); |
1808 | 0 | pNormalDiff.push_back(nDiff[2]); |
1809 | 0 | } else { |
1810 | 0 | pNormalDiff.push_back(0.0); |
1811 | 0 | pNormalDiff.push_back(0.0); |
1812 | 0 | pNormalDiff.push_back(0.0); |
1813 | 0 | } |
1814 | 0 | } |
1815 | |
|
1816 | 0 | FBX::Node::WritePropertyNode( |
1817 | 0 | "Indexes", shape_indices, outstream, binary, indent |
1818 | 0 | ); |
1819 | |
|
1820 | 0 | FBX::Node::WritePropertyNode( |
1821 | 0 | "Vertices", pPositionDiff, outstream, binary, indent |
1822 | 0 | ); |
1823 | |
|
1824 | 0 | if (pNormalDiff.size()>0) { |
1825 | 0 | FBX::Node::WritePropertyNode( |
1826 | 0 | "Normals", pNormalDiff, outstream, binary, indent |
1827 | 0 | ); |
1828 | 0 | } |
1829 | 0 | } |
1830 | 0 | indent--; |
1831 | 0 | bsnode.End(outstream, binary, indent, true); |
1832 | | |
1833 | | // Add blendshape Channel Deformer |
1834 | 0 | FBX::Node sdnode("Deformer"); |
1835 | 0 | const int64_t blendchannel_uid = generate_uid(); |
1836 | 0 | sdnode.AddProperties( |
1837 | 0 | blendchannel_uid, blendshape_name + FBX::SEPARATOR + "SubDeformer", "BlendShapeChannel" |
1838 | 0 | ); |
1839 | 0 | sdnode.AddChild("Version", int32_t(100)); |
1840 | 0 | sdnode.AddChild("DeformPercent", float(0.0)); |
1841 | 0 | FBX::Node p("Properties70"); |
1842 | 0 | p.AddP70numberA("DeformPercent", 0.0); |
1843 | 0 | sdnode.AddChild(p); |
1844 | | // TODO: Normally just one weight per channel, adding stub for later development |
1845 | 0 | std::vector<double>fFullWeights; |
1846 | 0 | fFullWeights.push_back(100.); |
1847 | 0 | sdnode.AddChild("FullWeights", fFullWeights); |
1848 | 0 | sdnode.Dump(outstream, binary, indent); |
1849 | |
|
1850 | 0 | connections.emplace_back("C", "OO", blendchannel_uid, deformer_uid); |
1851 | 0 | connections.emplace_back("C", "OO", blendshape_uid, blendchannel_uid); |
1852 | 0 | } |
1853 | 0 | } |
1854 | | |
1855 | | // bones. |
1856 | | // |
1857 | | // output structure: |
1858 | | // subset of node hierarchy that are "skeleton", |
1859 | | // i.e. do not have meshes but only bones. |
1860 | | // but.. i'm not sure how anyone could guarantee that... |
1861 | | // |
1862 | | // input... |
1863 | | // well, for each mesh it has "bones", |
1864 | | // and the bone names correspond to nodes. |
1865 | | // of course we also need the parent nodes, |
1866 | | // as they give some of the transform........ |
1867 | | // |
1868 | | // well. we can assume a sane input, i suppose. |
1869 | | // |
1870 | | // so input is the bone node hierarchy, |
1871 | | // with an extra thing for the transformation of the MESH in BONE space. |
1872 | | // |
1873 | | // output is a set of bone nodes, |
1874 | | // a "bindpose" which indicates the default local transform of all bones, |
1875 | | // and a set of "deformers". |
1876 | | // each deformer is parented to a mesh geometry, |
1877 | | // and has one or more "subdeformer"s as children. |
1878 | | // each subdeformer has one bone node as a child, |
1879 | | // and represents the influence of that bone on the grandparent mesh. |
1880 | | // the subdeformer has a list of indices, and weights, |
1881 | | // with indices specifying vertex indices, |
1882 | | // and weights specifying the corresponding influence of this bone. |
1883 | | // it also has Transform and TransformLink elements, |
1884 | | // specifying the transform of the MESH in BONE space, |
1885 | | // and the transformation of the BONE in WORLD space, |
1886 | | // likely in the bindpose. |
1887 | | // |
1888 | | // the input bone structure is different but similar, |
1889 | | // storing the number of weights for this bone, |
1890 | | // and an array of (vertex index, weight) pairs. |
1891 | | // |
1892 | | // one sticky point is that the number of vertices may not match, |
1893 | | // because assimp splits vertices by normal, uv, etc. |
1894 | | |
1895 | | |
1896 | | // first we should mark the skeleton for each mesh. |
1897 | | // the skeleton must include not only the aiBones, |
1898 | | // but also all their parent nodes. |
1899 | | // anything that affects the position of any bone node must be included. |
1900 | | |
1901 | | // note that we want to preserve input order as much as possible here. |
1902 | | // previously, sorting by name lead to consistent output across systems, but was not |
1903 | | // suitable for downstream consumption by some applications. |
1904 | 56 | std::vector<std::vector<const aiNode*>> skeleton_by_mesh(mScene->mNumMeshes); |
1905 | | // at the same time we can build a list of all the skeleton nodes, |
1906 | | // which will be used later to mark them as type "limbNode". |
1907 | 56 | std::unordered_set<const aiNode*> limbnodes; |
1908 | | |
1909 | | //actual bone nodes in fbx, without parenting-up |
1910 | 56 | std::vector<std::string> allBoneNames; |
1911 | 1.90k | for(unsigned int m = 0; m < mScene->mNumMeshes; ++ m) { |
1912 | 1.85k | aiMesh* pMesh = mScene->mMeshes[m]; |
1913 | 5.60k | for(unsigned int b = 0; b < pMesh->mNumBones; ++ b) |
1914 | 3.75k | allBoneNames.push_back(pMesh->mBones[b]->mName.data); |
1915 | 1.85k | } |
1916 | 56 | aiMatrix4x4 mxTransIdentity; |
1917 | | |
1918 | | // and a map of nodes by bone name, as finding them is annoying. |
1919 | 56 | std::map<std::string,aiNode*> node_by_bone; |
1920 | 1.90k | for (size_t mi = 0; mi < mScene->mNumMeshes; ++mi) { |
1921 | 1.85k | const aiMesh* m = mScene->mMeshes[mi]; |
1922 | 1.85k | std::vector<const aiNode*> skeleton; |
1923 | 5.60k | for (size_t bi =0; bi < m->mNumBones; ++bi) { |
1924 | 3.75k | const aiBone* b = m->mBones[bi]; |
1925 | 3.75k | const std::string name(b->mName.C_Str()); |
1926 | 3.75k | auto elem = node_by_bone.find(name); |
1927 | 3.75k | aiNode* n; |
1928 | 3.75k | if (elem != node_by_bone.end()) { |
1929 | 0 | n = elem->second; |
1930 | 3.75k | } else { |
1931 | 3.75k | n = mScene->mRootNode->FindNode(b->mName); |
1932 | 3.75k | if (!n) { |
1933 | | // this should never happen |
1934 | 0 | std::stringstream err; |
1935 | 0 | err << "Failed to find node for bone: \"" << name << "\""; |
1936 | 0 | throw DeadlyExportError(err.str()); |
1937 | 0 | } |
1938 | 3.75k | node_by_bone[name] = n; |
1939 | 3.75k | limbnodes.insert(n); |
1940 | 3.75k | } |
1941 | 3.75k | skeleton.push_back(n); |
1942 | | // mark all parent nodes as skeleton as well, |
1943 | | // up until we find the root node, |
1944 | | // or else the node containing the mesh, |
1945 | | // or else the parent of a node containing the mesh. |
1946 | 3.75k | for ( |
1947 | 3.75k | const aiNode* parent = n->mParent; |
1948 | 7.50k | parent && parent != mScene->mRootNode; |
1949 | 3.75k | parent = parent->mParent |
1950 | 3.75k | ) { |
1951 | | // if we've already done this node we can skip it all |
1952 | 3.75k | if (std::find(skeleton.begin(), skeleton.end(), parent) != skeleton.end()) { |
1953 | 0 | break; |
1954 | 0 | } |
1955 | | // ignore fbx transform nodes as these will be collapsed later |
1956 | | // TODO: cache this by aiNode* |
1957 | 3.75k | const std::string node_name(parent->mName.C_Str()); |
1958 | 3.75k | if (node_name.find(MAGIC_NODE_TAG) != std::string::npos) { |
1959 | 0 | continue; |
1960 | 0 | } |
1961 | | //not a bone in scene && no effect in transform |
1962 | 3.75k | if (std::find(allBoneNames.begin(), allBoneNames.end(), node_name) == allBoneNames.end() |
1963 | 3.75k | && parent->mTransformation == mxTransIdentity) { |
1964 | 3.74k | continue; |
1965 | 3.74k | } |
1966 | | // otherwise check if this is the root of the skeleton |
1967 | 3 | bool end = false; |
1968 | | // is the mesh part of this node? |
1969 | 3 | for (size_t i = 0; i < parent->mNumMeshes && !end; ++i) { |
1970 | 0 | end |= parent->mMeshes[i] == mi; |
1971 | 0 | } |
1972 | | // is the mesh in one of the children of this node? |
1973 | 6 | for (size_t j = 0; j < parent->mNumChildren && !end; ++j) { |
1974 | 3 | aiNode* child = parent->mChildren[j]; |
1975 | 3 | for (size_t i = 0; i < child->mNumMeshes && !end; ++i) { |
1976 | 0 | end |= child->mMeshes[i] == mi; |
1977 | 0 | } |
1978 | 3 | } |
1979 | | |
1980 | | // if it was the skeleton root we can finish here |
1981 | 3 | if (end) { break; } |
1982 | 3 | } |
1983 | 3.75k | } |
1984 | 1.85k | skeleton_by_mesh[mi] = skeleton; |
1985 | 1.85k | } |
1986 | | |
1987 | | // we'll need the uids for the bone nodes, so generate them now |
1988 | 1.90k | for (size_t i = 0; i < mScene->mNumMeshes; ++i) { |
1989 | 1.85k | auto &s = skeleton_by_mesh[i]; |
1990 | 3.75k | for (const aiNode* n : s) { |
1991 | 3.75k | if (node_uids.find(n) == node_uids.end()) { |
1992 | 3.75k | node_uids[n] = generate_uid(); |
1993 | 3.75k | } |
1994 | 3.75k | } |
1995 | 1.85k | } |
1996 | | |
1997 | | // now, for each aiMesh, we need to export a deformer, |
1998 | | // and for each aiBone a subdeformer, |
1999 | | // which should have all the skinning info. |
2000 | | // these will need to be connected properly to the mesh, |
2001 | | // and we can do that all now. |
2002 | 1.90k | for (size_t mi = 0; mi < mScene->mNumMeshes; ++mi) { |
2003 | 1.85k | const aiMesh* m = mScene->mMeshes[mi]; |
2004 | 1.85k | if (!m->HasBones()) { |
2005 | 1.84k | continue; |
2006 | 1.84k | } |
2007 | | |
2008 | 6 | const aiNode *mesh_node = get_node_for_mesh((uint32_t)mi, mScene->mRootNode); |
2009 | | // make a deformer for this mesh |
2010 | 6 | int64_t deformer_uid = generate_uid(); |
2011 | 6 | FBX::Node dnode("Deformer"); |
2012 | 6 | dnode.AddProperties(deformer_uid, FBX::SEPARATOR + "Deformer", "Skin"); |
2013 | 6 | dnode.AddChild("Version", int32_t(101)); |
2014 | | // "acuracy"... this is not a typo.... |
2015 | 6 | dnode.AddChild("Link_DeformAcuracy", double(50)); |
2016 | 6 | dnode.AddChild("SkinningType", "Linear"); // TODO: other modes? |
2017 | 6 | dnode.Dump(outstream, binary, indent); |
2018 | | |
2019 | | // connect it |
2020 | 6 | connections.emplace_back("C", "OO", deformer_uid, mesh_uids[mesh_node]); |
2021 | | |
2022 | | // TODO, FIXME: this won't work if anything is not in the bind pose. |
2023 | | // for now if such a situation is detected, we throw an exception. |
2024 | 6 | std::set<const aiBone*> not_in_bind_pose; |
2025 | 6 | std::set<const aiNode*> no_offset_matrix; |
2026 | | |
2027 | | // first get this mesh's position in world space, |
2028 | | // as we'll need it for each subdeformer. |
2029 | | // |
2030 | | // ...of course taking the position of the MESH doesn't make sense, |
2031 | | // as it can be instanced to many nodes. |
2032 | | // All we can do is assume no instancing, |
2033 | | // and take the first node we find that contains the mesh. |
2034 | 6 | aiMatrix4x4 mesh_xform = get_world_transform(mesh_node, mScene); |
2035 | | |
2036 | | // now make a subdeformer for each bone in the skeleton |
2037 | 6 | const auto & skeleton= skeleton_by_mesh[mi]; |
2038 | 3.75k | for (const aiNode* bone_node : skeleton) { |
2039 | | // if there's a bone for this node, find it |
2040 | 3.75k | const aiBone* b = nullptr; |
2041 | 2.80M | for (size_t bi = 0; bi < m->mNumBones; ++bi) { |
2042 | | // TODO: this probably should index by something else |
2043 | 2.80M | const std::string name(m->mBones[bi]->mName.C_Str()); |
2044 | 2.80M | if (node_by_bone[name] == bone_node) { |
2045 | 3.75k | b = m->mBones[bi]; |
2046 | 3.75k | break; |
2047 | 3.75k | } |
2048 | 2.80M | } |
2049 | 3.75k | if (!b) { |
2050 | 0 | no_offset_matrix.insert(bone_node); |
2051 | 0 | } |
2052 | | |
2053 | | // start the subdeformer node |
2054 | 3.75k | const int64_t subdeformer_uid = generate_uid(); |
2055 | 3.75k | FBX::Node sdnode("Deformer"); |
2056 | 3.75k | sdnode.AddProperties( |
2057 | 3.75k | subdeformer_uid, FBX::SEPARATOR + "SubDeformer", "Cluster" |
2058 | 3.75k | ); |
2059 | 3.75k | sdnode.AddChild("Version", int32_t(100)); |
2060 | 3.75k | sdnode.AddChild("UserData", "", ""); |
2061 | | |
2062 | | // add indices and weights, if any |
2063 | 3.75k | if (b) { |
2064 | 3.75k | std::set<int32_t> setWeightedVertex; |
2065 | 3.75k | std::vector<int32_t> subdef_indices; |
2066 | 3.75k | std::vector<double> subdef_weights; |
2067 | 3.75k | int32_t last_index = -1; |
2068 | 33.7k | for (size_t wi = 0; wi < b->mNumWeights; ++wi) { |
2069 | 29.9k | if (b->mWeights[wi].mVertexId >= vVertexIndice[mi].size()) { |
2070 | 29.9k | ASSIMP_LOG_ERROR("UNREAL: Skipping vertex index to prevent buffer overflow."); |
2071 | 29.9k | continue; |
2072 | 29.9k | } |
2073 | 42 | int32_t vi = vVertexIndice[mi][b->mWeights[wi].mVertexId] |
2074 | 42 | + uniq_v_before_mi[mi]; |
2075 | 42 | bool bIsWeightedAlready = (setWeightedVertex.find(vi) != setWeightedVertex.end()); |
2076 | 42 | if (vi == last_index || bIsWeightedAlready) { |
2077 | | // only for vertices we exported to fbx |
2078 | | // TODO, FIXME: this assumes identically-located vertices |
2079 | | // will always deform in the same way. |
2080 | | // as assimp doesn't store a separate list of "positions", |
2081 | | // there's not much that can be done about this |
2082 | | // other than assuming that identical position means |
2083 | | // identical vertex. |
2084 | 0 | continue; |
2085 | 0 | } |
2086 | 42 | setWeightedVertex.insert(vi); |
2087 | 42 | subdef_indices.push_back(vi); |
2088 | 42 | subdef_weights.push_back(b->mWeights[wi].mWeight); |
2089 | 42 | last_index = vi; |
2090 | 42 | } |
2091 | | // yes, "indexes" |
2092 | 3.75k | sdnode.AddChild("Indexes", subdef_indices); |
2093 | 3.75k | sdnode.AddChild("Weights", subdef_weights); |
2094 | 3.75k | } |
2095 | | |
2096 | | // transform is the transform of the mesh, but in bone space. |
2097 | | // if the skeleton is in the bind pose, |
2098 | | // we can take the inverse of the world-space bone transform |
2099 | | // and multiply by the world-space transform of the mesh. |
2100 | 3.75k | aiMatrix4x4 bone_xform = get_world_transform(bone_node, mScene); |
2101 | 3.75k | aiMatrix4x4 inverse_bone_xform = bone_xform; |
2102 | 3.75k | inverse_bone_xform.Inverse(); |
2103 | 3.75k | aiMatrix4x4 tr = inverse_bone_xform * mesh_xform; |
2104 | | |
2105 | 3.75k | sdnode.AddChild("Transform", tr); |
2106 | | |
2107 | | |
2108 | 3.75k | sdnode.AddChild("TransformLink", bone_xform); |
2109 | | // note: this means we ALWAYS rely on the mesh node transform |
2110 | | // being unchanged from the time the skeleton was bound. |
2111 | | // there's not really any way around this at the moment. |
2112 | | |
2113 | | // done |
2114 | 3.75k | sdnode.Dump(outstream, binary, indent); |
2115 | | |
2116 | | // lastly, connect to the parent deformer |
2117 | 3.75k | connections.emplace_back( |
2118 | 3.75k | "C", "OO", subdeformer_uid, deformer_uid |
2119 | 3.75k | ); |
2120 | | |
2121 | | // we also need to connect the limb node to the subdeformer. |
2122 | 3.75k | connections.emplace_back( |
2123 | 3.75k | "C", "OO", node_uids[bone_node], subdeformer_uid |
2124 | 3.75k | ); |
2125 | 3.75k | } |
2126 | | |
2127 | | // if we cannot create a valid FBX file, simply die. |
2128 | | // this will both prevent unnecessary bug reports, |
2129 | | // and tell the user what they can do to fix the situation |
2130 | | // (i.e. export their model in the bind pose). |
2131 | 6 | if (no_offset_matrix.size() && not_in_bind_pose.size()) { |
2132 | 0 | std::stringstream err; |
2133 | 0 | err << "Not enough information to construct bind pose"; |
2134 | 0 | err << " for mesh " << mi << "!"; |
2135 | 0 | err << " Transform matrix for bone \""; |
2136 | 0 | err << (*not_in_bind_pose.begin())->mName.C_Str() << "\""; |
2137 | 0 | if (not_in_bind_pose.size() > 1) { |
2138 | 0 | err << " (and " << not_in_bind_pose.size() - 1 << " more)"; |
2139 | 0 | } |
2140 | 0 | err << " does not match mOffsetMatrix,"; |
2141 | 0 | err << " and node \""; |
2142 | 0 | err << (*no_offset_matrix.begin())->mName.C_Str() << "\""; |
2143 | 0 | if (no_offset_matrix.size() > 1) { |
2144 | 0 | err << " (and " << no_offset_matrix.size() - 1 << " more)"; |
2145 | 0 | } |
2146 | 0 | err << " has no offset matrix to rely on."; |
2147 | 0 | err << " Please ensure bones are in the bind pose to export."; |
2148 | 0 | throw DeadlyExportError(err.str()); |
2149 | 0 | } |
2150 | | |
2151 | 6 | } |
2152 | | |
2153 | | // BindPose |
2154 | | // |
2155 | | // This is a legacy system, which should be unnecessary. |
2156 | | // |
2157 | | // Somehow including it slows file loading by the official FBX SDK, |
2158 | | // and as it can reconstruct it from the deformers anyway, |
2159 | | // this is not currently included. |
2160 | | // |
2161 | | // The code is kept here in case it's useful in the future, |
2162 | | // but it's pretty much a hack anyway, |
2163 | | // as assimp doesn't store bindpose information for full skeletons. |
2164 | | // |
2165 | | /*for (size_t mi = 0; mi < mScene->mNumMeshes; ++mi) { |
2166 | | aiMesh* mesh = mScene->mMeshes[mi]; |
2167 | | if (! mesh->HasBones()) { continue; } |
2168 | | int64_t bindpose_uid = generate_uid(); |
2169 | | FBX::Node bpnode("Pose"); |
2170 | | bpnode.AddProperty(bindpose_uid); |
2171 | | // note: this uid is never linked or connected to anything. |
2172 | | bpnode.AddProperty(FBX::SEPARATOR + "Pose"); // blank name |
2173 | | bpnode.AddProperty("BindPose"); |
2174 | | |
2175 | | bpnode.AddChild("Type", "BindPose"); |
2176 | | bpnode.AddChild("Version", int32_t(100)); |
2177 | | |
2178 | | aiNode* mesh_node = get_node_for_mesh(mi, mScene->mRootNode); |
2179 | | |
2180 | | // next get the whole skeleton for this mesh. |
2181 | | // we need it all to define the bindpose section. |
2182 | | // the FBX SDK will complain if it's missing, |
2183 | | // and also if parents of used bones don't have a subdeformer. |
2184 | | // order shouldn't matter. |
2185 | | std::set<aiNode*> skeleton; |
2186 | | for (size_t bi = 0; bi < mesh->mNumBones; ++bi) { |
2187 | | // bone node should have already been indexed |
2188 | | const aiBone* b = mesh->mBones[bi]; |
2189 | | const std::string bone_name(b->mName.C_Str()); |
2190 | | aiNode* parent = node_by_bone[bone_name]; |
2191 | | // insert all nodes down to the root or mesh node |
2192 | | while ( |
2193 | | parent |
2194 | | && parent != mScene->mRootNode |
2195 | | && parent != mesh_node |
2196 | | ) { |
2197 | | skeleton.insert(parent); |
2198 | | parent = parent->mParent; |
2199 | | } |
2200 | | } |
2201 | | |
2202 | | // number of pose nodes. includes one for the mesh itself. |
2203 | | bpnode.AddChild("NbPoseNodes", int32_t(1 + skeleton.size())); |
2204 | | |
2205 | | // the first pose node is always the mesh itself |
2206 | | FBX::Node pose("PoseNode"); |
2207 | | pose.AddChild("Node", mesh_uids[mi]); |
2208 | | aiMatrix4x4 mesh_node_xform = get_world_transform(mesh_node, mScene); |
2209 | | pose.AddChild("Matrix", mesh_node_xform); |
2210 | | bpnode.AddChild(pose); |
2211 | | |
2212 | | for (aiNode* bonenode : skeleton) { |
2213 | | // does this node have a uid yet? |
2214 | | int64_t node_uid; |
2215 | | auto node_uid_iter = node_uids.find(bonenode); |
2216 | | if (node_uid_iter != node_uids.end()) { |
2217 | | node_uid = node_uid_iter->second; |
2218 | | } else { |
2219 | | node_uid = generate_uid(); |
2220 | | node_uids[bonenode] = node_uid; |
2221 | | } |
2222 | | |
2223 | | // make a pose thingy |
2224 | | pose = FBX::Node("PoseNode"); |
2225 | | pose.AddChild("Node", node_uid); |
2226 | | aiMatrix4x4 node_xform = get_world_transform(bonenode, mScene); |
2227 | | pose.AddChild("Matrix", node_xform); |
2228 | | bpnode.AddChild(pose); |
2229 | | } |
2230 | | |
2231 | | // now write it |
2232 | | bpnode.Dump(outstream, binary, indent); |
2233 | | }*/ |
2234 | | |
2235 | | // lights |
2236 | 56 | indent = 1; |
2237 | 56 | lights_uids.clear(); |
2238 | 56 | for (size_t li = 0; li < mScene->mNumLights; ++li) { |
2239 | 0 | aiLight* l = mScene->mLights[li]; |
2240 | |
|
2241 | 0 | int64_t uid = generate_uid(); |
2242 | 0 | const std::string lightNodeAttributeName = l->mName.C_Str() + FBX::SEPARATOR + "NodeAttribute"; |
2243 | |
|
2244 | 0 | FBX::Node lna("NodeAttribute"); |
2245 | 0 | lna.AddProperties(uid, lightNodeAttributeName, "Light"); |
2246 | 0 | FBX::Node lnap("Properties70"); |
2247 | | |
2248 | | // Light color. |
2249 | 0 | lnap.AddP70colorA("Color", l->mColorDiffuse.r, l->mColorDiffuse.g, l->mColorDiffuse.b); |
2250 | | |
2251 | | // TODO Assimp light description is quite concise and do not handle light intensity. |
2252 | | // Default value to 1000W. |
2253 | 0 | lnap.AddP70numberA("Intensity", 1000); |
2254 | | |
2255 | | // FBXLight::EType conversion |
2256 | 0 | switch (l->mType) { |
2257 | 0 | case aiLightSource_POINT: |
2258 | 0 | lnap.AddP70enum("LightType", 0); |
2259 | 0 | break; |
2260 | 0 | case aiLightSource_DIRECTIONAL: |
2261 | 0 | lnap.AddP70enum("LightType", 1); |
2262 | 0 | break; |
2263 | 0 | case aiLightSource_SPOT: |
2264 | 0 | lnap.AddP70enum("LightType", 2); |
2265 | 0 | lnap.AddP70numberA("InnerAngle", AI_RAD_TO_DEG(l->mAngleInnerCone)); |
2266 | 0 | lnap.AddP70numberA("OuterAngle", AI_RAD_TO_DEG(l->mAngleOuterCone)); |
2267 | 0 | break; |
2268 | | // TODO Assimp do not handle 'area' nor 'volume' lights, but FBX does. |
2269 | | /*case aiLightSource_AREA: |
2270 | | lnap.AddP70enum("LightType", 3); |
2271 | | lnap.AddP70enum("AreaLightShape", 0); // 0=Rectangle, 1=Sphere |
2272 | | break; |
2273 | | case aiLightSource_VOLUME: |
2274 | | lnap.AddP70enum("LightType", 4); |
2275 | | break;*/ |
2276 | 0 | default: |
2277 | 0 | break; |
2278 | 0 | } |
2279 | | |
2280 | | // Did not understood how to configure the decay so disabling attenuation. |
2281 | 0 | lnap.AddP70enum("DecayType", 0); |
2282 | | |
2283 | | // Dump to FBX stream |
2284 | 0 | lna.AddChild(lnap); |
2285 | 0 | lna.AddChild("TypeFlags", FBX::FBXExportProperty("Light")); |
2286 | 0 | lna.AddChild("GeometryVersion", FBX::FBXExportProperty(int32_t(124))); |
2287 | 0 | lna.Dump(outstream, binary, indent); |
2288 | | |
2289 | | // Store name and uid (will be used later when parsing scene nodes) |
2290 | 0 | lights_uids[l->mName.C_Str()] = uid; |
2291 | 0 | } |
2292 | | |
2293 | | // TODO: cameras |
2294 | | |
2295 | | // write nodes (i.e. model hierarchy) |
2296 | | // start at root node |
2297 | 56 | WriteModelNodes( |
2298 | 56 | outstream, mScene->mRootNode, 0, limbnodes |
2299 | 56 | ); |
2300 | | |
2301 | | // animations |
2302 | | // |
2303 | | // in FBX there are: |
2304 | | // * AnimationStack - corresponds to an aiAnimation |
2305 | | // * AnimationLayer - a combinable animation component |
2306 | | // * AnimationCurveNode - links the property to be animated |
2307 | | // * AnimationCurve - defines animation data for a single property value |
2308 | | // |
2309 | | // the CurveNode also provides the default value for a property, |
2310 | | // such as the X, Y, Z coordinates for animatable translation. |
2311 | | // |
2312 | | // the Curve only specifies values for one component of the property, |
2313 | | // so there will be a separate AnimationCurve for X, Y, and Z. |
2314 | | // |
2315 | | // Assimp has: |
2316 | | // * aiAnimation - basically corresponds to an AnimationStack |
2317 | | // * aiNodeAnim - defines all animation for one aiNode |
2318 | | // * aiVectorKey/aiQuatKey - define the keyframe data for T/R/S |
2319 | | // |
2320 | | // assimp has no equivalent for AnimationLayer, |
2321 | | // and these are flattened on FBX import. |
2322 | | // we can assume there will be one per AnimationStack. |
2323 | | // |
2324 | | // the aiNodeAnim contains all animation data for a single aiNode, |
2325 | | // which will correspond to three AnimationCurveNode's: |
2326 | | // one each for translation, rotation and scale. |
2327 | | // The data for each of these will be put in 9 AnimationCurve's, |
2328 | | // T.X, T.Y, T.Z, R.X, R.Y, R.Z, etc. |
2329 | | |
2330 | | // AnimationStack / aiAnimation |
2331 | 56 | std::vector<int64_t> animation_stack_uids(mScene->mNumAnimations); |
2332 | 1.94k | for (size_t ai = 0; ai < mScene->mNumAnimations; ++ai) { |
2333 | 1.89k | int64_t animstack_uid = generate_uid(); |
2334 | 1.89k | animation_stack_uids[ai] = animstack_uid; |
2335 | 1.89k | const aiAnimation* anim = mScene->mAnimations[ai]; |
2336 | | |
2337 | 1.89k | FBX::Node asnode("AnimationStack"); |
2338 | 1.89k | std::string name = anim->mName.C_Str() + FBX::SEPARATOR + "AnimStack"; |
2339 | 1.89k | asnode.AddProperties(animstack_uid, name, ""); |
2340 | 1.89k | FBX::Node p("Properties70"); |
2341 | 1.89k | p.AddP70time("LocalStart", 0); // assimp doesn't store this |
2342 | 1.89k | p.AddP70time("LocalStop", to_ktime(anim->mDuration, anim)); |
2343 | 1.89k | p.AddP70time("ReferenceStart", 0); |
2344 | 1.89k | p.AddP70time("ReferenceStop", to_ktime(anim->mDuration, anim)); |
2345 | 1.89k | asnode.AddChild(p); |
2346 | | |
2347 | | // this node absurdly always pretends it has children |
2348 | | // (in this case it does, but just in case...) |
2349 | 1.89k | asnode.force_has_children = true; |
2350 | 1.89k | asnode.Dump(outstream, binary, indent); |
2351 | | |
2352 | | // note: animation stacks are not connected to anything |
2353 | 1.89k | } |
2354 | | |
2355 | | // AnimationLayer - one per aiAnimation |
2356 | 56 | std::vector<int64_t> animation_layer_uids(mScene->mNumAnimations); |
2357 | 1.94k | for (size_t ai = 0; ai < mScene->mNumAnimations; ++ai) { |
2358 | 1.89k | int64_t animlayer_uid = generate_uid(); |
2359 | 1.89k | animation_layer_uids[ai] = animlayer_uid; |
2360 | 1.89k | FBX::Node alnode("AnimationLayer"); |
2361 | 1.89k | alnode.AddProperties(animlayer_uid, FBX::SEPARATOR + "AnimLayer", ""); |
2362 | | |
2363 | | // this node absurdly always pretends it has children |
2364 | 1.89k | alnode.force_has_children = true; |
2365 | 1.89k | alnode.Dump(outstream, binary, indent); |
2366 | | |
2367 | | // connect to the relevant animstack |
2368 | 1.89k | connections.emplace_back( |
2369 | 1.89k | "C", "OO", animlayer_uid, animation_stack_uids[ai] |
2370 | 1.89k | ); |
2371 | 1.89k | } |
2372 | | |
2373 | | // AnimCurveNode - three per aiNodeAnim |
2374 | 56 | std::vector<std::vector<std::array<int64_t,3>>> curve_node_uids; |
2375 | 1.94k | for (size_t ai = 0; ai < mScene->mNumAnimations; ++ai) { |
2376 | 1.89k | const aiAnimation* anim = mScene->mAnimations[ai]; |
2377 | 1.89k | const int64_t layer_uid = animation_layer_uids[ai]; |
2378 | 1.89k | std::vector<std::array<int64_t,3>> nodeanim_uids; |
2379 | 3.78k | for (size_t nai = 0; nai < anim->mNumChannels; ++nai) { |
2380 | 1.89k | const aiNodeAnim* na = anim->mChannels[nai]; |
2381 | | // get the corresponding aiNode |
2382 | 1.89k | const aiNode* node = mScene->mRootNode->FindNode(na->mNodeName); |
2383 | | // and its transform |
2384 | 1.89k | const aiMatrix4x4 node_xfm = get_world_transform(node, mScene); |
2385 | 1.89k | aiVector3D T, R, S; |
2386 | 1.89k | node_xfm.Decompose(S, R, T); |
2387 | | |
2388 | | // AnimationCurveNode uids |
2389 | 1.89k | std::array<int64_t,3> ids; |
2390 | 1.89k | ids[0] = generate_uid(); // T |
2391 | 1.89k | ids[1] = generate_uid(); // R |
2392 | 1.89k | ids[2] = generate_uid(); // S |
2393 | | |
2394 | | // translation |
2395 | 1.89k | WriteAnimationCurveNode(outstream, |
2396 | 1.89k | ids[0], "T", T, "Lcl Translation", |
2397 | 1.89k | layer_uid, node_uids[node] |
2398 | 1.89k | ); |
2399 | | |
2400 | | // rotation |
2401 | 1.89k | WriteAnimationCurveNode(outstream, |
2402 | 1.89k | ids[1], "R", R, "Lcl Rotation", |
2403 | 1.89k | layer_uid, node_uids[node] |
2404 | 1.89k | ); |
2405 | | |
2406 | | // scale |
2407 | 1.89k | WriteAnimationCurveNode(outstream, |
2408 | 1.89k | ids[2], "S", S, "Lcl Scale", |
2409 | 1.89k | layer_uid, node_uids[node] |
2410 | 1.89k | ); |
2411 | | |
2412 | | // store the uids for later use |
2413 | 1.89k | nodeanim_uids.push_back(ids); |
2414 | 1.89k | } |
2415 | 1.89k | curve_node_uids.push_back(nodeanim_uids); |
2416 | 1.89k | } |
2417 | | |
2418 | | // AnimCurve - defines actual keyframe data. |
2419 | | // there's a separate curve for every component of every vector, |
2420 | | // for example a transform curvenode will have separate X/Y/Z AnimCurve's |
2421 | 1.94k | for (size_t ai = 0; ai < mScene->mNumAnimations; ++ai) { |
2422 | 1.89k | const aiAnimation* anim = mScene->mAnimations[ai]; |
2423 | 3.78k | for (size_t nai = 0; nai < anim->mNumChannels; ++nai) { |
2424 | 1.89k | const aiNodeAnim* na = anim->mChannels[nai]; |
2425 | | // get the corresponding aiNode |
2426 | 1.89k | const aiNode* node = mScene->mRootNode->FindNode(na->mNodeName); |
2427 | | // and its transform |
2428 | 1.89k | const aiMatrix4x4 node_xfm = get_world_transform(node, mScene); |
2429 | 1.89k | aiVector3D T, R, S; |
2430 | 1.89k | node_xfm.Decompose(S, R, T); |
2431 | 1.89k | const std::array<int64_t,3>& ids = curve_node_uids[ai][nai]; |
2432 | | |
2433 | 1.89k | std::vector<int64_t> times; |
2434 | 1.89k | std::vector<float> xval, yval, zval; |
2435 | | |
2436 | | // position/translation |
2437 | 3.78k | for (size_t ki = 0; ki < na->mNumPositionKeys; ++ki) { |
2438 | 1.89k | const aiVectorKey& k = na->mPositionKeys[ki]; |
2439 | 1.89k | times.push_back(to_ktime(k.mTime, anim)); |
2440 | 1.89k | xval.push_back(k.mValue.x); |
2441 | 1.89k | yval.push_back(k.mValue.y); |
2442 | 1.89k | zval.push_back(k.mValue.z); |
2443 | 1.89k | } |
2444 | | // one curve each for X, Y, Z |
2445 | 1.89k | WriteAnimationCurve(outstream, T.x, times, xval, ids[0], "d|X"); |
2446 | 1.89k | WriteAnimationCurve(outstream, T.y, times, yval, ids[0], "d|Y"); |
2447 | 1.89k | WriteAnimationCurve(outstream, T.z, times, zval, ids[0], "d|Z"); |
2448 | | |
2449 | | // rotation |
2450 | 1.89k | times.clear(); xval.clear(); yval.clear(); zval.clear(); |
2451 | 3.78k | for (size_t ki = 0; ki < na->mNumRotationKeys; ++ki) { |
2452 | 1.89k | const aiQuatKey& k = na->mRotationKeys[ki]; |
2453 | 1.89k | times.push_back(to_ktime(k.mTime, anim)); |
2454 | | // TODO: aiQuaternion method to convert to Euler... |
2455 | 1.89k | aiMatrix4x4 m(k.mValue.GetMatrix()); |
2456 | 1.89k | aiVector3D qs, qr, qt; |
2457 | 1.89k | m.Decompose(qs, qr, qt); |
2458 | 1.89k | qr = AI_RAD_TO_DEG(qr); |
2459 | 1.89k | xval.push_back(qr.x); |
2460 | 1.89k | yval.push_back(qr.y); |
2461 | 1.89k | zval.push_back(qr.z); |
2462 | 1.89k | } |
2463 | 1.89k | WriteAnimationCurve(outstream, R.x, times, xval, ids[1], "d|X"); |
2464 | 1.89k | WriteAnimationCurve(outstream, R.y, times, yval, ids[1], "d|Y"); |
2465 | 1.89k | WriteAnimationCurve(outstream, R.z, times, zval, ids[1], "d|Z"); |
2466 | | |
2467 | | // scaling/scale |
2468 | 1.89k | times.clear(); xval.clear(); yval.clear(); zval.clear(); |
2469 | 3.78k | for (size_t ki = 0; ki < na->mNumScalingKeys; ++ki) { |
2470 | 1.89k | const aiVectorKey& k = na->mScalingKeys[ki]; |
2471 | 1.89k | times.push_back(to_ktime(k.mTime, anim)); |
2472 | 1.89k | xval.push_back(k.mValue.x); |
2473 | 1.89k | yval.push_back(k.mValue.y); |
2474 | 1.89k | zval.push_back(k.mValue.z); |
2475 | 1.89k | } |
2476 | 1.89k | WriteAnimationCurve(outstream, S.x, times, xval, ids[2], "d|X"); |
2477 | 1.89k | WriteAnimationCurve(outstream, S.y, times, yval, ids[2], "d|Y"); |
2478 | 1.89k | WriteAnimationCurve(outstream, S.z, times, zval, ids[2], "d|Z"); |
2479 | 1.89k | } |
2480 | 1.89k | } |
2481 | | |
2482 | 56 | indent = 0; |
2483 | 56 | object_node.End(outstream, binary, indent, true); |
2484 | 56 | } |
2485 | | |
2486 | | // convenience map of magic node name strings to FBX properties, |
2487 | | // including the expected type of transform. |
2488 | | const std::map<std::string,std::pair<std::string,char>> transform_types = { |
2489 | | {"Translation", {"Lcl Translation", 't'}}, |
2490 | | {"RotationOffset", {"RotationOffset", 't'}}, |
2491 | | {"RotationPivot", {"RotationPivot", 't'}}, |
2492 | | {"PreRotation", {"PreRotation", 'r'}}, |
2493 | | {"Rotation", {"Lcl Rotation", 'r'}}, |
2494 | | {"PostRotation", {"PostRotation", 'r'}}, |
2495 | | {"RotationPivotInverse", {"RotationPivotInverse", 'i'}}, |
2496 | | {"ScalingOffset", {"ScalingOffset", 't'}}, |
2497 | | {"ScalingPivot", {"ScalingPivot", 't'}}, |
2498 | | {"Scaling", {"Lcl Scaling", 's'}}, |
2499 | | {"ScalingPivotInverse", {"ScalingPivotInverse", 'i'}}, |
2500 | | {"GeometricScaling", {"GeometricScaling", 's'}}, |
2501 | | {"GeometricRotation", {"GeometricRotation", 'r'}}, |
2502 | | {"GeometricTranslation", {"GeometricTranslation", 't'}}, |
2503 | | {"GeometricTranslationInverse", {"GeometricTranslationInverse", 'i'}}, |
2504 | | {"GeometricRotationInverse", {"GeometricRotationInverse", 'i'}}, |
2505 | | {"GeometricScalingInverse", {"GeometricScalingInverse", 'i'}} |
2506 | | }; |
2507 | | |
2508 | | //add metadata to fbx property |
2509 | 15.1k | void add_meta(FBX::Node& fbx_node, const aiNode* node){ |
2510 | 15.1k | if(node->mMetaData == nullptr) return; |
2511 | 0 | aiMetadata* meta = node->mMetaData; |
2512 | 0 | for (unsigned int i = 0; i < meta->mNumProperties; ++i) { |
2513 | 0 | aiString key = meta->mKeys[i]; |
2514 | 0 | aiMetadataEntry* entry = &meta->mValues[i]; |
2515 | 0 | switch (entry->mType) { |
2516 | 0 | case AI_BOOL:{ |
2517 | 0 | bool val = *static_cast<bool *>(entry->mData); |
2518 | 0 | fbx_node.AddP70bool(key.C_Str(), val); |
2519 | 0 | break; |
2520 | 0 | } |
2521 | 0 | case AI_INT32:{ |
2522 | 0 | int32_t val = *static_cast<int32_t *>(entry->mData); |
2523 | 0 | fbx_node.AddP70int(key.C_Str(), val); |
2524 | 0 | break; |
2525 | 0 | } |
2526 | 0 | case AI_UINT64:{ |
2527 | | //use string to add uint64 |
2528 | 0 | uint64_t val = *static_cast<uint64_t *>(entry->mData); |
2529 | 0 | fbx_node.AddP70string(key.C_Str(), std::to_string(val).c_str()); |
2530 | 0 | break; |
2531 | 0 | } |
2532 | 0 | case AI_FLOAT:{ |
2533 | 0 | float val = *static_cast<float *>(entry->mData); |
2534 | 0 | fbx_node.AddP70double(key.C_Str(), val); |
2535 | 0 | break; |
2536 | 0 | } |
2537 | 0 | case AI_DOUBLE:{ |
2538 | 0 | double val = *static_cast<double *>(entry->mData); |
2539 | 0 | fbx_node.AddP70double(key.C_Str(), val); |
2540 | 0 | break; |
2541 | 0 | } |
2542 | 0 | case AI_AISTRING:{ |
2543 | 0 | aiString val = *static_cast<aiString *>(entry->mData); |
2544 | 0 | fbx_node.AddP70string(key.C_Str(), val.C_Str()); |
2545 | 0 | break; |
2546 | 0 | } |
2547 | 0 | case AI_AIMETADATA: { |
2548 | | //ignore |
2549 | 0 | break; |
2550 | 0 | } |
2551 | 0 | default: |
2552 | 0 | break; |
2553 | 0 | } |
2554 | |
|
2555 | 0 | } |
2556 | |
|
2557 | 0 | } |
2558 | | |
2559 | | // write a single model node to the stream |
2560 | | void FBXExporter::WriteModelNode( |
2561 | | StreamWriterLE& outstream, |
2562 | | bool, |
2563 | | const aiNode* node, |
2564 | | int64_t node_uid, |
2565 | | const std::string& type, |
2566 | | const std::vector<std::pair<std::string,aiVector3D>>& transform_chain, |
2567 | | TransformInheritance inherit_type |
2568 | 15.1k | ){ |
2569 | 15.1k | const aiVector3D zero = {0, 0, 0}; |
2570 | 15.1k | const aiVector3D one = {1, 1, 1}; |
2571 | 15.1k | FBX::Node m("Model"); |
2572 | 15.1k | std::string name = node->mName.C_Str() + FBX::SEPARATOR + "Model"; |
2573 | 15.1k | m.AddProperties(node_uid, std::move(name), type); |
2574 | 15.1k | m.AddChild("Version", int32_t(232)); |
2575 | 15.1k | FBX::Node p("Properties70"); |
2576 | 15.1k | p.AddP70bool("RotationActive", true); |
2577 | 15.1k | p.AddP70int("DefaultAttributeIndex", 0); |
2578 | 15.1k | p.AddP70enum("InheritType", inherit_type); |
2579 | 15.1k | if (transform_chain.empty()) { |
2580 | | // decompose 4x4 transform matrix into TRS |
2581 | 15.1k | aiVector3D t, r, s; |
2582 | 15.1k | node->mTransformation.Decompose(s, r, t); |
2583 | 15.1k | if (t != zero) { |
2584 | 5 | p.AddP70( |
2585 | 5 | "Lcl Translation", "Lcl Translation", "", "A", |
2586 | 5 | double(t.x), double(t.y), double(t.z) |
2587 | 5 | ); |
2588 | 5 | } |
2589 | 15.1k | if (r != zero) { |
2590 | 3 | r = AI_RAD_TO_DEG(r); |
2591 | 3 | p.AddP70( |
2592 | 3 | "Lcl Rotation", "Lcl Rotation", "", "A", |
2593 | 3 | double(r.x), double(r.y), double(r.z) |
2594 | 3 | ); |
2595 | 3 | } |
2596 | 15.1k | if (s != one) { |
2597 | 3 | p.AddP70( |
2598 | 3 | "Lcl Scaling", "Lcl Scaling", "", "A", |
2599 | 3 | double(s.x), double(s.y), double(s.z) |
2600 | 3 | ); |
2601 | 3 | } |
2602 | 15.1k | } else { |
2603 | | // apply the transformation chain. |
2604 | | // these transformation elements are created when importing FBX, |
2605 | | // which has a complex transformation hierarchy for each node. |
2606 | | // as such we can bake the hierarchy back into the node on export. |
2607 | 0 | for (auto &item : transform_chain) { |
2608 | 0 | auto elem = transform_types.find(item.first); |
2609 | 0 | if (elem == transform_types.end()) { |
2610 | | // then this is a bug |
2611 | 0 | std::stringstream err; |
2612 | 0 | err << "unrecognized FBX transformation type: "; |
2613 | 0 | err << item.first; |
2614 | 0 | throw DeadlyExportError(err.str()); |
2615 | 0 | } |
2616 | 0 | const std::string &cur_name = elem->second.first; |
2617 | 0 | const aiVector3D &v = item.second; |
2618 | 0 | if (cur_name.compare(0, 4, "Lcl ") == 0) { |
2619 | | // special handling for animatable properties |
2620 | 0 | p.AddP70( cur_name, cur_name, "", "A", double(v.x), double(v.y), double(v.z) ); |
2621 | 0 | } else { |
2622 | 0 | p.AddP70vector(cur_name, v.x, v.y, v.z); |
2623 | 0 | } |
2624 | 0 | } |
2625 | 0 | } |
2626 | 15.1k | add_meta(p, node); |
2627 | 15.1k | m.AddChild(p); |
2628 | | |
2629 | | // not sure what these are for, |
2630 | | // but they seem to be omnipresent |
2631 | 15.1k | m.AddChild("Shading", FBXExportProperty(true)); |
2632 | 15.1k | m.AddChild("Culling", FBXExportProperty("CullingOff")); |
2633 | | |
2634 | 15.1k | m.Dump(outstream, binary, 1); |
2635 | 15.1k | } |
2636 | | |
2637 | | // wrapper for WriteModelNodes to create and pass a blank transform chain |
2638 | | void FBXExporter::WriteModelNodes( |
2639 | | StreamWriterLE& s, |
2640 | | const aiNode* node, |
2641 | | int64_t parent_uid, |
2642 | | const std::unordered_set<const aiNode*>& limbnodes |
2643 | 15.1k | ) { |
2644 | 15.1k | std::vector<std::pair<std::string,aiVector3D>> chain; |
2645 | 15.1k | WriteModelNodes(s, node, parent_uid, limbnodes, chain); |
2646 | 15.1k | } |
2647 | | |
2648 | | void FBXExporter::WriteModelNodes( |
2649 | | StreamWriterLE& outstream, |
2650 | | const aiNode* node, |
2651 | | int64_t parent_uid, |
2652 | | const std::unordered_set<const aiNode*>& limbnodes, |
2653 | | std::vector<std::pair<std::string,aiVector3D>>& transform_chain |
2654 | 15.1k | ) { |
2655 | | // first collapse any expanded transformation chains created by FBX import. |
2656 | 15.1k | std::string node_name(node->mName.C_Str()); |
2657 | 15.1k | if (node_name.find(MAGIC_NODE_TAG) != std::string::npos) { |
2658 | 0 | auto pos = node_name.find(MAGIC_NODE_TAG) + MAGIC_NODE_TAG.size() + 1; |
2659 | 0 | std::string type_name = node_name.substr(pos); |
2660 | 0 | auto elem = transform_types.find(type_name); |
2661 | 0 | if (elem == transform_types.end()) { |
2662 | | // then this is a bug and should be fixed |
2663 | 0 | std::stringstream err; |
2664 | 0 | err << "unrecognized FBX transformation node"; |
2665 | 0 | err << " of type " << type_name << " in node " << node_name; |
2666 | 0 | throw DeadlyExportError(err.str()); |
2667 | 0 | } |
2668 | 0 | aiVector3D t, r, s; |
2669 | 0 | node->mTransformation.Decompose(s, r, t); |
2670 | 0 | switch (elem->second.second) { |
2671 | 0 | case 'i': // inverse |
2672 | | // we don't need to worry about the inverse matrices |
2673 | 0 | break; |
2674 | 0 | case 't': // translation |
2675 | 0 | transform_chain.emplace_back(elem->first, t); |
2676 | 0 | break; |
2677 | 0 | case 'r': // rotation |
2678 | 0 | transform_chain.emplace_back(elem->first, AI_RAD_TO_DEG(r)); |
2679 | 0 | break; |
2680 | 0 | case 's': // scale |
2681 | 0 | transform_chain.emplace_back(elem->first, s); |
2682 | 0 | break; |
2683 | 0 | default: |
2684 | | // this should never happen |
2685 | 0 | std::stringstream err; |
2686 | 0 | err << "unrecognized FBX transformation type code: "; |
2687 | 0 | err << elem->second.second; |
2688 | 0 | throw DeadlyExportError(err.str()); |
2689 | 0 | } |
2690 | | // now continue on to any child nodes |
2691 | 0 | for (unsigned i = 0; i < node->mNumChildren; ++i) { |
2692 | 0 | WriteModelNodes( |
2693 | 0 | outstream, |
2694 | 0 | node->mChildren[i], |
2695 | 0 | parent_uid, |
2696 | 0 | limbnodes, |
2697 | 0 | transform_chain |
2698 | 0 | ); |
2699 | 0 | } |
2700 | 0 | return; |
2701 | 0 | } |
2702 | | |
2703 | 15.1k | int64_t node_uid = 0; |
2704 | | // generate uid and connect to parent, if not the root node, |
2705 | 15.1k | if (node != mScene->mRootNode) { |
2706 | 15.1k | auto elem = node_uids.find(node); |
2707 | 15.1k | if (elem != node_uids.end()) { |
2708 | 3.75k | node_uid = elem->second; |
2709 | 11.3k | } else { |
2710 | 11.3k | node_uid = generate_uid(); |
2711 | 11.3k | node_uids[node] = node_uid; |
2712 | 11.3k | } |
2713 | 15.1k | connections.emplace_back("C", "OO", node_uid, parent_uid); |
2714 | 15.1k | } |
2715 | | |
2716 | | // what type of node is this? |
2717 | 15.1k | if (node == mScene->mRootNode) { |
2718 | | // handled later |
2719 | 15.1k | } else if (node->mNumMeshes == 1) { |
2720 | | // connect to child mesh, which should have been written previously |
2721 | | // TODO double check this line |
2722 | 886 | connections.emplace_back("C", "OO", mesh_uids[node], node_uid); |
2723 | | // also connect to the material for the child mesh |
2724 | 886 | connections.emplace_back( |
2725 | 886 | "C", "OO", |
2726 | 886 | material_uids[mScene->mMeshes[node->mMeshes[0]]->mMaterialIndex], |
2727 | 886 | node_uid |
2728 | 886 | ); |
2729 | | // write model node |
2730 | 886 | WriteModelNode( |
2731 | 886 | outstream, binary, node, node_uid, "Mesh", transform_chain |
2732 | 886 | ); |
2733 | 14.2k | } else if (limbnodes.count(node)) { |
2734 | 3.75k | WriteModelNode( |
2735 | 3.75k | outstream, binary, node, node_uid, "LimbNode", transform_chain |
2736 | 3.75k | ); |
2737 | | // we also need to write a nodeattribute to mark it as a skeleton |
2738 | 3.75k | int64_t node_attribute_uid = generate_uid(); |
2739 | 3.75k | FBX::Node na("NodeAttribute"); |
2740 | 3.75k | na.AddProperties( |
2741 | 3.75k | node_attribute_uid, FBX::SEPARATOR + "NodeAttribute", "LimbNode" |
2742 | 3.75k | ); |
2743 | 3.75k | na.AddChild("TypeFlags", FBXExportProperty("Skeleton")); |
2744 | 3.75k | na.Dump(outstream, binary, 1); |
2745 | | // and connect them |
2746 | 3.75k | connections.emplace_back("C", "OO", node_attribute_uid, node_uid); |
2747 | 10.4k | } else if (node->mNumMeshes >= 1) { |
2748 | 282 | connections.emplace_back("C", "OO", mesh_uids[node], node_uid); |
2749 | 1.22k | for (size_t i = 0; i < node->mNumMeshes; i++) { |
2750 | 945 | connections.emplace_back( |
2751 | 945 | "C", "OO", |
2752 | 945 | material_uids[mScene->mMeshes[node->mMeshes[i]]->mMaterialIndex], |
2753 | 945 | node_uid |
2754 | 945 | ); |
2755 | 945 | } |
2756 | 282 | WriteModelNode(outstream, binary, node, node_uid, "Mesh", transform_chain); |
2757 | 10.1k | } else { |
2758 | 10.1k | const auto& lightIt = lights_uids.find(node->mName.C_Str()); |
2759 | 10.1k | if(lightIt != lights_uids.end()) { |
2760 | | // Node has a light connected to it. |
2761 | 0 | WriteModelNode( |
2762 | 0 | outstream, binary, node, node_uid, "Light", transform_chain |
2763 | 0 | ); |
2764 | 0 | connections.emplace_back("C", "OO", lightIt->second, node_uid); |
2765 | 10.1k | } else { |
2766 | | // generate a null node so we can add children to it |
2767 | 10.1k | WriteModelNode( |
2768 | 10.1k | outstream, binary, node, node_uid, "Null", transform_chain |
2769 | 10.1k | ); |
2770 | 10.1k | } |
2771 | 10.1k | } |
2772 | | |
2773 | 15.1k | if (node == mScene->mRootNode && node->mNumMeshes > 0) { |
2774 | 12 | int64_t new_node_uid = generate_uid(); |
2775 | 12 | connections.emplace_back("C", "OO", new_node_uid, node_uid); |
2776 | 12 | connections.emplace_back("C", "OO", mesh_uids[node], new_node_uid); |
2777 | 32 | for (size_t i = 0; i < node->mNumMeshes; ++i) { |
2778 | 20 | connections.emplace_back( |
2779 | 20 | "C", "OO", |
2780 | 20 | material_uids[mScene->mMeshes[node->mMeshes[i]]->mMaterialIndex], |
2781 | 20 | new_node_uid |
2782 | 20 | ); |
2783 | 20 | } |
2784 | 12 | aiNode new_node; |
2785 | 12 | new_node.mName = mScene->mMeshes[0]->mName; |
2786 | 12 | WriteModelNode(outstream, binary, &new_node, new_node_uid, "Mesh", {}); |
2787 | 12 | } |
2788 | | |
2789 | | // now recurse into children |
2790 | 30.2k | for (size_t i = 0; i < node->mNumChildren; ++i) { |
2791 | 15.1k | WriteModelNodes( |
2792 | 15.1k | outstream, node->mChildren[i], node_uid, limbnodes |
2793 | 15.1k | ); |
2794 | 15.1k | } |
2795 | 15.1k | } |
2796 | | |
2797 | | void FBXExporter::WriteAnimationCurveNode( |
2798 | | StreamWriterLE &outstream, |
2799 | | int64_t uid, |
2800 | | const std::string &name, // "T", "R", or "S" |
2801 | | aiVector3D default_value, |
2802 | | const std::string &property_name, // "Lcl Translation" etc |
2803 | | int64_t layer_uid, |
2804 | 5.67k | int64_t node_uid) { |
2805 | 5.67k | FBX::Node n("AnimationCurveNode"); |
2806 | 5.67k | n.AddProperties(uid, name + FBX::SEPARATOR + "AnimCurveNode", ""); |
2807 | 5.67k | FBX::Node p("Properties70"); |
2808 | 5.67k | p.AddP70numberA("d|X", default_value.x); |
2809 | 5.67k | p.AddP70numberA("d|Y", default_value.y); |
2810 | 5.67k | p.AddP70numberA("d|Z", default_value.z); |
2811 | 5.67k | n.AddChild(p); |
2812 | 5.67k | n.Dump(outstream, binary, 1); |
2813 | | // connect to layer |
2814 | 5.67k | this->connections.emplace_back("C", "OO", uid, layer_uid); |
2815 | | // connect to bone |
2816 | 5.67k | this->connections.emplace_back("C", "OP", uid, node_uid, property_name); |
2817 | 5.67k | } |
2818 | | |
2819 | | void FBXExporter::WriteAnimationCurve( |
2820 | | StreamWriterLE& outstream, |
2821 | | double default_value, |
2822 | | const std::vector<int64_t>& times, |
2823 | | const std::vector<float>& values, |
2824 | | int64_t curvenode_uid, |
2825 | | const std::string& property_link // "d|X", "d|Y", etc |
2826 | 17.0k | ) { |
2827 | 17.0k | FBX::Node n("AnimationCurve"); |
2828 | 17.0k | int64_t curve_uid = generate_uid(); |
2829 | 17.0k | n.AddProperties(curve_uid, FBX::SEPARATOR + "AnimCurve", ""); |
2830 | 17.0k | n.AddChild("Default", default_value); |
2831 | 17.0k | n.AddChild("KeyVer", int32_t(4009)); |
2832 | 17.0k | n.AddChild("KeyTime", times); |
2833 | 17.0k | n.AddChild("KeyValueFloat", values); |
2834 | | // TODO: keyattr flags and data (STUB for now) |
2835 | 17.0k | n.AddChild("KeyAttrFlags", std::vector<int32_t>{0}); |
2836 | 17.0k | n.AddChild("KeyAttrDataFloat", std::vector<float>{0,0,0,0}); |
2837 | 17.0k | n.AddChild( |
2838 | 17.0k | "KeyAttrRefCount", |
2839 | 17.0k | std::vector<int32_t>{static_cast<int32_t>(times.size())} |
2840 | 17.0k | ); |
2841 | 17.0k | n.Dump(outstream, binary, 1); |
2842 | 17.0k | this->connections.emplace_back( |
2843 | 17.0k | "C", "OP", curve_uid, curvenode_uid, property_link |
2844 | 17.0k | ); |
2845 | 17.0k | } |
2846 | | |
2847 | | |
2848 | | void FBXExporter::WriteConnections () |
2849 | 56 | { |
2850 | | // we should have completed the connection graph already, |
2851 | | // so basically just dump it here |
2852 | 56 | if (!binary) { |
2853 | 0 | WriteAsciiSectionHeader("Object connections"); |
2854 | 0 | } |
2855 | | // TODO: comments with names in the ascii version |
2856 | 56 | FBX::Node conn("Connections"); |
2857 | 56 | StreamWriterLE outstream(outfile); |
2858 | 56 | conn.Begin(outstream, binary, 0); |
2859 | 56 | conn.BeginChildren(outstream, binary, 0); |
2860 | 59.6k | for (auto &n : connections) { |
2861 | 59.6k | n.Dump(outstream, binary, 1); |
2862 | 59.6k | } |
2863 | 56 | conn.End(outstream, binary, 0, !connections.empty()); |
2864 | 56 | connections.clear(); |
2865 | 56 | } |
2866 | | |
2867 | | #endif // ASSIMP_BUILD_NO_FBX_EXPORTER |
2868 | | #endif // ASSIMP_BUILD_NO_EXPORT |