Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bevyengine
GitHub Repository: bevyengine/bevy
Path: blob/main/crates/bevy_gltf/src/loader/mod.rs
6596 views
1
mod extensions;
2
mod gltf_ext;
3
4
use alloc::sync::Arc;
5
use std::{
6
io::Error,
7
path::{Path, PathBuf},
8
sync::Mutex,
9
};
10
11
#[cfg(feature = "bevy_animation")]
12
use bevy_animation::{prelude::*, AnimationTarget, AnimationTargetId};
13
use bevy_asset::{
14
io::Reader, AssetLoadError, AssetLoader, Handle, LoadContext, ReadAssetBytesError,
15
RenderAssetUsages,
16
};
17
use bevy_camera::{
18
primitives::Aabb, visibility::Visibility, Camera, Camera3d, OrthographicProjection,
19
PerspectiveProjection, Projection, ScalingMode,
20
};
21
use bevy_color::{Color, LinearRgba};
22
use bevy_ecs::{
23
entity::{Entity, EntityHashMap},
24
hierarchy::ChildSpawner,
25
name::Name,
26
world::World,
27
};
28
use bevy_image::{
29
CompressedImageFormats, Image, ImageLoaderSettings, ImageSampler, ImageSamplerDescriptor,
30
ImageType, TextureError,
31
};
32
use bevy_light::{DirectionalLight, PointLight, SpotLight};
33
use bevy_math::{Mat4, Vec3};
34
use bevy_mesh::{
35
morph::{MeshMorphWeights, MorphAttributes, MorphTargetImage, MorphWeights},
36
skinning::{SkinnedMesh, SkinnedMeshInverseBindposes},
37
Indices, Mesh, Mesh3d, MeshVertexAttribute, PrimitiveTopology,
38
};
39
#[cfg(feature = "pbr_transmission_textures")]
40
use bevy_pbr::UvChannel;
41
use bevy_pbr::{MeshMaterial3d, StandardMaterial, MAX_JOINTS};
42
use bevy_platform::collections::{HashMap, HashSet};
43
use bevy_render::render_resource::Face;
44
use bevy_scene::Scene;
45
#[cfg(not(target_arch = "wasm32"))]
46
use bevy_tasks::IoTaskPool;
47
use bevy_transform::components::Transform;
48
49
use gltf::{
50
accessor::Iter,
51
image::Source,
52
mesh::{util::ReadIndices, Mode},
53
Document, Material, Node, Semantic,
54
};
55
56
use serde::{Deserialize, Serialize};
57
#[cfg(feature = "bevy_animation")]
58
use smallvec::SmallVec;
59
60
use thiserror::Error;
61
use tracing::{error, info_span, warn};
62
63
use crate::{
64
vertex_attributes::convert_attribute, Gltf, GltfAssetLabel, GltfExtras, GltfMaterialExtras,
65
GltfMaterialName, GltfMeshExtras, GltfMeshName, GltfNode, GltfSceneExtras, GltfSkin,
66
};
67
68
#[cfg(feature = "bevy_animation")]
69
use self::gltf_ext::scene::collect_path;
70
use self::{
71
extensions::{AnisotropyExtension, ClearcoatExtension, SpecularExtension},
72
gltf_ext::{
73
check_for_cycles, get_linear_textures,
74
material::{
75
alpha_mode, material_label, needs_tangents, uv_channel,
76
warn_on_differing_texture_transforms,
77
},
78
mesh::{primitive_name, primitive_topology},
79
scene::{node_name, node_transform},
80
texture::{texture_handle, texture_sampler, texture_transform_to_affine2},
81
},
82
};
83
use crate::convert_coordinates::ConvertCoordinates as _;
84
85
/// An error that occurs when loading a glTF file.
86
#[derive(Error, Debug)]
87
pub enum GltfError {
88
/// Unsupported primitive mode.
89
#[error("unsupported primitive mode")]
90
UnsupportedPrimitive {
91
/// The primitive mode.
92
mode: Mode,
93
},
94
/// Invalid glTF file.
95
#[error("invalid glTF file: {0}")]
96
Gltf(#[from] gltf::Error),
97
/// Binary blob is missing.
98
#[error("binary blob is missing")]
99
MissingBlob,
100
/// Decoding the base64 mesh data failed.
101
#[error("failed to decode base64 mesh data")]
102
Base64Decode(#[from] base64::DecodeError),
103
/// Unsupported buffer format.
104
#[error("unsupported buffer format")]
105
BufferFormatUnsupported,
106
/// Invalid image mime type.
107
#[error("invalid image mime type: {0}")]
108
#[from(ignore)]
109
InvalidImageMimeType(String),
110
/// Error when loading a texture. Might be due to a disabled image file format feature.
111
#[error("You may need to add the feature for the file format: {0}")]
112
ImageError(#[from] TextureError),
113
/// Failed to read bytes from an asset path.
114
#[error("failed to read bytes from an asset path: {0}")]
115
ReadAssetBytesError(#[from] ReadAssetBytesError),
116
/// Failed to load asset from an asset path.
117
#[error("failed to load asset from an asset path: {0}")]
118
AssetLoadError(#[from] AssetLoadError),
119
/// Missing sampler for an animation.
120
#[error("Missing sampler for animation {0}")]
121
#[from(ignore)]
122
MissingAnimationSampler(usize),
123
/// Failed to generate tangents.
124
#[error("failed to generate tangents: {0}")]
125
GenerateTangentsError(#[from] bevy_mesh::GenerateTangentsError),
126
/// Failed to generate morph targets.
127
#[error("failed to generate morph targets: {0}")]
128
MorphTarget(#[from] bevy_mesh::morph::MorphBuildError),
129
/// Circular children in Nodes
130
#[error("GLTF model must be a tree, found cycle instead at node indices: {0:?}")]
131
#[from(ignore)]
132
CircularChildren(String),
133
/// Failed to load a file.
134
#[error("failed to load file: {0}")]
135
Io(#[from] Error),
136
}
137
138
/// Loads glTF files with all of their data as their corresponding bevy representations.
139
pub struct GltfLoader {
140
/// List of compressed image formats handled by the loader.
141
pub supported_compressed_formats: CompressedImageFormats,
142
/// Custom vertex attributes that will be recognized when loading a glTF file.
143
///
144
/// Keys must be the attribute names as found in the glTF data, which must start with an underscore.
145
/// See [this section of the glTF specification](https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html#meshes-overview)
146
/// for additional details on custom attributes.
147
pub custom_vertex_attributes: HashMap<Box<str>, MeshVertexAttribute>,
148
/// Arc to default [`ImageSamplerDescriptor`].
149
pub default_sampler: Arc<Mutex<ImageSamplerDescriptor>>,
150
/// How to convert glTF coordinates on import. Assuming glTF cameras, glTF lights, and glTF meshes had global identity transforms,
151
/// their Bevy [`Transform::forward`](bevy_transform::components::Transform::forward) will be pointing in the following global directions:
152
/// - When set to `false`
153
/// - glTF cameras and glTF lights: global -Z,
154
/// - glTF models: global +Z.
155
/// - When set to `true`
156
/// - glTF cameras and glTF lights: global +Z,
157
/// - glTF models: global -Z.
158
///
159
/// The default is `false`.
160
pub default_use_model_forward_direction: bool,
161
}
162
163
/// Specifies optional settings for processing gltfs at load time. By default, all recognized contents of
164
/// the gltf will be loaded.
165
///
166
/// # Example
167
///
168
/// To load a gltf but exclude the cameras, replace a call to `asset_server.load("my.gltf")` with
169
/// ```no_run
170
/// # use bevy_asset::{AssetServer, Handle};
171
/// # use bevy_gltf::*;
172
/// # let asset_server: AssetServer = panic!();
173
/// let gltf_handle: Handle<Gltf> = asset_server.load_with_settings(
174
/// "my.gltf",
175
/// |s: &mut GltfLoaderSettings| {
176
/// s.load_cameras = false;
177
/// }
178
/// );
179
/// ```
180
#[derive(Serialize, Deserialize)]
181
pub struct GltfLoaderSettings {
182
/// If empty, the gltf mesh nodes will be skipped.
183
///
184
/// Otherwise, nodes will be loaded and retained in RAM/VRAM according to the active flags.
185
pub load_meshes: RenderAssetUsages,
186
/// If empty, the gltf materials will be skipped.
187
///
188
/// Otherwise, materials will be loaded and retained in RAM/VRAM according to the active flags.
189
pub load_materials: RenderAssetUsages,
190
/// If true, the loader will spawn cameras for gltf camera nodes.
191
pub load_cameras: bool,
192
/// If true, the loader will spawn lights for gltf light nodes.
193
pub load_lights: bool,
194
/// If true, the loader will include the root of the gltf root node.
195
pub include_source: bool,
196
/// Overrides the default sampler. Data from sampler node is added on top of that.
197
///
198
/// If None, uses the global default which is stored in the [`DefaultGltfImageSampler`](crate::DefaultGltfImageSampler) resource.
199
pub default_sampler: Option<ImageSamplerDescriptor>,
200
/// If true, the loader will ignore sampler data from gltf and use the default sampler.
201
pub override_sampler: bool,
202
/// _CAUTION: This is an experimental feature with [known issues](https://github.com/bevyengine/bevy/issues/20621). Behavior may change in future versions._
203
///
204
/// How to convert glTF coordinates on import. Assuming glTF cameras, glTF lights, and glTF meshes had global unit transforms,
205
/// their Bevy [`Transform::forward`](bevy_transform::components::Transform::forward) will be pointing in the following global directions:
206
/// - When set to `false`
207
/// - glTF cameras and glTF lights: global -Z,
208
/// - glTF models: global +Z.
209
/// - When set to `true`
210
/// - glTF cameras and glTF lights: global +Z,
211
/// - glTF models: global -Z.
212
///
213
/// If `None`, uses the global default set by [`GltfPlugin::use_model_forward_direction`](crate::GltfPlugin::use_model_forward_direction).
214
pub use_model_forward_direction: Option<bool>,
215
}
216
217
impl Default for GltfLoaderSettings {
218
fn default() -> Self {
219
Self {
220
load_meshes: RenderAssetUsages::default(),
221
load_materials: RenderAssetUsages::default(),
222
load_cameras: true,
223
load_lights: true,
224
include_source: false,
225
default_sampler: None,
226
override_sampler: false,
227
use_model_forward_direction: None,
228
}
229
}
230
}
231
232
impl GltfLoader {
233
/// Loads an entire glTF file.
234
pub async fn load_gltf<'a, 'b, 'c>(
235
loader: &GltfLoader,
236
bytes: &'a [u8],
237
load_context: &'b mut LoadContext<'c>,
238
settings: &'b GltfLoaderSettings,
239
) -> Result<Gltf, GltfError> {
240
let gltf = gltf::Gltf::from_slice(bytes)?;
241
242
let file_name = load_context
243
.asset_path()
244
.path()
245
.to_str()
246
.ok_or(GltfError::Gltf(gltf::Error::Io(Error::new(
247
std::io::ErrorKind::InvalidInput,
248
"Gltf file name invalid",
249
))))?
250
.to_string();
251
let buffer_data = load_buffers(&gltf, load_context).await?;
252
253
let linear_textures = get_linear_textures(&gltf.document);
254
255
#[cfg(feature = "bevy_animation")]
256
let paths = {
257
let mut paths = HashMap::<usize, (usize, Vec<Name>)>::default();
258
for scene in gltf.scenes() {
259
for node in scene.nodes() {
260
let root_index = node.index();
261
collect_path(&node, &[], &mut paths, root_index, &mut HashSet::default());
262
}
263
}
264
paths
265
};
266
267
let convert_coordinates = match settings.use_model_forward_direction {
268
Some(convert_coordinates) => convert_coordinates,
269
None => loader.default_use_model_forward_direction,
270
};
271
272
#[cfg(feature = "bevy_animation")]
273
let (animations, named_animations, animation_roots) = {
274
use bevy_animation::{
275
animated_field, animation_curves::*, gltf_curves::*, VariableCurve,
276
};
277
use bevy_math::{
278
curve::{ConstantCurve, Interval, UnevenSampleAutoCurve},
279
Quat, Vec4,
280
};
281
use gltf::animation::util::ReadOutputs;
282
let mut animations = vec![];
283
let mut named_animations = <HashMap<_, _>>::default();
284
let mut animation_roots = <HashSet<_>>::default();
285
for animation in gltf.animations() {
286
let mut animation_clip = AnimationClip::default();
287
for channel in animation.channels() {
288
let node = channel.target().node();
289
let interpolation = channel.sampler().interpolation();
290
let reader = channel.reader(|buffer| Some(&buffer_data[buffer.index()]));
291
let keyframe_timestamps: Vec<f32> = if let Some(inputs) = reader.read_inputs() {
292
match inputs {
293
Iter::Standard(times) => times.collect(),
294
Iter::Sparse(_) => {
295
warn!("Sparse accessor not supported for animation sampler input");
296
continue;
297
}
298
}
299
} else {
300
warn!("Animations without a sampler input are not supported");
301
return Err(GltfError::MissingAnimationSampler(animation.index()));
302
};
303
304
if keyframe_timestamps.is_empty() {
305
warn!("Tried to load animation with no keyframe timestamps");
306
continue;
307
}
308
309
let maybe_curve: Option<VariableCurve> = if let Some(outputs) =
310
reader.read_outputs()
311
{
312
match outputs {
313
ReadOutputs::Translations(tr) => {
314
let translation_property = animated_field!(Transform::translation);
315
let translations: Vec<Vec3> = tr
316
.map(Vec3::from)
317
.map(|verts| {
318
if convert_coordinates {
319
Vec3::convert_coordinates(verts)
320
} else {
321
verts
322
}
323
})
324
.collect();
325
if keyframe_timestamps.len() == 1 {
326
Some(VariableCurve::new(AnimatableCurve::new(
327
translation_property,
328
ConstantCurve::new(Interval::EVERYWHERE, translations[0]),
329
)))
330
} else {
331
match interpolation {
332
gltf::animation::Interpolation::Linear => {
333
UnevenSampleAutoCurve::new(
334
keyframe_timestamps.into_iter().zip(translations),
335
)
336
.ok()
337
.map(
338
|curve| {
339
VariableCurve::new(AnimatableCurve::new(
340
translation_property,
341
curve,
342
))
343
},
344
)
345
}
346
gltf::animation::Interpolation::Step => {
347
SteppedKeyframeCurve::new(
348
keyframe_timestamps.into_iter().zip(translations),
349
)
350
.ok()
351
.map(
352
|curve| {
353
VariableCurve::new(AnimatableCurve::new(
354
translation_property,
355
curve,
356
))
357
},
358
)
359
}
360
gltf::animation::Interpolation::CubicSpline => {
361
CubicKeyframeCurve::new(
362
keyframe_timestamps,
363
translations,
364
)
365
.ok()
366
.map(
367
|curve| {
368
VariableCurve::new(AnimatableCurve::new(
369
translation_property,
370
curve,
371
))
372
},
373
)
374
}
375
}
376
}
377
}
378
ReadOutputs::Rotations(rots) => {
379
let rotation_property = animated_field!(Transform::rotation);
380
let rotations: Vec<Quat> = rots
381
.into_f32()
382
.map(Quat::from_array)
383
.map(|quat| {
384
if convert_coordinates {
385
Quat::convert_coordinates(quat)
386
} else {
387
quat
388
}
389
})
390
.collect();
391
if keyframe_timestamps.len() == 1 {
392
Some(VariableCurve::new(AnimatableCurve::new(
393
rotation_property,
394
ConstantCurve::new(Interval::EVERYWHERE, rotations[0]),
395
)))
396
} else {
397
match interpolation {
398
gltf::animation::Interpolation::Linear => {
399
UnevenSampleAutoCurve::new(
400
keyframe_timestamps.into_iter().zip(rotations),
401
)
402
.ok()
403
.map(
404
|curve| {
405
VariableCurve::new(AnimatableCurve::new(
406
rotation_property,
407
curve,
408
))
409
},
410
)
411
}
412
gltf::animation::Interpolation::Step => {
413
SteppedKeyframeCurve::new(
414
keyframe_timestamps.into_iter().zip(rotations),
415
)
416
.ok()
417
.map(
418
|curve| {
419
VariableCurve::new(AnimatableCurve::new(
420
rotation_property,
421
curve,
422
))
423
},
424
)
425
}
426
gltf::animation::Interpolation::CubicSpline => {
427
CubicRotationCurve::new(
428
keyframe_timestamps,
429
rotations.into_iter().map(Vec4::from),
430
)
431
.ok()
432
.map(
433
|curve| {
434
VariableCurve::new(AnimatableCurve::new(
435
rotation_property,
436
curve,
437
))
438
},
439
)
440
}
441
}
442
}
443
}
444
ReadOutputs::Scales(scale) => {
445
let scale_property = animated_field!(Transform::scale);
446
let scales: Vec<Vec3> = scale.map(Vec3::from).collect();
447
if keyframe_timestamps.len() == 1 {
448
Some(VariableCurve::new(AnimatableCurve::new(
449
scale_property,
450
ConstantCurve::new(Interval::EVERYWHERE, scales[0]),
451
)))
452
} else {
453
match interpolation {
454
gltf::animation::Interpolation::Linear => {
455
UnevenSampleAutoCurve::new(
456
keyframe_timestamps.into_iter().zip(scales),
457
)
458
.ok()
459
.map(
460
|curve| {
461
VariableCurve::new(AnimatableCurve::new(
462
scale_property,
463
curve,
464
))
465
},
466
)
467
}
468
gltf::animation::Interpolation::Step => {
469
SteppedKeyframeCurve::new(
470
keyframe_timestamps.into_iter().zip(scales),
471
)
472
.ok()
473
.map(
474
|curve| {
475
VariableCurve::new(AnimatableCurve::new(
476
scale_property,
477
curve,
478
))
479
},
480
)
481
}
482
gltf::animation::Interpolation::CubicSpline => {
483
CubicKeyframeCurve::new(keyframe_timestamps, scales)
484
.ok()
485
.map(|curve| {
486
VariableCurve::new(AnimatableCurve::new(
487
scale_property,
488
curve,
489
))
490
})
491
}
492
}
493
}
494
}
495
ReadOutputs::MorphTargetWeights(weights) => {
496
let weights: Vec<f32> = weights.into_f32().collect();
497
if keyframe_timestamps.len() == 1 {
498
#[expect(
499
clippy::unnecessary_map_on_constructor,
500
reason = "While the mapping is unnecessary, it is much more readable at this level of indentation. Additionally, mapping makes it more consistent with the other branches."
501
)]
502
Some(ConstantCurve::new(Interval::EVERYWHERE, weights))
503
.map(WeightsCurve)
504
.map(VariableCurve::new)
505
} else {
506
match interpolation {
507
gltf::animation::Interpolation::Linear => {
508
WideLinearKeyframeCurve::new(
509
keyframe_timestamps,
510
weights,
511
)
512
.ok()
513
.map(WeightsCurve)
514
.map(VariableCurve::new)
515
}
516
gltf::animation::Interpolation::Step => {
517
WideSteppedKeyframeCurve::new(
518
keyframe_timestamps,
519
weights,
520
)
521
.ok()
522
.map(WeightsCurve)
523
.map(VariableCurve::new)
524
}
525
gltf::animation::Interpolation::CubicSpline => {
526
WideCubicKeyframeCurve::new(
527
keyframe_timestamps,
528
weights,
529
)
530
.ok()
531
.map(WeightsCurve)
532
.map(VariableCurve::new)
533
}
534
}
535
}
536
}
537
}
538
} else {
539
warn!("Animations without a sampler output are not supported");
540
return Err(GltfError::MissingAnimationSampler(animation.index()));
541
};
542
543
let Some(curve) = maybe_curve else {
544
warn!(
545
"Invalid keyframe data for node {}; curve could not be constructed",
546
node.index()
547
);
548
continue;
549
};
550
551
if let Some((root_index, path)) = paths.get(&node.index()) {
552
animation_roots.insert(*root_index);
553
animation_clip.add_variable_curve_to_target(
554
AnimationTargetId::from_names(path.iter()),
555
curve,
556
);
557
} else {
558
warn!(
559
"Animation ignored for node {}: part of its hierarchy is missing a name",
560
node.index()
561
);
562
}
563
}
564
let handle = load_context.add_labeled_asset(
565
GltfAssetLabel::Animation(animation.index()).to_string(),
566
animation_clip,
567
);
568
if let Some(name) = animation.name() {
569
named_animations.insert(name.into(), handle.clone());
570
}
571
animations.push(handle);
572
}
573
(animations, named_animations, animation_roots)
574
};
575
576
let default_sampler = match settings.default_sampler.as_ref() {
577
Some(sampler) => sampler,
578
None => &loader.default_sampler.lock().unwrap().clone(),
579
};
580
// We collect handles to ensure loaded images from paths are not unloaded before they are used elsewhere
581
// in the loader. This prevents "reloads", but it also prevents dropping the is_srgb context on reload.
582
//
583
// In theory we could store a mapping between texture.index() and handle to use
584
// later in the loader when looking up handles for materials. However this would mean
585
// that the material's load context would no longer track those images as dependencies.
586
let mut _texture_handles = Vec::new();
587
if gltf.textures().len() == 1 || cfg!(target_arch = "wasm32") {
588
for texture in gltf.textures() {
589
let parent_path = load_context.path().parent().unwrap();
590
let image = load_image(
591
texture,
592
&buffer_data,
593
&linear_textures,
594
parent_path,
595
loader.supported_compressed_formats,
596
default_sampler,
597
settings,
598
)
599
.await?;
600
image.process_loaded_texture(load_context, &mut _texture_handles);
601
}
602
} else {
603
#[cfg(not(target_arch = "wasm32"))]
604
IoTaskPool::get()
605
.scope(|scope| {
606
gltf.textures().for_each(|gltf_texture| {
607
let parent_path = load_context.path().parent().unwrap();
608
let linear_textures = &linear_textures;
609
let buffer_data = &buffer_data;
610
scope.spawn(async move {
611
load_image(
612
gltf_texture,
613
buffer_data,
614
linear_textures,
615
parent_path,
616
loader.supported_compressed_formats,
617
default_sampler,
618
settings,
619
)
620
.await
621
});
622
});
623
})
624
.into_iter()
625
.for_each(|result| match result {
626
Ok(image) => {
627
image.process_loaded_texture(load_context, &mut _texture_handles);
628
}
629
Err(err) => {
630
warn!("Error loading glTF texture: {}", err);
631
}
632
});
633
}
634
635
let mut materials = vec![];
636
let mut named_materials = <HashMap<_, _>>::default();
637
// Only include materials in the output if they're set to be retained in the MAIN_WORLD and/or RENDER_WORLD by the load_materials flag
638
if !settings.load_materials.is_empty() {
639
// NOTE: materials must be loaded after textures because image load() calls will happen before load_with_settings, preventing is_srgb from being set properly
640
for material in gltf.materials() {
641
let handle = load_material(&material, load_context, &gltf.document, false);
642
if let Some(name) = material.name() {
643
named_materials.insert(name.into(), handle.clone());
644
}
645
materials.push(handle);
646
}
647
}
648
let mut meshes = vec![];
649
let mut named_meshes = <HashMap<_, _>>::default();
650
let mut meshes_on_skinned_nodes = <HashSet<_>>::default();
651
let mut meshes_on_non_skinned_nodes = <HashSet<_>>::default();
652
for gltf_node in gltf.nodes() {
653
if gltf_node.skin().is_some() {
654
if let Some(mesh) = gltf_node.mesh() {
655
meshes_on_skinned_nodes.insert(mesh.index());
656
}
657
} else if let Some(mesh) = gltf_node.mesh() {
658
meshes_on_non_skinned_nodes.insert(mesh.index());
659
}
660
}
661
for gltf_mesh in gltf.meshes() {
662
let mut primitives = vec![];
663
for primitive in gltf_mesh.primitives() {
664
let primitive_label = GltfAssetLabel::Primitive {
665
mesh: gltf_mesh.index(),
666
primitive: primitive.index(),
667
};
668
let primitive_topology = primitive_topology(primitive.mode())?;
669
670
let mut mesh = Mesh::new(primitive_topology, settings.load_meshes);
671
672
// Read vertex attributes
673
for (semantic, accessor) in primitive.attributes() {
674
if [Semantic::Joints(0), Semantic::Weights(0)].contains(&semantic) {
675
if !meshes_on_skinned_nodes.contains(&gltf_mesh.index()) {
676
warn!(
677
"Ignoring attribute {:?} for skinned mesh {} used on non skinned nodes (NODE_SKINNED_MESH_WITHOUT_SKIN)",
678
semantic,
679
primitive_label
680
);
681
continue;
682
} else if meshes_on_non_skinned_nodes.contains(&gltf_mesh.index()) {
683
error!("Skinned mesh {} used on both skinned and non skin nodes, this is likely to cause an error (NODE_SKINNED_MESH_WITHOUT_SKIN)", primitive_label);
684
}
685
}
686
match convert_attribute(
687
semantic,
688
accessor,
689
&buffer_data,
690
&loader.custom_vertex_attributes,
691
convert_coordinates,
692
) {
693
Ok((attribute, values)) => mesh.insert_attribute(attribute, values),
694
Err(err) => warn!("{}", err),
695
}
696
}
697
698
// Read vertex indices
699
let reader =
700
primitive.reader(|buffer| Some(buffer_data[buffer.index()].as_slice()));
701
if let Some(indices) = reader.read_indices() {
702
mesh.insert_indices(match indices {
703
ReadIndices::U8(is) => Indices::U16(is.map(|x| x as u16).collect()),
704
ReadIndices::U16(is) => Indices::U16(is.collect()),
705
ReadIndices::U32(is) => Indices::U32(is.collect()),
706
});
707
};
708
709
{
710
let morph_target_reader = reader.read_morph_targets();
711
if morph_target_reader.len() != 0 {
712
let morph_targets_label = GltfAssetLabel::MorphTarget {
713
mesh: gltf_mesh.index(),
714
primitive: primitive.index(),
715
};
716
let morph_target_image = MorphTargetImage::new(
717
morph_target_reader.map(|i| PrimitiveMorphAttributesIter {
718
convert_coordinates,
719
positions: i.0,
720
normals: i.1,
721
tangents: i.2,
722
}),
723
mesh.count_vertices(),
724
RenderAssetUsages::default(),
725
)?;
726
let handle = load_context.add_labeled_asset(
727
morph_targets_label.to_string(),
728
morph_target_image.0,
729
);
730
731
mesh.set_morph_targets(handle);
732
let extras = gltf_mesh.extras().as_ref();
733
if let Some(names) = extras.and_then(|extras| {
734
serde_json::from_str::<MorphTargetNames>(extras.get()).ok()
735
}) {
736
mesh.set_morph_target_names(names.target_names);
737
}
738
}
739
}
740
741
if mesh.attribute(Mesh::ATTRIBUTE_NORMAL).is_none()
742
&& matches!(mesh.primitive_topology(), PrimitiveTopology::TriangleList)
743
{
744
tracing::debug!(
745
"Automatically calculating missing vertex normals for geometry."
746
);
747
let vertex_count_before = mesh.count_vertices();
748
mesh.duplicate_vertices();
749
mesh.compute_flat_normals();
750
let vertex_count_after = mesh.count_vertices();
751
if vertex_count_before != vertex_count_after {
752
tracing::debug!("Missing vertex normals in indexed geometry, computing them as flat. Vertex count increased from {} to {}", vertex_count_before, vertex_count_after);
753
} else {
754
tracing::debug!(
755
"Missing vertex normals in indexed geometry, computing them as flat."
756
);
757
}
758
}
759
760
if !mesh.contains_attribute(Mesh::ATTRIBUTE_TANGENT)
761
&& mesh.contains_attribute(Mesh::ATTRIBUTE_NORMAL)
762
&& needs_tangents(&primitive.material())
763
{
764
tracing::debug!(
765
"Missing vertex tangents for {}, computing them using the mikktspace algorithm. Consider using a tool such as Blender to pre-compute the tangents.", file_name
766
);
767
768
let generate_tangents_span = info_span!("generate_tangents", name = file_name);
769
770
generate_tangents_span.in_scope(|| {
771
if let Err(err) = mesh.generate_tangents() {
772
warn!(
773
"Failed to generate vertex tangents using the mikktspace algorithm: {}",
774
err
775
);
776
}
777
});
778
}
779
780
let mesh_handle = load_context.add_labeled_asset(primitive_label.to_string(), mesh);
781
primitives.push(super::GltfPrimitive::new(
782
&gltf_mesh,
783
&primitive,
784
mesh_handle,
785
primitive
786
.material()
787
.index()
788
.and_then(|i| materials.get(i).cloned()),
789
primitive.extras().as_deref().map(GltfExtras::from),
790
primitive
791
.material()
792
.extras()
793
.as_deref()
794
.map(GltfExtras::from),
795
));
796
}
797
798
let mesh = super::GltfMesh::new(
799
&gltf_mesh,
800
primitives,
801
gltf_mesh.extras().as_deref().map(GltfExtras::from),
802
);
803
804
let handle = load_context.add_labeled_asset(mesh.asset_label().to_string(), mesh);
805
if let Some(name) = gltf_mesh.name() {
806
named_meshes.insert(name.into(), handle.clone());
807
}
808
meshes.push(handle);
809
}
810
811
let skinned_mesh_inverse_bindposes: Vec<_> = gltf
812
.skins()
813
.map(|gltf_skin| {
814
let reader = gltf_skin.reader(|buffer| Some(&buffer_data[buffer.index()]));
815
let local_to_bone_bind_matrices: Vec<Mat4> = reader
816
.read_inverse_bind_matrices()
817
.map(|mats| {
818
mats.map(|mat| Mat4::from_cols_array_2d(&mat))
819
.map(|mat| {
820
if convert_coordinates {
821
mat.convert_coordinates()
822
} else {
823
mat
824
}
825
})
826
.collect()
827
})
828
.unwrap_or_else(|| {
829
core::iter::repeat_n(Mat4::IDENTITY, gltf_skin.joints().len()).collect()
830
});
831
832
load_context.add_labeled_asset(
833
GltfAssetLabel::InverseBindMatrices(gltf_skin.index()).to_string(),
834
SkinnedMeshInverseBindposes::from(local_to_bone_bind_matrices),
835
)
836
})
837
.collect();
838
839
let mut nodes = HashMap::<usize, Handle<GltfNode>>::default();
840
let mut named_nodes = <HashMap<_, _>>::default();
841
let mut skins = <HashMap<_, _>>::default();
842
let mut named_skins = <HashMap<_, _>>::default();
843
844
// First, create the node handles.
845
for node in gltf.nodes() {
846
let label = GltfAssetLabel::Node(node.index());
847
let label_handle = load_context.get_label_handle(label.to_string());
848
nodes.insert(node.index(), label_handle);
849
}
850
851
// Then check for cycles.
852
check_for_cycles(&gltf)?;
853
854
// Now populate the nodes.
855
for node in gltf.nodes() {
856
let skin = node.skin().map(|skin| {
857
skins
858
.entry(skin.index())
859
.or_insert_with(|| {
860
let joints: Vec<_> = skin
861
.joints()
862
.map(|joint| nodes.get(&joint.index()).unwrap().clone())
863
.collect();
864
865
if joints.len() > MAX_JOINTS {
866
warn!(
867
"The glTF skin {} has {} joints, but the maximum supported is {}",
868
skin.name()
869
.map(ToString::to_string)
870
.unwrap_or_else(|| skin.index().to_string()),
871
joints.len(),
872
MAX_JOINTS
873
);
874
}
875
876
let gltf_skin = GltfSkin::new(
877
&skin,
878
joints,
879
skinned_mesh_inverse_bindposes[skin.index()].clone(),
880
skin.extras().as_deref().map(GltfExtras::from),
881
);
882
883
let handle = load_context
884
.add_labeled_asset(gltf_skin.asset_label().to_string(), gltf_skin);
885
886
if let Some(name) = skin.name() {
887
named_skins.insert(name.into(), handle.clone());
888
}
889
890
handle
891
})
892
.clone()
893
});
894
895
let children = node
896
.children()
897
.map(|child| nodes.get(&child.index()).unwrap().clone())
898
.collect();
899
900
let mesh = node
901
.mesh()
902
.map(|mesh| mesh.index())
903
.and_then(|i| meshes.get(i).cloned());
904
905
let gltf_node = GltfNode::new(
906
&node,
907
children,
908
mesh,
909
node_transform(&node, convert_coordinates),
910
skin,
911
node.extras().as_deref().map(GltfExtras::from),
912
);
913
914
#[cfg(feature = "bevy_animation")]
915
let gltf_node = gltf_node.with_animation_root(animation_roots.contains(&node.index()));
916
917
let handle =
918
load_context.add_labeled_asset(gltf_node.asset_label().to_string(), gltf_node);
919
nodes.insert(node.index(), handle.clone());
920
if let Some(name) = node.name() {
921
named_nodes.insert(name.into(), handle);
922
}
923
}
924
925
let mut nodes_to_sort = nodes.into_iter().collect::<Vec<_>>();
926
nodes_to_sort.sort_by_key(|(i, _)| *i);
927
let nodes = nodes_to_sort
928
.into_iter()
929
.map(|(_, resolved)| resolved)
930
.collect();
931
932
let mut scenes = vec![];
933
let mut named_scenes = <HashMap<_, _>>::default();
934
let mut active_camera_found = false;
935
for scene in gltf.scenes() {
936
let mut err = None;
937
let mut world = World::default();
938
let mut node_index_to_entity_map = <HashMap<_, _>>::default();
939
let mut entity_to_skin_index_map = EntityHashMap::default();
940
let mut scene_load_context = load_context.begin_labeled_asset();
941
942
let world_root_id = world
943
.spawn((Transform::default(), Visibility::default()))
944
.with_children(|parent| {
945
for node in scene.nodes() {
946
let result = load_node(
947
&node,
948
parent,
949
load_context,
950
&mut scene_load_context,
951
settings,
952
&mut node_index_to_entity_map,
953
&mut entity_to_skin_index_map,
954
&mut active_camera_found,
955
&Transform::default(),
956
#[cfg(feature = "bevy_animation")]
957
&animation_roots,
958
#[cfg(feature = "bevy_animation")]
959
None,
960
&gltf.document,
961
convert_coordinates,
962
);
963
if result.is_err() {
964
err = Some(result);
965
return;
966
}
967
}
968
})
969
.id();
970
971
if let Some(extras) = scene.extras().as_ref() {
972
world.entity_mut(world_root_id).insert(GltfSceneExtras {
973
value: extras.get().to_string(),
974
});
975
}
976
977
if let Some(Err(err)) = err {
978
return Err(err);
979
}
980
981
#[cfg(feature = "bevy_animation")]
982
{
983
// for each node root in a scene, check if it's the root of an animation
984
// if it is, add the AnimationPlayer component
985
for node in scene.nodes() {
986
if animation_roots.contains(&node.index()) {
987
world
988
.entity_mut(*node_index_to_entity_map.get(&node.index()).unwrap())
989
.insert(AnimationPlayer::default());
990
}
991
}
992
}
993
994
for (&entity, &skin_index) in &entity_to_skin_index_map {
995
let mut entity = world.entity_mut(entity);
996
let skin = gltf.skins().nth(skin_index).unwrap();
997
let joint_entities: Vec<_> = skin
998
.joints()
999
.map(|node| node_index_to_entity_map[&node.index()])
1000
.collect();
1001
1002
entity.insert(SkinnedMesh {
1003
inverse_bindposes: skinned_mesh_inverse_bindposes[skin_index].clone(),
1004
joints: joint_entities,
1005
});
1006
}
1007
let loaded_scene = scene_load_context.finish(Scene::new(world));
1008
let scene_handle = load_context.add_loaded_labeled_asset(
1009
GltfAssetLabel::Scene(scene.index()).to_string(),
1010
loaded_scene,
1011
);
1012
1013
if let Some(name) = scene.name() {
1014
named_scenes.insert(name.into(), scene_handle.clone());
1015
}
1016
scenes.push(scene_handle);
1017
}
1018
1019
Ok(Gltf {
1020
default_scene: gltf
1021
.default_scene()
1022
.and_then(|scene| scenes.get(scene.index()))
1023
.cloned(),
1024
scenes,
1025
named_scenes,
1026
meshes,
1027
named_meshes,
1028
skins: skins.into_values().collect(),
1029
named_skins,
1030
materials,
1031
named_materials,
1032
nodes,
1033
named_nodes,
1034
#[cfg(feature = "bevy_animation")]
1035
animations,
1036
#[cfg(feature = "bevy_animation")]
1037
named_animations,
1038
source: if settings.include_source {
1039
Some(gltf)
1040
} else {
1041
None
1042
},
1043
})
1044
}
1045
}
1046
1047
impl AssetLoader for GltfLoader {
1048
type Asset = Gltf;
1049
type Settings = GltfLoaderSettings;
1050
type Error = GltfError;
1051
async fn load(
1052
&self,
1053
reader: &mut dyn Reader,
1054
settings: &GltfLoaderSettings,
1055
load_context: &mut LoadContext<'_>,
1056
) -> Result<Gltf, Self::Error> {
1057
let mut bytes = Vec::new();
1058
reader.read_to_end(&mut bytes).await?;
1059
1060
Self::load_gltf(self, &bytes, load_context, settings).await
1061
}
1062
1063
fn extensions(&self) -> &[&str] {
1064
&["gltf", "glb"]
1065
}
1066
}
1067
1068
/// Loads a glTF texture as a bevy [`Image`] and returns it together with its label.
1069
async fn load_image<'a, 'b>(
1070
gltf_texture: gltf::Texture<'a>,
1071
buffer_data: &[Vec<u8>],
1072
linear_textures: &HashSet<usize>,
1073
parent_path: &'b Path,
1074
supported_compressed_formats: CompressedImageFormats,
1075
default_sampler: &ImageSamplerDescriptor,
1076
settings: &GltfLoaderSettings,
1077
) -> Result<ImageOrPath, GltfError> {
1078
let is_srgb = !linear_textures.contains(&gltf_texture.index());
1079
let sampler_descriptor = if settings.override_sampler {
1080
default_sampler.clone()
1081
} else {
1082
texture_sampler(&gltf_texture, default_sampler)
1083
};
1084
1085
match gltf_texture.source().source() {
1086
Source::View { view, mime_type } => {
1087
let start = view.offset();
1088
let end = view.offset() + view.length();
1089
let buffer = &buffer_data[view.buffer().index()][start..end];
1090
let image = Image::from_buffer(
1091
buffer,
1092
ImageType::MimeType(mime_type),
1093
supported_compressed_formats,
1094
is_srgb,
1095
ImageSampler::Descriptor(sampler_descriptor),
1096
settings.load_materials,
1097
)?;
1098
Ok(ImageOrPath::Image {
1099
image,
1100
label: GltfAssetLabel::Texture(gltf_texture.index()),
1101
})
1102
}
1103
Source::Uri { uri, mime_type } => {
1104
let uri = percent_encoding::percent_decode_str(uri)
1105
.decode_utf8()
1106
.unwrap();
1107
let uri = uri.as_ref();
1108
if let Ok(data_uri) = DataUri::parse(uri) {
1109
let bytes = data_uri.decode()?;
1110
let image_type = ImageType::MimeType(data_uri.mime_type);
1111
Ok(ImageOrPath::Image {
1112
image: Image::from_buffer(
1113
&bytes,
1114
mime_type.map(ImageType::MimeType).unwrap_or(image_type),
1115
supported_compressed_formats,
1116
is_srgb,
1117
ImageSampler::Descriptor(sampler_descriptor),
1118
settings.load_materials,
1119
)?,
1120
label: GltfAssetLabel::Texture(gltf_texture.index()),
1121
})
1122
} else {
1123
let image_path = parent_path.join(uri);
1124
Ok(ImageOrPath::Path {
1125
path: image_path,
1126
is_srgb,
1127
sampler_descriptor,
1128
})
1129
}
1130
}
1131
}
1132
}
1133
1134
/// Loads a glTF material as a bevy [`StandardMaterial`] and returns it.
1135
fn load_material(
1136
material: &Material,
1137
load_context: &mut LoadContext,
1138
document: &Document,
1139
is_scale_inverted: bool,
1140
) -> Handle<StandardMaterial> {
1141
let material_label = material_label(material, is_scale_inverted);
1142
load_context
1143
.labeled_asset_scope::<_, ()>(material_label.to_string(), |load_context| {
1144
let pbr = material.pbr_metallic_roughness();
1145
1146
// TODO: handle missing label handle errors here?
1147
let color = pbr.base_color_factor();
1148
let base_color_channel = pbr
1149
.base_color_texture()
1150
.map(|info| uv_channel(material, "base color", info.tex_coord()))
1151
.unwrap_or_default();
1152
let base_color_texture = pbr
1153
.base_color_texture()
1154
.map(|info| texture_handle(&info.texture(), load_context));
1155
1156
let uv_transform = pbr
1157
.base_color_texture()
1158
.and_then(|info| info.texture_transform().map(texture_transform_to_affine2))
1159
.unwrap_or_default();
1160
1161
let normal_map_channel = material
1162
.normal_texture()
1163
.map(|info| uv_channel(material, "normal map", info.tex_coord()))
1164
.unwrap_or_default();
1165
let normal_map_texture: Option<Handle<Image>> =
1166
material.normal_texture().map(|normal_texture| {
1167
// TODO: handle normal_texture.scale
1168
texture_handle(&normal_texture.texture(), load_context)
1169
});
1170
1171
let metallic_roughness_channel = pbr
1172
.metallic_roughness_texture()
1173
.map(|info| uv_channel(material, "metallic/roughness", info.tex_coord()))
1174
.unwrap_or_default();
1175
let metallic_roughness_texture = pbr.metallic_roughness_texture().map(|info| {
1176
warn_on_differing_texture_transforms(
1177
material,
1178
&info,
1179
uv_transform,
1180
"metallic/roughness",
1181
);
1182
texture_handle(&info.texture(), load_context)
1183
});
1184
1185
let occlusion_channel = material
1186
.occlusion_texture()
1187
.map(|info| uv_channel(material, "occlusion", info.tex_coord()))
1188
.unwrap_or_default();
1189
let occlusion_texture = material.occlusion_texture().map(|occlusion_texture| {
1190
// TODO: handle occlusion_texture.strength() (a scalar multiplier for occlusion strength)
1191
texture_handle(&occlusion_texture.texture(), load_context)
1192
});
1193
1194
let emissive = material.emissive_factor();
1195
let emissive_channel = material
1196
.emissive_texture()
1197
.map(|info| uv_channel(material, "emissive", info.tex_coord()))
1198
.unwrap_or_default();
1199
let emissive_texture = material.emissive_texture().map(|info| {
1200
// TODO: handle occlusion_texture.strength() (a scalar multiplier for occlusion strength)
1201
warn_on_differing_texture_transforms(material, &info, uv_transform, "emissive");
1202
texture_handle(&info.texture(), load_context)
1203
});
1204
1205
#[cfg(feature = "pbr_transmission_textures")]
1206
let (
1207
specular_transmission,
1208
specular_transmission_channel,
1209
specular_transmission_texture,
1210
) = material
1211
.transmission()
1212
.map_or((0.0, UvChannel::Uv0, None), |transmission| {
1213
let specular_transmission_channel = transmission
1214
.transmission_texture()
1215
.map(|info| uv_channel(material, "specular/transmission", info.tex_coord()))
1216
.unwrap_or_default();
1217
let transmission_texture: Option<Handle<Image>> = transmission
1218
.transmission_texture()
1219
.map(|transmission_texture| {
1220
texture_handle(&transmission_texture.texture(), load_context)
1221
});
1222
1223
(
1224
transmission.transmission_factor(),
1225
specular_transmission_channel,
1226
transmission_texture,
1227
)
1228
});
1229
1230
#[cfg(not(feature = "pbr_transmission_textures"))]
1231
let specular_transmission = material
1232
.transmission()
1233
.map_or(0.0, |transmission| transmission.transmission_factor());
1234
1235
#[cfg(feature = "pbr_transmission_textures")]
1236
let (
1237
thickness,
1238
thickness_channel,
1239
thickness_texture,
1240
attenuation_distance,
1241
attenuation_color,
1242
) = material.volume().map_or(
1243
(0.0, UvChannel::Uv0, None, f32::INFINITY, [1.0, 1.0, 1.0]),
1244
|volume| {
1245
let thickness_channel = volume
1246
.thickness_texture()
1247
.map(|info| uv_channel(material, "thickness", info.tex_coord()))
1248
.unwrap_or_default();
1249
let thickness_texture: Option<Handle<Image>> =
1250
volume.thickness_texture().map(|thickness_texture| {
1251
texture_handle(&thickness_texture.texture(), load_context)
1252
});
1253
1254
(
1255
volume.thickness_factor(),
1256
thickness_channel,
1257
thickness_texture,
1258
volume.attenuation_distance(),
1259
volume.attenuation_color(),
1260
)
1261
},
1262
);
1263
1264
#[cfg(not(feature = "pbr_transmission_textures"))]
1265
let (thickness, attenuation_distance, attenuation_color) =
1266
material
1267
.volume()
1268
.map_or((0.0, f32::INFINITY, [1.0, 1.0, 1.0]), |volume| {
1269
(
1270
volume.thickness_factor(),
1271
volume.attenuation_distance(),
1272
volume.attenuation_color(),
1273
)
1274
});
1275
1276
let ior = material.ior().unwrap_or(1.5);
1277
1278
// Parse the `KHR_materials_clearcoat` extension data if necessary.
1279
let clearcoat =
1280
ClearcoatExtension::parse(load_context, document, material).unwrap_or_default();
1281
1282
// Parse the `KHR_materials_anisotropy` extension data if necessary.
1283
let anisotropy =
1284
AnisotropyExtension::parse(load_context, document, material).unwrap_or_default();
1285
1286
// Parse the `KHR_materials_specular` extension data if necessary.
1287
let specular =
1288
SpecularExtension::parse(load_context, document, material).unwrap_or_default();
1289
1290
// We need to operate in the Linear color space and be willing to exceed 1.0 in our channels
1291
let base_emissive = LinearRgba::rgb(emissive[0], emissive[1], emissive[2]);
1292
let emissive = base_emissive * material.emissive_strength().unwrap_or(1.0);
1293
1294
Ok(StandardMaterial {
1295
base_color: Color::linear_rgba(color[0], color[1], color[2], color[3]),
1296
base_color_channel,
1297
base_color_texture,
1298
perceptual_roughness: pbr.roughness_factor(),
1299
metallic: pbr.metallic_factor(),
1300
metallic_roughness_channel,
1301
metallic_roughness_texture,
1302
normal_map_channel,
1303
normal_map_texture,
1304
double_sided: material.double_sided(),
1305
cull_mode: if material.double_sided() {
1306
None
1307
} else if is_scale_inverted {
1308
Some(Face::Front)
1309
} else {
1310
Some(Face::Back)
1311
},
1312
occlusion_channel,
1313
occlusion_texture,
1314
emissive,
1315
emissive_channel,
1316
emissive_texture,
1317
specular_transmission,
1318
#[cfg(feature = "pbr_transmission_textures")]
1319
specular_transmission_channel,
1320
#[cfg(feature = "pbr_transmission_textures")]
1321
specular_transmission_texture,
1322
thickness,
1323
#[cfg(feature = "pbr_transmission_textures")]
1324
thickness_channel,
1325
#[cfg(feature = "pbr_transmission_textures")]
1326
thickness_texture,
1327
ior,
1328
attenuation_distance,
1329
attenuation_color: Color::linear_rgb(
1330
attenuation_color[0],
1331
attenuation_color[1],
1332
attenuation_color[2],
1333
),
1334
unlit: material.unlit(),
1335
alpha_mode: alpha_mode(material),
1336
uv_transform,
1337
clearcoat: clearcoat.clearcoat_factor.unwrap_or_default() as f32,
1338
clearcoat_perceptual_roughness: clearcoat
1339
.clearcoat_roughness_factor
1340
.unwrap_or_default() as f32,
1341
#[cfg(feature = "pbr_multi_layer_material_textures")]
1342
clearcoat_channel: clearcoat.clearcoat_channel,
1343
#[cfg(feature = "pbr_multi_layer_material_textures")]
1344
clearcoat_texture: clearcoat.clearcoat_texture,
1345
#[cfg(feature = "pbr_multi_layer_material_textures")]
1346
clearcoat_roughness_channel: clearcoat.clearcoat_roughness_channel,
1347
#[cfg(feature = "pbr_multi_layer_material_textures")]
1348
clearcoat_roughness_texture: clearcoat.clearcoat_roughness_texture,
1349
#[cfg(feature = "pbr_multi_layer_material_textures")]
1350
clearcoat_normal_channel: clearcoat.clearcoat_normal_channel,
1351
#[cfg(feature = "pbr_multi_layer_material_textures")]
1352
clearcoat_normal_texture: clearcoat.clearcoat_normal_texture,
1353
anisotropy_strength: anisotropy.anisotropy_strength.unwrap_or_default() as f32,
1354
anisotropy_rotation: anisotropy.anisotropy_rotation.unwrap_or_default() as f32,
1355
#[cfg(feature = "pbr_anisotropy_texture")]
1356
anisotropy_channel: anisotropy.anisotropy_channel,
1357
#[cfg(feature = "pbr_anisotropy_texture")]
1358
anisotropy_texture: anisotropy.anisotropy_texture,
1359
// From the `KHR_materials_specular` spec:
1360
// <https://github.com/KhronosGroup/glTF/tree/main/extensions/2.0/Khronos/KHR_materials_specular#materials-with-reflectance-parameter>
1361
reflectance: specular.specular_factor.unwrap_or(1.0) as f32 * 0.5,
1362
#[cfg(feature = "pbr_specular_textures")]
1363
specular_channel: specular.specular_channel,
1364
#[cfg(feature = "pbr_specular_textures")]
1365
specular_texture: specular.specular_texture,
1366
specular_tint: match specular.specular_color_factor {
1367
Some(color) => {
1368
Color::linear_rgb(color[0] as f32, color[1] as f32, color[2] as f32)
1369
}
1370
None => Color::WHITE,
1371
},
1372
#[cfg(feature = "pbr_specular_textures")]
1373
specular_tint_channel: specular.specular_color_channel,
1374
#[cfg(feature = "pbr_specular_textures")]
1375
specular_tint_texture: specular.specular_color_texture,
1376
..Default::default()
1377
})
1378
})
1379
.unwrap()
1380
}
1381
1382
/// Loads a glTF node.
1383
#[cfg_attr(
1384
not(target_arch = "wasm32"),
1385
expect(
1386
clippy::result_large_err,
1387
reason = "`GltfError` is only barely past the threshold for large errors."
1388
)
1389
)]
1390
fn load_node(
1391
gltf_node: &Node,
1392
child_spawner: &mut ChildSpawner,
1393
root_load_context: &LoadContext,
1394
load_context: &mut LoadContext,
1395
settings: &GltfLoaderSettings,
1396
node_index_to_entity_map: &mut HashMap<usize, Entity>,
1397
entity_to_skin_index_map: &mut EntityHashMap<usize>,
1398
active_camera_found: &mut bool,
1399
parent_transform: &Transform,
1400
#[cfg(feature = "bevy_animation")] animation_roots: &HashSet<usize>,
1401
#[cfg(feature = "bevy_animation")] mut animation_context: Option<AnimationContext>,
1402
document: &Document,
1403
convert_coordinates: bool,
1404
) -> Result<(), GltfError> {
1405
let mut gltf_error = None;
1406
let transform = node_transform(gltf_node, convert_coordinates);
1407
let world_transform = *parent_transform * transform;
1408
// according to https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html#instantiation,
1409
// if the determinant of the transform is negative we must invert the winding order of
1410
// triangles in meshes on the node.
1411
// instead we equivalently test if the global scale is inverted by checking if the number
1412
// of negative scale factors is odd. if so we will assign a copy of the material with face
1413
// culling inverted, rather than modifying the mesh data directly.
1414
let is_scale_inverted = world_transform.scale.is_negative_bitmask().count_ones() & 1 == 1;
1415
let mut node = child_spawner.spawn((transform, Visibility::default()));
1416
1417
let name = node_name(gltf_node);
1418
node.insert(name.clone());
1419
1420
#[cfg(feature = "bevy_animation")]
1421
if animation_context.is_none() && animation_roots.contains(&gltf_node.index()) {
1422
// This is an animation root. Make a new animation context.
1423
animation_context = Some(AnimationContext {
1424
root: node.id(),
1425
path: SmallVec::new(),
1426
});
1427
}
1428
1429
#[cfg(feature = "bevy_animation")]
1430
if let Some(ref mut animation_context) = animation_context {
1431
animation_context.path.push(name);
1432
1433
node.insert(AnimationTarget {
1434
id: AnimationTargetId::from_names(animation_context.path.iter()),
1435
player: animation_context.root,
1436
});
1437
}
1438
1439
if let Some(extras) = gltf_node.extras() {
1440
node.insert(GltfExtras {
1441
value: extras.get().to_string(),
1442
});
1443
}
1444
1445
// create camera node
1446
if settings.load_cameras
1447
&& let Some(camera) = gltf_node.camera()
1448
{
1449
let projection = match camera.projection() {
1450
gltf::camera::Projection::Orthographic(orthographic) => {
1451
let xmag = orthographic.xmag();
1452
let orthographic_projection = OrthographicProjection {
1453
near: orthographic.znear(),
1454
far: orthographic.zfar(),
1455
scaling_mode: ScalingMode::FixedHorizontal {
1456
viewport_width: xmag,
1457
},
1458
..OrthographicProjection::default_3d()
1459
};
1460
Projection::Orthographic(orthographic_projection)
1461
}
1462
gltf::camera::Projection::Perspective(perspective) => {
1463
let mut perspective_projection: PerspectiveProjection = PerspectiveProjection {
1464
fov: perspective.yfov(),
1465
near: perspective.znear(),
1466
..Default::default()
1467
};
1468
if let Some(zfar) = perspective.zfar() {
1469
perspective_projection.far = zfar;
1470
}
1471
if let Some(aspect_ratio) = perspective.aspect_ratio() {
1472
perspective_projection.aspect_ratio = aspect_ratio;
1473
}
1474
Projection::Perspective(perspective_projection)
1475
}
1476
};
1477
1478
node.insert((
1479
Camera3d::default(),
1480
projection,
1481
transform,
1482
Camera {
1483
is_active: !*active_camera_found,
1484
..Default::default()
1485
},
1486
));
1487
1488
*active_camera_found = true;
1489
}
1490
1491
// Map node index to entity
1492
node_index_to_entity_map.insert(gltf_node.index(), node.id());
1493
1494
let mut morph_weights = None;
1495
1496
node.with_children(|parent| {
1497
// Only include meshes in the output if they're set to be retained in the MAIN_WORLD and/or RENDER_WORLD by the load_meshes flag
1498
if !settings.load_meshes.is_empty()
1499
&& let Some(mesh) = gltf_node.mesh()
1500
{
1501
// append primitives
1502
for primitive in mesh.primitives() {
1503
let material = primitive.material();
1504
let material_label = material_label(&material, is_scale_inverted).to_string();
1505
1506
// This will make sure we load the default material now since it would not have been
1507
// added when iterating over all the gltf materials (since the default material is
1508
// not explicitly listed in the gltf).
1509
// It also ensures an inverted scale copy is instantiated if required.
1510
if !root_load_context.has_labeled_asset(&material_label)
1511
&& !load_context.has_labeled_asset(&material_label)
1512
{
1513
load_material(&material, load_context, document, is_scale_inverted);
1514
}
1515
1516
let primitive_label = GltfAssetLabel::Primitive {
1517
mesh: mesh.index(),
1518
primitive: primitive.index(),
1519
};
1520
let bounds = primitive.bounding_box();
1521
1522
let mut mesh_entity = parent.spawn((
1523
// TODO: handle missing label handle errors here?
1524
Mesh3d(load_context.get_label_handle(primitive_label.to_string())),
1525
MeshMaterial3d::<StandardMaterial>(
1526
load_context.get_label_handle(&material_label),
1527
),
1528
));
1529
1530
let target_count = primitive.morph_targets().len();
1531
if target_count != 0 {
1532
let weights = match mesh.weights() {
1533
Some(weights) => weights.to_vec(),
1534
None => vec![0.0; target_count],
1535
};
1536
1537
if morph_weights.is_none() {
1538
morph_weights = Some(weights.clone());
1539
}
1540
1541
// unwrap: the parent's call to `MeshMorphWeights::new`
1542
// means this code doesn't run if it returns an `Err`.
1543
// According to https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html#morph-targets
1544
// they should all have the same length.
1545
// > All morph target accessors MUST have the same count as
1546
// > the accessors of the original primitive.
1547
mesh_entity.insert(MeshMorphWeights::new(weights).unwrap());
1548
}
1549
1550
let mut bounds_min = Vec3::from_slice(&bounds.min);
1551
let mut bounds_max = Vec3::from_slice(&bounds.max);
1552
1553
if convert_coordinates {
1554
let converted_min = bounds_min.convert_coordinates();
1555
let converted_max = bounds_max.convert_coordinates();
1556
1557
bounds_min = converted_min.min(converted_max);
1558
bounds_max = converted_min.max(converted_max);
1559
}
1560
1561
mesh_entity.insert(Aabb::from_min_max(bounds_min, bounds_max));
1562
1563
if let Some(extras) = primitive.extras() {
1564
mesh_entity.insert(GltfExtras {
1565
value: extras.get().to_string(),
1566
});
1567
}
1568
1569
if let Some(extras) = mesh.extras() {
1570
mesh_entity.insert(GltfMeshExtras {
1571
value: extras.get().to_string(),
1572
});
1573
}
1574
1575
if let Some(extras) = material.extras() {
1576
mesh_entity.insert(GltfMaterialExtras {
1577
value: extras.get().to_string(),
1578
});
1579
}
1580
1581
if let Some(name) = mesh.name() {
1582
mesh_entity.insert(GltfMeshName(name.to_string()));
1583
}
1584
1585
if let Some(name) = material.name() {
1586
mesh_entity.insert(GltfMaterialName(name.to_string()));
1587
}
1588
1589
mesh_entity.insert(Name::new(primitive_name(&mesh, &material)));
1590
1591
// Mark for adding skinned mesh
1592
if let Some(skin) = gltf_node.skin() {
1593
entity_to_skin_index_map.insert(mesh_entity.id(), skin.index());
1594
}
1595
}
1596
}
1597
1598
if settings.load_lights
1599
&& let Some(light) = gltf_node.light()
1600
{
1601
match light.kind() {
1602
gltf::khr_lights_punctual::Kind::Directional => {
1603
let mut entity = parent.spawn(DirectionalLight {
1604
color: Color::srgb_from_array(light.color()),
1605
// NOTE: KHR_punctual_lights defines the intensity units for directional
1606
// lights in lux (lm/m^2) which is what we need.
1607
illuminance: light.intensity(),
1608
..Default::default()
1609
});
1610
if let Some(name) = light.name() {
1611
entity.insert(Name::new(name.to_string()));
1612
}
1613
if let Some(extras) = light.extras() {
1614
entity.insert(GltfExtras {
1615
value: extras.get().to_string(),
1616
});
1617
}
1618
}
1619
gltf::khr_lights_punctual::Kind::Point => {
1620
let mut entity = parent.spawn(PointLight {
1621
color: Color::srgb_from_array(light.color()),
1622
// NOTE: KHR_punctual_lights defines the intensity units for point lights in
1623
// candela (lm/sr) which is luminous intensity and we need luminous power.
1624
// For a point light, luminous power = 4 * pi * luminous intensity
1625
intensity: light.intensity() * core::f32::consts::PI * 4.0,
1626
range: light.range().unwrap_or(20.0),
1627
radius: 0.0,
1628
..Default::default()
1629
});
1630
if let Some(name) = light.name() {
1631
entity.insert(Name::new(name.to_string()));
1632
}
1633
if let Some(extras) = light.extras() {
1634
entity.insert(GltfExtras {
1635
value: extras.get().to_string(),
1636
});
1637
}
1638
}
1639
gltf::khr_lights_punctual::Kind::Spot {
1640
inner_cone_angle,
1641
outer_cone_angle,
1642
} => {
1643
let mut entity = parent.spawn(SpotLight {
1644
color: Color::srgb_from_array(light.color()),
1645
// NOTE: KHR_punctual_lights defines the intensity units for spot lights in
1646
// candela (lm/sr) which is luminous intensity and we need luminous power.
1647
// For a spot light, we map luminous power = 4 * pi * luminous intensity
1648
intensity: light.intensity() * core::f32::consts::PI * 4.0,
1649
range: light.range().unwrap_or(20.0),
1650
radius: light.range().unwrap_or(0.0),
1651
inner_angle: inner_cone_angle,
1652
outer_angle: outer_cone_angle,
1653
..Default::default()
1654
});
1655
if let Some(name) = light.name() {
1656
entity.insert(Name::new(name.to_string()));
1657
}
1658
if let Some(extras) = light.extras() {
1659
entity.insert(GltfExtras {
1660
value: extras.get().to_string(),
1661
});
1662
}
1663
}
1664
}
1665
}
1666
1667
// append other nodes
1668
for child in gltf_node.children() {
1669
if let Err(err) = load_node(
1670
&child,
1671
parent,
1672
root_load_context,
1673
load_context,
1674
settings,
1675
node_index_to_entity_map,
1676
entity_to_skin_index_map,
1677
active_camera_found,
1678
&world_transform,
1679
#[cfg(feature = "bevy_animation")]
1680
animation_roots,
1681
#[cfg(feature = "bevy_animation")]
1682
animation_context.clone(),
1683
document,
1684
convert_coordinates,
1685
) {
1686
gltf_error = Some(err);
1687
return;
1688
}
1689
}
1690
});
1691
1692
// Only include meshes in the output if they're set to be retained in the MAIN_WORLD and/or RENDER_WORLD by the load_meshes flag
1693
if !settings.load_meshes.is_empty()
1694
&& let (Some(mesh), Some(weights)) = (gltf_node.mesh(), morph_weights)
1695
{
1696
let primitive_label = mesh.primitives().next().map(|p| GltfAssetLabel::Primitive {
1697
mesh: mesh.index(),
1698
primitive: p.index(),
1699
});
1700
let first_mesh =
1701
primitive_label.map(|label| load_context.get_label_handle(label.to_string()));
1702
node.insert(MorphWeights::new(weights, first_mesh)?);
1703
}
1704
1705
if let Some(err) = gltf_error {
1706
Err(err)
1707
} else {
1708
Ok(())
1709
}
1710
}
1711
1712
/// Loads the raw glTF buffer data for a specific glTF file.
1713
async fn load_buffers(
1714
gltf: &gltf::Gltf,
1715
load_context: &mut LoadContext<'_>,
1716
) -> Result<Vec<Vec<u8>>, GltfError> {
1717
const VALID_MIME_TYPES: &[&str] = &["application/octet-stream", "application/gltf-buffer"];
1718
1719
let mut buffer_data = Vec::new();
1720
for buffer in gltf.buffers() {
1721
match buffer.source() {
1722
gltf::buffer::Source::Uri(uri) => {
1723
let uri = percent_encoding::percent_decode_str(uri)
1724
.decode_utf8()
1725
.unwrap();
1726
let uri = uri.as_ref();
1727
let buffer_bytes = match DataUri::parse(uri) {
1728
Ok(data_uri) if VALID_MIME_TYPES.contains(&data_uri.mime_type) => {
1729
data_uri.decode()?
1730
}
1731
Ok(_) => return Err(GltfError::BufferFormatUnsupported),
1732
Err(()) => {
1733
// TODO: Remove this and add dep
1734
let buffer_path = load_context.path().parent().unwrap().join(uri);
1735
load_context.read_asset_bytes(buffer_path).await?
1736
}
1737
};
1738
buffer_data.push(buffer_bytes);
1739
}
1740
gltf::buffer::Source::Bin => {
1741
if let Some(blob) = gltf.blob.as_deref() {
1742
buffer_data.push(blob.into());
1743
} else {
1744
return Err(GltfError::MissingBlob);
1745
}
1746
}
1747
}
1748
}
1749
1750
Ok(buffer_data)
1751
}
1752
1753
struct DataUri<'a> {
1754
pub mime_type: &'a str,
1755
pub base64: bool,
1756
pub data: &'a str,
1757
}
1758
1759
impl<'a> DataUri<'a> {
1760
fn parse(uri: &'a str) -> Result<DataUri<'a>, ()> {
1761
let uri = uri.strip_prefix("data:").ok_or(())?;
1762
let (mime_type, data) = Self::split_once(uri, ',').ok_or(())?;
1763
1764
let (mime_type, base64) = match mime_type.strip_suffix(";base64") {
1765
Some(mime_type) => (mime_type, true),
1766
None => (mime_type, false),
1767
};
1768
1769
Ok(DataUri {
1770
mime_type,
1771
base64,
1772
data,
1773
})
1774
}
1775
1776
fn decode(&self) -> Result<Vec<u8>, base64::DecodeError> {
1777
if self.base64 {
1778
base64::Engine::decode(&base64::engine::general_purpose::STANDARD, self.data)
1779
} else {
1780
Ok(self.data.as_bytes().to_owned())
1781
}
1782
}
1783
1784
fn split_once(input: &str, delimiter: char) -> Option<(&str, &str)> {
1785
let mut iter = input.splitn(2, delimiter);
1786
Some((iter.next()?, iter.next()?))
1787
}
1788
}
1789
1790
enum ImageOrPath {
1791
Image {
1792
image: Image,
1793
label: GltfAssetLabel,
1794
},
1795
Path {
1796
path: PathBuf,
1797
is_srgb: bool,
1798
sampler_descriptor: ImageSamplerDescriptor,
1799
},
1800
}
1801
1802
impl ImageOrPath {
1803
// TODO: use the threaded impl on wasm once wasm thread pool doesn't deadlock on it
1804
// See https://github.com/bevyengine/bevy/issues/1924 for more details
1805
// The taskpool use is also avoided when there is only one texture for performance reasons and
1806
// to avoid https://github.com/bevyengine/bevy/pull/2725
1807
// PERF: could this be a Vec instead? Are gltf texture indices dense?
1808
fn process_loaded_texture(
1809
self,
1810
load_context: &mut LoadContext,
1811
handles: &mut Vec<Handle<Image>>,
1812
) {
1813
let handle = match self {
1814
ImageOrPath::Image { label, image } => {
1815
load_context.add_labeled_asset(label.to_string(), image)
1816
}
1817
ImageOrPath::Path {
1818
path,
1819
is_srgb,
1820
sampler_descriptor,
1821
} => load_context
1822
.loader()
1823
.with_settings(move |settings: &mut ImageLoaderSettings| {
1824
settings.is_srgb = is_srgb;
1825
settings.sampler = ImageSampler::Descriptor(sampler_descriptor.clone());
1826
})
1827
.load(path),
1828
};
1829
handles.push(handle);
1830
}
1831
}
1832
1833
struct PrimitiveMorphAttributesIter<'s> {
1834
convert_coordinates: bool,
1835
positions: Option<Iter<'s, [f32; 3]>>,
1836
normals: Option<Iter<'s, [f32; 3]>>,
1837
tangents: Option<Iter<'s, [f32; 3]>>,
1838
}
1839
1840
impl<'s> Iterator for PrimitiveMorphAttributesIter<'s> {
1841
type Item = MorphAttributes;
1842
1843
fn next(&mut self) -> Option<Self::Item> {
1844
let position = self.positions.as_mut().and_then(Iterator::next);
1845
let normal = self.normals.as_mut().and_then(Iterator::next);
1846
let tangent = self.tangents.as_mut().and_then(Iterator::next);
1847
if position.is_none() && normal.is_none() && tangent.is_none() {
1848
return None;
1849
}
1850
1851
let mut attributes = MorphAttributes {
1852
position: position.map(Into::into).unwrap_or(Vec3::ZERO),
1853
normal: normal.map(Into::into).unwrap_or(Vec3::ZERO),
1854
tangent: tangent.map(Into::into).unwrap_or(Vec3::ZERO),
1855
};
1856
1857
if self.convert_coordinates {
1858
attributes = MorphAttributes {
1859
position: attributes.position.convert_coordinates(),
1860
normal: attributes.normal.convert_coordinates(),
1861
tangent: attributes.tangent.convert_coordinates(),
1862
}
1863
}
1864
1865
Some(attributes)
1866
}
1867
}
1868
1869
/// A helper structure for `load_node` that contains information about the
1870
/// nearest ancestor animation root.
1871
#[cfg(feature = "bevy_animation")]
1872
#[derive(Clone)]
1873
struct AnimationContext {
1874
/// The nearest ancestor animation root.
1875
pub root: Entity,
1876
/// The path to the animation root. This is used for constructing the
1877
/// animation target UUIDs.
1878
pub path: SmallVec<[Name; 8]>,
1879
}
1880
1881
#[derive(Deserialize)]
1882
#[serde(rename_all = "camelCase")]
1883
struct MorphTargetNames {
1884
pub target_names: Vec<String>,
1885
}
1886
1887
#[cfg(test)]
1888
mod test {
1889
use std::path::Path;
1890
1891
use crate::{Gltf, GltfAssetLabel, GltfNode, GltfSkin};
1892
use bevy_app::{App, TaskPoolPlugin};
1893
use bevy_asset::{
1894
io::{
1895
memory::{Dir, MemoryAssetReader},
1896
AssetSource, AssetSourceId,
1897
},
1898
AssetApp, AssetPlugin, AssetServer, Assets, Handle, LoadState,
1899
};
1900
use bevy_ecs::{resource::Resource, world::World};
1901
use bevy_log::LogPlugin;
1902
use bevy_mesh::skinning::SkinnedMeshInverseBindposes;
1903
use bevy_render::mesh::MeshPlugin;
1904
use bevy_scene::ScenePlugin;
1905
1906
fn test_app(dir: Dir) -> App {
1907
let mut app = App::new();
1908
let reader = MemoryAssetReader { root: dir };
1909
app.register_asset_source(
1910
AssetSourceId::Default,
1911
AssetSource::build().with_reader(move || Box::new(reader.clone())),
1912
)
1913
.add_plugins((
1914
LogPlugin::default(),
1915
TaskPoolPlugin::default(),
1916
AssetPlugin::default(),
1917
ScenePlugin,
1918
MeshPlugin,
1919
crate::GltfPlugin::default(),
1920
));
1921
1922
app.finish();
1923
app.cleanup();
1924
1925
app
1926
}
1927
1928
const LARGE_ITERATION_COUNT: usize = 10000;
1929
1930
fn run_app_until(app: &mut App, mut predicate: impl FnMut(&mut World) -> Option<()>) {
1931
for _ in 0..LARGE_ITERATION_COUNT {
1932
app.update();
1933
if predicate(app.world_mut()).is_some() {
1934
return;
1935
}
1936
}
1937
1938
panic!("Ran out of loops to return `Some` from `predicate`");
1939
}
1940
1941
fn load_gltf_into_app(gltf_path: &str, gltf: &str) -> App {
1942
#[expect(
1943
dead_code,
1944
reason = "This struct is used to keep the handle alive. As such, we have no need to handle the handle directly."
1945
)]
1946
#[derive(Resource)]
1947
struct GltfHandle(Handle<Gltf>);
1948
1949
let dir = Dir::default();
1950
dir.insert_asset_text(Path::new(gltf_path), gltf);
1951
let mut app = test_app(dir);
1952
app.update();
1953
let asset_server = app.world().resource::<AssetServer>().clone();
1954
let handle: Handle<Gltf> = asset_server.load(gltf_path.to_string());
1955
let handle_id = handle.id();
1956
app.insert_resource(GltfHandle(handle));
1957
app.update();
1958
run_app_until(&mut app, |_world| {
1959
let load_state = asset_server.get_load_state(handle_id).unwrap();
1960
match load_state {
1961
LoadState::Loaded => Some(()),
1962
LoadState::Failed(err) => panic!("{err}"),
1963
_ => None,
1964
}
1965
});
1966
app
1967
}
1968
1969
#[test]
1970
fn single_node() {
1971
let gltf_path = "test.gltf";
1972
let app = load_gltf_into_app(
1973
gltf_path,
1974
r#"
1975
{
1976
"asset": {
1977
"version": "2.0"
1978
},
1979
"nodes": [
1980
{
1981
"name": "TestSingleNode"
1982
}
1983
],
1984
"scene": 0,
1985
"scenes": [{ "nodes": [0] }]
1986
}
1987
"#,
1988
);
1989
let asset_server = app.world().resource::<AssetServer>();
1990
let handle = asset_server.load(gltf_path);
1991
let gltf_root_assets = app.world().resource::<Assets<Gltf>>();
1992
let gltf_node_assets = app.world().resource::<Assets<GltfNode>>();
1993
let gltf_root = gltf_root_assets.get(&handle).unwrap();
1994
assert!(gltf_root.nodes.len() == 1, "Single node");
1995
assert!(
1996
gltf_root.named_nodes.contains_key("TestSingleNode"),
1997
"Named node is in named nodes"
1998
);
1999
let gltf_node = gltf_node_assets
2000
.get(gltf_root.named_nodes.get("TestSingleNode").unwrap())
2001
.unwrap();
2002
assert_eq!(gltf_node.name, "TestSingleNode", "Correct name");
2003
assert_eq!(gltf_node.index, 0, "Correct index");
2004
assert_eq!(gltf_node.children.len(), 0, "No children");
2005
assert_eq!(gltf_node.asset_label(), GltfAssetLabel::Node(0));
2006
}
2007
2008
#[test]
2009
fn node_hierarchy_no_hierarchy() {
2010
let gltf_path = "test.gltf";
2011
let app = load_gltf_into_app(
2012
gltf_path,
2013
r#"
2014
{
2015
"asset": {
2016
"version": "2.0"
2017
},
2018
"nodes": [
2019
{
2020
"name": "l1"
2021
},
2022
{
2023
"name": "l2"
2024
}
2025
],
2026
"scene": 0,
2027
"scenes": [{ "nodes": [0] }]
2028
}
2029
"#,
2030
);
2031
let asset_server = app.world().resource::<AssetServer>();
2032
let handle = asset_server.load(gltf_path);
2033
let gltf_root_assets = app.world().resource::<Assets<Gltf>>();
2034
let gltf_node_assets = app.world().resource::<Assets<GltfNode>>();
2035
let gltf_root = gltf_root_assets.get(&handle).unwrap();
2036
let result = gltf_root
2037
.nodes
2038
.iter()
2039
.map(|h| gltf_node_assets.get(h).unwrap())
2040
.collect::<Vec<_>>();
2041
assert_eq!(result.len(), 2);
2042
assert_eq!(result[0].name, "l1");
2043
assert_eq!(result[0].children.len(), 0);
2044
assert_eq!(result[1].name, "l2");
2045
assert_eq!(result[1].children.len(), 0);
2046
}
2047
2048
#[test]
2049
fn node_hierarchy_simple_hierarchy() {
2050
let gltf_path = "test.gltf";
2051
let app = load_gltf_into_app(
2052
gltf_path,
2053
r#"
2054
{
2055
"asset": {
2056
"version": "2.0"
2057
},
2058
"nodes": [
2059
{
2060
"name": "l1",
2061
"children": [1]
2062
},
2063
{
2064
"name": "l2"
2065
}
2066
],
2067
"scene": 0,
2068
"scenes": [{ "nodes": [0] }]
2069
}
2070
"#,
2071
);
2072
let asset_server = app.world().resource::<AssetServer>();
2073
let handle = asset_server.load(gltf_path);
2074
let gltf_root_assets = app.world().resource::<Assets<Gltf>>();
2075
let gltf_node_assets = app.world().resource::<Assets<GltfNode>>();
2076
let gltf_root = gltf_root_assets.get(&handle).unwrap();
2077
let result = gltf_root
2078
.nodes
2079
.iter()
2080
.map(|h| gltf_node_assets.get(h).unwrap())
2081
.collect::<Vec<_>>();
2082
assert_eq!(result.len(), 2);
2083
assert_eq!(result[0].name, "l1");
2084
assert_eq!(result[0].children.len(), 1);
2085
assert_eq!(result[1].name, "l2");
2086
assert_eq!(result[1].children.len(), 0);
2087
}
2088
2089
#[test]
2090
fn node_hierarchy_hierarchy() {
2091
let gltf_path = "test.gltf";
2092
let app = load_gltf_into_app(
2093
gltf_path,
2094
r#"
2095
{
2096
"asset": {
2097
"version": "2.0"
2098
},
2099
"nodes": [
2100
{
2101
"name": "l1",
2102
"children": [1]
2103
},
2104
{
2105
"name": "l2",
2106
"children": [2]
2107
},
2108
{
2109
"name": "l3",
2110
"children": [3, 4, 5]
2111
},
2112
{
2113
"name": "l4",
2114
"children": [6]
2115
},
2116
{
2117
"name": "l5"
2118
},
2119
{
2120
"name": "l6"
2121
},
2122
{
2123
"name": "l7"
2124
}
2125
],
2126
"scene": 0,
2127
"scenes": [{ "nodes": [0] }]
2128
}
2129
"#,
2130
);
2131
let asset_server = app.world().resource::<AssetServer>();
2132
let handle = asset_server.load(gltf_path);
2133
let gltf_root_assets = app.world().resource::<Assets<Gltf>>();
2134
let gltf_node_assets = app.world().resource::<Assets<GltfNode>>();
2135
let gltf_root = gltf_root_assets.get(&handle).unwrap();
2136
let result = gltf_root
2137
.nodes
2138
.iter()
2139
.map(|h| gltf_node_assets.get(h).unwrap())
2140
.collect::<Vec<_>>();
2141
assert_eq!(result.len(), 7);
2142
assert_eq!(result[0].name, "l1");
2143
assert_eq!(result[0].children.len(), 1);
2144
assert_eq!(result[1].name, "l2");
2145
assert_eq!(result[1].children.len(), 1);
2146
assert_eq!(result[2].name, "l3");
2147
assert_eq!(result[2].children.len(), 3);
2148
assert_eq!(result[3].name, "l4");
2149
assert_eq!(result[3].children.len(), 1);
2150
assert_eq!(result[4].name, "l5");
2151
assert_eq!(result[4].children.len(), 0);
2152
assert_eq!(result[5].name, "l6");
2153
assert_eq!(result[5].children.len(), 0);
2154
assert_eq!(result[6].name, "l7");
2155
assert_eq!(result[6].children.len(), 0);
2156
}
2157
2158
#[test]
2159
fn node_hierarchy_cyclic() {
2160
let gltf_path = "test.gltf";
2161
let gltf_str = r#"
2162
{
2163
"asset": {
2164
"version": "2.0"
2165
},
2166
"nodes": [
2167
{
2168
"name": "l1",
2169
"children": [1]
2170
},
2171
{
2172
"name": "l2",
2173
"children": [0]
2174
}
2175
],
2176
"scene": 0,
2177
"scenes": [{ "nodes": [0] }]
2178
}
2179
"#;
2180
2181
let dir = Dir::default();
2182
dir.insert_asset_text(Path::new(gltf_path), gltf_str);
2183
let mut app = test_app(dir);
2184
app.update();
2185
let asset_server = app.world().resource::<AssetServer>().clone();
2186
let handle: Handle<Gltf> = asset_server.load(gltf_path);
2187
let handle_id = handle.id();
2188
app.update();
2189
run_app_until(&mut app, |_world| {
2190
let load_state = asset_server.get_load_state(handle_id).unwrap();
2191
if load_state.is_failed() {
2192
Some(())
2193
} else {
2194
None
2195
}
2196
});
2197
let load_state = asset_server.get_load_state(handle_id).unwrap();
2198
assert!(load_state.is_failed());
2199
}
2200
2201
#[test]
2202
fn node_hierarchy_missing_node() {
2203
let gltf_path = "test.gltf";
2204
let gltf_str = r#"
2205
{
2206
"asset": {
2207
"version": "2.0"
2208
},
2209
"nodes": [
2210
{
2211
"name": "l1",
2212
"children": [2]
2213
},
2214
{
2215
"name": "l2"
2216
}
2217
],
2218
"scene": 0,
2219
"scenes": [{ "nodes": [0] }]
2220
}
2221
"#;
2222
2223
let dir = Dir::default();
2224
dir.insert_asset_text(Path::new(gltf_path), gltf_str);
2225
let mut app = test_app(dir);
2226
app.update();
2227
let asset_server = app.world().resource::<AssetServer>().clone();
2228
let handle: Handle<Gltf> = asset_server.load(gltf_path);
2229
let handle_id = handle.id();
2230
app.update();
2231
run_app_until(&mut app, |_world| {
2232
let load_state = asset_server.get_load_state(handle_id).unwrap();
2233
if load_state.is_failed() {
2234
Some(())
2235
} else {
2236
None
2237
}
2238
});
2239
let load_state = asset_server.get_load_state(handle_id).unwrap();
2240
assert!(load_state.is_failed());
2241
}
2242
2243
#[test]
2244
fn skin_node() {
2245
let gltf_path = "test.gltf";
2246
let app = load_gltf_into_app(
2247
gltf_path,
2248
r#"
2249
{
2250
"asset": {
2251
"version": "2.0"
2252
},
2253
"nodes": [
2254
{
2255
"name": "skinned",
2256
"skin": 0,
2257
"children": [1, 2]
2258
},
2259
{
2260
"name": "joint1"
2261
},
2262
{
2263
"name": "joint2"
2264
}
2265
],
2266
"skins": [
2267
{
2268
"inverseBindMatrices": 0,
2269
"joints": [1, 2]
2270
}
2271
],
2272
"buffers": [
2273
{
2274
"uri" : "data:application/gltf-buffer;base64,AACAPwAAAAAAAAAAAAAAAAAAAAAAAIA/AAAAAAAAAAAAAAAAAAAAAAAAgD8AAAAAAAAAAAAAAAAAAAAAAACAPwAAgD8AAAAAAAAAAAAAAAAAAAAAAACAPwAAAAAAAAAAAAAAAAAAAAAAAIA/AAAAAAAAAAAAAIC/AAAAAAAAgD8=",
2275
"byteLength" : 128
2276
}
2277
],
2278
"bufferViews": [
2279
{
2280
"buffer": 0,
2281
"byteLength": 128
2282
}
2283
],
2284
"accessors": [
2285
{
2286
"bufferView" : 0,
2287
"componentType" : 5126,
2288
"count" : 2,
2289
"type" : "MAT4"
2290
}
2291
],
2292
"scene": 0,
2293
"scenes": [{ "nodes": [0] }]
2294
}
2295
"#,
2296
);
2297
let asset_server = app.world().resource::<AssetServer>();
2298
let handle = asset_server.load(gltf_path);
2299
let gltf_root_assets = app.world().resource::<Assets<Gltf>>();
2300
let gltf_node_assets = app.world().resource::<Assets<GltfNode>>();
2301
let gltf_skin_assets = app.world().resource::<Assets<GltfSkin>>();
2302
let gltf_inverse_bind_matrices = app
2303
.world()
2304
.resource::<Assets<SkinnedMeshInverseBindposes>>();
2305
let gltf_root = gltf_root_assets.get(&handle).unwrap();
2306
2307
assert_eq!(gltf_root.skins.len(), 1);
2308
assert_eq!(gltf_root.nodes.len(), 3);
2309
2310
let skin = gltf_skin_assets.get(&gltf_root.skins[0]).unwrap();
2311
assert_eq!(skin.joints.len(), 2);
2312
assert_eq!(skin.joints[0], gltf_root.nodes[1]);
2313
assert_eq!(skin.joints[1], gltf_root.nodes[2]);
2314
assert!(gltf_inverse_bind_matrices.contains(&skin.inverse_bind_matrices));
2315
2316
let skinned_node = gltf_node_assets.get(&gltf_root.nodes[0]).unwrap();
2317
assert_eq!(skinned_node.name, "skinned");
2318
assert_eq!(skinned_node.children.len(), 2);
2319
assert_eq!(skinned_node.skin.as_ref(), Some(&gltf_root.skins[0]));
2320
}
2321
}
2322
2323