Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bevyengine
GitHub Repository: bevyengine/bevy
Path: blob/main/crates/bevy_render/macros/src/specializer.rs
9368 views
1
use bevy_macro_utils::{
2
fq_std::{FQDefault, FQResult},
3
get_struct_fields, require_named,
4
};
5
use proc_macro::TokenStream;
6
use proc_macro2::Span;
7
use quote::{format_ident, quote};
8
use syn::{
9
parse::{Parse, ParseStream},
10
parse_macro_input, parse_quote,
11
punctuated::Punctuated,
12
spanned::Spanned,
13
DeriveInput, Expr, Field, Ident, Index, Member, Meta, MetaList, Pat, Path, Token, Type,
14
WherePredicate,
15
};
16
17
const SPECIALIZE_ATTR_IDENT: &str = "specialize";
18
const SPECIALIZE_ALL_IDENT: &str = "all";
19
20
const KEY_ATTR_IDENT: &str = "key";
21
const KEY_DEFAULT_IDENT: &str = "default";
22
23
enum SpecializeImplTargets {
24
All,
25
Specific(Vec<Path>),
26
}
27
28
impl Parse for SpecializeImplTargets {
29
fn parse(input: ParseStream) -> syn::Result<Self> {
30
let paths = input.parse_terminated(Path::parse, Token![,])?;
31
if paths
32
.first()
33
.is_some_and(|p| p.is_ident(SPECIALIZE_ALL_IDENT))
34
{
35
Ok(SpecializeImplTargets::All)
36
} else {
37
Ok(SpecializeImplTargets::Specific(paths.into_iter().collect()))
38
}
39
}
40
}
41
42
#[derive(Clone)]
43
enum Key {
44
Whole,
45
Default,
46
Index(Index),
47
Custom(Expr),
48
}
49
50
impl Key {
51
fn expr(&self) -> Expr {
52
match self {
53
Key::Whole => parse_quote!(key),
54
Key::Default => parse_quote!(#FQDefault::default()),
55
Key::Index(index) => {
56
let member = Member::Unnamed(index.clone());
57
parse_quote!(key.#member)
58
}
59
Key::Custom(expr) => expr.clone(),
60
}
61
}
62
}
63
64
const KEY_ERROR_MSG: &str = "Invalid key override. Must be either `default` or a valid Rust expression of the correct key type";
65
66
impl Parse for Key {
67
fn parse(input: ParseStream) -> syn::Result<Self> {
68
if let Ok(ident) = input.parse::<Ident>() {
69
if ident == KEY_DEFAULT_IDENT {
70
Ok(Key::Default)
71
} else {
72
Err(syn::Error::new_spanned(ident, KEY_ERROR_MSG))
73
}
74
} else {
75
input.parse::<Expr>().map(Key::Custom).map_err(|mut err| {
76
err.extend(syn::Error::new(err.span(), KEY_ERROR_MSG));
77
err
78
})
79
}
80
}
81
}
82
83
#[derive(Clone)]
84
struct FieldInfo {
85
ty: Type,
86
member: Member,
87
key: Key,
88
}
89
90
impl FieldInfo {
91
fn key_ty(&self, specialize_path: &Path, target_path: &Path) -> Option<Type> {
92
let ty = &self.ty;
93
matches!(self.key, Key::Whole | Key::Index(_))
94
.then_some(parse_quote!(<#ty as #specialize_path::Specializer<#target_path>>::Key))
95
}
96
97
fn key_ident(&self, ident: Ident) -> Option<Ident> {
98
matches!(self.key, Key::Whole | Key::Index(_)).then_some(ident)
99
}
100
101
fn specialize_expr(&self, specialize_path: &Path, target_path: &Path) -> Expr {
102
let FieldInfo {
103
ty, member, key, ..
104
} = &self;
105
let key_expr = key.expr();
106
parse_quote!(<#ty as #specialize_path::Specializer<#target_path>>::specialize(&self.#member, #key_expr, descriptor))
107
}
108
109
fn specialize_predicate(&self, specialize_path: &Path, target_path: &Path) -> WherePredicate {
110
let ty = &self.ty;
111
if matches!(&self.key, Key::Default) {
112
parse_quote!(#ty: #specialize_path::Specializer<#target_path, Key: #FQDefault>)
113
} else {
114
parse_quote!(#ty: #specialize_path::Specializer<#target_path>)
115
}
116
}
117
}
118
119
fn get_field_info(
120
fields: &Punctuated<Field, Token![,]>,
121
targets: &SpecializeImplTargets,
122
) -> syn::Result<Vec<FieldInfo>> {
123
let mut field_info: Vec<FieldInfo> = Vec::new();
124
let mut used_count = 0;
125
let mut single_index = 0;
126
for (index, field) in fields.iter().enumerate() {
127
let field_ty = field.ty.clone();
128
let field_member = field.ident.clone().map_or(
129
Member::Unnamed(Index {
130
index: index as u32,
131
span: field.span(),
132
}),
133
Member::Named,
134
);
135
let key_index = Index {
136
index: used_count,
137
span: field.span(),
138
};
139
140
let mut use_key_field = true;
141
let mut key = Key::Index(key_index);
142
for attr in &field.attrs {
143
match &attr.meta {
144
Meta::List(MetaList { path, tokens, .. }) if path.is_ident(&KEY_ATTR_IDENT) => {
145
let owned_tokens = tokens.clone().into();
146
let Ok(parsed_key) = syn::parse::<Key>(owned_tokens) else {
147
return Err(syn::Error::new(
148
attr.span(),
149
"Invalid key override attribute",
150
));
151
};
152
key = parsed_key;
153
if matches!(
154
(&key, &targets),
155
(Key::Custom(_), SpecializeImplTargets::All)
156
) {
157
return Err(syn::Error::new(
158
attr.span(),
159
"#[key(default)] is the only key override type allowed with #[specialize(all)]",
160
));
161
}
162
use_key_field = false;
163
}
164
_ => {}
165
}
166
}
167
168
if use_key_field {
169
used_count += 1;
170
single_index = index;
171
}
172
173
field_info.push(FieldInfo {
174
ty: field_ty,
175
member: field_member,
176
key,
177
});
178
}
179
180
if used_count == 1 {
181
field_info[single_index].key = Key::Whole;
182
}
183
184
Ok(field_info)
185
}
186
187
fn get_specialize_targets(
188
ast: &DeriveInput,
189
derive_name: &str,
190
) -> syn::Result<SpecializeImplTargets> {
191
let specialize_attr = ast.attrs.iter().find_map(|attr| {
192
if attr.path().is_ident(SPECIALIZE_ATTR_IDENT)
193
&& let Meta::List(meta_list) = &attr.meta
194
{
195
return Some(meta_list);
196
}
197
None
198
});
199
let Some(specialize_meta_list) = specialize_attr else {
200
return Err(syn::Error::new(
201
Span::call_site(),
202
format!("#[derive({derive_name})] must be accompanied by #[specialize(..targets)].\n Example usages: #[specialize(RenderPipeline)], #[specialize(all)]")
203
));
204
};
205
syn::parse::<SpecializeImplTargets>(specialize_meta_list.tokens.clone().into())
206
}
207
208
macro_rules! guard {
209
($expr: expr) => {
210
match $expr {
211
Ok(__val) => __val,
212
Err(err) => return err.to_compile_error().into(),
213
}
214
};
215
}
216
217
pub fn impl_specializer(input: TokenStream) -> TokenStream {
218
let bevy_render_path: Path = crate::bevy_render_path();
219
let specialize_path = {
220
let mut path = bevy_render_path.clone();
221
path.segments.push(format_ident!("render_resource").into());
222
path
223
};
224
225
let ecs_path = crate::bevy_ecs_path();
226
227
let ast = parse_macro_input!(input as DeriveInput);
228
let targets = guard!(get_specialize_targets(&ast, "Specializer"));
229
let fields = guard!(get_struct_fields(&ast.data, "Specializer"));
230
let fields = guard!(require_named(fields));
231
let field_info = guard!(get_field_info(fields, &targets));
232
233
let key_idents: Vec<Option<Ident>> = field_info
234
.iter()
235
.enumerate()
236
.map(|(i, field_info)| field_info.key_ident(format_ident!("key{i}")))
237
.collect();
238
let key_tuple_idents: Vec<Ident> = key_idents.iter().flatten().cloned().collect();
239
let ignore_pat: Pat = parse_quote!(_);
240
let key_patterns: Vec<Pat> = key_idents
241
.iter()
242
.map(|key_ident| match key_ident {
243
Some(key_ident) => parse_quote!(#key_ident),
244
None => ignore_pat.clone(),
245
})
246
.collect();
247
248
match targets {
249
SpecializeImplTargets::All => impl_specialize_all(
250
&specialize_path,
251
&ecs_path,
252
&ast,
253
&field_info,
254
&key_patterns,
255
&key_tuple_idents,
256
),
257
SpecializeImplTargets::Specific(targets) => targets
258
.iter()
259
.map(|target| {
260
impl_specialize_specific(
261
&specialize_path,
262
&ecs_path,
263
&ast,
264
&field_info,
265
target,
266
&key_patterns,
267
&key_tuple_idents,
268
)
269
})
270
.collect(),
271
}
272
}
273
274
fn impl_specialize_all(
275
specialize_path: &Path,
276
ecs_path: &Path,
277
ast: &DeriveInput,
278
field_info: &[FieldInfo],
279
key_patterns: &[Pat],
280
key_tuple_idents: &[Ident],
281
) -> TokenStream {
282
let target_path = Path::from(format_ident!("T"));
283
let key_elems: Vec<Type> = field_info
284
.iter()
285
.filter_map(|field_info| field_info.key_ty(specialize_path, &target_path))
286
.collect();
287
let specialize_exprs: Vec<Expr> = field_info
288
.iter()
289
.map(|field_info| field_info.specialize_expr(specialize_path, &target_path))
290
.collect();
291
292
let struct_name = &ast.ident;
293
let mut generics = ast.generics.clone();
294
generics.params.insert(
295
0,
296
parse_quote!(#target_path: #specialize_path::Specializable),
297
);
298
299
if !field_info.is_empty() {
300
let where_clause = generics.make_where_clause();
301
for field in field_info {
302
where_clause
303
.predicates
304
.push(field.specialize_predicate(specialize_path, &target_path));
305
}
306
}
307
308
let (_, type_generics, _) = ast.generics.split_for_impl();
309
let (impl_generics, _, where_clause) = &generics.split_for_impl();
310
311
TokenStream::from(quote! {
312
impl #impl_generics #specialize_path::Specializer<#target_path> for #struct_name #type_generics #where_clause {
313
type Key = (#(#key_elems),*);
314
315
fn specialize(
316
&self,
317
key: Self::Key,
318
descriptor: &mut <#target_path as #specialize_path::Specializable>::Descriptor
319
) -> #FQResult<#specialize_path::Canonical<Self::Key>, #ecs_path::error::BevyError> {
320
#(let #key_patterns = #specialize_exprs?;)*
321
#FQResult::Ok((#(#key_tuple_idents),*))
322
}
323
}
324
})
325
}
326
327
fn impl_specialize_specific(
328
specialize_path: &Path,
329
ecs_path: &Path,
330
ast: &DeriveInput,
331
field_info: &[FieldInfo],
332
target_path: &Path,
333
key_patterns: &[Pat],
334
key_tuple_idents: &[Ident],
335
) -> TokenStream {
336
let key_elems: Vec<Type> = field_info
337
.iter()
338
.filter_map(|field_info| field_info.key_ty(specialize_path, target_path))
339
.collect();
340
let specialize_exprs: Vec<Expr> = field_info
341
.iter()
342
.map(|field_info| field_info.specialize_expr(specialize_path, target_path))
343
.collect();
344
345
let struct_name = &ast.ident;
346
let (impl_generics, type_generics, where_clause) = &ast.generics.split_for_impl();
347
348
TokenStream::from(quote! {
349
impl #impl_generics #specialize_path::Specializer<#target_path> for #struct_name #type_generics #where_clause {
350
type Key = (#(#key_elems),*);
351
352
fn specialize(
353
&self,
354
key: Self::Key,
355
descriptor: &mut <#target_path as #specialize_path::Specializable>::Descriptor
356
) -> #FQResult<#specialize_path::Canonical<Self::Key>, #ecs_path::error::BevyError> {
357
#(let #key_patterns = #specialize_exprs?;)*
358
#FQResult::Ok((#(#key_tuple_idents),*))
359
}
360
}
361
})
362
}
363
364
pub fn impl_specializer_key(input: TokenStream) -> TokenStream {
365
let bevy_render_path: Path = crate::bevy_render_path();
366
let specialize_path = {
367
let mut path = bevy_render_path.clone();
368
path.segments.push(format_ident!("render_resource").into());
369
path
370
};
371
372
let ast = parse_macro_input!(input as DeriveInput);
373
let ident = ast.ident;
374
TokenStream::from(quote!(
375
impl #specialize_path::SpecializerKey for #ident {
376
const IS_CANONICAL: bool = true;
377
type Canonical = Self;
378
}
379
))
380
}
381
382