Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bevyengine
GitHub Repository: bevyengine/bevy
Path: blob/main/crates/bevy_render/macros/src/specializer.rs
6596 views
1
use bevy_macro_utils::{
2
fq_std::{FQDefault, FQResult},
3
get_struct_fields,
4
};
5
use proc_macro::TokenStream;
6
use proc_macro2::Span;
7
use quote::{format_ident, quote};
8
use syn::{
9
parse::{Parse, ParseStream},
10
parse_macro_input, parse_quote,
11
punctuated::Punctuated,
12
spanned::Spanned,
13
DeriveInput, Expr, Field, Ident, Index, Member, Meta, MetaList, Pat, Path, Token, Type,
14
WherePredicate,
15
};
16
17
const SPECIALIZE_ATTR_IDENT: &str = "specialize";
18
const SPECIALIZE_ALL_IDENT: &str = "all";
19
20
const KEY_ATTR_IDENT: &str = "key";
21
const KEY_DEFAULT_IDENT: &str = "default";
22
23
enum SpecializeImplTargets {
24
All,
25
Specific(Vec<Path>),
26
}
27
28
impl Parse for SpecializeImplTargets {
29
fn parse(input: ParseStream) -> syn::Result<Self> {
30
let paths = input.parse_terminated(Path::parse, Token![,])?;
31
if paths
32
.first()
33
.is_some_and(|p| p.is_ident(SPECIALIZE_ALL_IDENT))
34
{
35
Ok(SpecializeImplTargets::All)
36
} else {
37
Ok(SpecializeImplTargets::Specific(paths.into_iter().collect()))
38
}
39
}
40
}
41
42
#[derive(Clone)]
43
enum Key {
44
Whole,
45
Default,
46
Index(Index),
47
Custom(Expr),
48
}
49
50
impl Key {
51
fn expr(&self) -> Expr {
52
match self {
53
Key::Whole => parse_quote!(key),
54
Key::Default => parse_quote!(#FQDefault::default()),
55
Key::Index(index) => {
56
let member = Member::Unnamed(index.clone());
57
parse_quote!(key.#member)
58
}
59
Key::Custom(expr) => expr.clone(),
60
}
61
}
62
}
63
64
const KEY_ERROR_MSG: &str = "Invalid key override. Must be either `default` or a valid Rust expression of the correct key type";
65
66
impl Parse for Key {
67
fn parse(input: ParseStream) -> syn::Result<Self> {
68
if let Ok(ident) = input.parse::<Ident>() {
69
if ident == KEY_DEFAULT_IDENT {
70
Ok(Key::Default)
71
} else {
72
Err(syn::Error::new_spanned(ident, KEY_ERROR_MSG))
73
}
74
} else {
75
input.parse::<Expr>().map(Key::Custom).map_err(|mut err| {
76
err.extend(syn::Error::new(err.span(), KEY_ERROR_MSG));
77
err
78
})
79
}
80
}
81
}
82
83
#[derive(Clone)]
84
struct FieldInfo {
85
ty: Type,
86
member: Member,
87
key: Key,
88
}
89
90
impl FieldInfo {
91
fn key_ty(&self, specialize_path: &Path, target_path: &Path) -> Option<Type> {
92
let ty = &self.ty;
93
matches!(self.key, Key::Whole | Key::Index(_))
94
.then_some(parse_quote!(<#ty as #specialize_path::Specializer<#target_path>>::Key))
95
}
96
97
fn key_ident(&self, ident: Ident) -> Option<Ident> {
98
matches!(self.key, Key::Whole | Key::Index(_)).then_some(ident)
99
}
100
101
fn specialize_expr(&self, specialize_path: &Path, target_path: &Path) -> Expr {
102
let FieldInfo {
103
ty, member, key, ..
104
} = &self;
105
let key_expr = key.expr();
106
parse_quote!(<#ty as #specialize_path::Specializer<#target_path>>::specialize(&self.#member, #key_expr, descriptor))
107
}
108
109
fn specialize_predicate(&self, specialize_path: &Path, target_path: &Path) -> WherePredicate {
110
let ty = &self.ty;
111
if matches!(&self.key, Key::Default) {
112
parse_quote!(#ty: #specialize_path::Specializer<#target_path, Key: #FQDefault>)
113
} else {
114
parse_quote!(#ty: #specialize_path::Specializer<#target_path>)
115
}
116
}
117
}
118
119
fn get_field_info(
120
fields: &Punctuated<Field, Token![,]>,
121
targets: &SpecializeImplTargets,
122
) -> syn::Result<Vec<FieldInfo>> {
123
let mut field_info: Vec<FieldInfo> = Vec::new();
124
let mut used_count = 0;
125
let mut single_index = 0;
126
for (index, field) in fields.iter().enumerate() {
127
let field_ty = field.ty.clone();
128
let field_member = field.ident.clone().map_or(
129
Member::Unnamed(Index {
130
index: index as u32,
131
span: field.span(),
132
}),
133
Member::Named,
134
);
135
let key_index = Index {
136
index: used_count,
137
span: field.span(),
138
};
139
140
let mut use_key_field = true;
141
let mut key = Key::Index(key_index);
142
for attr in &field.attrs {
143
match &attr.meta {
144
Meta::List(MetaList { path, tokens, .. }) if path.is_ident(&KEY_ATTR_IDENT) => {
145
let owned_tokens = tokens.clone().into();
146
let Ok(parsed_key) = syn::parse::<Key>(owned_tokens) else {
147
return Err(syn::Error::new(
148
attr.span(),
149
"Invalid key override attribute",
150
));
151
};
152
key = parsed_key;
153
if matches!(
154
(&key, &targets),
155
(Key::Custom(_), SpecializeImplTargets::All)
156
) {
157
return Err(syn::Error::new(
158
attr.span(),
159
"#[key(default)] is the only key override type allowed with #[specialize(all)]",
160
));
161
}
162
use_key_field = false;
163
}
164
_ => {}
165
}
166
}
167
168
if use_key_field {
169
used_count += 1;
170
single_index = index;
171
}
172
173
field_info.push(FieldInfo {
174
ty: field_ty,
175
member: field_member,
176
key,
177
});
178
}
179
180
if used_count == 1 {
181
field_info[single_index].key = Key::Whole;
182
}
183
184
Ok(field_info)
185
}
186
187
fn get_specialize_targets(
188
ast: &DeriveInput,
189
derive_name: &str,
190
) -> syn::Result<SpecializeImplTargets> {
191
let specialize_attr = ast.attrs.iter().find_map(|attr| {
192
if attr.path().is_ident(SPECIALIZE_ATTR_IDENT)
193
&& let Meta::List(meta_list) = &attr.meta
194
{
195
return Some(meta_list);
196
}
197
None
198
});
199
let Some(specialize_meta_list) = specialize_attr else {
200
return Err(syn::Error::new(
201
Span::call_site(),
202
format!("#[derive({derive_name})] must be accompanied by #[specialize(..targets)].\n Example usages: #[specialize(RenderPipeline)], #[specialize(all)]")
203
));
204
};
205
syn::parse::<SpecializeImplTargets>(specialize_meta_list.tokens.clone().into())
206
}
207
208
macro_rules! guard {
209
($expr: expr) => {
210
match $expr {
211
Ok(__val) => __val,
212
Err(err) => return err.to_compile_error().into(),
213
}
214
};
215
}
216
217
pub fn impl_specializer(input: TokenStream) -> TokenStream {
218
let bevy_render_path: Path = crate::bevy_render_path();
219
let specialize_path = {
220
let mut path = bevy_render_path.clone();
221
path.segments.push(format_ident!("render_resource").into());
222
path
223
};
224
225
let ecs_path = crate::bevy_ecs_path();
226
227
let ast = parse_macro_input!(input as DeriveInput);
228
let targets = guard!(get_specialize_targets(&ast, "Specializer"));
229
let fields = guard!(get_struct_fields(&ast.data, "Specializer"));
230
let field_info = guard!(get_field_info(fields, &targets));
231
232
let key_idents: Vec<Option<Ident>> = field_info
233
.iter()
234
.enumerate()
235
.map(|(i, field_info)| field_info.key_ident(format_ident!("key{i}")))
236
.collect();
237
let key_tuple_idents: Vec<Ident> = key_idents.iter().flatten().cloned().collect();
238
let ignore_pat: Pat = parse_quote!(_);
239
let key_patterns: Vec<Pat> = key_idents
240
.iter()
241
.map(|key_ident| match key_ident {
242
Some(key_ident) => parse_quote!(#key_ident),
243
None => ignore_pat.clone(),
244
})
245
.collect();
246
247
match targets {
248
SpecializeImplTargets::All => impl_specialize_all(
249
&specialize_path,
250
&ecs_path,
251
&ast,
252
&field_info,
253
&key_patterns,
254
&key_tuple_idents,
255
),
256
SpecializeImplTargets::Specific(targets) => targets
257
.iter()
258
.map(|target| {
259
impl_specialize_specific(
260
&specialize_path,
261
&ecs_path,
262
&ast,
263
&field_info,
264
target,
265
&key_patterns,
266
&key_tuple_idents,
267
)
268
})
269
.collect(),
270
}
271
}
272
273
fn impl_specialize_all(
274
specialize_path: &Path,
275
ecs_path: &Path,
276
ast: &DeriveInput,
277
field_info: &[FieldInfo],
278
key_patterns: &[Pat],
279
key_tuple_idents: &[Ident],
280
) -> TokenStream {
281
let target_path = Path::from(format_ident!("T"));
282
let key_elems: Vec<Type> = field_info
283
.iter()
284
.filter_map(|field_info| field_info.key_ty(specialize_path, &target_path))
285
.collect();
286
let specialize_exprs: Vec<Expr> = field_info
287
.iter()
288
.map(|field_info| field_info.specialize_expr(specialize_path, &target_path))
289
.collect();
290
291
let struct_name = &ast.ident;
292
let mut generics = ast.generics.clone();
293
generics.params.insert(
294
0,
295
parse_quote!(#target_path: #specialize_path::Specializable),
296
);
297
298
if !field_info.is_empty() {
299
let where_clause = generics.make_where_clause();
300
for field in field_info {
301
where_clause
302
.predicates
303
.push(field.specialize_predicate(specialize_path, &target_path));
304
}
305
}
306
307
let (_, type_generics, _) = ast.generics.split_for_impl();
308
let (impl_generics, _, where_clause) = &generics.split_for_impl();
309
310
TokenStream::from(quote! {
311
impl #impl_generics #specialize_path::Specializer<#target_path> for #struct_name #type_generics #where_clause {
312
type Key = (#(#key_elems),*);
313
314
fn specialize(
315
&self,
316
key: Self::Key,
317
descriptor: &mut <#target_path as #specialize_path::Specializable>::Descriptor
318
) -> #FQResult<#specialize_path::Canonical<Self::Key>, #ecs_path::error::BevyError> {
319
#(let #key_patterns = #specialize_exprs?;)*
320
#FQResult::Ok((#(#key_tuple_idents),*))
321
}
322
}
323
})
324
}
325
326
fn impl_specialize_specific(
327
specialize_path: &Path,
328
ecs_path: &Path,
329
ast: &DeriveInput,
330
field_info: &[FieldInfo],
331
target_path: &Path,
332
key_patterns: &[Pat],
333
key_tuple_idents: &[Ident],
334
) -> TokenStream {
335
let key_elems: Vec<Type> = field_info
336
.iter()
337
.filter_map(|field_info| field_info.key_ty(specialize_path, target_path))
338
.collect();
339
let specialize_exprs: Vec<Expr> = field_info
340
.iter()
341
.map(|field_info| field_info.specialize_expr(specialize_path, target_path))
342
.collect();
343
344
let struct_name = &ast.ident;
345
let (impl_generics, type_generics, where_clause) = &ast.generics.split_for_impl();
346
347
TokenStream::from(quote! {
348
impl #impl_generics #specialize_path::Specializer<#target_path> for #struct_name #type_generics #where_clause {
349
type Key = (#(#key_elems),*);
350
351
fn specialize(
352
&self,
353
key: Self::Key,
354
descriptor: &mut <#target_path as #specialize_path::Specializable>::Descriptor
355
) -> #FQResult<#specialize_path::Canonical<Self::Key>, #ecs_path::error::BevyError> {
356
#(let #key_patterns = #specialize_exprs?;)*
357
#FQResult::Ok((#(#key_tuple_idents),*))
358
}
359
}
360
})
361
}
362
363
pub fn impl_specializer_key(input: TokenStream) -> TokenStream {
364
let bevy_render_path: Path = crate::bevy_render_path();
365
let specialize_path = {
366
let mut path = bevy_render_path.clone();
367
path.segments.push(format_ident!("render_resource").into());
368
path
369
};
370
371
let ast = parse_macro_input!(input as DeriveInput);
372
let ident = ast.ident;
373
TokenStream::from(quote!(
374
impl #specialize_path::SpecializerKey for #ident {
375
const IS_CANONICAL: bool = true;
376
type Canonical = Self;
377
}
378
))
379
}
380
381