1use alloc::{
2 borrow::Cow::{self, Borrowed},
3 boxed::Box,
4 format,
5 string::{String, ToString as _},
6 sync::Arc,
7 vec,
8 vec::Vec,
9};
10use core::{
11 error::Error,
12 fmt,
13 future::ready,
14 ops::{Deref, Range},
15 pin::Pin,
16 ptr::NonNull,
17 slice,
18};
19
20use arrayvec::ArrayVec;
21use smallvec::SmallVec;
22use wgc::{
23 command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
24 resource::BlasPrepareCompactResult,
25};
26use wgt::{
27 error::{ErrorType, WebGpuError},
28 WasmNotSendSync,
29};
30
31use crate::util::Mutex;
32use crate::{
33 api,
34 dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
35 BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
36 CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
37 ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas,
38};
39
40#[derive(Clone)]
41pub struct ContextWgpuCore(Arc<wgc::global::Global>);
42
43impl Drop for ContextWgpuCore {
44 fn drop(&mut self) {
45 }
47}
48
49impl fmt::Debug for ContextWgpuCore {
50 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
51 f.debug_struct("ContextWgpuCore")
52 .field("type", &"Native")
53 .finish()
54 }
55}
56
57impl ContextWgpuCore {
58 pub unsafe fn from_hal_instance<A: wgc::hal_api::HalApi>(hal_instance: A::Instance) -> Self {
59 Self(unsafe {
60 Arc::new(wgc::global::Global::from_hal_instance::<A>(
61 "wgpu",
62 hal_instance,
63 ))
64 })
65 }
66
67 pub unsafe fn instance_as_hal<A: wgc::hal_api::HalApi>(&self) -> Option<&A::Instance> {
71 unsafe { self.0.instance_as_hal::<A>() }
72 }
73
74 pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
75 Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
76 }
77
78 #[cfg(wgpu_core)]
79 pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
80 self.0.enumerate_adapters(backends)
81 }
82
83 pub unsafe fn create_adapter_from_hal<A: wgc::hal_api::HalApi>(
84 &self,
85 hal_adapter: hal::ExposedAdapter<A>,
86 ) -> wgc::id::AdapterId {
87 unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
88 }
89
90 pub unsafe fn adapter_as_hal<A: wgc::hal_api::HalApi>(
91 &self,
92 adapter: &CoreAdapter,
93 ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
94 unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
95 }
96
97 pub unsafe fn buffer_as_hal<A: wgc::hal_api::HalApi>(
98 &self,
99 buffer: &CoreBuffer,
100 ) -> Option<impl Deref<Target = A::Buffer>> {
101 unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
102 }
103
104 pub unsafe fn create_device_from_hal<A: wgc::hal_api::HalApi>(
105 &self,
106 adapter: &CoreAdapter,
107 hal_device: hal::OpenDevice<A>,
108 desc: &crate::DeviceDescriptor<'_>,
109 ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
110 if !matches!(desc.trace, wgt::Trace::Off) {
111 log::error!(
112 "
113 Feature 'trace' has been removed temporarily; \
114 see https://github.com/gfx-rs/wgpu/issues/5974. \
115 The `trace` parameter will have no effect."
116 );
117 }
118
119 let (device_id, queue_id) = unsafe {
120 self.0.create_device_from_hal(
121 adapter.id,
122 hal_device.into(),
123 &desc.map_label(|l| l.map(Borrowed)),
124 None,
125 None,
126 )
127 }?;
128 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
129 let device = CoreDevice {
130 context: self.clone(),
131 id: device_id,
132 error_sink: error_sink.clone(),
133 features: desc.required_features,
134 };
135 let queue = CoreQueue {
136 context: self.clone(),
137 id: queue_id,
138 error_sink,
139 };
140 Ok((device, queue))
141 }
142
143 pub unsafe fn create_texture_from_hal<A: wgc::hal_api::HalApi>(
144 &self,
145 hal_texture: A::Texture,
146 device: &CoreDevice,
147 desc: &TextureDescriptor<'_>,
148 ) -> CoreTexture {
149 let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
150 let (id, error) = unsafe {
151 self.0
152 .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
153 };
154 if let Some(cause) = error {
155 self.handle_error(
156 &device.error_sink,
157 cause,
158 desc.label,
159 "Device::create_texture_from_hal",
160 );
161 }
162 CoreTexture {
163 context: self.clone(),
164 id,
165 error_sink: Arc::clone(&device.error_sink),
166 }
167 }
168
169 pub unsafe fn create_buffer_from_hal<A: wgc::hal_api::HalApi>(
170 &self,
171 hal_buffer: A::Buffer,
172 device: &CoreDevice,
173 desc: &BufferDescriptor<'_>,
174 ) -> CoreBuffer {
175 let (id, error) = unsafe {
176 self.0.create_buffer_from_hal::<A>(
177 hal_buffer,
178 device.id,
179 &desc.map_label(|l| l.map(Borrowed)),
180 None,
181 )
182 };
183 if let Some(cause) = error {
184 self.handle_error(
185 &device.error_sink,
186 cause,
187 desc.label,
188 "Device::create_buffer_from_hal",
189 );
190 }
191 CoreBuffer {
192 context: self.clone(),
193 id,
194 error_sink: Arc::clone(&device.error_sink),
195 }
196 }
197
198 pub unsafe fn device_as_hal<A: wgc::hal_api::HalApi>(
199 &self,
200 device: &CoreDevice,
201 ) -> Option<impl Deref<Target = A::Device>> {
202 unsafe { self.0.device_as_hal::<A>(device.id) }
203 }
204
205 pub unsafe fn surface_as_hal<A: wgc::hal_api::HalApi>(
206 &self,
207 surface: &CoreSurface,
208 ) -> Option<impl Deref<Target = A::Surface>> {
209 unsafe { self.0.surface_as_hal::<A>(surface.id) }
210 }
211
212 pub unsafe fn texture_as_hal<A: wgc::hal_api::HalApi>(
213 &self,
214 texture: &CoreTexture,
215 ) -> Option<impl Deref<Target = A::Texture>> {
216 unsafe { self.0.texture_as_hal::<A>(texture.id) }
217 }
218
219 pub unsafe fn texture_view_as_hal<A: wgc::hal_api::HalApi>(
220 &self,
221 texture_view: &CoreTextureView,
222 ) -> Option<impl Deref<Target = A::TextureView>> {
223 unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
224 }
225
226 pub unsafe fn command_encoder_as_hal_mut<
228 A: wgc::hal_api::HalApi,
229 F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
230 R,
231 >(
232 &self,
233 command_encoder: &CoreCommandEncoder,
234 hal_command_encoder_callback: F,
235 ) -> R {
236 unsafe {
237 self.0.command_encoder_as_hal_mut::<A, F, R>(
238 command_encoder.id,
239 hal_command_encoder_callback,
240 )
241 }
242 }
243
244 pub unsafe fn blas_as_hal<A: wgc::hal_api::HalApi>(
245 &self,
246 blas: &CoreBlas,
247 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
248 unsafe { self.0.blas_as_hal::<A>(blas.id) }
249 }
250
251 pub unsafe fn tlas_as_hal<A: wgc::hal_api::HalApi>(
252 &self,
253 tlas: &CoreTlas,
254 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
255 unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
256 }
257
258 pub fn generate_report(&self) -> wgc::global::GlobalReport {
259 self.0.generate_report()
260 }
261
262 #[cold]
263 #[track_caller]
264 #[inline(never)]
265 fn handle_error_inner(
266 &self,
267 sink_mutex: &Mutex<ErrorSinkRaw>,
268 error_type: ErrorType,
269 source: ContextErrorSource,
270 label: Label<'_>,
271 fn_ident: &'static str,
272 ) {
273 let source: ErrorSource = Box::new(wgc::error::ContextError {
274 fn_ident,
275 source,
276 label: label.unwrap_or_default().to_string(),
277 });
278 let mut sink = sink_mutex.lock();
279 let description = || self.format_error(&*source);
280 let error = match error_type {
281 ErrorType::Internal => {
282 let description = description();
283 crate::Error::Internal {
284 source,
285 description,
286 }
287 }
288 ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
289 ErrorType::Validation => {
290 let description = description();
291 crate::Error::Validation {
292 source,
293 description,
294 }
295 }
296 ErrorType::DeviceLost => return, };
298 sink.handle_error(error);
299 }
300
301 #[inline]
302 #[track_caller]
303 fn handle_error(
304 &self,
305 sink_mutex: &Mutex<ErrorSinkRaw>,
306 source: impl WebGpuError + WasmNotSendSync + 'static,
307 label: Label<'_>,
308 fn_ident: &'static str,
309 ) {
310 let error_type = source.webgpu_error_type();
311 self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
312 }
313
314 #[inline]
315 #[track_caller]
316 fn handle_error_nolabel(
317 &self,
318 sink_mutex: &Mutex<ErrorSinkRaw>,
319 source: impl WebGpuError + WasmNotSendSync + 'static,
320 fn_ident: &'static str,
321 ) {
322 let error_type = source.webgpu_error_type();
323 self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
324 }
325
326 #[track_caller]
327 #[cold]
328 fn handle_error_fatal(
329 &self,
330 cause: impl Error + WasmNotSendSync + 'static,
331 operation: &'static str,
332 ) -> ! {
333 panic!("Error in {operation}: {f}", f = self.format_error(&cause));
334 }
335
336 #[inline(never)]
337 fn format_error(&self, err: &(dyn Error + 'static)) -> String {
338 let mut output = String::new();
339 let mut level = 1;
340
341 fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
342 let mut print = |e: &(dyn Error + 'static)| {
343 use core::fmt::Write;
344 writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
345
346 if let Some(e) = e.source() {
347 *level += 1;
348 print_tree(output, level, e);
349 *level -= 1;
350 }
351 };
352 if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
353 for e in multi.errors() {
354 print(e);
355 }
356 } else {
357 print(e);
358 }
359 }
360
361 print_tree(&mut output, &mut level, err);
362
363 format!("Validation Error\n\nCaused by:\n{output}")
364 }
365
366 pub unsafe fn queue_as_hal<A: wgc::hal_api::HalApi>(
367 &self,
368 queue: &CoreQueue,
369 ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
370 unsafe { self.0.queue_as_hal::<A>(queue.id) }
371 }
372}
373
374fn map_buffer_copy_view(view: crate::TexelCopyBufferInfo<'_>) -> wgc::command::TexelCopyBufferInfo {
375 wgc::command::TexelCopyBufferInfo {
376 buffer: view.buffer.inner.as_core().id,
377 layout: view.layout,
378 }
379}
380
381fn map_texture_copy_view(
382 view: crate::TexelCopyTextureInfo<'_>,
383) -> wgc::command::TexelCopyTextureInfo {
384 wgc::command::TexelCopyTextureInfo {
385 texture: view.texture.inner.as_core().id,
386 mip_level: view.mip_level,
387 origin: view.origin,
388 aspect: view.aspect,
389 }
390}
391
392#[cfg_attr(not(webgl), expect(unused))]
393fn map_texture_tagged_copy_view(
394 view: crate::CopyExternalImageDestInfo<&api::Texture>,
395) -> wgc::command::CopyExternalImageDestInfo {
396 wgc::command::CopyExternalImageDestInfo {
397 texture: view.texture.inner.as_core().id,
398 mip_level: view.mip_level,
399 origin: view.origin,
400 aspect: view.aspect,
401 color_space: view.color_space,
402 premultiplied_alpha: view.premultiplied_alpha,
403 }
404}
405
406fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
407 match load {
408 LoadOp::Clear(clear_value) => LoadOp::Clear(Some(*clear_value)),
409 LoadOp::Load => LoadOp::Load,
410 }
411}
412
413fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
414 match ops {
415 Some(&Operations { load, store }) => wgc::command::PassChannel {
416 load_op: Some(map_load_op(&load)),
417 store_op: Some(store),
418 read_only: false,
419 },
420 None => wgc::command::PassChannel {
421 load_op: None,
422 store_op: None,
423 read_only: true,
424 },
425 }
426}
427
428#[derive(Debug)]
429pub struct CoreSurface {
430 pub(crate) context: ContextWgpuCore,
431 id: wgc::id::SurfaceId,
432 configured_device: Mutex<Option<wgc::id::DeviceId>>,
435 error_sink: Mutex<Option<ErrorSink>>,
438}
439
440#[derive(Debug)]
441pub struct CoreAdapter {
442 pub(crate) context: ContextWgpuCore,
443 pub(crate) id: wgc::id::AdapterId,
444}
445
446#[derive(Debug)]
447pub struct CoreDevice {
448 pub(crate) context: ContextWgpuCore,
449 id: wgc::id::DeviceId,
450 error_sink: ErrorSink,
451 features: Features,
452}
453
454#[derive(Debug)]
455pub struct CoreBuffer {
456 pub(crate) context: ContextWgpuCore,
457 id: wgc::id::BufferId,
458 error_sink: ErrorSink,
459}
460
461#[derive(Debug)]
462pub struct CoreShaderModule {
463 pub(crate) context: ContextWgpuCore,
464 id: wgc::id::ShaderModuleId,
465 compilation_info: CompilationInfo,
466}
467
468#[derive(Debug)]
469pub struct CoreBindGroupLayout {
470 pub(crate) context: ContextWgpuCore,
471 id: wgc::id::BindGroupLayoutId,
472}
473
474#[derive(Debug)]
475pub struct CoreBindGroup {
476 pub(crate) context: ContextWgpuCore,
477 id: wgc::id::BindGroupId,
478}
479
480#[derive(Debug)]
481pub struct CoreTexture {
482 pub(crate) context: ContextWgpuCore,
483 id: wgc::id::TextureId,
484 error_sink: ErrorSink,
485}
486
487#[derive(Debug)]
488pub struct CoreTextureView {
489 pub(crate) context: ContextWgpuCore,
490 id: wgc::id::TextureViewId,
491}
492
493#[derive(Debug)]
494pub struct CoreSampler {
495 pub(crate) context: ContextWgpuCore,
496 id: wgc::id::SamplerId,
497}
498
499#[derive(Debug)]
500pub struct CoreQuerySet {
501 pub(crate) context: ContextWgpuCore,
502 id: wgc::id::QuerySetId,
503}
504
505#[derive(Debug)]
506pub struct CorePipelineLayout {
507 pub(crate) context: ContextWgpuCore,
508 id: wgc::id::PipelineLayoutId,
509}
510
511#[derive(Debug)]
512pub struct CorePipelineCache {
513 pub(crate) context: ContextWgpuCore,
514 id: wgc::id::PipelineCacheId,
515}
516
517#[derive(Debug)]
518pub struct CoreCommandBuffer {
519 pub(crate) context: ContextWgpuCore,
520 id: wgc::id::CommandBufferId,
521}
522
523#[derive(Debug)]
524pub struct CoreRenderBundleEncoder {
525 pub(crate) context: ContextWgpuCore,
526 encoder: wgc::command::RenderBundleEncoder,
527 id: crate::cmp::Identifier,
528}
529
530#[derive(Debug)]
531pub struct CoreRenderBundle {
532 id: wgc::id::RenderBundleId,
533}
534
535#[derive(Debug)]
536pub struct CoreQueue {
537 pub(crate) context: ContextWgpuCore,
538 id: wgc::id::QueueId,
539 error_sink: ErrorSink,
540}
541
542#[derive(Debug)]
543pub struct CoreComputePipeline {
544 pub(crate) context: ContextWgpuCore,
545 id: wgc::id::ComputePipelineId,
546 error_sink: ErrorSink,
547}
548
549#[derive(Debug)]
550pub struct CoreRenderPipeline {
551 pub(crate) context: ContextWgpuCore,
552 id: wgc::id::RenderPipelineId,
553 error_sink: ErrorSink,
554}
555
556#[derive(Debug)]
557pub struct CoreComputePass {
558 pub(crate) context: ContextWgpuCore,
559 pass: wgc::command::ComputePass,
560 error_sink: ErrorSink,
561 id: crate::cmp::Identifier,
562}
563
564#[derive(Debug)]
565pub struct CoreRenderPass {
566 pub(crate) context: ContextWgpuCore,
567 pass: wgc::command::RenderPass,
568 error_sink: ErrorSink,
569 id: crate::cmp::Identifier,
570}
571
572#[derive(Debug)]
573pub struct CoreCommandEncoder {
574 pub(crate) context: ContextWgpuCore,
575 id: wgc::id::CommandEncoderId,
576 error_sink: ErrorSink,
577 open: bool,
578}
579
580#[derive(Debug)]
581pub struct CoreBlas {
582 pub(crate) context: ContextWgpuCore,
583 id: wgc::id::BlasId,
584 error_sink: ErrorSink,
585}
586
587#[derive(Debug)]
588pub struct CoreTlas {
589 pub(crate) context: ContextWgpuCore,
590 id: wgc::id::TlasId,
591 }
593
594#[derive(Debug)]
595pub struct CoreSurfaceOutputDetail {
596 context: ContextWgpuCore,
597 surface_id: wgc::id::SurfaceId,
598}
599
600type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
601
602struct ErrorScope {
603 error: Option<crate::Error>,
604 filter: crate::ErrorFilter,
605}
606
607struct ErrorSinkRaw {
608 scopes: Vec<ErrorScope>,
609 uncaptured_handler: Option<Box<dyn crate::UncapturedErrorHandler>>,
610}
611
612impl ErrorSinkRaw {
613 fn new() -> ErrorSinkRaw {
614 ErrorSinkRaw {
615 scopes: Vec::new(),
616 uncaptured_handler: None,
617 }
618 }
619
620 #[track_caller]
621 fn handle_error(&mut self, err: crate::Error) {
622 let filter = match err {
623 crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
624 crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
625 crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
626 };
627 match self
628 .scopes
629 .iter_mut()
630 .rev()
631 .find(|scope| scope.filter == filter)
632 {
633 Some(scope) => {
634 if scope.error.is_none() {
635 scope.error = Some(err);
636 }
637 }
638 None => {
639 if let Some(custom_handler) = self.uncaptured_handler.as_ref() {
640 (custom_handler)(err);
641 } else {
642 default_error_handler(err);
644 }
645 }
646 }
647 }
648}
649
650impl fmt::Debug for ErrorSinkRaw {
651 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
652 write!(f, "ErrorSink")
653 }
654}
655
656#[track_caller]
657fn default_error_handler(err: crate::Error) {
658 log::error!("Handling wgpu errors as fatal by default");
659 panic!("wgpu error: {err}\n");
660}
661
662impl From<CreateShaderModuleError> for CompilationInfo {
663 fn from(value: CreateShaderModuleError) -> Self {
664 match value {
665 #[cfg(feature = "wgsl")]
666 CreateShaderModuleError::Parsing(v) => v.into(),
667 #[cfg(feature = "glsl")]
668 CreateShaderModuleError::ParsingGlsl(v) => v.into(),
669 #[cfg(feature = "spirv")]
670 CreateShaderModuleError::ParsingSpirV(v) => v.into(),
671 CreateShaderModuleError::Validation(v) => v.into(),
672 CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
675 CompilationInfo {
676 messages: Vec::new(),
677 }
678 }
679 _ => CompilationInfo {
681 messages: vec![CompilationMessage {
682 message: value.to_string(),
683 message_type: CompilationMessageType::Error,
684 location: None,
685 }],
686 },
687 }
688 }
689}
690
691#[derive(Debug)]
692pub struct CoreQueueWriteBuffer {
693 buffer_id: wgc::id::StagingBufferId,
694 mapping: CoreBufferMappedRange,
695}
696
697#[derive(Debug)]
698pub struct CoreBufferMappedRange {
699 ptr: NonNull<u8>,
700 size: usize,
701}
702
703#[cfg(send_sync)]
704unsafe impl Send for CoreBufferMappedRange {}
705#[cfg(send_sync)]
706unsafe impl Sync for CoreBufferMappedRange {}
707
708impl Drop for CoreBufferMappedRange {
709 fn drop(&mut self) {
710 }
713}
714
715crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
716crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
717crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
718crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
719crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
720crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
721crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
722crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
723crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
724crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
725crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
726crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
727crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
728crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
729crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
730crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
731crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
732crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
733crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
734crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
735crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
736crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
737crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
738crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
739crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
740crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
741crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
742crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
743
744impl dispatch::InstanceInterface for ContextWgpuCore {
745 fn new(desc: &wgt::InstanceDescriptor) -> Self
746 where
747 Self: Sized,
748 {
749 Self(Arc::new(wgc::global::Global::new("wgpu", desc)))
750 }
751
752 unsafe fn create_surface(
753 &self,
754 target: crate::api::SurfaceTargetUnsafe,
755 ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
756 let id = match target {
757 SurfaceTargetUnsafe::RawHandle {
758 raw_display_handle,
759 raw_window_handle,
760 } => unsafe {
761 self.0
762 .instance_create_surface(raw_display_handle, raw_window_handle, None)
763 },
764
765 #[cfg(all(unix, not(target_vendor = "apple"), not(target_family = "wasm")))]
766 SurfaceTargetUnsafe::Drm {
767 fd,
768 plane,
769 connector_id,
770 width,
771 height,
772 refresh_rate,
773 } => unsafe {
774 self.0.instance_create_surface_from_drm(
775 fd,
776 plane,
777 connector_id,
778 width,
779 height,
780 refresh_rate,
781 None,
782 )
783 },
784
785 #[cfg(metal)]
786 SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
787 self.0.instance_create_surface_metal(layer, None)
788 },
789
790 #[cfg(dx12)]
791 SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
792 self.0.instance_create_surface_from_visual(visual, None)
793 },
794
795 #[cfg(dx12)]
796 SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
797 self.0
798 .instance_create_surface_from_surface_handle(surface_handle, None)
799 },
800
801 #[cfg(dx12)]
802 SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
803 self.0
804 .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
805 },
806 }?;
807
808 Ok(CoreSurface {
809 context: self.clone(),
810 id,
811 configured_device: Mutex::default(),
812 error_sink: Mutex::default(),
813 }
814 .into())
815 }
816
817 fn request_adapter(
818 &self,
819 options: &crate::api::RequestAdapterOptions<'_, '_>,
820 ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
821 let id = self.0.request_adapter(
822 &wgc::instance::RequestAdapterOptions {
823 power_preference: options.power_preference,
824 force_fallback_adapter: options.force_fallback_adapter,
825 compatible_surface: options
826 .compatible_surface
827 .map(|surface| surface.inner.as_core().id),
828 },
829 wgt::Backends::all(),
830 None,
831 );
832 let adapter = id.map(|id| {
833 let core = CoreAdapter {
834 context: self.clone(),
835 id,
836 };
837 let generic: dispatch::DispatchAdapter = core.into();
838 generic
839 });
840 Box::pin(ready(adapter))
841 }
842
843 fn poll_all_devices(&self, force_wait: bool) -> bool {
844 match self.0.poll_all_devices(force_wait) {
845 Ok(all_queue_empty) => all_queue_empty,
846 Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
847 }
848 }
849
850 #[cfg(feature = "wgsl")]
851 fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
852 use wgc::naga::front::wgsl::ImplementedLanguageExtension;
853 ImplementedLanguageExtension::all().iter().copied().fold(
854 crate::WgslLanguageFeatures::empty(),
855 |acc, wle| {
856 acc | match wle {
857 ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
858 crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
859 }
860 ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
861 crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
862 }
863 ImplementedLanguageExtension::PointerCompositeAccess => {
864 crate::WgslLanguageFeatures::PointerCompositeAccess
865 }
866 }
867 },
868 )
869 }
870}
871
872impl dispatch::AdapterInterface for CoreAdapter {
873 fn request_device(
874 &self,
875 desc: &crate::DeviceDescriptor<'_>,
876 ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
877 if !matches!(desc.trace, wgt::Trace::Off) {
878 log::error!(
879 "
880 Feature 'trace' has been removed temporarily; \
881 see https://github.com/gfx-rs/wgpu/issues/5974. \
882 The `trace` parameter will have no effect."
883 );
884 }
885
886 let res = self.context.0.adapter_request_device(
887 self.id,
888 &desc.map_label(|l| l.map(Borrowed)),
889 None,
890 None,
891 );
892 let (device_id, queue_id) = match res {
893 Ok(ids) => ids,
894 Err(err) => {
895 return Box::pin(ready(Err(err.into())));
896 }
897 };
898 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
899 let device = CoreDevice {
900 context: self.context.clone(),
901 id: device_id,
902 error_sink: error_sink.clone(),
903 features: desc.required_features,
904 };
905 let queue = CoreQueue {
906 context: self.context.clone(),
907 id: queue_id,
908 error_sink,
909 };
910 Box::pin(ready(Ok((device.into(), queue.into()))))
911 }
912
913 fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
914 let surface = surface.as_core();
915
916 self.context
917 .0
918 .adapter_is_surface_supported(self.id, surface.id)
919 }
920
921 fn features(&self) -> crate::Features {
922 self.context.0.adapter_features(self.id)
923 }
924
925 fn limits(&self) -> crate::Limits {
926 self.context.0.adapter_limits(self.id)
927 }
928
929 fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
930 self.context.0.adapter_downlevel_capabilities(self.id)
931 }
932
933 fn get_info(&self) -> crate::AdapterInfo {
934 self.context.0.adapter_get_info(self.id)
935 }
936
937 fn get_texture_format_features(
938 &self,
939 format: crate::TextureFormat,
940 ) -> crate::TextureFormatFeatures {
941 self.context
942 .0
943 .adapter_get_texture_format_features(self.id, format)
944 }
945
946 fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
947 self.context.0.adapter_get_presentation_timestamp(self.id)
948 }
949}
950
951impl Drop for CoreAdapter {
952 fn drop(&mut self) {
953 self.context.0.adapter_drop(self.id)
954 }
955}
956
957impl dispatch::DeviceInterface for CoreDevice {
958 fn features(&self) -> crate::Features {
959 self.context.0.device_features(self.id)
960 }
961
962 fn limits(&self) -> crate::Limits {
963 self.context.0.device_limits(self.id)
964 }
965
966 #[cfg_attr(
968 not(any(
969 feature = "spirv",
970 feature = "glsl",
971 feature = "wgsl",
972 feature = "naga-ir"
973 )),
974 expect(unused)
975 )]
976 fn create_shader_module(
977 &self,
978 desc: crate::ShaderModuleDescriptor<'_>,
979 shader_bound_checks: wgt::ShaderRuntimeChecks,
980 ) -> dispatch::DispatchShaderModule {
981 let descriptor = wgc::pipeline::ShaderModuleDescriptor {
982 label: desc.label.map(Borrowed),
983 runtime_checks: shader_bound_checks,
984 };
985 let source = match desc.source {
986 #[cfg(feature = "spirv")]
987 ShaderSource::SpirV(ref spv) => {
988 let options = naga::front::spv::Options {
990 adjust_coordinate_space: false, strict_capabilities: true,
992 block_ctx_dump_prefix: None,
993 };
994 wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
995 }
996 #[cfg(feature = "glsl")]
997 ShaderSource::Glsl {
998 ref shader,
999 stage,
1000 defines,
1001 } => {
1002 let options = naga::front::glsl::Options {
1003 stage,
1004 defines: defines
1005 .iter()
1006 .map(|&(key, value)| (String::from(key), String::from(value)))
1007 .collect(),
1008 };
1009 wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1010 }
1011 #[cfg(feature = "wgsl")]
1012 ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1013 #[cfg(feature = "naga-ir")]
1014 ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1015 ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1016 };
1017 let (id, error) =
1018 self.context
1019 .0
1020 .device_create_shader_module(self.id, &descriptor, source, None);
1021 let compilation_info = match error {
1022 Some(cause) => {
1023 self.context.handle_error(
1024 &self.error_sink,
1025 cause.clone(),
1026 desc.label,
1027 "Device::create_shader_module",
1028 );
1029 CompilationInfo::from(cause)
1030 }
1031 None => CompilationInfo { messages: vec![] },
1032 };
1033
1034 CoreShaderModule {
1035 context: self.context.clone(),
1036 id,
1037 compilation_info,
1038 }
1039 .into()
1040 }
1041
1042 unsafe fn create_shader_module_passthrough(
1043 &self,
1044 desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1045 ) -> dispatch::DispatchShaderModule {
1046 let desc = desc.map_label(|l| l.map(Cow::from));
1047 let (id, error) = unsafe {
1048 self.context
1049 .0
1050 .device_create_shader_module_passthrough(self.id, &desc, None)
1051 };
1052
1053 let compilation_info = match error {
1054 Some(cause) => {
1055 self.context.handle_error(
1056 &self.error_sink,
1057 cause.clone(),
1058 desc.label().as_deref(),
1059 "Device::create_shader_module_passthrough",
1060 );
1061 CompilationInfo::from(cause)
1062 }
1063 None => CompilationInfo { messages: vec![] },
1064 };
1065
1066 CoreShaderModule {
1067 context: self.context.clone(),
1068 id,
1069 compilation_info,
1070 }
1071 .into()
1072 }
1073
1074 fn create_bind_group_layout(
1075 &self,
1076 desc: &crate::BindGroupLayoutDescriptor<'_>,
1077 ) -> dispatch::DispatchBindGroupLayout {
1078 let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1079 label: desc.label.map(Borrowed),
1080 entries: Borrowed(desc.entries),
1081 };
1082 let (id, error) =
1083 self.context
1084 .0
1085 .device_create_bind_group_layout(self.id, &descriptor, None);
1086 if let Some(cause) = error {
1087 self.context.handle_error(
1088 &self.error_sink,
1089 cause,
1090 desc.label,
1091 "Device::create_bind_group_layout",
1092 );
1093 }
1094 CoreBindGroupLayout {
1095 context: self.context.clone(),
1096 id,
1097 }
1098 .into()
1099 }
1100
1101 fn create_bind_group(
1102 &self,
1103 desc: &crate::BindGroupDescriptor<'_>,
1104 ) -> dispatch::DispatchBindGroup {
1105 use wgc::binding_model as bm;
1106
1107 let mut arrayed_texture_views = Vec::new();
1108 let mut arrayed_samplers = Vec::new();
1109 if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1110 for entry in desc.entries.iter() {
1112 if let BindingResource::TextureViewArray(array) = entry.resource {
1113 arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1114 }
1115 if let BindingResource::SamplerArray(array) = entry.resource {
1116 arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1117 }
1118 }
1119 }
1120 let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1121 let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1122
1123 let mut arrayed_buffer_bindings = Vec::new();
1124 if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1125 for entry in desc.entries.iter() {
1127 if let BindingResource::BufferArray(array) = entry.resource {
1128 arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1129 buffer: binding.buffer.inner.as_core().id,
1130 offset: binding.offset,
1131 size: binding.size,
1132 }));
1133 }
1134 }
1135 }
1136 let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1137
1138 let entries = desc
1139 .entries
1140 .iter()
1141 .map(|entry| bm::BindGroupEntry {
1142 binding: entry.binding,
1143 resource: match entry.resource {
1144 BindingResource::Buffer(BufferBinding {
1145 buffer,
1146 offset,
1147 size,
1148 }) => bm::BindingResource::Buffer(bm::BufferBinding {
1149 buffer: buffer.inner.as_core().id,
1150 offset,
1151 size,
1152 }),
1153 BindingResource::BufferArray(array) => {
1154 let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1155 remaining_arrayed_buffer_bindings =
1156 &remaining_arrayed_buffer_bindings[array.len()..];
1157 bm::BindingResource::BufferArray(Borrowed(slice))
1158 }
1159 BindingResource::Sampler(sampler) => {
1160 bm::BindingResource::Sampler(sampler.inner.as_core().id)
1161 }
1162 BindingResource::SamplerArray(array) => {
1163 let slice = &remaining_arrayed_samplers[..array.len()];
1164 remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1165 bm::BindingResource::SamplerArray(Borrowed(slice))
1166 }
1167 BindingResource::TextureView(texture_view) => {
1168 bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1169 }
1170 BindingResource::TextureViewArray(array) => {
1171 let slice = &remaining_arrayed_texture_views[..array.len()];
1172 remaining_arrayed_texture_views =
1173 &remaining_arrayed_texture_views[array.len()..];
1174 bm::BindingResource::TextureViewArray(Borrowed(slice))
1175 }
1176 BindingResource::AccelerationStructure(acceleration_structure) => {
1177 bm::BindingResource::AccelerationStructure(
1178 acceleration_structure.inner.as_core().id,
1179 )
1180 }
1181 },
1182 })
1183 .collect::<Vec<_>>();
1184 let descriptor = bm::BindGroupDescriptor {
1185 label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1186 layout: desc.layout.inner.as_core().id,
1187 entries: Borrowed(&entries),
1188 };
1189
1190 let (id, error) = self
1191 .context
1192 .0
1193 .device_create_bind_group(self.id, &descriptor, None);
1194 if let Some(cause) = error {
1195 self.context.handle_error(
1196 &self.error_sink,
1197 cause,
1198 desc.label,
1199 "Device::create_bind_group",
1200 );
1201 }
1202 CoreBindGroup {
1203 context: self.context.clone(),
1204 id,
1205 }
1206 .into()
1207 }
1208
1209 fn create_pipeline_layout(
1210 &self,
1211 desc: &crate::PipelineLayoutDescriptor<'_>,
1212 ) -> dispatch::DispatchPipelineLayout {
1213 assert!(
1216 desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1217 "Bind group layout count {} exceeds device bind group limit {}",
1218 desc.bind_group_layouts.len(),
1219 wgc::MAX_BIND_GROUPS
1220 );
1221
1222 let temp_layouts = desc
1223 .bind_group_layouts
1224 .iter()
1225 .map(|bgl| bgl.inner.as_core().id)
1226 .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1227 let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1228 label: desc.label.map(Borrowed),
1229 bind_group_layouts: Borrowed(&temp_layouts),
1230 push_constant_ranges: Borrowed(desc.push_constant_ranges),
1231 };
1232
1233 let (id, error) = self
1234 .context
1235 .0
1236 .device_create_pipeline_layout(self.id, &descriptor, None);
1237 if let Some(cause) = error {
1238 self.context.handle_error(
1239 &self.error_sink,
1240 cause,
1241 desc.label,
1242 "Device::create_pipeline_layout",
1243 );
1244 }
1245 CorePipelineLayout {
1246 context: self.context.clone(),
1247 id,
1248 }
1249 .into()
1250 }
1251
1252 fn create_render_pipeline(
1253 &self,
1254 desc: &crate::RenderPipelineDescriptor<'_>,
1255 ) -> dispatch::DispatchRenderPipeline {
1256 use wgc::pipeline as pipe;
1257
1258 let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1259 .vertex
1260 .buffers
1261 .iter()
1262 .map(|vbuf| pipe::VertexBufferLayout {
1263 array_stride: vbuf.array_stride,
1264 step_mode: vbuf.step_mode,
1265 attributes: Borrowed(vbuf.attributes),
1266 })
1267 .collect();
1268
1269 let vert_constants = desc
1270 .vertex
1271 .compilation_options
1272 .constants
1273 .iter()
1274 .map(|&(key, value)| (String::from(key), value))
1275 .collect();
1276
1277 let descriptor = pipe::RenderPipelineDescriptor {
1278 label: desc.label.map(Borrowed),
1279 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1280 vertex: pipe::VertexState {
1281 stage: pipe::ProgrammableStageDescriptor {
1282 module: desc.vertex.module.inner.as_core().id,
1283 entry_point: desc.vertex.entry_point.map(Borrowed),
1284 constants: vert_constants,
1285 zero_initialize_workgroup_memory: desc
1286 .vertex
1287 .compilation_options
1288 .zero_initialize_workgroup_memory,
1289 },
1290 buffers: Borrowed(&vertex_buffers),
1291 },
1292 primitive: desc.primitive,
1293 depth_stencil: desc.depth_stencil.clone(),
1294 multisample: desc.multisample,
1295 fragment: desc.fragment.as_ref().map(|frag| {
1296 let frag_constants = frag
1297 .compilation_options
1298 .constants
1299 .iter()
1300 .map(|&(key, value)| (String::from(key), value))
1301 .collect();
1302 pipe::FragmentState {
1303 stage: pipe::ProgrammableStageDescriptor {
1304 module: frag.module.inner.as_core().id,
1305 entry_point: frag.entry_point.map(Borrowed),
1306 constants: frag_constants,
1307 zero_initialize_workgroup_memory: frag
1308 .compilation_options
1309 .zero_initialize_workgroup_memory,
1310 },
1311 targets: Borrowed(frag.targets),
1312 }
1313 }),
1314 multiview: desc.multiview,
1315 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1316 };
1317
1318 let (id, error) =
1319 self.context
1320 .0
1321 .device_create_render_pipeline(self.id, &descriptor, None, None);
1322 if let Some(cause) = error {
1323 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1324 log::error!("Shader translation error for stage {:?}: {}", stage, error);
1325 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1326 }
1327 self.context.handle_error(
1328 &self.error_sink,
1329 cause,
1330 desc.label,
1331 "Device::create_render_pipeline",
1332 );
1333 }
1334 CoreRenderPipeline {
1335 context: self.context.clone(),
1336 id,
1337 error_sink: Arc::clone(&self.error_sink),
1338 }
1339 .into()
1340 }
1341
1342 fn create_compute_pipeline(
1343 &self,
1344 desc: &crate::ComputePipelineDescriptor<'_>,
1345 ) -> dispatch::DispatchComputePipeline {
1346 use wgc::pipeline as pipe;
1347
1348 let constants = desc
1349 .compilation_options
1350 .constants
1351 .iter()
1352 .map(|&(key, value)| (String::from(key), value))
1353 .collect();
1354
1355 let descriptor = pipe::ComputePipelineDescriptor {
1356 label: desc.label.map(Borrowed),
1357 layout: desc.layout.map(|pll| pll.inner.as_core().id),
1358 stage: pipe::ProgrammableStageDescriptor {
1359 module: desc.module.inner.as_core().id,
1360 entry_point: desc.entry_point.map(Borrowed),
1361 constants,
1362 zero_initialize_workgroup_memory: desc
1363 .compilation_options
1364 .zero_initialize_workgroup_memory,
1365 },
1366 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1367 };
1368
1369 let (id, error) =
1370 self.context
1371 .0
1372 .device_create_compute_pipeline(self.id, &descriptor, None, None);
1373 if let Some(cause) = error {
1374 if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1375 log::error!(
1376 "Shader translation error for stage {:?}: {}",
1377 wgt::ShaderStages::COMPUTE,
1378 error
1379 );
1380 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1381 }
1382 self.context.handle_error(
1383 &self.error_sink,
1384 cause,
1385 desc.label,
1386 "Device::create_compute_pipeline",
1387 );
1388 }
1389 CoreComputePipeline {
1390 context: self.context.clone(),
1391 id,
1392 error_sink: Arc::clone(&self.error_sink),
1393 }
1394 .into()
1395 }
1396
1397 unsafe fn create_pipeline_cache(
1398 &self,
1399 desc: &crate::PipelineCacheDescriptor<'_>,
1400 ) -> dispatch::DispatchPipelineCache {
1401 use wgc::pipeline as pipe;
1402
1403 let descriptor = pipe::PipelineCacheDescriptor {
1404 label: desc.label.map(Borrowed),
1405 data: desc.data.map(Borrowed),
1406 fallback: desc.fallback,
1407 };
1408 let (id, error) = unsafe {
1409 self.context
1410 .0
1411 .device_create_pipeline_cache(self.id, &descriptor, None)
1412 };
1413 if let Some(cause) = error {
1414 self.context.handle_error(
1415 &self.error_sink,
1416 cause,
1417 desc.label,
1418 "Device::device_create_pipeline_cache_init",
1419 );
1420 }
1421 CorePipelineCache {
1422 context: self.context.clone(),
1423 id,
1424 }
1425 .into()
1426 }
1427
1428 fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1429 let (id, error) = self.context.0.device_create_buffer(
1430 self.id,
1431 &desc.map_label(|l| l.map(Borrowed)),
1432 None,
1433 );
1434 if let Some(cause) = error {
1435 self.context
1436 .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1437 }
1438
1439 CoreBuffer {
1440 context: self.context.clone(),
1441 id,
1442 error_sink: Arc::clone(&self.error_sink),
1443 }
1444 .into()
1445 }
1446
1447 fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1448 let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1449 let (id, error) = self
1450 .context
1451 .0
1452 .device_create_texture(self.id, &wgt_desc, None);
1453 if let Some(cause) = error {
1454 self.context.handle_error(
1455 &self.error_sink,
1456 cause,
1457 desc.label,
1458 "Device::create_texture",
1459 );
1460 }
1461
1462 CoreTexture {
1463 context: self.context.clone(),
1464 id,
1465 error_sink: Arc::clone(&self.error_sink),
1466 }
1467 .into()
1468 }
1469
1470 fn create_blas(
1471 &self,
1472 desc: &crate::CreateBlasDescriptor<'_>,
1473 sizes: crate::BlasGeometrySizeDescriptors,
1474 ) -> (Option<u64>, dispatch::DispatchBlas) {
1475 let global = &self.context.0;
1476 let (id, handle, error) =
1477 global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1478 if let Some(cause) = error {
1479 self.context
1480 .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1481 }
1482 (
1483 handle,
1484 CoreBlas {
1485 context: self.context.clone(),
1486 id,
1487 error_sink: Arc::clone(&self.error_sink),
1488 }
1489 .into(),
1490 )
1491 }
1492
1493 fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1494 let global = &self.context.0;
1495 let (id, error) =
1496 global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1497 if let Some(cause) = error {
1498 self.context
1499 .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1500 }
1501 CoreTlas {
1502 context: self.context.clone(),
1503 id,
1504 }
1506 .into()
1507 }
1508
1509 fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1510 let descriptor = wgc::resource::SamplerDescriptor {
1511 label: desc.label.map(Borrowed),
1512 address_modes: [
1513 desc.address_mode_u,
1514 desc.address_mode_v,
1515 desc.address_mode_w,
1516 ],
1517 mag_filter: desc.mag_filter,
1518 min_filter: desc.min_filter,
1519 mipmap_filter: desc.mipmap_filter,
1520 lod_min_clamp: desc.lod_min_clamp,
1521 lod_max_clamp: desc.lod_max_clamp,
1522 compare: desc.compare,
1523 anisotropy_clamp: desc.anisotropy_clamp,
1524 border_color: desc.border_color,
1525 };
1526
1527 let (id, error) = self
1528 .context
1529 .0
1530 .device_create_sampler(self.id, &descriptor, None);
1531 if let Some(cause) = error {
1532 self.context.handle_error(
1533 &self.error_sink,
1534 cause,
1535 desc.label,
1536 "Device::create_sampler",
1537 );
1538 }
1539 CoreSampler {
1540 context: self.context.clone(),
1541 id,
1542 }
1543 .into()
1544 }
1545
1546 fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1547 let (id, error) = self.context.0.device_create_query_set(
1548 self.id,
1549 &desc.map_label(|l| l.map(Borrowed)),
1550 None,
1551 );
1552 if let Some(cause) = error {
1553 self.context
1554 .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1555 }
1556 CoreQuerySet {
1557 context: self.context.clone(),
1558 id,
1559 }
1560 .into()
1561 }
1562
1563 fn create_command_encoder(
1564 &self,
1565 desc: &crate::CommandEncoderDescriptor<'_>,
1566 ) -> dispatch::DispatchCommandEncoder {
1567 let (id, error) = self.context.0.device_create_command_encoder(
1568 self.id,
1569 &desc.map_label(|l| l.map(Borrowed)),
1570 None,
1571 );
1572 if let Some(cause) = error {
1573 self.context.handle_error(
1574 &self.error_sink,
1575 cause,
1576 desc.label,
1577 "Device::create_command_encoder",
1578 );
1579 }
1580
1581 CoreCommandEncoder {
1582 context: self.context.clone(),
1583 id,
1584 error_sink: Arc::clone(&self.error_sink),
1585 open: true,
1586 }
1587 .into()
1588 }
1589
1590 fn create_render_bundle_encoder(
1591 &self,
1592 desc: &crate::RenderBundleEncoderDescriptor<'_>,
1593 ) -> dispatch::DispatchRenderBundleEncoder {
1594 let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1595 label: desc.label.map(Borrowed),
1596 color_formats: Borrowed(desc.color_formats),
1597 depth_stencil: desc.depth_stencil,
1598 sample_count: desc.sample_count,
1599 multiview: desc.multiview,
1600 };
1601 let encoder = match wgc::command::RenderBundleEncoder::new(&descriptor, self.id, None) {
1602 Ok(encoder) => encoder,
1603 Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1604 };
1605
1606 CoreRenderBundleEncoder {
1607 context: self.context.clone(),
1608 encoder,
1609 id: crate::cmp::Identifier::create(),
1610 }
1611 .into()
1612 }
1613
1614 fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1615 self.context
1616 .0
1617 .device_set_device_lost_closure(self.id, device_lost_callback);
1618 }
1619
1620 fn on_uncaptured_error(&self, handler: Box<dyn crate::UncapturedErrorHandler>) {
1621 let mut error_sink = self.error_sink.lock();
1622 error_sink.uncaptured_handler = Some(handler);
1623 }
1624
1625 fn push_error_scope(&self, filter: crate::ErrorFilter) {
1626 let mut error_sink = self.error_sink.lock();
1627 error_sink.scopes.push(ErrorScope {
1628 error: None,
1629 filter,
1630 });
1631 }
1632
1633 fn pop_error_scope(&self) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1634 let mut error_sink = self.error_sink.lock();
1635 let scope = error_sink.scopes.pop().unwrap();
1636 Box::pin(ready(scope.error))
1637 }
1638
1639 unsafe fn start_graphics_debugger_capture(&self) {
1640 unsafe {
1641 self.context
1642 .0
1643 .device_start_graphics_debugger_capture(self.id)
1644 };
1645 }
1646
1647 unsafe fn stop_graphics_debugger_capture(&self) {
1648 unsafe {
1649 self.context
1650 .0
1651 .device_stop_graphics_debugger_capture(self.id)
1652 };
1653 }
1654
1655 fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1656 match self.context.0.device_poll(self.id, poll_type) {
1657 Ok(status) => Ok(status),
1658 Err(err) => {
1659 if let Some(poll_error) = err.to_poll_error() {
1660 return Err(poll_error);
1661 }
1662
1663 self.context.handle_error_fatal(err, "Device::poll")
1664 }
1665 }
1666 }
1667
1668 fn get_internal_counters(&self) -> crate::InternalCounters {
1669 self.context.0.device_get_internal_counters(self.id)
1670 }
1671
1672 fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1673 self.context.0.device_generate_allocator_report(self.id)
1674 }
1675
1676 fn destroy(&self) {
1677 self.context.0.device_destroy(self.id);
1678 }
1679}
1680
1681impl Drop for CoreDevice {
1682 fn drop(&mut self) {
1683 self.context.0.device_drop(self.id)
1684 }
1685}
1686
1687impl dispatch::QueueInterface for CoreQueue {
1688 fn write_buffer(
1689 &self,
1690 buffer: &dispatch::DispatchBuffer,
1691 offset: crate::BufferAddress,
1692 data: &[u8],
1693 ) {
1694 let buffer = buffer.as_core();
1695
1696 match self
1697 .context
1698 .0
1699 .queue_write_buffer(self.id, buffer.id, offset, data)
1700 {
1701 Ok(()) => (),
1702 Err(err) => {
1703 self.context
1704 .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1705 }
1706 }
1707 }
1708
1709 fn create_staging_buffer(
1710 &self,
1711 size: crate::BufferSize,
1712 ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1713 match self
1714 .context
1715 .0
1716 .queue_create_staging_buffer(self.id, size, None)
1717 {
1718 Ok((buffer_id, ptr)) => Some(
1719 CoreQueueWriteBuffer {
1720 buffer_id,
1721 mapping: CoreBufferMappedRange {
1722 ptr,
1723 size: size.get() as usize,
1724 },
1725 }
1726 .into(),
1727 ),
1728 Err(err) => {
1729 self.context.handle_error_nolabel(
1730 &self.error_sink,
1731 err,
1732 "Queue::write_buffer_with",
1733 );
1734 None
1735 }
1736 }
1737 }
1738
1739 fn validate_write_buffer(
1740 &self,
1741 buffer: &dispatch::DispatchBuffer,
1742 offset: wgt::BufferAddress,
1743 size: wgt::BufferSize,
1744 ) -> Option<()> {
1745 let buffer = buffer.as_core();
1746
1747 match self
1748 .context
1749 .0
1750 .queue_validate_write_buffer(self.id, buffer.id, offset, size)
1751 {
1752 Ok(()) => Some(()),
1753 Err(err) => {
1754 self.context.handle_error_nolabel(
1755 &self.error_sink,
1756 err,
1757 "Queue::write_buffer_with",
1758 );
1759 None
1760 }
1761 }
1762 }
1763
1764 fn write_staging_buffer(
1765 &self,
1766 buffer: &dispatch::DispatchBuffer,
1767 offset: crate::BufferAddress,
1768 staging_buffer: &dispatch::DispatchQueueWriteBuffer,
1769 ) {
1770 let buffer = buffer.as_core();
1771 let staging_buffer = staging_buffer.as_core();
1772
1773 match self.context.0.queue_write_staging_buffer(
1774 self.id,
1775 buffer.id,
1776 offset,
1777 staging_buffer.buffer_id,
1778 ) {
1779 Ok(()) => (),
1780 Err(err) => {
1781 self.context.handle_error_nolabel(
1782 &self.error_sink,
1783 err,
1784 "Queue::write_buffer_with",
1785 );
1786 }
1787 }
1788 }
1789
1790 fn write_texture(
1791 &self,
1792 texture: crate::TexelCopyTextureInfo<'_>,
1793 data: &[u8],
1794 data_layout: crate::TexelCopyBufferLayout,
1795 size: crate::Extent3d,
1796 ) {
1797 match self.context.0.queue_write_texture(
1798 self.id,
1799 &map_texture_copy_view(texture),
1800 data,
1801 &data_layout,
1802 &size,
1803 ) {
1804 Ok(()) => (),
1805 Err(err) => {
1806 self.context
1807 .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
1808 }
1809 }
1810 }
1811
1812 #[cfg(web)]
1815 #[cfg_attr(not(webgl), expect(unused_variables))]
1816 fn copy_external_image_to_texture(
1817 &self,
1818 source: &crate::CopyExternalImageSourceInfo,
1819 dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
1820 size: crate::Extent3d,
1821 ) {
1822 #[cfg(webgl)]
1823 match self.context.0.queue_copy_external_image_to_texture(
1824 self.id,
1825 source,
1826 map_texture_tagged_copy_view(dest),
1827 size,
1828 ) {
1829 Ok(()) => (),
1830 Err(err) => self.context.handle_error_nolabel(
1831 &self.error_sink,
1832 err,
1833 "Queue::copy_external_image_to_texture",
1834 ),
1835 }
1836 }
1837
1838 fn submit(
1839 &self,
1840 command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
1841 ) -> u64 {
1842 let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
1843 let command_buffer_ids = temp_command_buffers
1844 .iter()
1845 .map(|cmdbuf| cmdbuf.as_core().id)
1846 .collect::<SmallVec<[_; 4]>>();
1847
1848 let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
1849 Ok(index) => index,
1850 Err((index, err)) => {
1851 self.context
1852 .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
1853 index
1854 }
1855 };
1856
1857 drop(temp_command_buffers);
1858
1859 index
1860 }
1861
1862 fn get_timestamp_period(&self) -> f32 {
1863 self.context.0.queue_get_timestamp_period(self.id)
1864 }
1865
1866 fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
1867 self.context
1868 .0
1869 .queue_on_submitted_work_done(self.id, callback);
1870 }
1871
1872 fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
1873 let (id, handle, error) =
1874 self.context
1875 .0
1876 .queue_compact_blas(self.id, blas.as_core().id, None);
1877
1878 if let Some(cause) = error {
1879 self.context
1880 .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
1881 }
1882 (
1883 handle,
1884 CoreBlas {
1885 context: self.context.clone(),
1886 id,
1887 error_sink: Arc::clone(&self.error_sink),
1888 }
1889 .into(),
1890 )
1891 }
1892}
1893
1894impl Drop for CoreQueue {
1895 fn drop(&mut self) {
1896 self.context.0.queue_drop(self.id)
1897 }
1898}
1899
1900impl dispatch::ShaderModuleInterface for CoreShaderModule {
1901 fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
1902 Box::pin(ready(self.compilation_info.clone()))
1903 }
1904}
1905
1906impl Drop for CoreShaderModule {
1907 fn drop(&mut self) {
1908 self.context.0.shader_module_drop(self.id)
1909 }
1910}
1911
1912impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
1913
1914impl Drop for CoreBindGroupLayout {
1915 fn drop(&mut self) {
1916 self.context.0.bind_group_layout_drop(self.id)
1917 }
1918}
1919
1920impl dispatch::BindGroupInterface for CoreBindGroup {}
1921
1922impl Drop for CoreBindGroup {
1923 fn drop(&mut self) {
1924 self.context.0.bind_group_drop(self.id)
1925 }
1926}
1927
1928impl dispatch::TextureViewInterface for CoreTextureView {}
1929
1930impl Drop for CoreTextureView {
1931 fn drop(&mut self) {
1932 let _ = self.context.0.texture_view_drop(self.id);
1934 }
1935}
1936
1937impl dispatch::SamplerInterface for CoreSampler {}
1938
1939impl Drop for CoreSampler {
1940 fn drop(&mut self) {
1941 self.context.0.sampler_drop(self.id)
1942 }
1943}
1944
1945impl dispatch::BufferInterface for CoreBuffer {
1946 fn map_async(
1947 &self,
1948 mode: crate::MapMode,
1949 range: Range<crate::BufferAddress>,
1950 callback: dispatch::BufferMapCallback,
1951 ) {
1952 let operation = wgc::resource::BufferMapOperation {
1953 host: match mode {
1954 MapMode::Read => wgc::device::HostMap::Read,
1955 MapMode::Write => wgc::device::HostMap::Write,
1956 },
1957 callback: Some(Box::new(|status| {
1958 let res = status.map_err(|_| crate::BufferAsyncError);
1959 callback(res);
1960 })),
1961 };
1962
1963 match self.context.0.buffer_map_async(
1964 self.id,
1965 range.start,
1966 Some(range.end - range.start),
1967 operation,
1968 ) {
1969 Ok(_) => (),
1970 Err(cause) => {
1971 self.context
1972 .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
1973 }
1974 }
1975 }
1976
1977 fn get_mapped_range(
1978 &self,
1979 sub_range: Range<crate::BufferAddress>,
1980 ) -> dispatch::DispatchBufferMappedRange {
1981 let size = sub_range.end - sub_range.start;
1982 match self
1983 .context
1984 .0
1985 .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
1986 {
1987 Ok((ptr, size)) => CoreBufferMappedRange {
1988 ptr,
1989 size: size as usize,
1990 }
1991 .into(),
1992 Err(err) => self
1993 .context
1994 .handle_error_fatal(err, "Buffer::get_mapped_range"),
1995 }
1996 }
1997
1998 fn unmap(&self) {
1999 match self.context.0.buffer_unmap(self.id) {
2000 Ok(()) => (),
2001 Err(cause) => {
2002 self.context
2003 .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2004 }
2005 }
2006 }
2007
2008 fn destroy(&self) {
2009 self.context.0.buffer_destroy(self.id);
2010 }
2011}
2012
2013impl Drop for CoreBuffer {
2014 fn drop(&mut self) {
2015 self.context.0.buffer_drop(self.id)
2016 }
2017}
2018
2019impl dispatch::TextureInterface for CoreTexture {
2020 fn create_view(
2021 &self,
2022 desc: &crate::TextureViewDescriptor<'_>,
2023 ) -> dispatch::DispatchTextureView {
2024 let descriptor = wgc::resource::TextureViewDescriptor {
2025 label: desc.label.map(Borrowed),
2026 format: desc.format,
2027 dimension: desc.dimension,
2028 usage: desc.usage,
2029 range: wgt::ImageSubresourceRange {
2030 aspect: desc.aspect,
2031 base_mip_level: desc.base_mip_level,
2032 mip_level_count: desc.mip_level_count,
2033 base_array_layer: desc.base_array_layer,
2034 array_layer_count: desc.array_layer_count,
2035 },
2036 };
2037 let (id, error) = self
2038 .context
2039 .0
2040 .texture_create_view(self.id, &descriptor, None);
2041 if let Some(cause) = error {
2042 self.context
2043 .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2044 }
2045 CoreTextureView {
2046 context: self.context.clone(),
2047 id,
2048 }
2049 .into()
2050 }
2051
2052 fn destroy(&self) {
2053 self.context.0.texture_destroy(self.id);
2054 }
2055}
2056
2057impl Drop for CoreTexture {
2058 fn drop(&mut self) {
2059 self.context.0.texture_drop(self.id)
2060 }
2061}
2062
2063impl dispatch::BlasInterface for CoreBlas {
2064 fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2065 let callback: Option<wgc::resource::BlasCompactCallback> =
2066 Some(Box::new(|status: BlasPrepareCompactResult| {
2067 let res = status.map_err(|_| crate::BlasAsyncError);
2068 callback(res);
2069 }));
2070
2071 match self.context.0.blas_prepare_compact_async(self.id, callback) {
2072 Ok(_) => (),
2073 Err(cause) => self.context.handle_error_nolabel(
2074 &self.error_sink,
2075 cause,
2076 "Blas::prepare_compact_async",
2077 ),
2078 }
2079 }
2080
2081 fn ready_for_compaction(&self) -> bool {
2082 match self.context.0.ready_for_compaction(self.id) {
2083 Ok(ready) => ready,
2084 Err(cause) => {
2085 self.context.handle_error_nolabel(
2086 &self.error_sink,
2087 cause,
2088 "Blas::ready_for_compaction",
2089 );
2090 false
2092 }
2093 }
2094 }
2095}
2096
2097impl Drop for CoreBlas {
2098 fn drop(&mut self) {
2099 self.context.0.blas_drop(self.id)
2100 }
2101}
2102
2103impl dispatch::TlasInterface for CoreTlas {}
2104
2105impl Drop for CoreTlas {
2106 fn drop(&mut self) {
2107 self.context.0.tlas_drop(self.id)
2108 }
2109}
2110
2111impl dispatch::QuerySetInterface for CoreQuerySet {}
2112
2113impl Drop for CoreQuerySet {
2114 fn drop(&mut self) {
2115 self.context.0.query_set_drop(self.id)
2116 }
2117}
2118
2119impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2120
2121impl Drop for CorePipelineLayout {
2122 fn drop(&mut self) {
2123 self.context.0.pipeline_layout_drop(self.id)
2124 }
2125}
2126
2127impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2128 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2129 let (id, error) = self
2130 .context
2131 .0
2132 .render_pipeline_get_bind_group_layout(self.id, index, None);
2133 if let Some(err) = error {
2134 self.context.handle_error_nolabel(
2135 &self.error_sink,
2136 err,
2137 "RenderPipeline::get_bind_group_layout",
2138 )
2139 }
2140 CoreBindGroupLayout {
2141 context: self.context.clone(),
2142 id,
2143 }
2144 .into()
2145 }
2146}
2147
2148impl Drop for CoreRenderPipeline {
2149 fn drop(&mut self) {
2150 self.context.0.render_pipeline_drop(self.id)
2151 }
2152}
2153
2154impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2155 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2156 let (id, error) = self
2157 .context
2158 .0
2159 .compute_pipeline_get_bind_group_layout(self.id, index, None);
2160 if let Some(err) = error {
2161 self.context.handle_error_nolabel(
2162 &self.error_sink,
2163 err,
2164 "ComputePipeline::get_bind_group_layout",
2165 )
2166 }
2167 CoreBindGroupLayout {
2168 context: self.context.clone(),
2169 id,
2170 }
2171 .into()
2172 }
2173}
2174
2175impl Drop for CoreComputePipeline {
2176 fn drop(&mut self) {
2177 self.context.0.compute_pipeline_drop(self.id)
2178 }
2179}
2180
2181impl dispatch::PipelineCacheInterface for CorePipelineCache {
2182 fn get_data(&self) -> Option<Vec<u8>> {
2183 self.context.0.pipeline_cache_get_data(self.id)
2184 }
2185}
2186
2187impl Drop for CorePipelineCache {
2188 fn drop(&mut self) {
2189 self.context.0.pipeline_cache_drop(self.id)
2190 }
2191}
2192
2193impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2194 fn copy_buffer_to_buffer(
2195 &self,
2196 source: &dispatch::DispatchBuffer,
2197 source_offset: crate::BufferAddress,
2198 destination: &dispatch::DispatchBuffer,
2199 destination_offset: crate::BufferAddress,
2200 copy_size: Option<crate::BufferAddress>,
2201 ) {
2202 let source = source.as_core();
2203 let destination = destination.as_core();
2204
2205 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2206 self.id,
2207 source.id,
2208 source_offset,
2209 destination.id,
2210 destination_offset,
2211 copy_size,
2212 ) {
2213 self.context.handle_error_nolabel(
2214 &self.error_sink,
2215 cause,
2216 "CommandEncoder::copy_buffer_to_buffer",
2217 );
2218 }
2219 }
2220
2221 fn copy_buffer_to_texture(
2222 &self,
2223 source: crate::TexelCopyBufferInfo<'_>,
2224 destination: crate::TexelCopyTextureInfo<'_>,
2225 copy_size: crate::Extent3d,
2226 ) {
2227 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2228 self.id,
2229 &map_buffer_copy_view(source),
2230 &map_texture_copy_view(destination),
2231 ©_size,
2232 ) {
2233 self.context.handle_error_nolabel(
2234 &self.error_sink,
2235 cause,
2236 "CommandEncoder::copy_buffer_to_texture",
2237 );
2238 }
2239 }
2240
2241 fn copy_texture_to_buffer(
2242 &self,
2243 source: crate::TexelCopyTextureInfo<'_>,
2244 destination: crate::TexelCopyBufferInfo<'_>,
2245 copy_size: crate::Extent3d,
2246 ) {
2247 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2248 self.id,
2249 &map_texture_copy_view(source),
2250 &map_buffer_copy_view(destination),
2251 ©_size,
2252 ) {
2253 self.context.handle_error_nolabel(
2254 &self.error_sink,
2255 cause,
2256 "CommandEncoder::copy_texture_to_buffer",
2257 );
2258 }
2259 }
2260
2261 fn copy_texture_to_texture(
2262 &self,
2263 source: crate::TexelCopyTextureInfo<'_>,
2264 destination: crate::TexelCopyTextureInfo<'_>,
2265 copy_size: crate::Extent3d,
2266 ) {
2267 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2268 self.id,
2269 &map_texture_copy_view(source),
2270 &map_texture_copy_view(destination),
2271 ©_size,
2272 ) {
2273 self.context.handle_error_nolabel(
2274 &self.error_sink,
2275 cause,
2276 "CommandEncoder::copy_texture_to_texture",
2277 );
2278 }
2279 }
2280
2281 fn begin_compute_pass(
2282 &self,
2283 desc: &crate::ComputePassDescriptor<'_>,
2284 ) -> dispatch::DispatchComputePass {
2285 let timestamp_writes =
2286 desc.timestamp_writes
2287 .as_ref()
2288 .map(|tw| wgc::command::PassTimestampWrites {
2289 query_set: tw.query_set.inner.as_core().id,
2290 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2291 end_of_pass_write_index: tw.end_of_pass_write_index,
2292 });
2293
2294 let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2295 self.id,
2296 &wgc::command::ComputePassDescriptor {
2297 label: desc.label.map(Borrowed),
2298 timestamp_writes,
2299 },
2300 );
2301
2302 if let Some(cause) = err {
2303 self.context.handle_error(
2304 &self.error_sink,
2305 cause,
2306 desc.label,
2307 "CommandEncoder::begin_compute_pass",
2308 );
2309 }
2310
2311 CoreComputePass {
2312 context: self.context.clone(),
2313 pass,
2314 error_sink: self.error_sink.clone(),
2315 id: crate::cmp::Identifier::create(),
2316 }
2317 .into()
2318 }
2319
2320 fn begin_render_pass(
2321 &self,
2322 desc: &crate::RenderPassDescriptor<'_>,
2323 ) -> dispatch::DispatchRenderPass {
2324 let colors = desc
2325 .color_attachments
2326 .iter()
2327 .map(|ca| {
2328 ca.as_ref()
2329 .map(|at| wgc::command::RenderPassColorAttachment {
2330 view: at.view.inner.as_core().id,
2331 depth_slice: at.depth_slice,
2332 resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2333 load_op: at.ops.load,
2334 store_op: at.ops.store,
2335 })
2336 })
2337 .collect::<Vec<_>>();
2338
2339 let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2340 wgc::command::RenderPassDepthStencilAttachment {
2341 view: dsa.view.inner.as_core().id,
2342 depth: map_pass_channel(dsa.depth_ops.as_ref()),
2343 stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2344 }
2345 });
2346
2347 let timestamp_writes =
2348 desc.timestamp_writes
2349 .as_ref()
2350 .map(|tw| wgc::command::PassTimestampWrites {
2351 query_set: tw.query_set.inner.as_core().id,
2352 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2353 end_of_pass_write_index: tw.end_of_pass_write_index,
2354 });
2355
2356 let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2357 self.id,
2358 &wgc::command::RenderPassDescriptor {
2359 label: desc.label.map(Borrowed),
2360 timestamp_writes: timestamp_writes.as_ref(),
2361 color_attachments: Borrowed(&colors),
2362 depth_stencil_attachment: depth_stencil.as_ref(),
2363 occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2364 },
2365 );
2366
2367 if let Some(cause) = err {
2368 self.context.handle_error(
2369 &self.error_sink,
2370 cause,
2371 desc.label,
2372 "CommandEncoder::begin_render_pass",
2373 );
2374 }
2375
2376 CoreRenderPass {
2377 context: self.context.clone(),
2378 pass,
2379 error_sink: self.error_sink.clone(),
2380 id: crate::cmp::Identifier::create(),
2381 }
2382 .into()
2383 }
2384
2385 fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2386 let descriptor = wgt::CommandBufferDescriptor::default();
2387 self.open = false; let (id, error) = self.context.0.command_encoder_finish(self.id, &descriptor);
2389 if let Some(cause) = error {
2390 self.context
2391 .handle_error_nolabel(&self.error_sink, cause, "a CommandEncoder");
2392 }
2393 CoreCommandBuffer {
2394 context: self.context.clone(),
2395 id,
2396 }
2397 .into()
2398 }
2399
2400 fn clear_texture(
2401 &self,
2402 texture: &dispatch::DispatchTexture,
2403 subresource_range: &crate::ImageSubresourceRange,
2404 ) {
2405 let texture = texture.as_core();
2406
2407 if let Err(cause) =
2408 self.context
2409 .0
2410 .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2411 {
2412 self.context.handle_error_nolabel(
2413 &self.error_sink,
2414 cause,
2415 "CommandEncoder::clear_texture",
2416 );
2417 }
2418 }
2419
2420 fn clear_buffer(
2421 &self,
2422 buffer: &dispatch::DispatchBuffer,
2423 offset: crate::BufferAddress,
2424 size: Option<crate::BufferAddress>,
2425 ) {
2426 let buffer = buffer.as_core();
2427
2428 if let Err(cause) = self
2429 .context
2430 .0
2431 .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2432 {
2433 self.context.handle_error_nolabel(
2434 &self.error_sink,
2435 cause,
2436 "CommandEncoder::fill_buffer",
2437 );
2438 }
2439 }
2440
2441 fn insert_debug_marker(&self, label: &str) {
2442 if let Err(cause) = self
2443 .context
2444 .0
2445 .command_encoder_insert_debug_marker(self.id, label)
2446 {
2447 self.context.handle_error_nolabel(
2448 &self.error_sink,
2449 cause,
2450 "CommandEncoder::insert_debug_marker",
2451 );
2452 }
2453 }
2454
2455 fn push_debug_group(&self, label: &str) {
2456 if let Err(cause) = self
2457 .context
2458 .0
2459 .command_encoder_push_debug_group(self.id, label)
2460 {
2461 self.context.handle_error_nolabel(
2462 &self.error_sink,
2463 cause,
2464 "CommandEncoder::push_debug_group",
2465 );
2466 }
2467 }
2468
2469 fn pop_debug_group(&self) {
2470 if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2471 self.context.handle_error_nolabel(
2472 &self.error_sink,
2473 cause,
2474 "CommandEncoder::pop_debug_group",
2475 );
2476 }
2477 }
2478
2479 fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2480 let query_set = query_set.as_core();
2481
2482 if let Err(cause) =
2483 self.context
2484 .0
2485 .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2486 {
2487 self.context.handle_error_nolabel(
2488 &self.error_sink,
2489 cause,
2490 "CommandEncoder::write_timestamp",
2491 );
2492 }
2493 }
2494
2495 fn resolve_query_set(
2496 &self,
2497 query_set: &dispatch::DispatchQuerySet,
2498 first_query: u32,
2499 query_count: u32,
2500 destination: &dispatch::DispatchBuffer,
2501 destination_offset: crate::BufferAddress,
2502 ) {
2503 let query_set = query_set.as_core();
2504 let destination = destination.as_core();
2505
2506 if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2507 self.id,
2508 query_set.id,
2509 first_query,
2510 query_count,
2511 destination.id,
2512 destination_offset,
2513 ) {
2514 self.context.handle_error_nolabel(
2515 &self.error_sink,
2516 cause,
2517 "CommandEncoder::resolve_query_set",
2518 );
2519 }
2520 }
2521
2522 fn mark_acceleration_structures_built<'a>(
2523 &self,
2524 blas: &mut dyn Iterator<Item = &'a Blas>,
2525 tlas: &mut dyn Iterator<Item = &'a Tlas>,
2526 ) {
2527 let blas = blas
2528 .map(|b| b.inner.as_core().id)
2529 .collect::<SmallVec<[_; 4]>>();
2530 let tlas = tlas
2531 .map(|t| t.inner.as_core().id)
2532 .collect::<SmallVec<[_; 4]>>();
2533 if let Err(cause) = self
2534 .context
2535 .0
2536 .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2537 {
2538 self.context.handle_error_nolabel(
2539 &self.error_sink,
2540 cause,
2541 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2542 );
2543 }
2544 }
2545
2546 fn build_acceleration_structures<'a>(
2547 &self,
2548 blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2549 tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2550 ) {
2551 let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2552 let geometries = match e.geometry {
2553 crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2554 let iter = triangle_geometries.iter().map(|tg| {
2555 wgc::ray_tracing::BlasTriangleGeometry {
2556 vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2557 index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2558 transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2559 size: tg.size,
2560 transform_buffer_offset: tg.transform_buffer_offset,
2561 first_vertex: tg.first_vertex,
2562 vertex_stride: tg.vertex_stride,
2563 first_index: tg.first_index,
2564 }
2565 });
2566 wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2567 }
2568 };
2569 wgc::ray_tracing::BlasBuildEntry {
2570 blas_id: e.blas.inner.as_core().id,
2571 geometries,
2572 }
2573 });
2574
2575 let tlas = tlas.into_iter().map(|e| {
2576 let instances = e
2577 .instances
2578 .iter()
2579 .map(|instance: &Option<crate::TlasInstance>| {
2580 instance
2581 .as_ref()
2582 .map(|instance| wgc::ray_tracing::TlasInstance {
2583 blas_id: instance.blas.as_core().id,
2584 transform: &instance.transform,
2585 custom_data: instance.custom_data,
2586 mask: instance.mask,
2587 })
2588 });
2589 wgc::ray_tracing::TlasPackage {
2590 tlas_id: e.inner.as_core().id,
2591 instances: Box::new(instances),
2592 lowest_unmodified: e.lowest_unmodified,
2593 }
2594 });
2595
2596 if let Err(cause) = self
2597 .context
2598 .0
2599 .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2600 {
2601 self.context.handle_error_nolabel(
2602 &self.error_sink,
2603 cause,
2604 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2605 );
2606 }
2607 }
2608
2609 fn transition_resources<'a>(
2610 &mut self,
2611 buffer_transitions: &mut dyn Iterator<
2612 Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2613 >,
2614 texture_transitions: &mut dyn Iterator<
2615 Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2616 >,
2617 ) {
2618 let result = self.context.0.command_encoder_transition_resources(
2619 self.id,
2620 buffer_transitions.map(|t| wgt::BufferTransition {
2621 buffer: t.buffer.as_core().id,
2622 state: t.state,
2623 }),
2624 texture_transitions.map(|t| wgt::TextureTransition {
2625 texture: t.texture.as_core().id,
2626 selector: t.selector.clone(),
2627 state: t.state,
2628 }),
2629 );
2630
2631 if let Err(cause) = result {
2632 self.context.handle_error_nolabel(
2633 &self.error_sink,
2634 cause,
2635 "CommandEncoder::transition_resources",
2636 );
2637 }
2638 }
2639}
2640
2641impl Drop for CoreCommandEncoder {
2642 fn drop(&mut self) {
2643 if self.open {
2644 self.context.0.command_encoder_drop(self.id)
2645 }
2646 }
2647}
2648
2649impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2650
2651impl Drop for CoreCommandBuffer {
2652 fn drop(&mut self) {
2653 self.context.0.command_buffer_drop(self.id)
2654 }
2655}
2656
2657impl dispatch::ComputePassInterface for CoreComputePass {
2658 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2659 let pipeline = pipeline.as_core();
2660
2661 if let Err(cause) = self
2662 .context
2663 .0
2664 .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2665 {
2666 self.context.handle_error(
2667 &self.error_sink,
2668 cause,
2669 self.pass.label(),
2670 "ComputePass::set_pipeline",
2671 );
2672 }
2673 }
2674
2675 fn set_bind_group(
2676 &mut self,
2677 index: u32,
2678 bind_group: Option<&dispatch::DispatchBindGroup>,
2679 offsets: &[crate::DynamicOffset],
2680 ) {
2681 let bg = bind_group.map(|bg| bg.as_core().id);
2682
2683 if let Err(cause) =
2684 self.context
2685 .0
2686 .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2687 {
2688 self.context.handle_error(
2689 &self.error_sink,
2690 cause,
2691 self.pass.label(),
2692 "ComputePass::set_bind_group",
2693 );
2694 }
2695 }
2696
2697 fn set_push_constants(&mut self, offset: u32, data: &[u8]) {
2698 if let Err(cause) =
2699 self.context
2700 .0
2701 .compute_pass_set_push_constants(&mut self.pass, offset, data)
2702 {
2703 self.context.handle_error(
2704 &self.error_sink,
2705 cause,
2706 self.pass.label(),
2707 "ComputePass::set_push_constant",
2708 );
2709 }
2710 }
2711
2712 fn insert_debug_marker(&mut self, label: &str) {
2713 if let Err(cause) =
2714 self.context
2715 .0
2716 .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
2717 {
2718 self.context.handle_error(
2719 &self.error_sink,
2720 cause,
2721 self.pass.label(),
2722 "ComputePass::insert_debug_marker",
2723 );
2724 }
2725 }
2726
2727 fn push_debug_group(&mut self, group_label: &str) {
2728 if let Err(cause) =
2729 self.context
2730 .0
2731 .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
2732 {
2733 self.context.handle_error(
2734 &self.error_sink,
2735 cause,
2736 self.pass.label(),
2737 "ComputePass::push_debug_group",
2738 );
2739 }
2740 }
2741
2742 fn pop_debug_group(&mut self) {
2743 if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
2744 self.context.handle_error(
2745 &self.error_sink,
2746 cause,
2747 self.pass.label(),
2748 "ComputePass::pop_debug_group",
2749 );
2750 }
2751 }
2752
2753 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2754 let query_set = query_set.as_core();
2755
2756 if let Err(cause) =
2757 self.context
2758 .0
2759 .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
2760 {
2761 self.context.handle_error(
2762 &self.error_sink,
2763 cause,
2764 self.pass.label(),
2765 "ComputePass::write_timestamp",
2766 );
2767 }
2768 }
2769
2770 fn begin_pipeline_statistics_query(
2771 &mut self,
2772 query_set: &dispatch::DispatchQuerySet,
2773 query_index: u32,
2774 ) {
2775 let query_set = query_set.as_core();
2776
2777 if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
2778 &mut self.pass,
2779 query_set.id,
2780 query_index,
2781 ) {
2782 self.context.handle_error(
2783 &self.error_sink,
2784 cause,
2785 self.pass.label(),
2786 "ComputePass::begin_pipeline_statistics_query",
2787 );
2788 }
2789 }
2790
2791 fn end_pipeline_statistics_query(&mut self) {
2792 if let Err(cause) = self
2793 .context
2794 .0
2795 .compute_pass_end_pipeline_statistics_query(&mut self.pass)
2796 {
2797 self.context.handle_error(
2798 &self.error_sink,
2799 cause,
2800 self.pass.label(),
2801 "ComputePass::end_pipeline_statistics_query",
2802 );
2803 }
2804 }
2805
2806 fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
2807 if let Err(cause) = self
2808 .context
2809 .0
2810 .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
2811 {
2812 self.context.handle_error(
2813 &self.error_sink,
2814 cause,
2815 self.pass.label(),
2816 "ComputePass::dispatch_workgroups",
2817 );
2818 }
2819 }
2820
2821 fn dispatch_workgroups_indirect(
2822 &mut self,
2823 indirect_buffer: &dispatch::DispatchBuffer,
2824 indirect_offset: crate::BufferAddress,
2825 ) {
2826 let indirect_buffer = indirect_buffer.as_core();
2827
2828 if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
2829 &mut self.pass,
2830 indirect_buffer.id,
2831 indirect_offset,
2832 ) {
2833 self.context.handle_error(
2834 &self.error_sink,
2835 cause,
2836 self.pass.label(),
2837 "ComputePass::dispatch_workgroups_indirect",
2838 );
2839 }
2840 }
2841
2842 fn end(&mut self) {
2843 if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
2844 self.context.handle_error(
2845 &self.error_sink,
2846 cause,
2847 self.pass.label(),
2848 "ComputePass::end",
2849 );
2850 }
2851 }
2852}
2853
2854impl Drop for CoreComputePass {
2855 fn drop(&mut self) {
2856 dispatch::ComputePassInterface::end(self);
2857 }
2858}
2859
2860impl dispatch::RenderPassInterface for CoreRenderPass {
2861 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
2862 let pipeline = pipeline.as_core();
2863
2864 if let Err(cause) = self
2865 .context
2866 .0
2867 .render_pass_set_pipeline(&mut self.pass, pipeline.id)
2868 {
2869 self.context.handle_error(
2870 &self.error_sink,
2871 cause,
2872 self.pass.label(),
2873 "RenderPass::set_pipeline",
2874 );
2875 }
2876 }
2877
2878 fn set_bind_group(
2879 &mut self,
2880 index: u32,
2881 bind_group: Option<&dispatch::DispatchBindGroup>,
2882 offsets: &[crate::DynamicOffset],
2883 ) {
2884 let bg = bind_group.map(|bg| bg.as_core().id);
2885
2886 if let Err(cause) =
2887 self.context
2888 .0
2889 .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2890 {
2891 self.context.handle_error(
2892 &self.error_sink,
2893 cause,
2894 self.pass.label(),
2895 "RenderPass::set_bind_group",
2896 );
2897 }
2898 }
2899
2900 fn set_index_buffer(
2901 &mut self,
2902 buffer: &dispatch::DispatchBuffer,
2903 index_format: crate::IndexFormat,
2904 offset: crate::BufferAddress,
2905 size: Option<crate::BufferSize>,
2906 ) {
2907 let buffer = buffer.as_core();
2908
2909 if let Err(cause) = self.context.0.render_pass_set_index_buffer(
2910 &mut self.pass,
2911 buffer.id,
2912 index_format,
2913 offset,
2914 size,
2915 ) {
2916 self.context.handle_error(
2917 &self.error_sink,
2918 cause,
2919 self.pass.label(),
2920 "RenderPass::set_index_buffer",
2921 );
2922 }
2923 }
2924
2925 fn set_vertex_buffer(
2926 &mut self,
2927 slot: u32,
2928 buffer: &dispatch::DispatchBuffer,
2929 offset: crate::BufferAddress,
2930 size: Option<crate::BufferSize>,
2931 ) {
2932 let buffer = buffer.as_core();
2933
2934 if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
2935 &mut self.pass,
2936 slot,
2937 buffer.id,
2938 offset,
2939 size,
2940 ) {
2941 self.context.handle_error(
2942 &self.error_sink,
2943 cause,
2944 self.pass.label(),
2945 "RenderPass::set_vertex_buffer",
2946 );
2947 }
2948 }
2949
2950 fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
2951 if let Err(cause) =
2952 self.context
2953 .0
2954 .render_pass_set_push_constants(&mut self.pass, stages, offset, data)
2955 {
2956 self.context.handle_error(
2957 &self.error_sink,
2958 cause,
2959 self.pass.label(),
2960 "RenderPass::set_push_constants",
2961 );
2962 }
2963 }
2964
2965 fn set_blend_constant(&mut self, color: crate::Color) {
2966 if let Err(cause) = self
2967 .context
2968 .0
2969 .render_pass_set_blend_constant(&mut self.pass, color)
2970 {
2971 self.context.handle_error(
2972 &self.error_sink,
2973 cause,
2974 self.pass.label(),
2975 "RenderPass::set_blend_constant",
2976 );
2977 }
2978 }
2979
2980 fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
2981 if let Err(cause) =
2982 self.context
2983 .0
2984 .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
2985 {
2986 self.context.handle_error(
2987 &self.error_sink,
2988 cause,
2989 self.pass.label(),
2990 "RenderPass::set_scissor_rect",
2991 );
2992 }
2993 }
2994
2995 fn set_viewport(
2996 &mut self,
2997 x: f32,
2998 y: f32,
2999 width: f32,
3000 height: f32,
3001 min_depth: f32,
3002 max_depth: f32,
3003 ) {
3004 if let Err(cause) = self.context.0.render_pass_set_viewport(
3005 &mut self.pass,
3006 x,
3007 y,
3008 width,
3009 height,
3010 min_depth,
3011 max_depth,
3012 ) {
3013 self.context.handle_error(
3014 &self.error_sink,
3015 cause,
3016 self.pass.label(),
3017 "RenderPass::set_viewport",
3018 );
3019 }
3020 }
3021
3022 fn set_stencil_reference(&mut self, reference: u32) {
3023 if let Err(cause) = self
3024 .context
3025 .0
3026 .render_pass_set_stencil_reference(&mut self.pass, reference)
3027 {
3028 self.context.handle_error(
3029 &self.error_sink,
3030 cause,
3031 self.pass.label(),
3032 "RenderPass::set_stencil_reference",
3033 );
3034 }
3035 }
3036
3037 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3038 if let Err(cause) = self.context.0.render_pass_draw(
3039 &mut self.pass,
3040 vertices.end - vertices.start,
3041 instances.end - instances.start,
3042 vertices.start,
3043 instances.start,
3044 ) {
3045 self.context.handle_error(
3046 &self.error_sink,
3047 cause,
3048 self.pass.label(),
3049 "RenderPass::draw",
3050 );
3051 }
3052 }
3053
3054 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3055 if let Err(cause) = self.context.0.render_pass_draw_indexed(
3056 &mut self.pass,
3057 indices.end - indices.start,
3058 instances.end - instances.start,
3059 indices.start,
3060 base_vertex,
3061 instances.start,
3062 ) {
3063 self.context.handle_error(
3064 &self.error_sink,
3065 cause,
3066 self.pass.label(),
3067 "RenderPass::draw_indexed",
3068 );
3069 }
3070 }
3071
3072 fn draw_indirect(
3073 &mut self,
3074 indirect_buffer: &dispatch::DispatchBuffer,
3075 indirect_offset: crate::BufferAddress,
3076 ) {
3077 let indirect_buffer = indirect_buffer.as_core();
3078
3079 if let Err(cause) = self.context.0.render_pass_draw_indirect(
3080 &mut self.pass,
3081 indirect_buffer.id,
3082 indirect_offset,
3083 ) {
3084 self.context.handle_error(
3085 &self.error_sink,
3086 cause,
3087 self.pass.label(),
3088 "RenderPass::draw_indirect",
3089 );
3090 }
3091 }
3092
3093 fn draw_indexed_indirect(
3094 &mut self,
3095 indirect_buffer: &dispatch::DispatchBuffer,
3096 indirect_offset: crate::BufferAddress,
3097 ) {
3098 let indirect_buffer = indirect_buffer.as_core();
3099
3100 if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3101 &mut self.pass,
3102 indirect_buffer.id,
3103 indirect_offset,
3104 ) {
3105 self.context.handle_error(
3106 &self.error_sink,
3107 cause,
3108 self.pass.label(),
3109 "RenderPass::draw_indexed_indirect",
3110 );
3111 }
3112 }
3113
3114 fn multi_draw_indirect(
3115 &mut self,
3116 indirect_buffer: &dispatch::DispatchBuffer,
3117 indirect_offset: crate::BufferAddress,
3118 count: u32,
3119 ) {
3120 let indirect_buffer = indirect_buffer.as_core();
3121
3122 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3123 &mut self.pass,
3124 indirect_buffer.id,
3125 indirect_offset,
3126 count,
3127 ) {
3128 self.context.handle_error(
3129 &self.error_sink,
3130 cause,
3131 self.pass.label(),
3132 "RenderPass::multi_draw_indirect",
3133 );
3134 }
3135 }
3136
3137 fn multi_draw_indexed_indirect(
3138 &mut self,
3139 indirect_buffer: &dispatch::DispatchBuffer,
3140 indirect_offset: crate::BufferAddress,
3141 count: u32,
3142 ) {
3143 let indirect_buffer = indirect_buffer.as_core();
3144
3145 if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3146 &mut self.pass,
3147 indirect_buffer.id,
3148 indirect_offset,
3149 count,
3150 ) {
3151 self.context.handle_error(
3152 &self.error_sink,
3153 cause,
3154 self.pass.label(),
3155 "RenderPass::multi_draw_indexed_indirect",
3156 );
3157 }
3158 }
3159
3160 fn multi_draw_indirect_count(
3161 &mut self,
3162 indirect_buffer: &dispatch::DispatchBuffer,
3163 indirect_offset: crate::BufferAddress,
3164 count_buffer: &dispatch::DispatchBuffer,
3165 count_buffer_offset: crate::BufferAddress,
3166 max_count: u32,
3167 ) {
3168 let indirect_buffer = indirect_buffer.as_core();
3169 let count_buffer = count_buffer.as_core();
3170
3171 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3172 &mut self.pass,
3173 indirect_buffer.id,
3174 indirect_offset,
3175 count_buffer.id,
3176 count_buffer_offset,
3177 max_count,
3178 ) {
3179 self.context.handle_error(
3180 &self.error_sink,
3181 cause,
3182 self.pass.label(),
3183 "RenderPass::multi_draw_indirect_count",
3184 );
3185 }
3186 }
3187
3188 fn multi_draw_indexed_indirect_count(
3189 &mut self,
3190 indirect_buffer: &dispatch::DispatchBuffer,
3191 indirect_offset: crate::BufferAddress,
3192 count_buffer: &dispatch::DispatchBuffer,
3193 count_buffer_offset: crate::BufferAddress,
3194 max_count: u32,
3195 ) {
3196 let indirect_buffer = indirect_buffer.as_core();
3197 let count_buffer = count_buffer.as_core();
3198
3199 if let Err(cause) = self
3200 .context
3201 .0
3202 .render_pass_multi_draw_indexed_indirect_count(
3203 &mut self.pass,
3204 indirect_buffer.id,
3205 indirect_offset,
3206 count_buffer.id,
3207 count_buffer_offset,
3208 max_count,
3209 )
3210 {
3211 self.context.handle_error(
3212 &self.error_sink,
3213 cause,
3214 self.pass.label(),
3215 "RenderPass::multi_draw_indexed_indirect_count",
3216 );
3217 }
3218 }
3219
3220 fn insert_debug_marker(&mut self, label: &str) {
3221 if let Err(cause) = self
3222 .context
3223 .0
3224 .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3225 {
3226 self.context.handle_error(
3227 &self.error_sink,
3228 cause,
3229 self.pass.label(),
3230 "RenderPass::insert_debug_marker",
3231 );
3232 }
3233 }
3234
3235 fn push_debug_group(&mut self, group_label: &str) {
3236 if let Err(cause) =
3237 self.context
3238 .0
3239 .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3240 {
3241 self.context.handle_error(
3242 &self.error_sink,
3243 cause,
3244 self.pass.label(),
3245 "RenderPass::push_debug_group",
3246 );
3247 }
3248 }
3249
3250 fn pop_debug_group(&mut self) {
3251 if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3252 self.context.handle_error(
3253 &self.error_sink,
3254 cause,
3255 self.pass.label(),
3256 "RenderPass::pop_debug_group",
3257 );
3258 }
3259 }
3260
3261 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3262 let query_set = query_set.as_core();
3263
3264 if let Err(cause) =
3265 self.context
3266 .0
3267 .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3268 {
3269 self.context.handle_error(
3270 &self.error_sink,
3271 cause,
3272 self.pass.label(),
3273 "RenderPass::write_timestamp",
3274 );
3275 }
3276 }
3277
3278 fn begin_occlusion_query(&mut self, query_index: u32) {
3279 if let Err(cause) = self
3280 .context
3281 .0
3282 .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3283 {
3284 self.context.handle_error(
3285 &self.error_sink,
3286 cause,
3287 self.pass.label(),
3288 "RenderPass::begin_occlusion_query",
3289 );
3290 }
3291 }
3292
3293 fn end_occlusion_query(&mut self) {
3294 if let Err(cause) = self
3295 .context
3296 .0
3297 .render_pass_end_occlusion_query(&mut self.pass)
3298 {
3299 self.context.handle_error(
3300 &self.error_sink,
3301 cause,
3302 self.pass.label(),
3303 "RenderPass::end_occlusion_query",
3304 );
3305 }
3306 }
3307
3308 fn begin_pipeline_statistics_query(
3309 &mut self,
3310 query_set: &dispatch::DispatchQuerySet,
3311 query_index: u32,
3312 ) {
3313 let query_set = query_set.as_core();
3314
3315 if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3316 &mut self.pass,
3317 query_set.id,
3318 query_index,
3319 ) {
3320 self.context.handle_error(
3321 &self.error_sink,
3322 cause,
3323 self.pass.label(),
3324 "RenderPass::begin_pipeline_statistics_query",
3325 );
3326 }
3327 }
3328
3329 fn end_pipeline_statistics_query(&mut self) {
3330 if let Err(cause) = self
3331 .context
3332 .0
3333 .render_pass_end_pipeline_statistics_query(&mut self.pass)
3334 {
3335 self.context.handle_error(
3336 &self.error_sink,
3337 cause,
3338 self.pass.label(),
3339 "RenderPass::end_pipeline_statistics_query",
3340 );
3341 }
3342 }
3343
3344 fn execute_bundles(
3345 &mut self,
3346 render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3347 ) {
3348 let temp_render_bundles = render_bundles
3349 .map(|rb| rb.as_core().id)
3350 .collect::<SmallVec<[_; 4]>>();
3351 if let Err(cause) = self
3352 .context
3353 .0
3354 .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3355 {
3356 self.context.handle_error(
3357 &self.error_sink,
3358 cause,
3359 self.pass.label(),
3360 "RenderPass::execute_bundles",
3361 );
3362 }
3363 }
3364
3365 fn end(&mut self) {
3366 if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3367 self.context.handle_error(
3368 &self.error_sink,
3369 cause,
3370 self.pass.label(),
3371 "RenderPass::end",
3372 );
3373 }
3374 }
3375}
3376
3377impl Drop for CoreRenderPass {
3378 fn drop(&mut self) {
3379 dispatch::RenderPassInterface::end(self);
3380 }
3381}
3382
3383impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3384 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3385 let pipeline = pipeline.as_core();
3386
3387 wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3388 }
3389
3390 fn set_bind_group(
3391 &mut self,
3392 index: u32,
3393 bind_group: Option<&dispatch::DispatchBindGroup>,
3394 offsets: &[crate::DynamicOffset],
3395 ) {
3396 let bg = bind_group.map(|bg| bg.as_core().id);
3397
3398 unsafe {
3399 wgpu_render_bundle_set_bind_group(
3400 &mut self.encoder,
3401 index,
3402 bg,
3403 offsets.as_ptr(),
3404 offsets.len(),
3405 )
3406 }
3407 }
3408
3409 fn set_index_buffer(
3410 &mut self,
3411 buffer: &dispatch::DispatchBuffer,
3412 index_format: crate::IndexFormat,
3413 offset: crate::BufferAddress,
3414 size: Option<crate::BufferSize>,
3415 ) {
3416 let buffer = buffer.as_core();
3417
3418 self.encoder
3419 .set_index_buffer(buffer.id, index_format, offset, size)
3420 }
3421
3422 fn set_vertex_buffer(
3423 &mut self,
3424 slot: u32,
3425 buffer: &dispatch::DispatchBuffer,
3426 offset: crate::BufferAddress,
3427 size: Option<crate::BufferSize>,
3428 ) {
3429 let buffer = buffer.as_core();
3430
3431 wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3432 }
3433
3434 fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3435 unsafe {
3436 wgpu_render_bundle_set_push_constants(
3437 &mut self.encoder,
3438 stages,
3439 offset,
3440 data.len().try_into().unwrap(),
3441 data.as_ptr(),
3442 )
3443 }
3444 }
3445
3446 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3447 wgpu_render_bundle_draw(
3448 &mut self.encoder,
3449 vertices.end - vertices.start,
3450 instances.end - instances.start,
3451 vertices.start,
3452 instances.start,
3453 )
3454 }
3455
3456 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3457 wgpu_render_bundle_draw_indexed(
3458 &mut self.encoder,
3459 indices.end - indices.start,
3460 instances.end - instances.start,
3461 indices.start,
3462 base_vertex,
3463 instances.start,
3464 )
3465 }
3466
3467 fn draw_indirect(
3468 &mut self,
3469 indirect_buffer: &dispatch::DispatchBuffer,
3470 indirect_offset: crate::BufferAddress,
3471 ) {
3472 let indirect_buffer = indirect_buffer.as_core();
3473
3474 wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3475 }
3476
3477 fn draw_indexed_indirect(
3478 &mut self,
3479 indirect_buffer: &dispatch::DispatchBuffer,
3480 indirect_offset: crate::BufferAddress,
3481 ) {
3482 let indirect_buffer = indirect_buffer.as_core();
3483
3484 wgpu_render_bundle_draw_indexed_indirect(
3485 &mut self.encoder,
3486 indirect_buffer.id,
3487 indirect_offset,
3488 )
3489 }
3490
3491 fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3492 where
3493 Self: Sized,
3494 {
3495 let (id, error) = self.context.0.render_bundle_encoder_finish(
3496 self.encoder,
3497 &desc.map_label(|l| l.map(Borrowed)),
3498 None,
3499 );
3500 if let Some(err) = error {
3501 self.context
3502 .handle_error_fatal(err, "RenderBundleEncoder::finish");
3503 }
3504 CoreRenderBundle { id }.into()
3505 }
3506}
3507
3508impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3509
3510impl dispatch::SurfaceInterface for CoreSurface {
3511 fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3512 let adapter = adapter.as_core();
3513
3514 self.context
3515 .0
3516 .surface_get_capabilities(self.id, adapter.id)
3517 .unwrap_or_default()
3518 }
3519
3520 fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3521 let device = device.as_core();
3522
3523 let error = self.context.0.surface_configure(self.id, device.id, config);
3524 if let Some(e) = error {
3525 self.context
3526 .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3527 } else {
3528 *self.configured_device.lock() = Some(device.id);
3529 *self.error_sink.lock() = Some(device.error_sink.clone());
3530 }
3531 }
3532
3533 fn get_current_texture(
3534 &self,
3535 ) -> (
3536 Option<dispatch::DispatchTexture>,
3537 crate::SurfaceStatus,
3538 dispatch::DispatchSurfaceOutputDetail,
3539 ) {
3540 let output_detail = CoreSurfaceOutputDetail {
3541 context: self.context.clone(),
3542 surface_id: self.id,
3543 }
3544 .into();
3545
3546 match self.context.0.surface_get_current_texture(self.id, None) {
3547 Ok(wgc::present::SurfaceOutput {
3548 status,
3549 texture: texture_id,
3550 }) => {
3551 let data = texture_id
3552 .map(|id| CoreTexture {
3553 context: self.context.clone(),
3554 id,
3555 error_sink: Arc::new(Mutex::new(ErrorSinkRaw::new())),
3556 })
3557 .map(Into::into);
3558
3559 (data, status, output_detail)
3560 }
3561 Err(err) => {
3562 let error_sink = self.error_sink.lock();
3563 match error_sink.as_ref() {
3564 Some(error_sink) => {
3565 self.context.handle_error_nolabel(
3566 error_sink,
3567 err,
3568 "Surface::get_current_texture_view",
3569 );
3570 (None, crate::SurfaceStatus::Unknown, output_detail)
3571 }
3572 None => self
3573 .context
3574 .handle_error_fatal(err, "Surface::get_current_texture_view"),
3575 }
3576 }
3577 }
3578 }
3579}
3580
3581impl Drop for CoreSurface {
3582 fn drop(&mut self) {
3583 self.context.0.surface_drop(self.id)
3584 }
3585}
3586
3587impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
3588 fn present(&self) {
3589 match self.context.0.surface_present(self.surface_id) {
3590 Ok(_status) => (),
3591 Err(err) => self.context.handle_error_fatal(err, "Surface::present"),
3592 }
3593 }
3594
3595 fn texture_discard(&self) {
3596 match self.context.0.surface_texture_discard(self.surface_id) {
3597 Ok(_status) => (),
3598 Err(err) => self
3599 .context
3600 .handle_error_fatal(err, "Surface::discard_texture"),
3601 }
3602 }
3603}
3604impl Drop for CoreSurfaceOutputDetail {
3605 fn drop(&mut self) {
3606 }
3610}
3611
3612impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
3613 fn slice(&self) -> &[u8] {
3614 panic!()
3615 }
3616
3617 #[inline]
3618 fn slice_mut(&mut self) -> &mut [u8] {
3619 self.mapping.slice_mut()
3620 }
3621}
3622impl Drop for CoreQueueWriteBuffer {
3623 fn drop(&mut self) {
3624 }
3628}
3629
3630impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
3631 #[inline]
3632 fn slice(&self) -> &[u8] {
3633 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
3634 }
3635
3636 #[inline]
3637 fn slice_mut(&mut self) -> &mut [u8] {
3638 unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
3639 }
3640
3641 #[cfg(webgpu)]
3642 fn as_uint8array(&self) -> &js_sys::Uint8Array {
3643 panic!("Only available on WebGPU")
3644 }
3645}