skia_safe/gpu/ganesh/
direct_context.rs

1use std::{
2    fmt,
3    ops::{Deref, DerefMut},
4    ptr,
5    time::Duration,
6};
7
8use crate::{
9    gpu::{
10        BackendFormat, BackendRenderTarget, BackendTexture, ContextOptions, FlushInfo,
11        GpuStatsFlags, MutableTextureState, PurgeResourceOptions, RecordingContext,
12        SemaphoresSubmitted, SubmitInfo, SyncCpu,
13    },
14    prelude::*,
15    surfaces, Data, Image, Surface, TextureCompressionType,
16};
17use skia_bindings::{self as sb, GrDirectContext, GrDirectContext_DirectContextID, SkRefCntBase};
18
19#[repr(C)]
20#[derive(Copy, Clone, PartialEq, Eq, Debug)]
21pub struct DirectContextId {
22    id: u32,
23}
24
25native_transmutable!(
26    GrDirectContext_DirectContextID,
27    DirectContextId,
28    direct_context_id_layout
29);
30
31pub type DirectContext = RCHandle<GrDirectContext>;
32
33impl NativeRefCountedBase for GrDirectContext {
34    type Base = SkRefCntBase;
35}
36
37impl Deref for DirectContext {
38    type Target = RecordingContext;
39
40    fn deref(&self) -> &Self::Target {
41        unsafe { transmute_ref(self) }
42    }
43}
44
45impl DerefMut for DirectContext {
46    fn deref_mut(&mut self) -> &mut Self::Target {
47        unsafe { transmute_ref_mut(self) }
48    }
49}
50
51#[derive(Copy, Clone, PartialEq, Eq, Debug)]
52pub struct ResourceCacheLimits {
53    pub max_resources: usize,
54    pub max_resource_bytes: usize,
55}
56
57#[derive(Copy, Clone, PartialEq, Eq, Debug)]
58pub struct ResourceCacheUsage {
59    pub resource_count: usize,
60    pub resource_bytes: usize,
61}
62
63impl fmt::Debug for DirectContext {
64    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
65        f.debug_struct("DirectContext")
66            .field("base", self as &RecordingContext)
67            .field("resource_cache_limit", &self.resource_cache_limit())
68            .field("resource_cache_usage", &self.resource_cache_usage())
69            .field(
70                "resource_cache_purgeable_bytes",
71                &self.resource_cache_purgeable_bytes(),
72            )
73            .field(
74                "supports_distance_field_text",
75                &self.supports_distance_field_text(),
76            )
77            .finish()
78    }
79}
80
81impl DirectContext {
82    // Removed from Skia
83    #[cfg(feature = "gl")]
84    #[deprecated(since = "0.74.0", note = "use gpu::direct_contexts::make_gl()")]
85    pub fn new_gl<'a>(
86        interface: impl Into<crate::gpu::gl::Interface>,
87        options: impl Into<Option<&'a ContextOptions>>,
88    ) -> Option<DirectContext> {
89        crate::gpu::direct_contexts::make_gl(interface, options)
90    }
91
92    // Removed from Skia
93    #[cfg(feature = "vulkan")]
94    #[deprecated(since = "0.74.0", note = "use gpu::direct_contexts::make_vulkan()")]
95    pub fn new_vulkan<'a>(
96        backend_context: &crate::gpu::vk::BackendContext,
97        options: impl Into<Option<&'a ContextOptions>>,
98    ) -> Option<DirectContext> {
99        crate::gpu::direct_contexts::make_vulkan(backend_context, options)
100    }
101
102    #[cfg(feature = "metal")]
103    #[deprecated(since = "0.74.0", note = "use gpu::direct_contexts::make_metal()")]
104    pub fn new_metal<'a>(
105        backend_context: &crate::gpu::mtl::BackendContext,
106        options: impl Into<Option<&'a ContextOptions>>,
107    ) -> Option<DirectContext> {
108        crate::gpu::direct_contexts::make_metal(backend_context, options)
109    }
110
111    #[cfg(feature = "d3d")]
112    #[allow(clippy::missing_safety_doc)]
113    pub unsafe fn new_d3d<'a>(
114        backend_context: &crate::gpu::d3d::BackendContext,
115        options: impl Into<Option<&'a ContextOptions>>,
116    ) -> Option<DirectContext> {
117        DirectContext::from_ptr(sb::C_GrDirectContext_MakeDirect3D(
118            backend_context.native(),
119            options.into().native_ptr_or_null(),
120        ))
121    }
122
123    pub fn reset(&mut self, backend_state: Option<u32>) -> &mut Self {
124        unsafe {
125            self.native_mut()
126                .resetContext(backend_state.unwrap_or(sb::kAll_GrBackendState))
127        }
128        self
129    }
130
131    pub fn reset_gl_texture_bindings(&mut self) -> &mut Self {
132        unsafe { self.native_mut().resetGLTextureBindings() }
133        self
134    }
135
136    pub fn abandon(&mut self) -> &mut Self {
137        unsafe {
138            // self.native_mut().abandonContext()
139            sb::GrDirectContext_abandonContext(self.native_mut() as *mut _ as _)
140        }
141        self
142    }
143
144    pub fn is_device_lost(&mut self) -> bool {
145        unsafe { self.native_mut().isDeviceLost() }
146    }
147
148    // TODO: threadSafeProxy()
149
150    pub fn oomed(&mut self) -> bool {
151        unsafe { self.native_mut().oomed() }
152    }
153
154    pub fn release_resources_and_abandon(&mut self) -> &mut Self {
155        unsafe {
156            sb::GrDirectContext_releaseResourcesAndAbandonContext(self.native_mut() as *mut _ as _)
157        }
158        self
159    }
160
161    pub fn resource_cache_limit(&self) -> usize {
162        unsafe { self.native().getResourceCacheLimit() }
163    }
164
165    pub fn resource_cache_usage(&self) -> ResourceCacheUsage {
166        let mut resource_count = 0;
167        let mut resource_bytes = 0;
168        unsafe {
169            self.native()
170                .getResourceCacheUsage(&mut resource_count, &mut resource_bytes)
171        }
172        ResourceCacheUsage {
173            resource_count: resource_count.try_into().unwrap(),
174            resource_bytes,
175        }
176    }
177
178    pub fn resource_cache_purgeable_bytes(&self) -> usize {
179        unsafe { self.native().getResourceCachePurgeableBytes() }
180    }
181
182    pub fn set_resource_cache_limits(&mut self, limits: ResourceCacheLimits) {
183        unsafe {
184            self.native_mut().setResourceCacheLimits(
185                limits.max_resources.try_into().unwrap(),
186                limits.max_resource_bytes,
187            )
188        }
189    }
190
191    pub fn set_resource_cache_limit(&mut self, max_resource_bytes: usize) {
192        unsafe { self.native_mut().setResourceCacheLimit(max_resource_bytes) }
193    }
194
195    pub fn free_gpu_resources(&mut self) -> &mut Self {
196        unsafe { sb::GrDirectContext_freeGpuResources(self.native_mut() as *mut _ as _) }
197        self
198    }
199
200    pub fn perform_deferred_cleanup(
201        &mut self,
202        not_used: Duration,
203        opts: impl Into<Option<PurgeResourceOptions>>,
204    ) -> &mut Self {
205        unsafe {
206            sb::C_GrDirectContext_performDeferredCleanup(
207                self.native_mut(),
208                not_used.as_millis().try_into().unwrap(),
209                opts.into().unwrap_or(PurgeResourceOptions::AllResources),
210            )
211        }
212        self
213    }
214
215    pub fn purge_unlocked_resource_bytes(
216        &mut self,
217        bytes_to_purge: usize,
218        prefer_scratch_resources: bool,
219    ) -> &mut Self {
220        unsafe {
221            self.native_mut()
222                .purgeUnlockedResources(bytes_to_purge, prefer_scratch_resources)
223        }
224        self
225    }
226
227    pub fn purge_unlocked_resources(&mut self, opts: PurgeResourceOptions) -> &mut Self {
228        unsafe { self.native_mut().purgeUnlockedResources1(opts) }
229        self
230    }
231
232    pub fn supported_gpu_stats(&self) -> GpuStatsFlags {
233        GpuStatsFlags::from_bits_truncate(unsafe { self.native().supportedGpuStats() })
234    }
235
236    // TODO: wait()
237
238    pub fn flush_and_submit(&mut self) -> &mut Self {
239        unsafe { sb::C_GrDirectContext_flushAndSubmit(self.native_mut()) }
240        self
241    }
242
243    pub fn flush_submit_and_sync_cpu(&mut self) -> &mut Self {
244        self.flush(&FlushInfo::default());
245        self.submit(SyncCpu::Yes);
246        self
247    }
248
249    #[deprecated(since = "0.37.0", note = "Use flush()")]
250    pub fn flush_with_info(&mut self, info: &FlushInfo) -> SemaphoresSubmitted {
251        self.flush(info)
252    }
253
254    pub fn flush<'a>(&mut self, info: impl Into<Option<&'a FlushInfo>>) -> SemaphoresSubmitted {
255        let n = self.native_mut();
256        if let Some(info) = info.into() {
257            unsafe { n.flush(info.native()) }
258        } else {
259            let info = FlushInfo::default();
260            unsafe { n.flush(info.native()) }
261        }
262    }
263
264    pub fn flush_image_with_info(
265        &mut self,
266        image: &Image,
267        info: &FlushInfo,
268    ) -> SemaphoresSubmitted {
269        unsafe {
270            sb::C_GrDirectContext_flushImageWithInfo(
271                self.native_mut(),
272                image.clone().into_ptr(),
273                info.native(),
274            )
275        }
276    }
277
278    pub fn flush_image(&mut self, image: &Image) {
279        unsafe { sb::C_GrDirectContext_flushImage(self.native_mut(), image.clone().into_ptr()) }
280    }
281
282    pub fn flush_and_submit_image(&mut self, image: &Image) {
283        unsafe {
284            sb::C_GrDirectContext_flushAndSubmitImage(self.native_mut(), image.clone().into_ptr())
285        }
286    }
287
288    pub fn flush_surface_with_access(
289        &mut self,
290        surface: &mut Surface,
291        access: surfaces::BackendSurfaceAccess,
292        info: &FlushInfo,
293    ) -> SemaphoresSubmitted {
294        unsafe {
295            self.native_mut()
296                .flush3(surface.native_mut(), access, info.native())
297        }
298    }
299
300    pub fn flush_surface_with_texture_state(
301        &mut self,
302        surface: &mut Surface,
303        info: &FlushInfo,
304        new_state: Option<&MutableTextureState>,
305    ) -> SemaphoresSubmitted {
306        unsafe {
307            self.native_mut().flush4(
308                surface.native_mut(),
309                info.native(),
310                new_state.native_ptr_or_null(),
311            )
312        }
313    }
314
315    pub fn flush_and_submit_surface(
316        &mut self,
317        surface: &mut Surface,
318        sync_cpu: impl Into<Option<SyncCpu>>,
319    ) {
320        unsafe {
321            self.native_mut()
322                .flushAndSubmit1(surface.native_mut(), sync_cpu.into().unwrap_or(SyncCpu::No))
323        }
324    }
325
326    pub fn flush_surface(&mut self, surface: &mut Surface) {
327        unsafe { self.native_mut().flush5(surface.native_mut()) }
328    }
329
330    pub fn submit(&mut self, submit_info: impl Into<SubmitInfo>) -> bool {
331        unsafe { self.native_mut().submit(&submit_info.into().into_native()) }
332    }
333
334    pub fn check_async_work_completion(&mut self) {
335        unsafe { self.native_mut().checkAsyncWorkCompletion() }
336    }
337
338    // TODO: dumpMemoryStatistics()
339
340    pub fn supports_distance_field_text(&self) -> bool {
341        unsafe { self.native().supportsDistanceFieldText() }
342    }
343}
344
345#[cfg(feature = "vulkan")]
346impl DirectContext {
347    pub fn can_detect_new_vk_pipeline_cache_data(&self) -> bool {
348        unsafe { self.native().canDetectNewVkPipelineCacheData() }
349    }
350
351    pub fn has_new_vk_pipeline_cache_data(&self) -> bool {
352        unsafe { self.native().hasNewVkPipelineCacheData() }
353    }
354
355    pub fn store_vk_pipeline_cache_data(&mut self) -> &mut Self {
356        unsafe {
357            self.native_mut().storeVkPipelineCacheData();
358        }
359        self
360    }
361
362    pub fn store_vk_pipeline_cache_data_with_max_size(&mut self, max_size: usize) -> &mut Self {
363        unsafe {
364            self.native_mut().storeVkPipelineCacheData1(max_size);
365        }
366        self
367    }
368}
369
370impl DirectContext {
371    // TODO: wrap createBackendTexture (several variants)
372    //       introduced in m76, m77, and m79
373    //       extended in m84 with finishedProc and finishedContext
374    //       extended in m107 with label
375
376    // TODO: wrap updateBackendTexture (several variants)
377    //       introduced in m84
378
379    pub fn compressed_backend_format(&self, compression: TextureCompressionType) -> BackendFormat {
380        let mut backend_format = BackendFormat::new_invalid();
381        unsafe {
382            sb::C_GrDirectContext_compressedBackendFormat(
383                self.native(),
384                compression,
385                backend_format.native_mut(),
386            )
387        };
388        backend_format
389    }
390
391    // TODO: wrap createCompressedBackendTexture (several variants)
392    //       introduced in m81
393    //       extended in m84 with finishedProc and finishedContext
394
395    // TODO: wrap updateCompressedBackendTexture (two variants)
396    //       introduced in m86
397
398    // TODO: add variant with GpuFinishedProc / GpuFinishedContext
399    pub fn set_backend_texture_state(
400        &mut self,
401        backend_texture: &BackendTexture,
402        state: &MutableTextureState,
403    ) -> bool {
404        self.set_backend_texture_state_and_return_previous(backend_texture, state)
405            .is_some()
406    }
407
408    pub fn set_backend_texture_state_and_return_previous(
409        &mut self,
410        backend_texture: &BackendTexture,
411        state: &MutableTextureState,
412    ) -> Option<MutableTextureState> {
413        let mut previous = MutableTextureState::default();
414        unsafe {
415            self.native_mut().setBackendTextureState(
416                backend_texture.native(),
417                state.native(),
418                previous.native_mut(),
419                None,
420                ptr::null_mut(),
421            )
422        }
423        .if_true_some(previous)
424    }
425
426    // TODO: add variant with GpuFinishedProc / GpuFinishedContext
427    pub fn set_backend_render_target_state(
428        &mut self,
429        target: &BackendRenderTarget,
430        state: &MutableTextureState,
431    ) -> bool {
432        self.set_backend_render_target_state_and_return_previous(target, state)
433            .is_some()
434    }
435
436    pub fn set_backend_render_target_state_and_return_previous(
437        &mut self,
438        target: &BackendRenderTarget,
439        state: &MutableTextureState,
440    ) -> Option<MutableTextureState> {
441        let mut previous = MutableTextureState::default();
442        unsafe {
443            self.native_mut().setBackendRenderTargetState(
444                target.native(),
445                state.native(),
446                previous.native_mut(),
447                None,
448                ptr::null_mut(),
449            )
450        }
451        .if_true_some(previous)
452    }
453
454    pub fn delete_backend_texture(&mut self, texture: &BackendTexture) {
455        unsafe { self.native_mut().deleteBackendTexture(texture.native()) }
456    }
457
458    pub fn precompile_shader(&mut self, key: &Data, data: &Data) -> bool {
459        unsafe {
460            self.native_mut()
461                .precompileShader(key.native(), data.native())
462        }
463    }
464
465    pub fn id(&self) -> DirectContextId {
466        let mut id = DirectContextId { id: 0 };
467        unsafe { sb::C_GrDirectContext_directContextId(self.native(), id.native_mut()) }
468        id
469    }
470}
471
472#[cfg(test)]
473mod tests {
474    use super::DirectContext;
475    use crate::gpu::{SubmitInfo, SyncCpu};
476
477    #[allow(unused)]
478    fn submit_invocation(direct_context: &mut DirectContext) {
479        direct_context.submit(SyncCpu::Yes);
480        direct_context.submit(None);
481        direct_context.submit(Some(SyncCpu::Yes));
482        direct_context.submit(SubmitInfo::default());
483    }
484}