skia_safe/gpu/ganesh/
direct_context.rs

1use std::{
2    fmt,
3    ops::{Deref, DerefMut},
4    ptr,
5    time::Duration,
6};
7
8use skia_bindings::{self as sb, GrDirectContext, GrDirectContext_DirectContextID, SkRefCntBase};
9
10use crate::{
11    gpu::{
12        BackendFormat, BackendRenderTarget, BackendTexture, ContextOptions, FlushInfo,
13        GpuStatsFlags, MutableTextureState, PurgeResourceOptions, RecordingContext,
14        SemaphoresSubmitted, SubmitInfo, SyncCpu,
15    },
16    prelude::*,
17    surfaces, Data, Image, Surface, TextureCompressionType,
18};
19
20#[repr(C)]
21#[derive(Copy, Clone, PartialEq, Eq, Debug)]
22pub struct DirectContextId {
23    id: u32,
24}
25
26native_transmutable!(
27    GrDirectContext_DirectContextID,
28    DirectContextId,
29    direct_context_id_layout
30);
31
32pub type DirectContext = RCHandle<GrDirectContext>;
33
34impl NativeRefCountedBase for GrDirectContext {
35    type Base = SkRefCntBase;
36}
37
38impl Deref for DirectContext {
39    type Target = RecordingContext;
40
41    fn deref(&self) -> &Self::Target {
42        unsafe { transmute_ref(self) }
43    }
44}
45
46impl DerefMut for DirectContext {
47    fn deref_mut(&mut self) -> &mut Self::Target {
48        unsafe { transmute_ref_mut(self) }
49    }
50}
51
52#[derive(Copy, Clone, PartialEq, Eq, Debug)]
53pub struct ResourceCacheLimits {
54    pub max_resources: usize,
55    pub max_resource_bytes: usize,
56}
57
58#[derive(Copy, Clone, PartialEq, Eq, Debug)]
59pub struct ResourceCacheUsage {
60    pub resource_count: usize,
61    pub resource_bytes: usize,
62}
63
64impl fmt::Debug for DirectContext {
65    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
66        f.debug_struct("DirectContext")
67            .field("base", self as &RecordingContext)
68            .field("resource_cache_limit", &self.resource_cache_limit())
69            .field("resource_cache_usage", &self.resource_cache_usage())
70            .field(
71                "resource_cache_purgeable_bytes",
72                &self.resource_cache_purgeable_bytes(),
73            )
74            .field(
75                "supports_distance_field_text",
76                &self.supports_distance_field_text(),
77            )
78            .finish()
79    }
80}
81
82impl DirectContext {
83    // Removed from Skia
84    #[cfg(feature = "gl")]
85    #[deprecated(since = "0.74.0", note = "use gpu::direct_contexts::make_gl()")]
86    pub fn new_gl<'a>(
87        interface: impl Into<crate::gpu::gl::Interface>,
88        options: impl Into<Option<&'a ContextOptions>>,
89    ) -> Option<DirectContext> {
90        crate::gpu::direct_contexts::make_gl(interface, options)
91    }
92
93    // Removed from Skia
94    #[cfg(feature = "vulkan")]
95    #[deprecated(since = "0.74.0", note = "use gpu::direct_contexts::make_vulkan()")]
96    pub fn new_vulkan<'a>(
97        backend_context: &crate::gpu::vk::BackendContext,
98        options: impl Into<Option<&'a ContextOptions>>,
99    ) -> Option<DirectContext> {
100        crate::gpu::direct_contexts::make_vulkan(backend_context, options)
101    }
102
103    #[cfg(feature = "metal")]
104    #[deprecated(since = "0.74.0", note = "use gpu::direct_contexts::make_metal()")]
105    pub fn new_metal<'a>(
106        backend_context: &crate::gpu::mtl::BackendContext,
107        options: impl Into<Option<&'a ContextOptions>>,
108    ) -> Option<DirectContext> {
109        crate::gpu::direct_contexts::make_metal(backend_context, options)
110    }
111
112    #[cfg(feature = "d3d")]
113    #[allow(clippy::missing_safety_doc)]
114    pub unsafe fn new_d3d<'a>(
115        backend_context: &crate::gpu::d3d::BackendContext,
116        options: impl Into<Option<&'a ContextOptions>>,
117    ) -> Option<DirectContext> {
118        DirectContext::from_ptr(sb::C_GrDirectContext_MakeDirect3D(
119            backend_context.native(),
120            options.into().native_ptr_or_null(),
121        ))
122    }
123
124    pub fn reset(&mut self, backend_state: Option<u32>) -> &mut Self {
125        unsafe {
126            self.native_mut()
127                .resetContext(backend_state.unwrap_or(sb::kAll_GrBackendState))
128        }
129        self
130    }
131
132    pub fn reset_gl_texture_bindings(&mut self) -> &mut Self {
133        unsafe { self.native_mut().resetGLTextureBindings() }
134        self
135    }
136
137    pub fn abandon(&mut self) -> &mut Self {
138        unsafe {
139            // self.native_mut().abandonContext()
140            sb::GrDirectContext_abandonContext(self.native_mut() as *mut _ as _)
141        }
142        self
143    }
144
145    pub fn is_device_lost(&mut self) -> bool {
146        unsafe { self.native_mut().isDeviceLost() }
147    }
148
149    // TODO: threadSafeProxy()
150
151    pub fn oomed(&mut self) -> bool {
152        unsafe { self.native_mut().oomed() }
153    }
154
155    pub fn release_resources_and_abandon(&mut self) -> &mut Self {
156        unsafe {
157            sb::GrDirectContext_releaseResourcesAndAbandonContext(self.native_mut() as *mut _ as _)
158        }
159        self
160    }
161
162    pub fn resource_cache_limit(&self) -> usize {
163        unsafe { self.native().getResourceCacheLimit() }
164    }
165
166    pub fn resource_cache_usage(&self) -> ResourceCacheUsage {
167        let mut resource_count = 0;
168        let mut resource_bytes = 0;
169        unsafe {
170            self.native()
171                .getResourceCacheUsage(&mut resource_count, &mut resource_bytes)
172        }
173        ResourceCacheUsage {
174            resource_count: resource_count.try_into().unwrap(),
175            resource_bytes,
176        }
177    }
178
179    pub fn resource_cache_purgeable_bytes(&self) -> usize {
180        unsafe { self.native().getResourceCachePurgeableBytes() }
181    }
182
183    pub fn set_resource_cache_limits(&mut self, limits: ResourceCacheLimits) {
184        unsafe {
185            self.native_mut().setResourceCacheLimits(
186                limits.max_resources.try_into().unwrap(),
187                limits.max_resource_bytes,
188            )
189        }
190    }
191
192    pub fn set_resource_cache_limit(&mut self, max_resource_bytes: usize) {
193        unsafe { self.native_mut().setResourceCacheLimit(max_resource_bytes) }
194    }
195
196    pub fn free_gpu_resources(&mut self) -> &mut Self {
197        unsafe { sb::GrDirectContext_freeGpuResources(self.native_mut() as *mut _ as _) }
198        self
199    }
200
201    pub fn perform_deferred_cleanup(
202        &mut self,
203        not_used: Duration,
204        opts: impl Into<Option<PurgeResourceOptions>>,
205    ) -> &mut Self {
206        unsafe {
207            sb::C_GrDirectContext_performDeferredCleanup(
208                self.native_mut(),
209                not_used.as_millis().try_into().unwrap(),
210                opts.into().unwrap_or(PurgeResourceOptions::AllResources),
211            )
212        }
213        self
214    }
215
216    pub fn purge_unlocked_resource_bytes(
217        &mut self,
218        bytes_to_purge: usize,
219        prefer_scratch_resources: bool,
220    ) -> &mut Self {
221        unsafe {
222            self.native_mut()
223                .purgeUnlockedResources(bytes_to_purge, prefer_scratch_resources)
224        }
225        self
226    }
227
228    pub fn purge_unlocked_resources(&mut self, opts: PurgeResourceOptions) -> &mut Self {
229        unsafe { self.native_mut().purgeUnlockedResources1(opts) }
230        self
231    }
232
233    pub fn supported_gpu_stats(&self) -> GpuStatsFlags {
234        GpuStatsFlags::from_bits_truncate(unsafe { self.native().supportedGpuStats() })
235    }
236
237    // TODO: wait()
238
239    pub fn flush_and_submit(&mut self) -> &mut Self {
240        unsafe { sb::C_GrDirectContext_flushAndSubmit(self.native_mut()) }
241        self
242    }
243
244    pub fn flush_submit_and_sync_cpu(&mut self) -> &mut Self {
245        self.flush(&FlushInfo::default());
246        self.submit(SyncCpu::Yes);
247        self
248    }
249
250    #[deprecated(since = "0.37.0", note = "Use flush()")]
251    pub fn flush_with_info(&mut self, info: &FlushInfo) -> SemaphoresSubmitted {
252        self.flush(info)
253    }
254
255    pub fn flush<'a>(&mut self, info: impl Into<Option<&'a FlushInfo>>) -> SemaphoresSubmitted {
256        let n = self.native_mut();
257        if let Some(info) = info.into() {
258            unsafe { n.flush(info.native()) }
259        } else {
260            let info = FlushInfo::default();
261            unsafe { n.flush(info.native()) }
262        }
263    }
264
265    pub fn flush_image_with_info(
266        &mut self,
267        image: &Image,
268        info: &FlushInfo,
269    ) -> SemaphoresSubmitted {
270        unsafe {
271            sb::C_GrDirectContext_flushImageWithInfo(
272                self.native_mut(),
273                image.clone().into_ptr(),
274                info.native(),
275            )
276        }
277    }
278
279    pub fn flush_image(&mut self, image: &Image) {
280        unsafe { sb::C_GrDirectContext_flushImage(self.native_mut(), image.clone().into_ptr()) }
281    }
282
283    pub fn flush_and_submit_image(&mut self, image: &Image) {
284        unsafe {
285            sb::C_GrDirectContext_flushAndSubmitImage(self.native_mut(), image.clone().into_ptr())
286        }
287    }
288
289    pub fn flush_surface_with_access(
290        &mut self,
291        surface: &mut Surface,
292        access: surfaces::BackendSurfaceAccess,
293        info: &FlushInfo,
294    ) -> SemaphoresSubmitted {
295        unsafe {
296            self.native_mut()
297                .flush3(surface.native_mut(), access, info.native())
298        }
299    }
300
301    pub fn flush_surface_with_texture_state(
302        &mut self,
303        surface: &mut Surface,
304        info: &FlushInfo,
305        new_state: Option<&MutableTextureState>,
306    ) -> SemaphoresSubmitted {
307        unsafe {
308            self.native_mut().flush4(
309                surface.native_mut(),
310                info.native(),
311                new_state.native_ptr_or_null(),
312            )
313        }
314    }
315
316    pub fn flush_and_submit_surface(
317        &mut self,
318        surface: &mut Surface,
319        sync_cpu: impl Into<Option<SyncCpu>>,
320    ) {
321        unsafe {
322            self.native_mut()
323                .flushAndSubmit1(surface.native_mut(), sync_cpu.into().unwrap_or(SyncCpu::No))
324        }
325    }
326
327    pub fn flush_surface(&mut self, surface: &mut Surface) {
328        unsafe { self.native_mut().flush5(surface.native_mut()) }
329    }
330
331    pub fn submit(&mut self, submit_info: impl Into<SubmitInfo>) -> bool {
332        unsafe { self.native_mut().submit(&submit_info.into().into_native()) }
333    }
334
335    pub fn check_async_work_completion(&mut self) {
336        unsafe { self.native_mut().checkAsyncWorkCompletion() }
337    }
338
339    // TODO: dumpMemoryStatistics()
340
341    pub fn supports_distance_field_text(&self) -> bool {
342        unsafe { self.native().supportsDistanceFieldText() }
343    }
344
345    #[cfg(feature = "vulkan")]
346    pub fn store_vk_pipeline_cache_data(&mut self) -> &mut Self {
347        unsafe {
348            self.native_mut().storeVkPipelineCacheData();
349        }
350        self
351    }
352
353    // TODO: wrap createBackendTexture (several variants)
354    //       introduced in m76, m77, and m79
355    //       extended in m84 with finishedProc and finishedContext
356    //       extended in m107 with label
357
358    // TODO: wrap updateBackendTexture (several variants)
359    //       introduced in m84
360
361    pub fn compressed_backend_format(&self, compression: TextureCompressionType) -> BackendFormat {
362        let mut backend_format = BackendFormat::new_invalid();
363        unsafe {
364            sb::C_GrDirectContext_compressedBackendFormat(
365                self.native(),
366                compression,
367                backend_format.native_mut(),
368            )
369        };
370        backend_format
371    }
372
373    // TODO: wrap createCompressedBackendTexture (several variants)
374    //       introduced in m81
375    //       extended in m84 with finishedProc and finishedContext
376
377    // TODO: wrap updateCompressedBackendTexture (two variants)
378    //       introduced in m86
379
380    // TODO: add variant with GpuFinishedProc / GpuFinishedContext
381    pub fn set_backend_texture_state(
382        &mut self,
383        backend_texture: &BackendTexture,
384        state: &MutableTextureState,
385    ) -> bool {
386        self.set_backend_texture_state_and_return_previous(backend_texture, state)
387            .is_some()
388    }
389
390    pub fn set_backend_texture_state_and_return_previous(
391        &mut self,
392        backend_texture: &BackendTexture,
393        state: &MutableTextureState,
394    ) -> Option<MutableTextureState> {
395        let mut previous = MutableTextureState::default();
396        unsafe {
397            self.native_mut().setBackendTextureState(
398                backend_texture.native(),
399                state.native(),
400                previous.native_mut(),
401                None,
402                ptr::null_mut(),
403            )
404        }
405        .if_true_some(previous)
406    }
407
408    // TODO: add variant with GpuFinishedProc / GpuFinishedContext
409    pub fn set_backend_render_target_state(
410        &mut self,
411        target: &BackendRenderTarget,
412        state: &MutableTextureState,
413    ) -> bool {
414        self.set_backend_render_target_state_and_return_previous(target, state)
415            .is_some()
416    }
417
418    pub fn set_backend_render_target_state_and_return_previous(
419        &mut self,
420        target: &BackendRenderTarget,
421        state: &MutableTextureState,
422    ) -> Option<MutableTextureState> {
423        let mut previous = MutableTextureState::default();
424        unsafe {
425            self.native_mut().setBackendRenderTargetState(
426                target.native(),
427                state.native(),
428                previous.native_mut(),
429                None,
430                ptr::null_mut(),
431            )
432        }
433        .if_true_some(previous)
434    }
435
436    pub fn delete_backend_texture(&mut self, texture: &BackendTexture) {
437        unsafe { self.native_mut().deleteBackendTexture(texture.native()) }
438    }
439
440    pub fn precompile_shader(&mut self, key: &Data, data: &Data) -> bool {
441        unsafe {
442            self.native_mut()
443                .precompileShader(key.native(), data.native())
444        }
445    }
446
447    pub fn id(&self) -> DirectContextId {
448        let mut id = DirectContextId { id: 0 };
449        unsafe { sb::C_GrDirectContext_directContextId(self.native(), id.native_mut()) }
450        id
451    }
452}
453
454#[cfg(test)]
455mod tests {
456    use super::DirectContext;
457    use crate::gpu::{SubmitInfo, SyncCpu};
458
459    #[allow(unused)]
460    fn submit_invocation(direct_context: &mut DirectContext) {
461        direct_context.submit(SyncCpu::Yes);
462        direct_context.submit(None);
463        direct_context.submit(Some(SyncCpu::Yes));
464        direct_context.submit(SubmitInfo::default());
465    }
466}