local_stats_alloc/
lib.rs

1// Copyright 2025 RisingWave Labs
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7//     http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#![feature(allocator_api)]
16
17use std::alloc::Allocator;
18use std::ops::Deref;
19use std::sync::atomic::AtomicUsize;
20use std::sync::{Arc, atomic};
21
22pub struct StatsAlloc<T> {
23    bytes_in_use: AtomicUsize,
24
25    inner: T,
26}
27
28impl<T> StatsAlloc<T> {
29    pub fn new(inner: T) -> Self {
30        Self {
31            bytes_in_use: AtomicUsize::new(0),
32            inner,
33        }
34    }
35
36    pub fn bytes_in_use(&self) -> usize {
37        self.bytes_in_use.load(atomic::Ordering::Relaxed)
38    }
39
40    pub fn shared(self) -> SharedStatsAlloc<T> {
41        SharedStatsAlloc(Arc::new(self))
42    }
43}
44
45unsafe impl<T> Allocator for StatsAlloc<T>
46where
47    T: Allocator,
48{
49    #[inline(always)]
50    fn allocate(
51        &self,
52        layout: std::alloc::Layout,
53    ) -> Result<std::ptr::NonNull<[u8]>, std::alloc::AllocError> {
54        self.bytes_in_use
55            .fetch_add(layout.size(), atomic::Ordering::Relaxed);
56        self.inner.allocate(layout)
57    }
58
59    #[inline(always)]
60    unsafe fn deallocate(&self, ptr: std::ptr::NonNull<u8>, layout: std::alloc::Layout) {
61        unsafe {
62            self.bytes_in_use
63                .fetch_sub(layout.size(), atomic::Ordering::Relaxed);
64            self.inner.deallocate(ptr, layout)
65        }
66    }
67
68    #[inline(always)]
69    fn allocate_zeroed(
70        &self,
71        layout: std::alloc::Layout,
72    ) -> Result<std::ptr::NonNull<[u8]>, std::alloc::AllocError> {
73        self.bytes_in_use
74            .fetch_add(layout.size(), atomic::Ordering::Relaxed);
75        self.inner.allocate_zeroed(layout)
76    }
77
78    #[inline(always)]
79    unsafe fn grow(
80        &self,
81        ptr: std::ptr::NonNull<u8>,
82        old_layout: std::alloc::Layout,
83        new_layout: std::alloc::Layout,
84    ) -> Result<std::ptr::NonNull<[u8]>, std::alloc::AllocError> {
85        unsafe {
86            self.bytes_in_use
87                .fetch_add(new_layout.size(), atomic::Ordering::Relaxed);
88            self.bytes_in_use
89                .fetch_sub(old_layout.size(), atomic::Ordering::Relaxed);
90            self.inner.grow(ptr, old_layout, new_layout)
91        }
92    }
93
94    #[inline(always)]
95    unsafe fn grow_zeroed(
96        &self,
97        ptr: std::ptr::NonNull<u8>,
98        old_layout: std::alloc::Layout,
99        new_layout: std::alloc::Layout,
100    ) -> Result<std::ptr::NonNull<[u8]>, std::alloc::AllocError> {
101        unsafe {
102            self.bytes_in_use
103                .fetch_add(new_layout.size(), atomic::Ordering::Relaxed);
104            self.bytes_in_use
105                .fetch_sub(old_layout.size(), atomic::Ordering::Relaxed);
106            self.inner.grow_zeroed(ptr, old_layout, new_layout)
107        }
108    }
109
110    #[inline(always)]
111    unsafe fn shrink(
112        &self,
113        ptr: std::ptr::NonNull<u8>,
114        old_layout: std::alloc::Layout,
115        new_layout: std::alloc::Layout,
116    ) -> Result<std::ptr::NonNull<[u8]>, std::alloc::AllocError> {
117        unsafe {
118            self.bytes_in_use
119                .fetch_add(new_layout.size(), atomic::Ordering::Relaxed);
120            self.bytes_in_use
121                .fetch_sub(old_layout.size(), atomic::Ordering::Relaxed);
122            self.inner.shrink(ptr, old_layout, new_layout)
123        }
124    }
125}
126
127pub struct SharedStatsAlloc<T>(Arc<StatsAlloc<T>>);
128
129impl<T> Clone for SharedStatsAlloc<T> {
130    fn clone(&self) -> Self {
131        SharedStatsAlloc(self.0.clone())
132    }
133}
134
135impl<T> Deref for SharedStatsAlloc<T> {
136    type Target = StatsAlloc<T>;
137
138    fn deref(&self) -> &Self::Target {
139        &self.0
140    }
141}
142
143unsafe impl<T: Allocator> Allocator for SharedStatsAlloc<T> {
144    fn allocate(
145        &self,
146        layout: std::alloc::Layout,
147    ) -> Result<std::ptr::NonNull<[u8]>, std::alloc::AllocError> {
148        self.0.allocate(layout)
149    }
150
151    unsafe fn deallocate(&self, ptr: std::ptr::NonNull<u8>, layout: std::alloc::Layout) {
152        unsafe { self.0.deallocate(ptr, layout) }
153    }
154
155    fn allocate_zeroed(
156        &self,
157        layout: std::alloc::Layout,
158    ) -> Result<std::ptr::NonNull<[u8]>, std::alloc::AllocError> {
159        self.0.allocate_zeroed(layout)
160    }
161
162    unsafe fn grow(
163        &self,
164        ptr: std::ptr::NonNull<u8>,
165        old_layout: std::alloc::Layout,
166        new_layout: std::alloc::Layout,
167    ) -> Result<std::ptr::NonNull<[u8]>, std::alloc::AllocError> {
168        unsafe { self.0.grow(ptr, old_layout, new_layout) }
169    }
170
171    unsafe fn grow_zeroed(
172        &self,
173        ptr: std::ptr::NonNull<u8>,
174        old_layout: std::alloc::Layout,
175        new_layout: std::alloc::Layout,
176    ) -> Result<std::ptr::NonNull<[u8]>, std::alloc::AllocError> {
177        unsafe { self.0.grow_zeroed(ptr, old_layout, new_layout) }
178    }
179
180    unsafe fn shrink(
181        &self,
182        ptr: std::ptr::NonNull<u8>,
183        old_layout: std::alloc::Layout,
184        new_layout: std::alloc::Layout,
185    ) -> Result<std::ptr::NonNull<[u8]>, std::alloc::AllocError> {
186        unsafe { self.0.shrink(ptr, old_layout, new_layout) }
187    }
188}