risingwave_common_estimate_size/
lib.rs#![feature(allocator_api)]
#![feature(btree_cursors)]
#![feature(btree_extract_if)]
pub mod collections;
use std::cmp::Reverse;
use std::marker::PhantomData;
use std::sync::atomic::{AtomicUsize, Ordering};
use bytes::Bytes;
use fixedbitset::FixedBitSet;
pub use risingwave_common_proc_macro::EstimateSize;
pub trait EstimateSize {
fn estimated_heap_size(&self) -> usize;
fn estimated_size(&self) -> usize
where
Self: Sized,
{
self.estimated_heap_size() + std::mem::size_of::<Self>()
}
}
impl EstimateSize for FixedBitSet {
fn estimated_heap_size(&self) -> usize {
std::mem::size_of_val(self.as_slice())
}
}
impl EstimateSize for String {
fn estimated_heap_size(&self) -> usize {
self.capacity()
}
}
impl<T: EstimateSize> EstimateSize for Option<T> {
fn estimated_heap_size(&self) -> usize {
if let Some(inner) = self {
inner.estimated_heap_size()
} else {
0
}
}
}
impl EstimateSize for Bytes {
fn estimated_heap_size(&self) -> usize {
self.len()
}
}
impl EstimateSize for Box<str> {
fn estimated_heap_size(&self) -> usize {
self.len()
}
}
impl EstimateSize for serde_json::Value {
fn estimated_heap_size(&self) -> usize {
match self {
Self::Null => 0,
Self::Bool(_) => 0,
Self::Number(_) => 0,
Self::String(s) => s.estimated_heap_size(),
Self::Array(v) => std::mem::size_of::<Self>() * v.capacity(),
Self::Object(map) => std::mem::size_of::<Self>() * map.len(),
}
}
}
impl EstimateSize for jsonbb::Value {
fn estimated_heap_size(&self) -> usize {
self.capacity()
}
}
impl EstimateSize for jsonbb::Builder {
fn estimated_heap_size(&self) -> usize {
self.capacity()
}
}
impl<T1: EstimateSize, T2: EstimateSize> EstimateSize for (T1, T2) {
fn estimated_heap_size(&self) -> usize {
self.0.estimated_heap_size() + self.1.estimated_heap_size()
}
}
macro_rules! primitive_estimate_size_impl {
($($t:ty)*) => ($(
impl ZeroHeapSize for $t {}
)*)
}
primitive_estimate_size_impl! { () usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 bool }
pub trait ZeroHeapSize {}
impl<T: ZeroHeapSize> EstimateSize for T {
fn estimated_heap_size(&self) -> usize {
0
}
}
impl<T: ZeroHeapSize> EstimateSize for Vec<T> {
fn estimated_heap_size(&self) -> usize {
std::mem::size_of::<T>() * self.capacity()
}
}
impl<T: ZeroHeapSize> EstimateSize for Box<[T]> {
fn estimated_heap_size(&self) -> usize {
std::mem::size_of::<T>() * self.len()
}
}
impl<T: EstimateSize> EstimateSize for Reverse<T> {
fn estimated_heap_size(&self) -> usize {
self.0.estimated_heap_size()
}
}
impl<T: ZeroHeapSize, const LEN: usize> EstimateSize for [T; LEN] {
fn estimated_heap_size(&self) -> usize {
0
}
}
impl ZeroHeapSize for rust_decimal::Decimal {}
impl ZeroHeapSize for ethnum::I256 {}
impl<T> ZeroHeapSize for PhantomData<T> {}
#[derive(Default)]
pub struct KvSize(AtomicUsize);
impl Clone for KvSize {
fn clone(&self) -> Self {
Self(self.size().into())
}
}
impl KvSize {
pub fn new() -> Self {
Self(0.into())
}
pub fn with_size(size: usize) -> Self {
Self(size.into())
}
pub fn add<K: EstimateSize, V: EstimateSize>(&mut self, key: &K, val: &V) {
self.add_size(key.estimated_size());
self.add_size(val.estimated_size());
}
pub fn sub<K: EstimateSize, V: EstimateSize>(&mut self, key: &K, val: &V) {
self.sub_size(key.estimated_size());
self.sub_size(val.estimated_size());
}
pub fn add_val<V: EstimateSize>(&mut self, val: &V) -> usize {
let size = val.estimated_size();
self.add_size(size);
size
}
pub fn sub_val<V: EstimateSize>(&mut self, val: &V) {
self.sub_size(val.estimated_size());
}
pub fn add_size(&mut self, size: usize) {
let this = self.0.get_mut(); *this = this.saturating_add(size);
}
pub fn sub_size(&mut self, size: usize) {
let this = self.0.get_mut(); *this = this.saturating_sub(size);
}
pub fn update_size_atomic(&self, from: usize, to: usize) {
let _ = (self.0).fetch_update(Ordering::Relaxed, Ordering::Relaxed, |this| {
Some(this.saturating_add(to).saturating_sub(from))
});
}
pub fn set(&mut self, size: usize) {
self.0 = size.into();
}
pub fn size(&self) -> usize {
self.0.load(Ordering::Relaxed)
}
}