Files
aho_corasick
ansi_term
atty
backtrace
backtrace_sys
bitflags
blindbid
block_buffer
block_padding
bulletproofs
byte_tools
byteorder
cfg_if
chrono
clap
clear_on_drop
curve25519_dalek
digest
dusk_blindbidproof
dusk_tlv
dusk_uds
env_logger
failure
failure_derive
fake_simd
futures
futures_channel
futures_core
futures_executor
futures_io
futures_macro
futures_sink
futures_task
futures_util
async_await
future
io
lock
sink
stream
task
generic_array
humantime
keccak
lazy_static
libc
log
memchr
merlin
num_cpus
num_integer
num_traits
opaque_debug
packed_simd
pin_utils
proc_macro2
proc_macro_hack
proc_macro_nested
quick_error
quote
rand
rand_chacha
rand_core
rand_hc
rand_isaac
rand_jitter
rand_os
rand_pcg
rand_xorshift
regex
regex_syntax
rustc_demangle
serde
serde_derive
sha2
sha3
slab
strsim
subtle
syn
synstructure
termcolor
textwrap
thread_local
time
typenum
unicode_width
unicode_xid
vec_map
  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
// This module defines a common API for caching internal runtime state.
// The `thread_local` crate provides an extremely optimized version of this.
// However, if the perf-cache feature is disabled, then we drop the
// thread_local dependency and instead use a pretty naive caching mechanism
// with a mutex.
//
// Strictly speaking, the CachedGuard isn't necessary for the much more
// flexible thread_local API, but implementing thread_local's API doesn't
// seem possible in purely safe code.

pub use self::imp::{Cached, CachedGuard};

#[cfg(feature = "perf-cache")]
mod imp {
    use thread_local::CachedThreadLocal;

    #[derive(Debug)]
    pub struct Cached<T: Send>(CachedThreadLocal<T>);

    #[derive(Debug)]
    pub struct CachedGuard<'a, T: 'a>(&'a T);

    impl<T: Send> Cached<T> {
        pub fn new() -> Cached<T> {
            Cached(CachedThreadLocal::new())
        }

        pub fn get_or(&self, create: impl FnOnce() -> T) -> CachedGuard<T> {
            CachedGuard(self.0.get_or(|| Box::new(create())))
        }
    }

    impl<'a, T: Send> CachedGuard<'a, T> {
        pub fn value(&self) -> &T {
            self.0
        }
    }
}

#[cfg(not(feature = "perf-cache"))]
mod imp {
    use std::marker::PhantomData;
    use std::panic::UnwindSafe;
    use std::sync::Mutex;

    #[derive(Debug)]
    pub struct Cached<T: Send> {
        stack: Mutex<Vec<T>>,
        /// When perf-cache is enabled, the thread_local crate is used, and
        /// its CachedThreadLocal impls Send, Sync and UnwindSafe, but NOT
        /// RefUnwindSafe. However, a Mutex impls RefUnwindSafe. So in order
        /// to keep the APIs consistent regardless of whether perf-cache is
        /// enabled, we force this type to NOT impl RefUnwindSafe too.
        ///
        /// Ideally, we should always impl RefUnwindSafe, but it seems a little
        /// tricky to do that right now.
        ///
        /// See also: https://github.com/rust-lang/regex/issues/576
        _phantom: PhantomData<Box<dyn Send + Sync + UnwindSafe>>,
    }

    #[derive(Debug)]
    pub struct CachedGuard<'a, T: 'a + Send> {
        cache: &'a Cached<T>,
        value: Option<T>,
    }

    impl<T: Send> Cached<T> {
        pub fn new() -> Cached<T> {
            Cached { stack: Mutex::new(vec![]), _phantom: PhantomData }
        }

        pub fn get_or(&self, create: impl FnOnce() -> T) -> CachedGuard<T> {
            let mut stack = self.stack.lock().unwrap();
            match stack.pop() {
                None => CachedGuard { cache: self, value: Some(create()) },
                Some(value) => CachedGuard { cache: self, value: Some(value) },
            }
        }

        fn put(&self, value: T) {
            let mut stack = self.stack.lock().unwrap();
            stack.push(value);
        }
    }

    impl<'a, T: Send> CachedGuard<'a, T> {
        pub fn value(&self) -> &T {
            self.value.as_ref().unwrap()
        }
    }

    impl<'a, T: Send> Drop for CachedGuard<'a, T> {
        fn drop(&mut self) {
            if let Some(value) = self.value.take() {
                self.cache.put(value);
            }
        }
    }
}