tiger_lib/
macros.rs

1//! [`MacroCache`] to cache macro expansions, and [`MacroMap`] to track [`Loc`] use across macro expansions.
2
3use std::hash::Hash;
4use std::num::NonZeroU32;
5use std::sync::{LazyLock, RwLock};
6
7use crate::helpers::{BiTigerHashMap, TigerHashMap};
8use crate::token::{Loc, Token};
9use crate::tooltipped::Tooltipped;
10
11#[derive(Clone, Debug, Eq, PartialEq, Hash)]
12struct MacroKey {
13    /// the loc of the call site
14    loc: Loc,
15    /// lexically sorted macro arguments
16    args: Vec<(&'static str, &'static str)>,
17    tooltipped: Tooltipped,
18    /// only for triggers
19    negated: bool,
20}
21
22impl MacroKey {
23    pub fn new(
24        mut loc: Loc,
25        args: &[(&'static str, Token)],
26        tooltipped: Tooltipped,
27        negated: bool,
28    ) -> Self {
29        loc.link_idx = None;
30        let mut args: Vec<_> = args.iter().map(|(parm, arg)| (*parm, arg.as_str())).collect();
31        args.sort_unstable();
32        Self { loc, args, tooltipped, negated }
33    }
34}
35
36#[derive(Debug)]
37/// A helper for scripted effects, triggers, and modifiers, all of which can
38/// accept macro arguments and which need to be expanded for every macro call.
39///
40/// The cache helps avoid needless re-expansions for arguments that have already been validated.
41pub struct MacroCache<T> {
42    cache: RwLock<TigerHashMap<MacroKey, T>>,
43}
44
45impl<T> MacroCache<T> {
46    pub fn perform<F: FnMut(&T)>(
47        &self,
48        key: &Token,
49        args: &[(&'static str, Token)],
50        tooltipped: Tooltipped,
51        negated: bool,
52        mut f: F,
53    ) -> bool {
54        let key = MacroKey::new(key.loc, args, tooltipped, negated);
55        if let Some(x) = self.cache.read().unwrap().get(&key) {
56            f(x);
57            true
58        } else {
59            false
60        }
61    }
62
63    pub fn insert(
64        &self,
65        key: &Token,
66        args: &[(&'static str, Token)],
67        tooltipped: Tooltipped,
68        negated: bool,
69        value: T,
70    ) {
71        let key = MacroKey::new(key.loc, args, tooltipped, negated);
72        self.cache.write().unwrap().insert(key, value);
73    }
74}
75
76impl<T> Default for MacroCache<T> {
77    fn default() -> Self {
78        MacroCache { cache: RwLock::new(TigerHashMap::default()) }
79    }
80}
81
82/// Global macro map
83pub(crate) static MACRO_MAP: LazyLock<MacroMap> = LazyLock::new(MacroMap::default);
84
85#[derive(Default)]
86pub struct MacroMap(RwLock<MacroMapInner>);
87
88/// A bijective map storing the link index and the associated loc denoting the key
89/// to the block containing the macros.
90pub struct MacroMapInner {
91    counter: NonZeroU32,
92    bi_map: BiTigerHashMap<NonZeroU32, Loc>,
93}
94
95impl Default for MacroMapInner {
96    fn default() -> Self {
97        Self { counter: NonZeroU32::new(1).unwrap(), bi_map: BiTigerHashMap::default() }
98    }
99}
100
101impl MacroMap {
102    /// Get the loc associated with the index
103    pub fn get_loc(&self, index: MacroMapIndex) -> Option<Loc> {
104        self.0.read().unwrap().bi_map.get_by_left(&index.0).copied()
105    }
106    /// Get the index associated with the loc
107    pub fn get_index(&self, loc: Loc) -> Option<MacroMapIndex> {
108        self.0.read().unwrap().bi_map.get_by_right(&loc).copied().map(MacroMapIndex)
109    }
110
111    /// Insert a loc that is not expected to be in the map yet, and return its index.
112    pub fn insert_or_get_loc(&self, loc: Loc) -> MacroMapIndex {
113        let mut guard = self.0.write().unwrap();
114        let counter = guard.counter;
115        if guard.bi_map.insert_no_overwrite(counter, loc).is_err() {
116            // The loc was already in the map. (The counter is always unique so that side can't have collided.)
117            return guard.bi_map.get_by_right(&loc).copied().map(MacroMapIndex).unwrap();
118        }
119        guard.counter =
120            guard.counter.checked_add(1).expect("internal error: 2^32 macro map entries");
121        MacroMapIndex(counter)
122    }
123
124    /// Get the index of a loc, inserting it if it was not yet stored
125    pub fn get_or_insert_loc(&self, loc: Loc) -> MacroMapIndex {
126        // First try with just a read lock, which allows for more parallelism than using a write lock.
127        if let Some(index) = self.get_index(loc) {
128            index
129        } else {
130            // We need a write lock.
131            self.insert_or_get_loc(loc)
132        }
133    }
134
135    /// Clear all entries. This will break all existing `MacroMapIndex` values!
136    pub(crate) fn clear(&self) {
137        let mut guard = self.0.write().unwrap();
138        guard.counter = NonZeroU32::new(1).unwrap();
139        guard.bi_map.clear();
140    }
141}
142
143/// Type-safety wrapper.
144#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
145pub struct MacroMapIndex(NonZeroU32);