tiger_lib/
macros.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
//! [`MacroCache`] to cache macro expansions, and [`MacroMap`] to track [`Loc`] use across macro expansions.

use std::hash::Hash;
use std::num::NonZeroU32;
use std::sync::{LazyLock, RwLock};

use crate::helpers::{BiTigerHashMap, TigerHashMap};
use crate::token::{Loc, Token};
use crate::tooltipped::Tooltipped;

#[derive(Clone, Debug, Eq, PartialEq, Hash)]
struct MacroKey {
    /// the loc of the call site
    loc: Loc,
    /// lexically sorted macro arguments
    args: Vec<(&'static str, &'static str)>,
    tooltipped: Tooltipped,
    /// only for triggers
    negated: bool,
}

impl MacroKey {
    pub fn new(
        mut loc: Loc,
        args: &[(&'static str, Token)],
        tooltipped: Tooltipped,
        negated: bool,
    ) -> Self {
        loc.link_idx = None;
        let mut args: Vec<_> = args.iter().map(|(parm, arg)| (*parm, arg.as_str())).collect();
        args.sort_unstable();
        Self { loc, args, tooltipped, negated }
    }
}

#[derive(Debug)]
/// A helper for scripted effects, triggers, and modifiers, all of which can
/// accept macro arguments and which need to be expanded for every macro call.
///
/// The cache helps avoid needless re-expansions for arguments that have already been validated.
pub struct MacroCache<T> {
    cache: RwLock<TigerHashMap<MacroKey, T>>,
}

impl<T> MacroCache<T> {
    pub fn perform<F: FnMut(&T)>(
        &self,
        key: &Token,
        args: &[(&'static str, Token)],
        tooltipped: Tooltipped,
        negated: bool,
        mut f: F,
    ) -> bool {
        let key = MacroKey::new(key.loc, args, tooltipped, negated);
        if let Some(x) = self.cache.read().unwrap().get(&key) {
            f(x);
            true
        } else {
            false
        }
    }

    pub fn insert(
        &self,
        key: &Token,
        args: &[(&'static str, Token)],
        tooltipped: Tooltipped,
        negated: bool,
        value: T,
    ) {
        let key = MacroKey::new(key.loc, args, tooltipped, negated);
        self.cache.write().unwrap().insert(key, value);
    }
}

impl<T> Default for MacroCache<T> {
    fn default() -> Self {
        MacroCache { cache: RwLock::new(TigerHashMap::default()) }
    }
}

/// Global macro map
pub(crate) static MACRO_MAP: LazyLock<MacroMap> = LazyLock::new(MacroMap::default);

#[derive(Default)]
pub struct MacroMap(RwLock<MacroMapInner>);

/// A bijective map storing the link index and the associated loc denoting the key
/// to the block containing the macros.
pub struct MacroMapInner {
    counter: NonZeroU32,
    bi_map: BiTigerHashMap<NonZeroU32, Loc>,
}

impl Default for MacroMapInner {
    fn default() -> Self {
        Self { counter: NonZeroU32::new(1).unwrap(), bi_map: BiTigerHashMap::default() }
    }
}

impl MacroMap {
    /// Get the loc associated with the index
    pub fn get_loc(&self, index: MacroMapIndex) -> Option<Loc> {
        self.0.read().unwrap().bi_map.get_by_left(&index.0).copied()
    }
    /// Get the index associated with the loc
    pub fn get_index(&self, loc: Loc) -> Option<MacroMapIndex> {
        self.0.read().unwrap().bi_map.get_by_right(&loc).copied().map(MacroMapIndex)
    }

    /// Insert a loc that is not expected to be in the map yet, and return its index.
    pub fn insert_or_get_loc(&self, loc: Loc) -> MacroMapIndex {
        let mut guard = self.0.write().unwrap();
        let counter = guard.counter;
        if guard.bi_map.insert_no_overwrite(counter, loc).is_err() {
            // The loc was already in the map. (The counter is always unique so that side can't have collided.)
            return guard.bi_map.get_by_right(&loc).copied().map(MacroMapIndex).unwrap();
        }
        guard.counter =
            guard.counter.checked_add(1).expect("internal error: 2^32 macro map entries");
        MacroMapIndex(counter)
    }

    /// Get the index of a loc, inserting it if it was not yet stored
    pub fn get_or_insert_loc(&self, loc: Loc) -> MacroMapIndex {
        // First try with just a read lock, which allows for more parallelism than using a write lock.
        if let Some(index) = self.get_index(loc) {
            index
        } else {
            // We need a write lock.
            self.insert_or_get_loc(loc)
        }
    }

    /// Clear all entries. This will break all existing `MacroMapIndex` values!
    pub(crate) fn clear(&self) {
        let mut guard = self.0.write().unwrap();
        guard.counter = NonZeroU32::new(1).unwrap();
        guard.bi_map.clear();
    }
}

/// Type-safety wrapper.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MacroMapIndex(NonZeroU32);