1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
use crate::nodes::SCOPE_SAMPLES;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use synfx_dsp::{AtomicFloat, AtomicFloatPair};
#[derive(Debug)]
pub struct ScopeHandle {
bufs: [Vec<AtomicFloatPair>; 3],
active: [AtomicBool; 3],
offs_gain: [AtomicFloatPair; 3],
threshold: (AtomicBool, AtomicFloat),
}
impl ScopeHandle {
pub fn new_shared() -> Arc<Self> {
let mut v1 = vec![];
v1.resize_with(SCOPE_SAMPLES, || AtomicFloatPair::default());
let mut v2 = vec![];
v2.resize_with(SCOPE_SAMPLES, || AtomicFloatPair::default());
let mut v3 = vec![];
v3.resize_with(SCOPE_SAMPLES, || AtomicFloatPair::default());
Arc::new(Self {
bufs: [v1, v2, v3],
active: [AtomicBool::new(false), AtomicBool::new(false), AtomicBool::new(false)],
offs_gain: [
AtomicFloatPair::default(),
AtomicFloatPair::default(),
AtomicFloatPair::default(),
],
threshold: (AtomicBool::new(false), AtomicFloat::default()),
})
}
pub fn write_oversampled(&self, buf_idx: usize, idx: usize, copies: usize, v: f32) {
let end = (idx + copies).min(SCOPE_SAMPLES);
for i in idx..end {
self.bufs[buf_idx % 3][i % SCOPE_SAMPLES].set((v, v));
}
}
pub fn set_offs_gain(&self, buf_idx: usize, offs: f32, gain: f32) {
self.offs_gain[buf_idx % 3].set((offs, gain));
}
pub fn get_offs_gain(&self, buf_idx: usize) -> (f32, f32) {
self.offs_gain[buf_idx % 3].get()
}
pub fn set_threshold(&self, thresh: Option<f32>) {
if let Some(t) = thresh {
self.threshold.1.set(t);
self.threshold.0.store(true, Ordering::Relaxed);
} else {
self.threshold.0.store(false, Ordering::Relaxed);
}
}
pub fn get_threshold(&self) -> Option<f32> {
if self.threshold.0.load(Ordering::Relaxed) {
Some(self.threshold.1.get())
} else {
None
}
}
pub fn write(&self, buf_idx: usize, idx: usize, v: (f32, f32)) {
self.bufs[buf_idx % 3][idx % SCOPE_SAMPLES].set(v);
}
pub fn read(&self, buf_idx: usize, idx: usize) -> (f32, f32) {
self.bufs[buf_idx % 3][idx % SCOPE_SAMPLES].get()
}
pub fn set_active_from_mask(&self, mask: u64) {
self.active[0].store(mask & 0x1 > 0x0, Ordering::Relaxed);
self.active[1].store(mask & 0x2 > 0x0, Ordering::Relaxed);
self.active[2].store(mask & 0x4 > 0x0, Ordering::Relaxed);
}
pub fn is_active(&self, idx: usize) -> bool {
self.active[idx % 3].load(Ordering::Relaxed)
}
pub fn len(&self) -> usize {
SCOPE_SAMPLES
}
}