-
Notifications
You must be signed in to change notification settings - Fork 6
Expand file tree
/
Copy pathSound.j
More file actions
138 lines (108 loc) · 4.57 KB
/
Sound.j
File metadata and controls
138 lines (108 loc) · 4.57 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
/*
This file is part of FrACT10, a vision test battery.
© 2021 Michael Bach, bach@uni-freiburg.de, <https://michaelbach.de>
Sound.j
2020-05-25 This class manages the FrACT10 feedback sounds
*/
@import <Foundation/Foundation.j>
@implementation Sound: CPObject {
id audioContext, bufferTrialStart, bufferTrialYes, bufferTrialNo, bufferRunEnd, volumeNode;
BOOL needsInitAfterUserinteraction;
CPTimer _timer;
}
- (void) updateSoundFiles { //console.info("Sound>updateSoundFiles");
needsInitAfterUserinteraction = YES;
[self initAfterUserinteraction];
}
//the below is clumsy (doing it 3 times), but the closure didn't take the provided buffer in my attempts.
//for start of trial (BaLM)
- (void) loadSoundTrialStart { //console.info("Sound>loadSoundTrialStart");
bufferTrialStart = null;
const request = new XMLHttpRequest();
request.open('GET', "Resources/sounds/trialStart/" + gSoundsTrialStart[[Settings soundTrialStartIndex]], true);
request.responseType = 'arraybuffer';
request.onload = function() { //Decode asynchronously
audioContext.decodeAudioData(request.response, function(buff) {bufferTrialStart = buff;});
}
request.send();
}
//for correct response
- (void) loadSoundTrialYes { //console.info("Sound>loadSoundTrialYes");
bufferTrialYes = null;
const request = new XMLHttpRequest();
request.open('GET', "Resources/sounds/trialYes/" + gSoundsTrialYes[[Settings soundTrialYesIndex]], true);
request.responseType = 'arraybuffer';
request.onload = function() { //Decode asynchronously
audioContext.decodeAudioData(request.response, function(buff) {bufferTrialYes = buff;});
}
request.send();
}
//for incorrect responses
- (void) loadSoundTrialNo { //console.info("Sound>loadSound");
bufferTrialNo = null;
const request = new XMLHttpRequest();
request.open('GET', "Resources/sounds/trialNo/" + gSoundsTrialNo[[Settings soundTrialNoIndex]], true);
request.responseType = 'arraybuffer';
request.onload = function() { //Decode asynchronously
audioContext.decodeAudioData(request.response, function(buff) {bufferTrialNo = buff;});
}
request.send();
}
//for end of run
- (void) loadSoundRunEnd { //console.info("Sound>loadSound");
bufferRunEnd = null;
const request = new XMLHttpRequest();
request.open('GET', "Resources/sounds/runEnd/" + gSoundsRunEnd[[Settings soundRunEndIndex]], true);
request.responseType = 'arraybuffer';
request.onload = function() { //Decode asynchronously
audioContext.decodeAudioData(request.response, function(buff) {bufferRunEnd = buff;});
}
request.send();
}
- (void) playSoundFromBuffer: (id) buffer { //console.info("Sound>playSoundFromBuffer");
if (needsInitAfterUserinteraction) [self initAfterUserinteraction];
if (buffer === nil) return;
const source = audioContext.createBufferSource();
source.buffer = buffer;
source.connect(volumeNode);
volumeNode.gain.value = Math.pow([Settings soundVolume] / 100.0, 2); //a more physiologic transfer function IMHO
source.start(0);
}
- (void) playNumber: (int) number {
switch (number) {
case kSoundTrialStart: [self playSoundFromBuffer: bufferTrialStart]; break;
case kSoundTrialYes: [self playSoundFromBuffer: bufferTrialYes]; break;
case kSoundTrialNo: [self playSoundFromBuffer: bufferTrialNo]; break;
case kSoundRunEnd: [self playSoundFromBuffer: bufferRunEnd]; break;
default: alert("xx");
}
}
- (void) playDelayedNumber: (int) number {
_timer = [CPTimer scheduledTimerWithTimeInterval: 0.1 target: self selector: @selector(onTimeout:) userInfo:number repeats: NO];
}
- (void) onTimeout: (CPTimer) timer { //console.info("FractController>onTimeoutDisplay");
[self playNumber: [timer userInfo]];
}
- (void) initAfterUserinteraction { //console.info("Sound>initAfterUserinteraction");
if (!needsInitAfterUserinteraction) return;
needsInitAfterUserinteraction = NO;
if ('webkitAudioContext' in window) {
audioContext = new window.webkitAudioContext();
} else {
audioContext = new window.AudioContext();
}
volumeNode = audioContext.createGain();
volumeNode.gain.value = 0;
volumeNode.connect(audioContext.destination);
[self loadSoundTrialStart]; [self loadSoundTrialYes];
[self loadSoundTrialNo]; [self loadSoundRunEnd];
}
- (id) init { //console.info("Sound>init");
self = [super init];
if (self) {
//starting the AudioContext is not allowed unless by user interaction
needsInitAfterUserinteraction = YES;
}
return self;
}
@end