-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathjfrontennd.js
173 lines (138 loc) · 6.02 KB
/
jfrontennd.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
let isPlaying = false;
const audioElement = new Audio;
const pressButton = document.getElementById("playButton");
const playIcon = '\u25B6'; // Unicode character for right-pointing triangle
const pauseIcon = 'I I';
pressButton.textContent = playIcon;
// OpenAI
const { Configuration, OpenAIApi } = require("openai");
const configuration = new Configuration({ apiKey: "ENTER YOUR OPENAI API KEY HERE" });
const openai = new OpenAIApi(configuration);
// Music mapping
const musicMapping = {
'1. Грустная': 'sad.mp3',
'2. Веселая': 'happy.mp3',
'3. Епическая': 'epic.mp3',
'3. Епическая музыка': 'epic.mp3',
'4. Жуткая': 'creepy.mp3',
'5. Романтическая': 'romantic.mp3',
'6. Таинственная': 'mystery.mp3'
};
// Function to run the speech recognition. (openai request, and playing music) are within the function
runSpeechRecog = () => {
console.log("Button clicked!"); // Check if the button click event is detected
document.getElementById("output").innerHTML = "Loading text...";
var output = document.getElementById('output');
var action = document.getElementById('action');
var num_words = 0;
let recognization = new webkitSpeechRecognition();
recognization.continuous = true;
recognization.interimResults = false;
recognization.lang = "ru-RU";
switch (isPlaying) {
case true:
console.log("Stopping speech recognition"); // Check if recognition stop is triggered
maxDuration = 0;
recognization.stop();
if (isPlaying) {
console.log("Pausing music"); // Check if music pause is triggered
audioElement.pause();
isPlaying = false;
}
break;
case false:
console.log("Starting speech recognition"); // Check if recognition start is triggered
recognization.start();
if (!isPlaying) {
audioElement.play();
isPlaying = true;
}
break;
}
recognization.onstart = () => {
action.innerHTML = "Listening...";
}
recognization.onaudiostart = () =>{
pressButton.textContent = pauseIcon;
isPlaying = true;
localStorage.setItem('microphonePermission', 'granted');
const maxDuration = 15000; // 15 seconds
setTimeout(() => {
recognization.stop();
}, maxDuration);
}
recognization.onresult = async (e) => {
var transcript = e.results[0][0].transcript;
output.innerHTML = transcript;
var words = transcript.split(" ");
var num_words = words.length;
// Sending request to OpenAI using the transcribed text
const response = await openai.createChatCompletion({
model: 'gpt-3.5-turbo',
messages: [{
role: 'user',
content: `${transcript} Прочитай предложение и определи настроение музыки - 1. Грустная музыка, 2. Веселая музыка, 3. Епическая музыка, 4. Жуткая музыка 5. Романтическая музыка. 6. Таинственная музыка .Используй эти параметры: 1. Ответ одним словом.`
},],
temperature: 0,
max_tokens: 500,
top_p: 1.0,
frequency_penalty: 0.0,
presence_penalty: 0.0,
});
console.log(response.data.choices[0].message); // Getting response from chatGPT in console
const responseContent = response.data.choices[0].message.content;
const musicFile = musicMapping[responseContent]; // Find a music file name in the mapping
//Audio Visualizer
const container = document.getElementById('container');
const canvas = document.getElementById('canvas1');
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
const ctx = canvas.getContext('2d');
let audioSource
let analyser;
container.addEventListener('click', function(){
})
if (musicFile) {
// Play the corresponding music file
audioElement.src = `assets/${musicFile}`;
container.addEventListener('click', function(){
let audioElement = new Audio;
const audioContext = new AudioContext();
audioElement.play();
audioSource = audioContext.createMediaElementSource(audioElement);
analyser = audioContext.createAnalyser();
audioSource.connect(analyser);
analyser.connect(audioContext.destination);
analyser.fftSize = 64;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
const barWidth = canvas.width/bufferLength;
let barHeight;
let x;
function animate(){
x = 0;
ctx.clearRect(0, 0, canvas.width, canvas.height);
analyser.getByteFrequencyData(dataArray);
for(let i = 0; i < bufferLength; i++){
barHeight = dataArray[i];
ctx.fillStyle = 'white';
ctx.fillRect(x, canvas.height - barHeight, barWidth, barHeight);
x += barWidth;
}
requestAnimationFrame(animate);
}
animate();
});
} else {
// Error massage in case music don't match
console.log("Music not found", responseContent);
}
};
recognization.onend = () => {
pressButton.textContent = playIcon;
output.classList.remove("hide")
action.innerHTML = "";
isPlaying = false;
}
};
module.exports = { runSpeechRecog };