-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.jl
99 lines (79 loc) · 2.43 KB
/
main.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
# add LibSndFile PortAudio SampledSignals FileIO Whisper OpenAI
import LibSndFile
using PortAudio
using SampledSignals: s
using FileIO
using PortAudio
using Whisper, LibSndFile, FileIO, SampledSignals
using OpenAI
function save_audio_from_mic(duration)
# shure_device = filter(d->startswith(lowercase(d.name), "shure"),PortAudio.devices())[1]
shure_device = filter(d->startswith(lowercase(d.name), "macbook air microphone"),PortAudio.devices())[1]
stream = PortAudioStream(shure_device, 1, 0)
buf = read(stream, duration)
close(stream)
# save("test.ogg", buf)
buf
end
#####
function audio_buffer_to_text(buf)
# buf2 = load("test.ogg")
# typeof(buf2)
# typeof(buf)
# Whisper expects 16kHz sample rate and Float32 data
sout = SampleBuf(Float32, 16000, round(Int, length(buf)*(16000/samplerate(buf))), nchannels(buf))
write(SampleBufSink(sout), SampleBufSource(buf)) # Resample
transcribe("base.en", sout.data)
end
#####
function ask_chatgpt(prompt)
# secret_key = "PAST_YOUR_SECRETE_KEY_HERE"
# model = "davinci"
model = "gpt-3.5-turbo"
r = create_chat(
secret_key,
model,
[
Dict("role" => "system", "content"=> "You are a julia coder. Only send back code, with a docstring and no explanations. Do not add using statements."),
Dict("role" => "user", "content"=> prompt)
]
)
r.response[:choices][begin][:message][:content]
end
####
response = ""
code = ""
function ask(duration)
audio = save_audio_from_mic(duration)
text = lowercase(audio_buffer_to_text(audio))
print(text)
m = match(r"\s*command", text)
if m ≠ nothing
@show "command", m
text = text[m.offset+length("command"):end]
@show text
print("command: ", text)
return eval(Meta.parse(text))
end
@show "not command"
global response = ask_chatgpt(text)
global code = response[10:end-3]
print(response)
response
end
function run()
@show "run code? y/*", code
answer = readline()
answer ≠ "y" && return
# TODO a better (reliable) way to get code
eval(Meta.parse(code))
end
# convenience
a(duration) = @time ask(duration)
r = run
#####
# teach the coder ML
# Base.run(`openai tools fine_tunes.prepare_data -f finetune.csv`)
# Base.run(`openai api fine_tunes.create -t finetune.jsonl -m davinci`)
# Base.run(`export OPENAI_API_KEY=a`)
# Base.run(`echo $OPENAI_API_KEY`)