The app sends to audio to the server but the UI is not adapting and some crashes append

This commit is contained in:
Mathieu 2022-01-06 11:39:21 +01:00
parent cbcf615e57
commit 7baa44a8aa
8 changed files with 92 additions and 272 deletions

View File

@ -43,6 +43,8 @@
<entry key="../../../../../layout/compose-model-1641393136708.xml" value="0.3861111111111111" /> <entry key="../../../../../layout/compose-model-1641393136708.xml" value="0.3861111111111111" />
<entry key="../../../../../layout/compose-model-1641396021148.xml" value="0.2690033783783784" /> <entry key="../../../../../layout/compose-model-1641396021148.xml" value="0.2690033783783784" />
<entry key="../../../../../layout/compose-model-1641396094843.xml" value="0.33" /> <entry key="../../../../../layout/compose-model-1641396094843.xml" value="0.33" />
<entry key="../../../../../layout/compose-model-1641399721181.xml" value="2.0" />
<entry key="../../../../../layout/compose-model-1641463307897.xml" value="2.0" />
<entry key="app/src/main/res/drawable-v24/ic_launcher_foreground.xml" value="0.5307291666666667" /> <entry key="app/src/main/res/drawable-v24/ic_launcher_foreground.xml" value="0.5307291666666667" />
<entry key="app/src/main/res/drawable/ic_baseline_arrow_back_24.xml" value="0.38981481481481484" /> <entry key="app/src/main/res/drawable/ic_baseline_arrow_back_24.xml" value="0.38981481481481484" />
<entry key="app/src/main/res/drawable/ic_baseline_keyboard_24.xml" value="0.38981481481481484" /> <entry key="app/src/main/res/drawable/ic_baseline_keyboard_24.xml" value="0.38981481481481484" />

View File

@ -59,10 +59,9 @@ dependencies {
implementation 'com.google.accompanist:accompanist-permissions:0.22.0-rc' implementation 'com.google.accompanist:accompanist-permissions:0.22.0-rc'
implementation 'com.google.accompanist:accompanist-insets:0.22.0-rc' implementation 'com.google.accompanist:accompanist-insets:0.22.0-rc'
implementation 'androidx.lifecycle:lifecycle-runtime-ktx:2.4.0' implementation 'androidx.lifecycle:lifecycle-runtime-ktx:2.4.0'
implementation 'com.github.squti:Android-Wave-Recorder:1.6.0'
implementation("com.squareup.okhttp3:okhttp:4.9.3")
implementation 'androidx.activity:activity-compose:1.4.0' implementation 'androidx.activity:activity-compose:1.4.0'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0' androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
androidTestImplementation "androidx.compose.ui:ui-test-junit4:$compose_version"
debugImplementation "androidx.compose.ui:ui-tooling:$compose_version" debugImplementation "androidx.compose.ui:ui-tooling:$compose_version"
} }

View File

@ -14,7 +14,8 @@
android:roundIcon="@mipmap/ic_launcher" android:roundIcon="@mipmap/ic_launcher"
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/Theme.JarvisCompose" android:theme="@style/Theme.JarvisCompose"
android:fullBackupContent="true"> android:fullBackupContent="true"
android:usesCleartextTraffic="true">
<activity <activity
android:name=".MainActivity" android:name=".MainActivity"
android:exported="true" android:exported="true"

View File

@ -1,22 +1,59 @@
package ch.mathieubroillet.jarvis.android.audio package ch.mathieubroillet.jarvis.android.audio
import android.os.Handler import android.content.Context
import ch.mathieubroillet.jarvis.android.utils.contactServerWithFileAudioRecording
import com.github.squti.androidwaverecorder.RecorderState
import com.github.squti.androidwaverecorder.WaveRecorder
import java.io.File
import kotlin.concurrent.thread
private var audioTempFileOutput: String = ""
private var recordTask: RecordAudio = RecordAudio() private var waveRecorder: WaveRecorder? = null
private var isRecording: Boolean = false
fun startRecording() { fun startRecording() {
val handler = Handler() if (waveRecorder != null) {
handler.postDelayed({ waveRecorder!!.startRecording()
recordTask = RecordAudio()
recordTask.start()
recordTask.run()
}, 250)
} }
}
fun stopRecording() { fun stopRecording() {
recordTask.stop() if (waveRecorder != null) {
recordTask.interrupt() waveRecorder!!.stopRecording()
}
//byteArrayOutputStream.toByteArray();
thread {
contactServerWithFileAudioRecording(getOutputFile())
getOutputFile().delete()
}
}
fun getOutputFile(): File {
return File(audioTempFileOutput)
}
fun isRecording(): Boolean {
return isRecording
}
fun registerRecorder(context: Context) {
if (waveRecorder == null) {
audioTempFileOutput = context.filesDir.absolutePath + "/temp_recording.wav"
waveRecorder = WaveRecorder(audioTempFileOutput)
waveRecorder!!.noiseSuppressorActive = true
waveRecorder!!.onStateChangeListener = {
when (it) {
RecorderState.RECORDING -> {
isRecording = true
}
RecorderState.STOP -> {
isRecording = false
}
else -> {}
}
}
}
} }

View File

@ -1,254 +0,0 @@
package ch.mathieubroillet.jarvis.android.audio;
import android.annotation.SuppressLint;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Environment;
import android.util.Log;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import ch.mathieubroillet.jarvis.android.MainActivity;
public class RecordAudio extends Thread {
private static final String TAG = MainActivity.class.getSimpleName();
private static final int RECORDER_BPP = 16;
private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav";
private static final String AUDIO_RECORDER_FOLDER = "AudioRecorder";
private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw";
private final int frequency = 44100;
private int bufferSize;
private FileOutputStream os = null;
@Override
public void run() {
super.run();
Log.w(TAG, "doInBackground");
try {
String filename = getTempFilename();
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int channelConfiguration = AudioFormat.CHANNEL_IN_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
bufferSize = AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding);
@SuppressLint("MissingPermission")
AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, frequency, channelConfiguration, audioEncoding, bufferSize);
short[] buffer = new short[bufferSize];
audioRecord.startRecording();
while (currentThread().isAlive()) {
int bufferReadResult = audioRecord.read(buffer, 0, bufferSize);
if (AudioRecord.ERROR_INVALID_OPERATION != bufferReadResult) {
//check signal
//put a threshold
short threshold = 15000;
int foundPeak = searchThreshold(buffer, threshold);
if (foundPeak > -1) { //found signal
//record signal
byte[] byteBuffer = shortToByte(buffer, bufferReadResult);
try {
os.write(byteBuffer);
} catch (IOException e) {
e.printStackTrace();
}
} else {//count the time
//don't save signal
}
//show results
//here, with publichProgress function, if you calculate the total saved samples,
//you can optionally show the recorded file length in seconds: publishProgress(elsapsedTime,0);
}
}
audioRecord.stop();
//close file
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
copyWaveFile(getTempFilename(), getFilename());
deleteTempFile();
} catch (Throwable t) {
t.printStackTrace();
Log.e("AudioRecord", "Recording Failed");
}
}
byte[] shortToByte(short[] input, int elements) {
int short_index, byte_index;
byte[] buffer = new byte[elements * 2];
short_index = byte_index = 0;
while (short_index != elements) {
buffer[byte_index] = (byte) (input[short_index] & 0x00FF);
buffer[byte_index + 1] = (byte) ((input[short_index] & 0xFF00) >> 8);
++short_index;
byte_index += 2;
}
return buffer;
}
int searchThreshold(short[] arr, short thr) {
int peakIndex;
int arrLen = arr.length;
for (peakIndex = 0; peakIndex < arrLen; peakIndex++) {
if ((arr[peakIndex] >= thr) || (arr[peakIndex] <= -thr)) {
return peakIndex;
}
}
return -1;
}
private String getFilename() {
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath, AUDIO_RECORDER_FOLDER);
if (!file.exists()) {
file.mkdirs();
}
return (file.getAbsolutePath() + "/" + System.currentTimeMillis() + AUDIO_RECORDER_FILE_EXT_WAV);
}
private String getTempFilename() {
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath, AUDIO_RECORDER_FOLDER);
if (!file.exists()) {
file.mkdirs();
}
File tempFile = new File(filepath, AUDIO_RECORDER_TEMP_FILE);
if (tempFile.exists())
tempFile.delete();
return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE);
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
private void copyWaveFile(String inFilename, String outFilename) {
FileInputStream in;
FileOutputStream out;
long totalAudioLen;
long totalDataLen;
long longSampleRate = frequency;
int channels = 1;
long byteRate = (long) RECORDER_BPP * frequency * channels / 8;
byte[] data = new byte[bufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 36;
writeWaveFileHeader(out, totalAudioLen, totalDataLen, longSampleRate, channels, byteRate);
while (in.read(data) != -1) {
out.write(data);
}
in.close();
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void writeWaveFileHeader(
FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels,
long byteRate) throws IOException {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (channels * 16 / 8); // block align
header[33] = 0;
header[34] = RECORDER_BPP; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
}

View File

@ -5,6 +5,7 @@ import androidx.compose.material.*
import androidx.compose.runtime.* import androidx.compose.runtime.*
import androidx.compose.ui.Alignment import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier import androidx.compose.ui.Modifier
import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.res.painterResource import androidx.compose.ui.res.painterResource
import androidx.compose.ui.res.stringResource import androidx.compose.ui.res.stringResource
import androidx.compose.ui.tooling.preview.Preview import androidx.compose.ui.tooling.preview.Preview
@ -14,6 +15,7 @@ import androidx.compose.ui.unit.sp
import androidx.navigation.NavController import androidx.navigation.NavController
import androidx.navigation.compose.rememberNavController import androidx.navigation.compose.rememberNavController
import ch.mathieubroillet.jarvis.android.R import ch.mathieubroillet.jarvis.android.R
import ch.mathieubroillet.jarvis.android.audio.*
import ch.mathieubroillet.jarvis.android.chat.ConversationUiState import ch.mathieubroillet.jarvis.android.chat.ConversationUiState
import ch.mathieubroillet.jarvis.android.chat.Message import ch.mathieubroillet.jarvis.android.chat.Message
import ch.mathieubroillet.jarvis.android.chat.Messages import ch.mathieubroillet.jarvis.android.chat.Messages
@ -22,6 +24,7 @@ import ch.mathieubroillet.jarvis.android.ui.theme.JarvisComposeTheme
import ch.mathieubroillet.jarvis.android.ui.theme.productSansFont import ch.mathieubroillet.jarvis.android.ui.theme.productSansFont
import ch.mathieubroillet.jarvis.android.utils.DefaultBox import ch.mathieubroillet.jarvis.android.utils.DefaultBox
import ch.mathieubroillet.jarvis.android.utils.IconAlertDialogTextField import ch.mathieubroillet.jarvis.android.utils.IconAlertDialogTextField
import com.github.squti.androidwaverecorder.WaveRecorder
//Draws the base of the main activity, that includes the 3-dots menu and the "hi text". //Draws the base of the main activity, that includes the 3-dots menu and the "hi text".
@ -96,10 +99,15 @@ fun StartRecordingFAB() {
verticalAlignment = Alignment.Bottom, verticalAlignment = Alignment.Bottom,
horizontalArrangement = Arrangement.Center horizontalArrangement = Arrangement.Center
) { ) {
var isRecording by remember { mutableStateOf(isRecording()) }
//Microphone floating button to manually start/stop listening //Microphone floating button to manually start/stop listening
FloatingActionButton(onClick = { /*TODO*/ }, modifier = Modifier.size(70.dp)) { FloatingActionButton(onClick = {
if (isRecording) stopRecording() else startRecording()
}, modifier = Modifier.size(70.dp)) {
Icon( Icon(
painter = painterResource(id = R.drawable.ic_baseline_mic_24), painter = painterResource(id = if (isRecording) R.drawable.ic_baseline_shield_24 else R.drawable.ic_baseline_mic_24),
contentDescription = "microphone" contentDescription = "microphone"
) )
} }
@ -109,6 +117,9 @@ fun StartRecordingFAB() {
@Composable @Composable
fun DisplayMainPage(navController: NavController, uiState: ConversationUiState) { fun DisplayMainPage(navController: NavController, uiState: ConversationUiState) {
registerRecorder(LocalContext.current)
//We create a main box with basic padding to avoid having stuff too close to every side. //We create a main box with basic padding to avoid having stuff too close to every side.
DefaultBox { DefaultBox {

View File

@ -0,0 +1,23 @@
package ch.mathieubroillet.jarvis.android.utils
import okhttp3.MediaType.Companion.toMediaType
import okhttp3.OkHttpClient
import okhttp3.Request
import okhttp3.RequestBody.Companion.asRequestBody
import java.io.File
import java.io.IOException
fun contactServerWithFileAudioRecording(file: File) {
val client = OkHttpClient()
val request = Request.Builder()
.url("http://192.168.1.130:5000/process_audio_request_file")
.post(file.asRequestBody("audio/mpeg; charset=utf-8".toMediaType()))
.build()
client.newCall(request).execute().use { response ->
if (!response.isSuccessful) throw IOException("Unexpected code $response")
println(response.body!!.string())
}
}

View File

@ -4,6 +4,7 @@ dependencyResolutionManagement {
google() google()
mavenCentral() mavenCentral()
jcenter() // Warning: this repository is going to shut down soon jcenter() // Warning: this repository is going to shut down soon
maven { url "https://jitpack.io" }
} }
} }
rootProject.name = "Jarvis Compose" rootProject.name = "Jarvis Compose"