Use Assist pipeline STT/TTS on Wear OS (#3611)

* Use Assist pipeline STT/TTS on Wear OS

 - Update Assist pipeline support on Wear OS to use the pipelines' STT/TTS capabilities, if available and if the app has the required permission
 - Move UrlHandler functions (app) to UrlUtil (common)

* Create a base AssistViewModel for sharing code

 - Creates AssistViewModelBase in common to share Assist tasks that appear in both the main app and watch app

* Keep screen on during voice input to avoid interruption
This commit is contained in:
Joris Pelgröm 2023-06-30 04:17:26 +02:00 committed by GitHub
parent 72722bd605
commit edf6ba5b9f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 501 additions and 302 deletions

View File

@ -1,29 +1,20 @@
package io.homeassistant.companion.android.assist
import android.app.Application
import android.content.pm.PackageManager
import android.util.Log
import androidx.compose.runtime.getValue
import androidx.compose.runtime.mutableStateListOf
import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.setValue
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.viewModelScope
import dagger.hilt.android.lifecycle.HiltViewModel
import io.homeassistant.companion.android.assist.ui.AssistMessage
import io.homeassistant.companion.android.assist.ui.AssistUiPipeline
import io.homeassistant.companion.android.common.assist.AssistViewModelBase
import io.homeassistant.companion.android.common.data.servers.ServerManager
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineError
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineEventType
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineIntentEnd
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineResponse
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineRunStart
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineSttEnd
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineTtsEnd
import io.homeassistant.companion.android.common.util.AudioRecorder
import io.homeassistant.companion.android.common.util.AudioUrlPlayer
import io.homeassistant.companion.android.util.UrlHandler
import kotlinx.coroutines.Job
import kotlinx.coroutines.launch
import javax.inject.Inject
import io.homeassistant.companion.android.common.R as commonR
@ -32,40 +23,22 @@ import io.homeassistant.companion.android.common.R as commonR
class AssistViewModel @Inject constructor(
val serverManager: ServerManager,
private val audioRecorder: AudioRecorder,
private val audioUrlPlayer: AudioUrlPlayer,
audioUrlPlayer: AudioUrlPlayer,
application: Application
) : AndroidViewModel(application) {
) : AssistViewModelBase(serverManager, audioRecorder, audioUrlPlayer, application) {
companion object {
const val TAG = "AssistViewModel"
}
enum class AssistInputMode {
TEXT,
TEXT_ONLY,
VOICE_INACTIVE,
VOICE_ACTIVE,
BLOCKED
}
private val app = application
private var filteredServerId: Int? = null
private var selectedServerId = ServerManager.SERVER_ID_ACTIVE
private val allPipelines = mutableMapOf<Int, List<AssistPipelineResponse>>()
private var selectedPipeline: AssistPipelineResponse? = null
private var recorderJob: Job? = null
private var recorderQueue: MutableList<ByteArray>? = null
private var recorderAutoStart = true
private var hasMicrophone = true
private var hasPermission = false
private var requestPermission: (() -> Unit)? = null
private var requestSilently = true
private var binaryHandlerId: Int? = null
private var conversationId: String? = null
private val startMessage = AssistMessage(application.getString(commonR.string.assist_how_can_i_assist), isInput = false)
private val _conversation = mutableStateListOf(startMessage)
val conversation: List<AssistMessage> = _conversation
@ -79,10 +52,6 @@ class AssistViewModel @Inject constructor(
var inputMode by mutableStateOf<AssistInputMode?>(null)
private set
init {
hasMicrophone = app.packageManager.hasSystemFeature(PackageManager.FEATURE_MICROPHONE)
}
fun onCreate(serverId: Int?, pipelineId: String?, startListening: Boolean?) {
viewModelScope.launch {
serverId?.let {
@ -125,6 +94,12 @@ class AssistViewModel @Inject constructor(
}
}
override fun getInput(): AssistInputMode? = inputMode
override fun setInput(inputMode: AssistInputMode) {
this.inputMode = inputMode
}
private suspend fun checkSupport(): Boolean? {
if (!serverManager.isRegistered()) return false
if (!serverManager.integrationRepository(selectedServerId).isHomeAssistantVersionAtLeast(2023, 5, 0)) return false
@ -175,8 +150,7 @@ class AssistViewModel @Inject constructor(
_conversation.clear()
_conversation.add(startMessage)
binaryHandlerId = null
conversationId = null
clearPipelineData()
if (hasMicrophone && it.sttEngine != null) {
if (recorderAutoStart && (hasPermission || requestSilently)) {
inputMode = AssistInputMode.VOICE_INACTIVE
@ -241,13 +215,7 @@ class AssistViewModel @Inject constructor(
}
if (recording) {
recorderQueue = mutableListOf()
recorderJob = viewModelScope.launch {
audioRecorder.audioBytes.collect {
recorderQueue?.add(it) ?: sendVoiceData(it)
}
}
setupRecorderQueue()
inputMode = AssistInputMode.VOICE_ACTIVE
runAssistPipeline(null)
} else {
@ -264,100 +232,20 @@ class AssistViewModel @Inject constructor(
if (!isVoice) _conversation.add(haMessage)
var message = if (isVoice) userMessage else haMessage
var job: Job? = null
job = viewModelScope.launch {
val flow = if (isVoice) {
serverManager.webSocketRepository(selectedServerId).runAssistPipelineForVoice(
sampleRate = AudioRecorder.SAMPLE_RATE,
outputTts = selectedPipeline?.ttsEngine?.isNotBlank() == true,
pipelineId = selectedPipeline?.id,
conversationId = conversationId
runAssistPipelineInternal(
text,
selectedPipeline
) { newMessage, isInput, isError ->
_conversation.indexOf(message).takeIf { pos -> pos >= 0 }?.let { index ->
_conversation[index] = message.copy(
message = newMessage,
isInput = isInput ?: message.isInput,
isError = isError
)
} else {
serverManager.webSocketRepository(selectedServerId).runAssistPipelineForText(
text = text!!,
pipelineId = selectedPipeline?.id,
conversationId = conversationId
)
}
flow?.collect {
when (it.type) {
AssistPipelineEventType.RUN_START -> {
if (!isVoice) return@collect
val data = (it.data as? AssistPipelineRunStart)?.runnerData
binaryHandlerId = data?.get("stt_binary_handler_id") as? Int
}
AssistPipelineEventType.STT_START -> {
viewModelScope.launch {
recorderQueue?.forEach { item ->
sendVoiceData(item)
}
recorderQueue = null
}
}
AssistPipelineEventType.STT_END -> {
stopRecording()
(it.data as? AssistPipelineSttEnd)?.sttOutput?.let { response ->
_conversation.indexOf(message).takeIf { pos -> pos >= 0 }?.let { index ->
_conversation[index] = message.copy(message = response["text"] as String)
}
}
_conversation.add(haMessage)
message = haMessage
}
AssistPipelineEventType.INTENT_END -> {
val data = (it.data as? AssistPipelineIntentEnd)?.intentOutput ?: return@collect
conversationId = data.conversationId
data.response.speech.plain["speech"]?.let { response ->
_conversation.indexOf(message).takeIf { pos -> pos >= 0 }?.let { index ->
_conversation[index] = message.copy(message = response)
}
}
}
AssistPipelineEventType.TTS_END -> {
if (!isVoice) return@collect
val audioPath = (it.data as? AssistPipelineTtsEnd)?.ttsOutput?.url
if (!audioPath.isNullOrBlank()) {
playAudio(audioPath)
}
}
AssistPipelineEventType.RUN_END -> {
stopRecording()
job?.cancel()
}
AssistPipelineEventType.ERROR -> {
val errorMessage = (it.data as? AssistPipelineError)?.message ?: return@collect
_conversation.indexOf(message).takeIf { pos -> pos >= 0 }?.let { index ->
_conversation[index] = message.copy(message = errorMessage, isError = true)
}
stopRecording()
job?.cancel()
}
else -> { /* Do nothing */ }
if (isInput == true) {
_conversation.add(haMessage)
message = haMessage
}
} ?: run {
_conversation.indexOf(message).takeIf { pos -> pos >= 0 }?.let { index ->
_conversation[index] = message.copy(message = app.getString(commonR.string.assist_error), isError = true)
}
stopRecording()
}
}
}
private fun sendVoiceData(data: ByteArray) {
binaryHandlerId?.let {
viewModelScope.launch {
// Launch to prevent blocking the output flow if the network is slow
serverManager.webSocketRepository(selectedServerId).sendVoiceData(it, data)
}
}
}
private fun playAudio(path: String) {
UrlHandler.handle(serverManager.getServer(selectedServerId)?.connection?.getUrl(), path)?.let {
viewModelScope.launch {
audioUrlPlayer.playAudio(it.toString())
}
}
}
@ -385,27 +273,4 @@ class AssistViewModel @Inject constructor(
stopRecording()
stopPlayback()
}
private fun stopRecording() {
audioRecorder.stopRecording()
recorderJob?.cancel()
recorderJob = null
if (binaryHandlerId != null) {
viewModelScope.launch {
recorderQueue?.forEach {
sendVoiceData(it)
}
recorderQueue = null
sendVoiceData(byteArrayOf()) // Empty message to indicate end of recording
binaryHandlerId = null
}
} else {
recorderQueue = null
}
if (inputMode == AssistInputMode.VOICE_ACTIVE) {
inputMode = AssistInputMode.VOICE_INACTIVE
}
}
private fun stopPlayback() = audioUrlPlayer.stop()
}

View File

@ -44,7 +44,6 @@ import androidx.compose.material.MaterialTheme
import androidx.compose.material.ModalBottomSheetLayout
import androidx.compose.material.ModalBottomSheetValue
import androidx.compose.material.OutlinedButton
import androidx.compose.material.Surface
import androidx.compose.material.Text
import androidx.compose.material.TextField
import androidx.compose.material.icons.Icons
@ -61,6 +60,7 @@ import androidx.compose.runtime.saveable.rememberSaveable
import androidx.compose.runtime.setValue
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.draw.scale
import androidx.compose.ui.focus.FocusRequester
import androidx.compose.ui.focus.focusRequester
@ -78,7 +78,7 @@ import androidx.compose.ui.unit.sp
import com.mikepenz.iconics.compose.Image
import com.mikepenz.iconics.typeface.library.community.material.CommunityMaterial
import io.homeassistant.companion.android.R
import io.homeassistant.companion.android.assist.AssistViewModel
import io.homeassistant.companion.android.common.assist.AssistViewModelBase
import kotlinx.coroutines.launch
import io.homeassistant.companion.android.common.R as commonR
@ -87,7 +87,7 @@ import io.homeassistant.companion.android.common.R as commonR
fun AssistSheetView(
conversation: List<AssistMessage>,
pipelines: List<AssistUiPipeline>,
inputMode: AssistViewModel.AssistInputMode?,
inputMode: AssistViewModelBase.AssistInputMode?,
currentPipeline: AssistUiPipeline?,
fromFrontend: Boolean,
onSelectPipeline: (Int, String) -> Unit,
@ -227,7 +227,7 @@ fun AssistSheetHeader(
@Composable
fun AssistSheetControls(
inputMode: AssistViewModel.AssistInputMode?,
inputMode: AssistViewModelBase.AssistInputMode?,
onChangeInput: () -> Unit,
onTextInput: (String) -> Unit,
onMicrophoneInput: () -> Unit
@ -237,18 +237,18 @@ fun AssistSheetControls(
return
}
if (inputMode == AssistViewModel.AssistInputMode.BLOCKED) { // No info and not recoverable, no space
if (inputMode == AssistViewModelBase.AssistInputMode.BLOCKED) { // No info and not recoverable, no space
return
}
val focusRequester = remember { FocusRequester() }
LaunchedEffect(inputMode) {
if (inputMode == AssistViewModel.AssistInputMode.TEXT || inputMode == AssistViewModel.AssistInputMode.TEXT_ONLY) {
if (inputMode == AssistViewModelBase.AssistInputMode.TEXT || inputMode == AssistViewModelBase.AssistInputMode.TEXT_ONLY) {
focusRequester.requestFocus()
}
}
if (inputMode == AssistViewModel.AssistInputMode.TEXT || inputMode == AssistViewModel.AssistInputMode.TEXT_ONLY) {
if (inputMode == AssistViewModelBase.AssistInputMode.TEXT || inputMode == AssistViewModelBase.AssistInputMode.TEXT_ONLY) {
var text by rememberSaveable(stateSaver = TextFieldValue.Saver) {
mutableStateOf(TextFieldValue())
}
@ -273,13 +273,13 @@ fun AssistSheetControls(
if (text.text.isNotBlank()) {
onTextInput(text.text)
text = TextFieldValue("")
} else if (inputMode != AssistViewModel.AssistInputMode.TEXT_ONLY) {
} else if (inputMode != AssistViewModelBase.AssistInputMode.TEXT_ONLY) {
onChangeInput()
}
},
enabled = (inputMode != AssistViewModel.AssistInputMode.TEXT_ONLY || text.text.isNotBlank())
enabled = (inputMode != AssistViewModelBase.AssistInputMode.TEXT_ONLY || text.text.isNotBlank())
) {
val inputIsSend = text.text.isNotBlank() || inputMode == AssistViewModel.AssistInputMode.TEXT_ONLY
val inputIsSend = text.text.isNotBlank() || inputMode == AssistViewModelBase.AssistInputMode.TEXT_ONLY
Image(
asset = if (inputIsSend) CommunityMaterial.Icon3.cmd_send else CommunityMaterial.Icon3.cmd_microphone,
contentDescription = stringResource(
@ -296,7 +296,7 @@ fun AssistSheetControls(
modifier = Modifier.size(64.dp),
contentAlignment = Alignment.Center
) {
val inputIsActive = inputMode == AssistViewModel.AssistInputMode.VOICE_ACTIVE
val inputIsActive = inputMode == AssistViewModelBase.AssistInputMode.VOICE_ACTIVE
if (inputIsActive) {
val transition = rememberInfiniteTransition()
val scale by transition.animateFloat(
@ -307,11 +307,12 @@ fun AssistSheetControls(
repeatMode = RepeatMode.Reverse
)
)
Surface(
color = colorResource(commonR.color.colorSpeechText),
modifier = Modifier.size(48.dp).scale(scale),
shape = CircleShape,
content = {}
Box(
modifier = Modifier
.size(48.dp)
.scale(scale)
.background(color = colorResource(commonR.color.colorSpeechText), shape = CircleShape)
.clip(CircleShape)
)
}
OutlinedButton(

View File

@ -15,7 +15,7 @@ import com.google.accompanist.themeadapter.material.MdcTheme
import dagger.hilt.android.AndroidEntryPoint
import io.homeassistant.companion.android.BaseActivity
import io.homeassistant.companion.android.nfc.views.LoadNfcView
import io.homeassistant.companion.android.util.UrlHandler
import io.homeassistant.companion.android.util.UrlUtil
import kotlinx.coroutines.launch
import io.homeassistant.companion.android.common.R as commonR
@ -106,7 +106,7 @@ class NfcSetupActivity : BaseActivity() {
// Create new nfc tag
if (!viewModel.nfcEventShouldWrite) {
val url = NFCUtil.extractUrlFromNFCIntent(intent)
val nfcTagId = UrlHandler.splitNfcTagId(url)
val nfcTagId = UrlUtil.splitNfcTagId(url)
if (nfcTagId == null) {
viewModel.onNfcReadEmpty()
} else {

View File

@ -13,7 +13,7 @@ import dagger.hilt.android.AndroidEntryPoint
import io.homeassistant.companion.android.BaseActivity
import io.homeassistant.companion.android.common.data.servers.ServerManager
import io.homeassistant.companion.android.nfc.views.TagReaderView
import io.homeassistant.companion.android.util.UrlHandler
import io.homeassistant.companion.android.util.UrlUtil
import kotlinx.coroutines.async
import kotlinx.coroutines.awaitAll
import kotlinx.coroutines.launch
@ -63,7 +63,7 @@ class TagReaderActivity : BaseActivity() {
private suspend fun handleTag(url: Uri?, isNfcTag: Boolean) {
// https://www.home-assistant.io/tag/5f0ba733-172f-430d-a7f8-e4ad940c88d7
val nfcTagId = UrlHandler.splitNfcTagId(url)
val nfcTagId = UrlUtil.splitNfcTagId(url)
Log.d(TAG, "Tag ID: $nfcTagId")
if (nfcTagId != null && serverManager.isRegistered()) {
serverManager.defaultServers.map {

View File

@ -72,7 +72,7 @@ import io.homeassistant.companion.android.sensors.LocationSensorManager
import io.homeassistant.companion.android.sensors.NotificationSensorManager
import io.homeassistant.companion.android.sensors.SensorReceiver
import io.homeassistant.companion.android.settings.SettingsActivity
import io.homeassistant.companion.android.util.UrlHandler
import io.homeassistant.companion.android.util.UrlUtil
import io.homeassistant.companion.android.websocket.WebsocketManager
import io.homeassistant.companion.android.webview.WebViewActivity
import kotlinx.coroutines.CoroutineScope
@ -1169,8 +1169,8 @@ class MessagingManager @Inject constructor(
) {
data[ICON_URL]?.let {
val serverId = data[THIS_SERVER_ID]!!.toInt()
val url = UrlHandler.handle(serverManager.getServer(serverId)?.connection?.getUrl(), it)
val bitmap = getImageBitmap(serverId, url, !UrlHandler.isAbsoluteUrl(it))
val url = UrlUtil.handle(serverManager.getServer(serverId)?.connection?.getUrl(), it)
val bitmap = getImageBitmap(serverId, url, !UrlUtil.isAbsoluteUrl(it))
if (bitmap != null) {
builder.setLargeIcon(bitmap)
}
@ -1183,8 +1183,8 @@ class MessagingManager @Inject constructor(
) {
data[IMAGE_URL]?.let {
val serverId = data[THIS_SERVER_ID]!!.toInt()
val url = UrlHandler.handle(serverManager.getServer(serverId)?.connection?.getUrl(), it)
val bitmap = getImageBitmap(serverId, url, !UrlHandler.isAbsoluteUrl(it))
val url = UrlUtil.handle(serverManager.getServer(serverId)?.connection?.getUrl(), it)
val bitmap = getImageBitmap(serverId, url, !UrlUtil.isAbsoluteUrl(it))
if (bitmap != null) {
builder
.setLargeIcon(bitmap)
@ -1229,8 +1229,8 @@ class MessagingManager @Inject constructor(
) {
data[VIDEO_URL]?.let {
val serverId = data[THIS_SERVER_ID]!!.toInt()
val url = UrlHandler.handle(serverManager.getServer(serverId)?.connection?.getUrl(), it)
getVideoFrames(serverId, url, !UrlHandler.isAbsoluteUrl(it))?.let { frames ->
val url = UrlUtil.handle(serverManager.getServer(serverId)?.connection?.getUrl(), it)
getVideoFrames(serverId, url, !UrlUtil.isAbsoluteUrl(it))?.let { frames ->
Log.d(TAG, "Found ${frames.size} frames for video notification")
RemoteViews(context.packageName, R.layout.view_image_flipper).let { remoteViewFlipper ->
if (frames.isNotEmpty()) {
@ -1438,7 +1438,7 @@ class MessagingManager @Inject constructor(
): PendingIntent {
val serverId = data[THIS_SERVER_ID]!!.toInt()
val needsPackage = uri.startsWith(APP_PREFIX) || uri.startsWith(INTENT_PREFIX)
val otherApp = needsPackage || UrlHandler.isAbsoluteUrl(uri) || uri.startsWith(DEEP_LINK_PREFIX)
val otherApp = needsPackage || UrlUtil.isAbsoluteUrl(uri) || uri.startsWith(DEEP_LINK_PREFIX)
val intent = when {
uri.isBlank() -> {
WebViewActivity.newInstance(context, null, serverId)
@ -1456,7 +1456,7 @@ class MessagingManager @Inject constructor(
WebViewActivity.newInstance(context, null, serverId)
}
}
UrlHandler.isAbsoluteUrl(uri) || uri.startsWith(DEEP_LINK_PREFIX) -> {
UrlUtil.isAbsoluteUrl(uri) || uri.startsWith(DEEP_LINK_PREFIX) -> {
Intent(Intent.ACTION_VIEW).apply {
this.data = Uri.parse(
if (uri.startsWith(DEEP_LINK_PREFIX)) {

View File

@ -1,33 +0,0 @@
package io.homeassistant.companion.android.util
import android.net.Uri
import okhttp3.HttpUrl.Companion.toHttpUrlOrNull
import java.net.URL
object UrlHandler {
fun handle(base: URL?, input: String): URL? {
return when {
isAbsoluteUrl(input) -> {
URL(input)
}
input.startsWith("homeassistant://navigate/") -> {
(base.toString() + input.removePrefix("homeassistant://navigate/")).toHttpUrlOrNull()?.toUrl()
}
else -> {
(base.toString() + input.removePrefix("/")).toHttpUrlOrNull()?.toUrl()
}
}
}
fun isAbsoluteUrl(it: String?): Boolean {
return Regex("^https?://").containsMatchIn(it.toString())
}
fun splitNfcTagId(it: Uri?): String? {
val matches =
Regex("^https?://www\\.home-assistant\\.io/tag/(.*)").find(
it.toString()
)
return matches?.groups?.get(1)?.value
}
}

View File

@ -15,7 +15,7 @@ import io.homeassistant.companion.android.common.util.DisabledLocationHandler
import io.homeassistant.companion.android.matter.MatterFrontendCommissioningStatus
import io.homeassistant.companion.android.matter.MatterManager
import io.homeassistant.companion.android.thread.ThreadManager
import io.homeassistant.companion.android.util.UrlHandler
import io.homeassistant.companion.android.util.UrlUtil
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.Job
@ -89,7 +89,7 @@ class WebViewPresenterImpl @Inject constructor(
urlForServer = server?.id
if (path != null && !path.startsWith("entityId:")) {
url = UrlHandler.handle(url, path)
url = UrlUtil.handle(url, path)
}
/*

View File

@ -62,6 +62,9 @@ dependencies {
implementation("com.google.dagger:hilt-android:2.46.1")
kapt("com.google.dagger:hilt-android-compiler:2.46.1")
implementation("androidx.lifecycle:lifecycle-runtime-ktx:2.6.1")
implementation("androidx.lifecycle:lifecycle-viewmodel-ktx:2.6.1")
api("androidx.room:room-runtime:2.5.1")
api("androidx.room:room-ktx:2.5.1")
kapt("androidx.room:room-compiler:2.5.1")

View File

@ -0,0 +1,189 @@
package io.homeassistant.companion.android.common.assist
import android.app.Application
import android.content.pm.PackageManager
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.viewModelScope
import io.homeassistant.companion.android.common.R
import io.homeassistant.companion.android.common.data.servers.ServerManager
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineError
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineEventType
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineIntentEnd
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineResponse
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineRunStart
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineSttEnd
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineTtsEnd
import io.homeassistant.companion.android.common.util.AudioRecorder
import io.homeassistant.companion.android.common.util.AudioUrlPlayer
import io.homeassistant.companion.android.util.UrlUtil
import kotlinx.coroutines.Job
import kotlinx.coroutines.launch
abstract class AssistViewModelBase(
private val serverManager: ServerManager,
private val audioRecorder: AudioRecorder,
private val audioUrlPlayer: AudioUrlPlayer,
application: Application
) : AndroidViewModel(application) {
enum class AssistInputMode {
TEXT,
TEXT_ONLY,
VOICE_INACTIVE,
VOICE_ACTIVE,
BLOCKED
}
protected val app = application
protected var selectedServerId = ServerManager.SERVER_ID_ACTIVE
private var recorderJob: Job? = null
private var recorderQueue: MutableList<ByteArray>? = null
protected val hasMicrophone = app.packageManager.hasSystemFeature(PackageManager.FEATURE_MICROPHONE)
protected var hasPermission = false
private var binaryHandlerId: Int? = null
private var conversationId: String? = null
abstract fun getInput(): AssistInputMode?
abstract fun setInput(inputMode: AssistInputMode)
protected fun clearPipelineData() {
binaryHandlerId = null
conversationId = null
}
/**
* @param text input to run an intent pipeline with, or `null` to run a STT pipeline (check if
* STT is supported _before_ calling this function)
* @param pipeline information about the pipeline, or `null` to use the server's default
* @param onMessage callback for messages that should be posted for this pipeline run, with 3
* arguments: the message, whether the message is input/output/undetermined, whether the message
* is an error message
*/
protected fun runAssistPipelineInternal(
text: String?,
pipeline: AssistPipelineResponse?,
onMessage: (String, Boolean?, Boolean) -> Unit
) {
val isVoice = text == null
var job: Job? = null
job = viewModelScope.launch {
val flow = if (isVoice) {
serverManager.webSocketRepository(selectedServerId).runAssistPipelineForVoice(
sampleRate = AudioRecorder.SAMPLE_RATE,
outputTts = pipeline?.ttsEngine?.isNotBlank() == true,
pipelineId = pipeline?.id,
conversationId = conversationId
)
} else {
serverManager.integrationRepository(selectedServerId).getAssistResponse(
text = text!!,
pipelineId = pipeline?.id,
conversationId = conversationId
)
}
flow?.collect {
when (it.type) {
AssistPipelineEventType.RUN_START -> {
if (!isVoice) return@collect
val data = (it.data as? AssistPipelineRunStart)?.runnerData
binaryHandlerId = data?.get("stt_binary_handler_id") as? Int
}
AssistPipelineEventType.STT_START -> {
viewModelScope.launch {
recorderQueue?.forEach { item ->
sendVoiceData(item)
}
recorderQueue = null
}
}
AssistPipelineEventType.STT_END -> {
stopRecording()
(it.data as? AssistPipelineSttEnd)?.sttOutput?.let { response ->
onMessage(response["text"] as String, true, false)
}
}
AssistPipelineEventType.INTENT_END -> {
val data = (it.data as? AssistPipelineIntentEnd)?.intentOutput ?: return@collect
conversationId = data.conversationId
data.response.speech.plain["speech"]?.let { response ->
onMessage(response, false, false)
}
}
AssistPipelineEventType.TTS_END -> {
if (!isVoice) return@collect
val audioPath = (it.data as? AssistPipelineTtsEnd)?.ttsOutput?.url
if (!audioPath.isNullOrBlank()) {
playAudio(audioPath)
}
}
AssistPipelineEventType.RUN_END -> {
stopRecording()
job?.cancel()
}
AssistPipelineEventType.ERROR -> {
val errorMessage = (it.data as? AssistPipelineError)?.message ?: return@collect
onMessage(errorMessage, null, true)
stopRecording()
job?.cancel()
}
else -> { /* Do nothing */ }
}
} ?: run {
onMessage(app.getString(R.string.assist_error), null, true)
}
}
}
protected fun setupRecorderQueue() {
recorderQueue = mutableListOf()
recorderJob = viewModelScope.launch {
audioRecorder.audioBytes.collect {
recorderQueue?.add(it) ?: sendVoiceData(it)
}
}
}
private fun sendVoiceData(data: ByteArray) {
binaryHandlerId?.let {
viewModelScope.launch {
// Launch to prevent blocking the output flow if the network is slow
serverManager.webSocketRepository().sendVoiceData(it, data)
}
}
}
private fun playAudio(path: String) {
UrlUtil.handle(serverManager.getServer(selectedServerId)?.connection?.getUrl(), path)?.let {
viewModelScope.launch {
audioUrlPlayer.playAudio(it.toString())
}
}
}
protected fun stopRecording() {
audioRecorder.stopRecording()
recorderJob?.cancel()
recorderJob = null
if (binaryHandlerId != null) {
viewModelScope.launch {
recorderQueue?.forEach {
sendVoiceData(it)
}
recorderQueue = null
sendVoiceData(byteArrayOf()) // Empty message to indicate end of recording
binaryHandlerId = null
}
} else {
recorderQueue = null
}
if (getInput() == AssistInputMode.VOICE_ACTIVE) {
setInput(AssistInputMode.VOICE_INACTIVE)
}
}
protected fun stopPlayback() = audioUrlPlayer.stop()
}

View File

@ -1,10 +1,12 @@
package io.homeassistant.companion.android.util
import android.net.Uri
import io.homeassistant.companion.android.common.data.MalformedHttpUrlException
import io.homeassistant.companion.android.common.data.authentication.impl.AuthenticationService
import okhttp3.HttpUrl
import okhttp3.HttpUrl.Companion.toHttpUrl
import okhttp3.HttpUrl.Companion.toHttpUrlOrNull
import java.net.URL
object UrlUtil {
fun formattedUrlString(url: String): String {
@ -35,4 +37,30 @@ object UrlUtil {
.build()
.toString()
}
fun handle(base: URL?, input: String): URL? {
return when {
isAbsoluteUrl(input) -> {
URL(input)
}
input.startsWith("homeassistant://navigate/") -> {
(base.toString() + input.removePrefix("homeassistant://navigate/")).toHttpUrlOrNull()?.toUrl()
}
else -> {
(base.toString() + input.removePrefix("/")).toHttpUrlOrNull()?.toUrl()
}
}
}
fun isAbsoluteUrl(it: String?): Boolean {
return Regex("^https?://").containsMatchIn(it.toString())
}
fun splitNfcTagId(it: Uri?): String? {
val matches =
Regex("^https?://www\\.home-assistant\\.io/tag/(.*)").find(
it.toString()
)
return matches?.groups?.get(1)?.value
}
}

View File

@ -7,6 +7,7 @@
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_BACKGROUND_LOCATION" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.ACTIVITY_RECOGNITION" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.BODY_SENSORS" />
@ -21,6 +22,7 @@
<uses-permission android:name="android.permission.BLUETOOTH_SCAN" />
<uses-feature android:name="android.hardware.type.watch" />
<uses-feature android:name="android.hardware.microphone" android:required="false" />
<uses-sdk tools:overrideLibrary="androidx.health.services.client" />

View File

@ -1,7 +1,9 @@
package io.homeassistant.companion.android.conversation
import android.Manifest
import android.content.Context
import android.content.Intent
import android.content.pm.PackageManager
import android.os.Bundle
import android.os.PowerManager
import android.speech.RecognizerIntent
@ -9,6 +11,7 @@ import androidx.activity.ComponentActivity
import androidx.activity.compose.setContent
import androidx.activity.result.contract.ActivityResultContracts
import androidx.activity.viewModels
import androidx.core.content.ContextCompat
import androidx.core.content.getSystemService
import androidx.lifecycle.lifecycleScope
import dagger.hilt.android.AndroidEntryPoint
@ -27,7 +30,7 @@ class ConversationActivity : ComponentActivity() {
}
}
private var searchResults = registerForActivityResult(ActivityResultContracts.StartActivityForResult()) { result ->
private val searchResults = registerForActivityResult(ActivityResultContracts.StartActivityForResult()) { result ->
if (result.resultCode == RESULT_OK) {
conversationViewModel.updateSpeechResult(
result.data?.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS).let {
@ -37,6 +40,10 @@ class ConversationActivity : ComponentActivity() {
}
}
private val requestPermission = registerForActivityResult(
ActivityResultContracts.RequestPermission()
) { conversationViewModel.onPermissionResult(it, this::launchVoiceInputIntent) }
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
@ -50,13 +57,21 @@ class ConversationActivity : ComponentActivity() {
setContent {
LoadAssistView(
conversationViewModel = conversationViewModel,
onMicrophoneInput = this::launchVoiceInputIntent
onVoiceInputIntent = this::launchVoiceInputIntent
)
}
}
override fun onResume() {
super.onResume()
conversationViewModel.setPermissionInfo(
ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED
) { requestPermission.launch(Manifest.permission.RECORD_AUDIO) }
}
override fun onPause() {
super.onPause()
conversationViewModel.onPause()
val pm = applicationContext.getSystemService<PowerManager>()
if (pm?.isInteractive == false && conversationViewModel.conversation.size >= 3) {
finish()

View File

@ -1,40 +1,41 @@
package io.homeassistant.companion.android.conversation
import android.app.Application
import android.util.Log
import androidx.compose.runtime.getValue
import androidx.compose.runtime.mutableStateListOf
import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.setValue
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.viewModelScope
import dagger.hilt.android.lifecycle.HiltViewModel
import io.homeassistant.companion.android.common.R
import io.homeassistant.companion.android.common.assist.AssistViewModelBase
import io.homeassistant.companion.android.common.data.prefs.WearPrefsRepository
import io.homeassistant.companion.android.common.data.servers.ServerManager
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineError
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineEventType
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineIntentEnd
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineResponse
import io.homeassistant.companion.android.common.util.AudioRecorder
import io.homeassistant.companion.android.common.util.AudioUrlPlayer
import io.homeassistant.companion.android.conversation.views.AssistMessage
import kotlinx.coroutines.Job
import kotlinx.coroutines.launch
import javax.inject.Inject
import io.homeassistant.companion.android.common.R as commonR
@HiltViewModel
class ConversationViewModel @Inject constructor(
application: Application,
private val serverManager: ServerManager,
private val wearPrefsRepository: WearPrefsRepository
) : AndroidViewModel(application) {
private val audioRecorder: AudioRecorder,
audioUrlPlayer: AudioUrlPlayer,
private val wearPrefsRepository: WearPrefsRepository,
application: Application
) : AssistViewModelBase(serverManager, audioRecorder, audioUrlPlayer, application) {
private val app = application
companion object {
private const val TAG = "ConvViewModel"
}
private var conversationId: String? = null
private var useAssistPipeline = false
private var useAssistPipelineStt = false
var useAssistPipeline by mutableStateOf(false)
private set
var allowInput by mutableStateOf(false)
var inputMode by mutableStateOf(AssistInputMode.BLOCKED)
private set
var isHapticEnabled by mutableStateOf(false)
@ -43,10 +44,13 @@ class ConversationViewModel @Inject constructor(
var currentPipeline by mutableStateOf<AssistPipelineResponse?>(null)
private set
private var requestPermission: (() -> Unit)? = null
private var requestSilently = true
private val _pipelines = mutableStateListOf<AssistPipelineResponse>()
val pipelines: List<AssistPipelineResponse> = _pipelines
private val startMessage = AssistMessage(application.getString(R.string.assist_how_can_i_assist), isInput = false)
private val startMessage = AssistMessage(application.getString(commonR.string.assist_how_can_i_assist), isInput = false)
private val _conversation = mutableStateListOf(startMessage)
val conversation: List<AssistMessage> = _conversation
@ -56,12 +60,12 @@ class ConversationViewModel @Inject constructor(
if (!serverManager.isRegistered()) {
_conversation.clear()
_conversation.add(
AssistMessage(app.getString(R.string.not_registered), isInput = false)
AssistMessage(app.getString(commonR.string.not_registered), isInput = false)
)
} else if (supported == null) { // Couldn't get config
_conversation.clear()
_conversation.add(
AssistMessage(app.getString(R.string.assist_connnect), isInput = false)
AssistMessage(app.getString(commonR.string.assist_connnect), isInput = false)
)
} else if (!supported) { // Core too old or missing component
val usingPipelines = serverManager.getServer()?.version?.isAtLeast(2023, 5) == true
@ -69,9 +73,9 @@ class ConversationViewModel @Inject constructor(
_conversation.add(
AssistMessage(
if (usingPipelines) {
app.getString(R.string.no_assist_support, "2023.5", app.getString(R.string.no_assist_support_assist_pipeline))
app.getString(commonR.string.no_assist_support, "2023.5", app.getString(commonR.string.no_assist_support_assist_pipeline))
} else {
app.getString(R.string.no_assist_support, "2023.1", app.getString(R.string.no_assist_support_conversation))
app.getString(commonR.string.no_assist_support, "2023.1", app.getString(commonR.string.no_assist_support_conversation))
},
isInput = false
)
@ -89,6 +93,12 @@ class ConversationViewModel @Inject constructor(
return false
}
override fun getInput(): AssistInputMode = inputMode
override fun setInput(inputMode: AssistInputMode) {
this.inputMode = inputMode
}
private suspend fun checkAssistSupport(): Boolean? {
isHapticEnabled = wearPrefsRepository.getWearHapticFeedback()
if (!serverManager.isRegistered()) return false
@ -111,8 +121,14 @@ class ConversationViewModel @Inject constructor(
pipelines?.let { _pipelines.addAll(it.pipelines) }
}
fun usePipelineStt(): Boolean = useAssistPipelineStt
fun changePipeline(id: String) = viewModelScope.launch {
if (id == currentPipeline?.id) return@launch
stopRecording()
stopPlayback()
setPipeline(id)
}
@ -123,71 +139,117 @@ class ConversationViewModel @Inject constructor(
null
}
useAssistPipelineStt = false
if (pipeline != null || !useAssistPipeline) {
currentPipeline = pipeline
_conversation.clear()
_conversation.add(startMessage)
conversationId = null
allowInput = true
clearPipelineData()
if (pipeline != null && hasMicrophone && pipeline.sttEngine != null) {
if (hasPermission || requestSilently) {
inputMode = AssistInputMode.VOICE_INACTIVE
useAssistPipelineStt = true
onMicrophoneInput()
} else {
inputMode = AssistInputMode.TEXT
}
} else {
inputMode = AssistInputMode.TEXT
}
} else {
allowInput = false
inputMode = AssistInputMode.BLOCKED
_conversation.clear()
_conversation.add(
AssistMessage(app.getString(R.string.assist_error), isInput = false)
AssistMessage(app.getString(commonR.string.assist_error), isInput = false)
)
}
return allowInput // Currently, always launch voice input when setting the pipeline
return inputMode == AssistInputMode.TEXT
}
fun updateSpeechResult(result: String) = runAssistPipeline(result)
fun updateSpeechResult(commonResult: String) = runAssistPipeline(commonResult)
fun onMicrophoneInput() {
if (!hasPermission) {
requestPermission?.let { it() }
return
}
if (inputMode == AssistInputMode.VOICE_ACTIVE) {
stopRecording()
return
}
val recording = try {
audioRecorder.startRecording()
} catch (e: Exception) {
Log.e(TAG, "Exception while starting recording", e)
false
}
if (recording) {
setupRecorderQueue()
inputMode = AssistInputMode.VOICE_ACTIVE
runAssistPipeline(null)
} else {
_conversation.add(AssistMessage(app.getString(commonR.string.assist_error), isInput = false, isError = true))
}
}
private fun runAssistPipeline(text: String?) {
if (text.isNullOrBlank()) return // Voice support is not ready yet
val isVoice = text == null
val userMessage = AssistMessage(text ?: "", isInput = true)
_conversation.add(userMessage)
val haMessage = AssistMessage("", isInput = false)
_conversation.add(haMessage)
if (!isVoice) _conversation.add(haMessage)
var message = if (isVoice) userMessage else haMessage
var job: Job? = null
job = viewModelScope.launch {
val flow = serverManager.integrationRepository().getAssistResponse(
text = text,
pipelineId = currentPipeline?.id,
conversationId = conversationId
)
flow?.collect {
when (it.type) {
AssistPipelineEventType.INTENT_END -> {
val data = (it.data as? AssistPipelineIntentEnd)?.intentOutput ?: return@collect
conversationId = data.conversationId
data.response.speech.plain["speech"]?.let { response ->
_conversation.indexOf(haMessage).takeIf { pos -> pos >= 0 }?.let { index ->
_conversation[index] = haMessage.copy(message = response)
}
}
}
AssistPipelineEventType.RUN_END -> {
job?.cancel()
}
AssistPipelineEventType.ERROR -> {
val errorMessage = (it.data as? AssistPipelineError)?.message ?: return@collect
_conversation.indexOf(haMessage).takeIf { pos -> pos >= 0 }?.let { index ->
_conversation[index] = haMessage.copy(message = errorMessage, isError = true)
}
job?.cancel()
}
else -> { /* Do nothing */ }
}
} ?: run {
_conversation.indexOf(haMessage).takeIf { pos -> pos >= 0 }?.let { index ->
_conversation[index] = haMessage.copy(message = app.getString(R.string.assist_error), isError = true)
runAssistPipelineInternal(
text,
currentPipeline
) { newMessage, isInput, isError ->
_conversation.indexOf(message).takeIf { pos -> pos >= 0 }?.let { index ->
_conversation[index] = message.copy(
message = newMessage,
isInput = isInput ?: message.isInput,
isError = isError
)
if (isInput == true) {
_conversation.add(haMessage)
message = haMessage
}
}
}
}
fun setPermissionInfo(hasPermission: Boolean, callback: () -> Unit) {
this.hasPermission = hasPermission
requestPermission = callback
}
fun onPermissionResult(granted: Boolean, voiceInputIntent: (() -> Unit)) {
hasPermission = granted
useAssistPipelineStt = currentPipeline?.sttEngine != null && granted
if (granted) {
inputMode = AssistInputMode.VOICE_INACTIVE
onMicrophoneInput()
} else if (requestSilently) { // Don't notify the user if they haven't explicitly requested
inputMode = AssistInputMode.TEXT
voiceInputIntent()
}
requestSilently = false
}
fun onConversationScreenHidden() {
stopRecording()
stopPlayback()
}
fun onPause() {
requestPermission = null
stopRecording()
stopPlayback()
}
}

View File

@ -1,5 +1,11 @@
package io.homeassistant.companion.android.conversation.views
import androidx.compose.animation.core.LinearEasing
import androidx.compose.animation.core.RepeatMode
import androidx.compose.animation.core.animateFloat
import androidx.compose.animation.core.infiniteRepeatable
import androidx.compose.animation.core.rememberInfiniteTransition
import androidx.compose.animation.core.tween
import androidx.compose.foundation.background
import androidx.compose.foundation.clickable
import androidx.compose.foundation.layout.Arrangement
@ -11,10 +17,14 @@ import androidx.compose.foundation.layout.height
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.layout.size
import androidx.compose.foundation.shape.AbsoluteRoundedCornerShape
import androidx.compose.foundation.shape.CircleShape
import androidx.compose.runtime.Composable
import androidx.compose.runtime.LaunchedEffect
import androidx.compose.runtime.getValue
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.draw.scale
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.graphics.ColorFilter
import androidx.compose.ui.hapticfeedback.HapticFeedbackType
@ -42,10 +52,12 @@ import androidx.wear.compose.navigation.rememberSwipeDismissableNavController
import com.mikepenz.iconics.compose.Image
import com.mikepenz.iconics.typeface.library.community.material.CommunityMaterial
import io.homeassistant.companion.android.common.R
import io.homeassistant.companion.android.common.assist.AssistViewModelBase
import io.homeassistant.companion.android.common.data.websocket.impl.entities.AssistPipelineResponse
import io.homeassistant.companion.android.conversation.ConversationViewModel
import io.homeassistant.companion.android.home.views.TimeText
import io.homeassistant.companion.android.theme.WearAppTheme
import io.homeassistant.companion.android.util.KeepScreenOn
import io.homeassistant.companion.android.views.ListHeader
import io.homeassistant.companion.android.views.ThemeLazyColumn
@ -55,7 +67,7 @@ private const val SCREEN_PIPELINES = "pipelines"
@Composable
fun LoadAssistView(
conversationViewModel: ConversationViewModel,
onMicrophoneInput: () -> Unit
onVoiceInputIntent: () -> Unit
) {
WearAppTheme {
val swipeDismissableNavController = rememberSwipeDismissableNavController()
@ -66,13 +78,20 @@ fun LoadAssistView(
composable(SCREEN_CONVERSATION) {
ConversationResultView(
conversation = conversationViewModel.conversation,
allowInput = conversationViewModel.allowInput,
inputMode = conversationViewModel.inputMode,
currentPipeline = conversationViewModel.currentPipeline,
hapticFeedback = conversationViewModel.isHapticEnabled,
onChangePipeline = {
conversationViewModel.onConversationScreenHidden()
swipeDismissableNavController.navigate(SCREEN_PIPELINES)
},
onMicrophoneInput = onMicrophoneInput
onMicrophoneInput = {
if (conversationViewModel.usePipelineStt()) {
conversationViewModel.onMicrophoneInput()
} else {
onVoiceInputIntent()
}
}
)
}
composable(SCREEN_PIPELINES) {
@ -91,7 +110,7 @@ fun LoadAssistView(
@Composable
fun ConversationResultView(
conversation: List<AssistMessage>,
allowInput: Boolean,
inputMode: AssistViewModelBase.AssistInputMode,
currentPipeline: AssistPipelineResponse?,
hapticFeedback: Boolean,
onChangePipeline: () -> Unit,
@ -108,7 +127,9 @@ fun ConversationResultView(
timeText = { TimeText(scalingLazyListState = scrollState) }
) {
LaunchedEffect(conversation.size) {
scrollState.scrollToItem(if (allowInput) conversation.size else (conversation.size - 1))
scrollState.scrollToItem(
if (inputMode != AssistViewModelBase.AssistInputMode.BLOCKED) conversation.size else (conversation.size - 1)
)
}
if (hapticFeedback) {
val haptic = LocalHapticFeedback.current
@ -152,18 +173,47 @@ fun ConversationResultView(
items(conversation) {
SpeechBubble(text = it.message, isResponse = !it.isInput)
}
if (allowInput) {
if (inputMode != AssistViewModelBase.AssistInputMode.BLOCKED) {
item {
Button(
modifier = Modifier.padding(top = 16.dp),
onClick = { onMicrophoneInput() },
colors = ButtonDefaults.secondaryButtonColors()
Box(
modifier = Modifier.size(64.dp),
contentAlignment = Alignment.Center
) {
Image(
asset = CommunityMaterial.Icon3.cmd_microphone,
contentDescription = stringResource(R.string.assist_start_listening),
colorFilter = ColorFilter.tint(LocalContentColor.current)
)
val inputIsActive = inputMode == AssistViewModelBase.AssistInputMode.VOICE_ACTIVE
if (inputIsActive) {
KeepScreenOn()
val transition = rememberInfiniteTransition()
val scale by transition.animateFloat(
initialValue = 1f,
targetValue = 1.2f,
animationSpec = infiniteRepeatable(
animation = tween(600, easing = LinearEasing),
repeatMode = RepeatMode.Reverse
)
)
Box(
modifier = Modifier
.size(48.dp)
.scale(scale)
.background(color = colorResource(R.color.colorSpeechText), shape = CircleShape)
.clip(CircleShape)
)
}
Button(
onClick = { onMicrophoneInput() },
colors =
if (inputIsActive) {
ButtonDefaults.secondaryButtonColors(backgroundColor = Color.Transparent, contentColor = Color.Black)
} else {
ButtonDefaults.secondaryButtonColors()
}
) {
Image(
asset = CommunityMaterial.Icon3.cmd_microphone,
contentDescription = stringResource(R.string.assist_start_listening),
colorFilter = ColorFilter.tint(LocalContentColor.current)
)
}
}
}
}

View File

@ -0,0 +1,17 @@
package io.homeassistant.companion.android.util
import androidx.compose.runtime.Composable
import androidx.compose.runtime.DisposableEffect
import androidx.compose.ui.platform.LocalView
/** Keep the screen on while in composition. From https://stackoverflow.com/a/71293123/4214819. */
@Composable
fun KeepScreenOn() {
val currentView = LocalView.current
DisposableEffect(Unit) {
currentView.keepScreenOn = true
onDispose {
currentView.keepScreenOn = false
}
}
}