android+linux: add head tracking and gestures

This commit is contained in:
Kavish Devar
2025-03-14 17:32:47 +05:30
parent 55d06d2f65
commit 22d5ae60b6
54 changed files with 3532 additions and 1483 deletions

View File

@@ -64,4 +64,4 @@ dependencies {
androidTestImplementation(libs.androidx.ui.test.junit4)
debugImplementation(libs.androidx.ui.tooling)
debugImplementation(libs.androidx.ui.test.manifest)
}
}

View File

@@ -24,6 +24,8 @@
tools:ignore="UnusedAttribute" />
<uses-permission android:name="android.permission.BLUETOOTH_ADVERTISE" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.ANSWER_PHONE_CALLS" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<application
android:allowBackup="true"

View File

@@ -64,9 +64,11 @@ import com.google.accompanist.permissions.rememberMultiplePermissionsState
import me.kavishdevar.aln.screens.AirPodsSettingsScreen
import me.kavishdevar.aln.screens.AppSettingsScreen
import me.kavishdevar.aln.screens.DebugScreen
import me.kavishdevar.aln.screens.HeadTrackingScreen
import me.kavishdevar.aln.screens.LongPress
import me.kavishdevar.aln.screens.RenameScreen
import me.kavishdevar.aln.services.AirPodsService
import me.kavishdevar.aln.services.ServiceManager
import me.kavishdevar.aln.ui.theme.ALNTheme
import me.kavishdevar.aln.utils.AirPodsNotifications
import me.kavishdevar.aln.utils.CrossDevice
@@ -135,7 +137,9 @@ fun Main() {
"android.permission.BLUETOOTH_SCAN",
"android.permission.BLUETOOTH_ADVERTISE",
"android.permission.POST_NOTIFICATIONS",
"android.permission.READ_PHONE_STATE"
"android.permission.READ_PHONE_STATE",
"android.permission.ANSWER_PHONE_CALLS",
"android.permission.MODIFY_AUDIO_SETTINGS"
)
)
val airPodsService = remember { mutableStateOf<AirPodsService?>(null) }
@@ -250,6 +254,9 @@ fun Main() {
composable("app_settings") {
AppSettingsScreen(navController)
}
composable("head_tracking") {
HeadTrackingScreen(navController)
}
}
}
serviceConnection = remember {

View File

@@ -1,17 +1,17 @@
/*
* AirPods like Normal (ALN) - Bringing Apple-only features to Linux and Android for seamless AirPods functionality!
*
*
* Copyright (C) 2024 Kavish Devar
*
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License.
*
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
@@ -48,7 +48,7 @@ import androidx.compose.ui.unit.sp
import me.kavishdevar.aln.services.AirPodsService
@Composable
fun IndependentToggle(name: String, service: AirPodsService, functionName: String, sharedPreferences: SharedPreferences, default: Boolean = false) {
fun IndependentToggle(name: String, service: AirPodsService? = null, functionName: String? = null, sharedPreferences: SharedPreferences, default: Boolean = false) {
val isDarkTheme = isSystemInDarkTheme()
val textColor = if (isDarkTheme) Color.White else Color.Black
@@ -77,9 +77,10 @@ fun IndependentToggle(name: String, service: AirPodsService, functionName: Strin
.edit()
.putBoolean(snakeCasedName, checked)
.apply()
val method = service::class.java.getMethod(functionName, Boolean::class.java)
method.invoke(service, checked)
if (functionName != null && service != null) {
val method = service::class.java.getMethod(functionName, Boolean::class.java)
method.invoke(service, checked)
}
}
)
},
@@ -98,8 +99,11 @@ fun IndependentToggle(name: String, service: AirPodsService, functionName: Strin
onCheckedChange = {
checked = it
sharedPreferences.edit().putBoolean(snakeCasedName, it).apply()
val method = service::class.java.getMethod(functionName, Boolean::class.java)
method.invoke(service, it)
if (functionName != null && service != null) {
val method =
service::class.java.getMethod(functionName, Boolean::class.java)
method.invoke(service, it)
}
},
)
}
@@ -110,4 +114,4 @@ fun IndependentToggle(name: String, service: AirPodsService, functionName: Strin
@Composable
fun IndependentTogglePreview() {
IndependentToggle("Test", AirPodsService(), "test", LocalContext.current.getSharedPreferences("preview", 0), true)
}
}

View File

@@ -1,17 +1,17 @@
/*
* AirPods like Normal (ALN) - Bringing Apple-only features to Linux and Android for seamless AirPods functionality!
*
*
* Copyright (C) 2024 Kavish Devar
*
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License.
*
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
@@ -81,6 +81,8 @@ fun PressAndHoldSettings(navController: NavController) {
modifier = Modifier.padding(8.dp, bottom = 2.dp)
)
Spacer(modifier = Modifier.height(1.dp))
Column(
modifier = Modifier
.fillMaxWidth()

View File

@@ -305,6 +305,21 @@ fun AirPodsSettingsScreen(dev: BluetoothDevice?, service: AirPodsService,
Spacer(modifier = Modifier.height(32.dp))
NoiseControlSettings(service = service)
Spacer(modifier = Modifier.height(16.dp))
Text(
text = stringResource(R.string.head_gestures).uppercase(),
style = TextStyle(
fontSize = 14.sp,
fontWeight = FontWeight.Light,
color = (if (isSystemInDarkTheme()) Color.White else Color.Black).copy(alpha = 0.6f),
fontFamily = FontFamily(Font(R.font.sf_pro))
),
modifier = Modifier.padding(8.dp, bottom = 2.dp)
)
Spacer(modifier = Modifier.height(2.dp))
NavigationButton(to = "head_tracking", "Head Tracking", navController)
Spacer(modifier = Modifier.height(16.dp))
PressAndHoldSettings(navController = navController)

View File

@@ -1,17 +1,17 @@
/*
* AirPods like Normal (ALN) - Bringing Apple-only features to Linux and Android for seamless AirPods functionality!
*
*
* Copyright (C) 2024 Kavish Devar
*
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License.
*
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
@@ -22,6 +22,8 @@ package me.kavishdevar.aln.screens
import android.annotation.SuppressLint
import android.content.Context
import android.os.Build
import androidx.annotation.RequiresApi
import androidx.compose.foundation.background
import androidx.compose.foundation.clickable
import androidx.compose.foundation.isSystemInDarkTheme
@@ -32,7 +34,6 @@ import androidx.compose.foundation.layout.Spacer
import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.foundation.layout.height
import androidx.compose.foundation.layout.imePadding
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.layout.size
import androidx.compose.foundation.layout.width
@@ -86,6 +87,7 @@ import kotlinx.coroutines.flow.MutableStateFlow
import me.kavishdevar.aln.R
import me.kavishdevar.aln.services.ServiceManager
@RequiresApi(Build.VERSION_CODES.Q)
@OptIn(ExperimentalMaterial3Api::class, ExperimentalLayoutApi::class)
@SuppressLint("UnusedMaterial3ScaffoldPaddingParameter", "UnspecifiedRegisterReceiverFlag")
@Composable
@@ -154,7 +156,7 @@ fun DebugScreen(navController: NavController) {
Column(
modifier = Modifier
.fillMaxSize()
.imePadding()
// .imePadding()
.haze(hazeState)
.padding(top = paddingValues.calculateTopPadding())
) {
@@ -198,7 +200,7 @@ fun DebugScreen(navController: NavController) {
Column {
Text(
text =
if (isSent) message.substring(5).take(60) + (if (message.substring(5).length > 60) "..." else "")
if (isSent) message.substring(5).take(60) + (if (message.substring(5).length > 60) "..." else "")
else message.substring(9).take(60) + (if (message.substring(9).length > 60) "..." else ""),
style = MaterialTheme.typography.bodySmall,
)
@@ -261,4 +263,4 @@ fun DebugScreen(navController: NavController) {
}
}
}
}
}

View File

@@ -0,0 +1,808 @@
/*
* AirPods like Normal (ALN) - Bringing Apple-only features to Linux and Android for seamless AirPods functionality!
*
* Copyright (C) 2024 Kavish Devar
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package me.kavishdevar.aln.screens
import android.content.Context
import android.os.Build
import android.util.Log
import androidx.annotation.RequiresApi
import androidx.compose.animation.AnimatedContent
import androidx.compose.animation.ExperimentalAnimationApi
import androidx.compose.animation.core.tween
import androidx.compose.animation.fadeIn
import androidx.compose.animation.fadeOut
import androidx.compose.animation.slideInVertically
import androidx.compose.animation.togetherWith
import androidx.compose.foundation.Canvas
import androidx.compose.foundation.isSystemInDarkTheme
import androidx.compose.foundation.layout.Box
import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.Spacer
import androidx.compose.foundation.layout.aspectRatio
import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.foundation.layout.height
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.automirrored.filled.KeyboardArrowLeft
import androidx.compose.material.icons.filled.PlayArrow
import androidx.compose.material3.Button
import androidx.compose.material3.ButtonDefaults
import androidx.compose.material3.Card
import androidx.compose.material3.CardDefaults
import androidx.compose.material3.CenterAlignedTopAppBar
import androidx.compose.material3.ExperimentalMaterial3Api
import androidx.compose.material3.Icon
import androidx.compose.material3.IconButton
import androidx.compose.material3.Scaffold
import androidx.compose.material3.Text
import androidx.compose.material3.TextButton
import androidx.compose.material3.TopAppBarDefaults
import androidx.compose.runtime.Composable
import androidx.compose.runtime.DisposableEffect
import androidx.compose.runtime.LaunchedEffect
import androidx.compose.runtime.collectAsState
import androidx.compose.runtime.getValue
import androidx.compose.runtime.mutableFloatStateOf
import androidx.compose.runtime.mutableLongStateOf
import androidx.compose.runtime.mutableStateListOf
import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.remember
import androidx.compose.runtime.rememberCoroutineScope
import androidx.compose.runtime.setValue
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.scale
import androidx.compose.ui.geometry.CornerRadius
import androidx.compose.ui.geometry.Offset
import androidx.compose.ui.geometry.Size
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.graphics.Path
import androidx.compose.ui.graphics.SolidColor
import androidx.compose.ui.graphics.StrokeCap
import androidx.compose.ui.graphics.asAndroidPath
import androidx.compose.ui.graphics.drawscope.Stroke
import androidx.compose.ui.graphics.nativeCanvas
import androidx.compose.ui.graphics.toArgb
import androidx.compose.ui.graphics.vector.ImageVector
import androidx.compose.ui.graphics.vector.path
import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.res.stringResource
import androidx.compose.ui.text.TextStyle
import androidx.compose.ui.text.drawText
import androidx.compose.ui.text.font.Font
import androidx.compose.ui.text.font.FontFamily
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.text.rememberTextMeasurer
import androidx.compose.ui.text.style.TextAlign
import androidx.compose.ui.tooling.preview.Preview
import androidx.compose.ui.unit.dp
import androidx.compose.ui.unit.sp
import androidx.navigation.NavController
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.delay
import kotlinx.coroutines.launch
import me.kavishdevar.aln.R
import me.kavishdevar.aln.composables.IndependentToggle
import me.kavishdevar.aln.services.ServiceManager
import me.kavishdevar.aln.utils.HeadTracking
import kotlin.math.abs
import kotlin.math.cos
import kotlin.math.sin
import kotlin.random.Random
@RequiresApi(Build.VERSION_CODES.Q)
@OptIn(ExperimentalMaterial3Api::class, ExperimentalAnimationApi::class)
@Composable
fun HeadTrackingScreen(navController: NavController) {
DisposableEffect(Unit) {
ServiceManager.getService()?.startHeadTracking()
onDispose {
ServiceManager.getService()?.stopHeadTracking()
}
}
val sharedPreferences = LocalContext.current.getSharedPreferences("settings", Context.MODE_PRIVATE)
val isDarkTheme = isSystemInDarkTheme()
val backgroundColor = if (isDarkTheme) Color(0xFF1C1C1E) else Color(0xFFFFFFFF)
val textColor = if (isDarkTheme) Color.White else Color.Black
Scaffold(
topBar = {
CenterAlignedTopAppBar(
title = {
Text(
stringResource(R.string.head_tracking),
fontFamily = FontFamily(Font(R.font.sf_pro)),
)
},
navigationIcon = {
TextButton(
onClick = {
navController.popBackStack()
if (ServiceManager.getService()?.isHeadTrackingActive == true) ServiceManager.getService()?.stopHeadTracking()
},
shape = RoundedCornerShape(8.dp),
) {
Icon(
Icons.AutoMirrored.Filled.KeyboardArrowLeft,
contentDescription = "Back",
tint = if (isDarkTheme) Color(0xFF007AFF) else Color(0xFF3C6DF5),
modifier = Modifier.scale(1.5f)
)
Text(
sharedPreferences.getString("name", "AirPods")!!,
style = TextStyle(
fontSize = 18.sp,
fontWeight = FontWeight.Medium,
color = if (isDarkTheme) Color(0xFF007AFF) else Color(0xFF3C6DF5),
fontFamily = FontFamily(Font(R.font.sf_pro))
),
)
}
},
colors = TopAppBarDefaults.topAppBarColors(
containerColor = Color.Transparent
),
actions = {
var isActive by remember { mutableStateOf(ServiceManager.getService()?.isHeadTrackingActive == true) }
IconButton(
onClick = {
if (ServiceManager.getService()?.isHeadTrackingActive == false) {
ServiceManager.getService()?.startHeadTracking()
Log.d("HeadTrackingScreen", "Head tracking started")
isActive = true
} else {
ServiceManager.getService()?.stopHeadTracking()
Log.d("HeadTrackingScreen", "Head tracking stopped")
isActive = false
}
},
) {
Icon(
if (isActive) {
ImageVector.Builder(
name = "Pause",
defaultWidth = 24.dp,
defaultHeight = 24.dp,
viewportWidth = 24f,
viewportHeight = 24f
).apply {
path(
fill = SolidColor(Color.Black),
pathBuilder = {
moveTo(6f, 5f)
lineTo(10f, 5f)
lineTo(10f, 19f)
lineTo(6f, 19f)
lineTo(6f, 5f)
moveTo(14f, 5f)
lineTo(18f, 5f)
lineTo(18f, 19f)
lineTo(14f, 19f)
lineTo(14f, 5f)
}
)
}.build()
} else Icons.Filled.PlayArrow,
contentDescription = "Start",
tint = if (isDarkTheme) Color(0xFF007AFF) else Color(0xFF3C6DF5),
modifier = Modifier.scale(1.5f)
)
}
}
)
},
containerColor = if (isSystemInDarkTheme()) Color(0xFF000000)
else Color(0xFFF2F2F7),
) { paddingValues ->
Column (
modifier = Modifier
.fillMaxSize()
.padding(paddingValues = paddingValues)
.padding(horizontal = 16.dp)
.padding(top = 8.dp)
) {
val sharedPreferences =
LocalContext.current.getSharedPreferences("settings", Context.MODE_PRIVATE)
var gestureText by remember { mutableStateOf("") }
val coroutineScope = rememberCoroutineScope()
IndependentToggle(name = "Head Gestures", sharedPreferences = sharedPreferences)
Spacer(modifier = Modifier.height(2.dp))
Text(
stringResource(R.string.head_gestures_details),
style = TextStyle(
fontSize = 14.sp,
fontWeight = FontWeight.Normal,
fontFamily = FontFamily(Font(R.font.sf_pro)),
color = textColor.copy(0.6f)
),
modifier = Modifier.padding(start = 4.dp)
)
Spacer(modifier = Modifier.height(16.dp))
Text(
"Head Orientation",
style = TextStyle(
fontSize = 18.sp,
fontWeight = FontWeight.Medium,
fontFamily = FontFamily(Font(R.font.sf_pro)),
color = textColor
),
modifier = Modifier.padding(start = 4.dp, bottom = 8.dp, top = 8.dp)
)
HeadVisualization()
Spacer(modifier = Modifier.height(16.dp))
Text(
"Acceleration",
style = TextStyle(
fontSize = 18.sp,
fontWeight = FontWeight.Medium,
fontFamily = FontFamily(Font(R.font.sf_pro)),
color = textColor
),
modifier = Modifier.padding(start = 4.dp, bottom = 8.dp, top = 8.dp)
)
AccelerationPlot()
Spacer(modifier = Modifier.height(16.dp))
Button (
onClick = {
gestureText = "Shake your head or nod!"
coroutineScope.launch {
val accepted = ServiceManager.getService()?.testHeadGestures() ?: false
gestureText = if (accepted) "\"Yes\" gesture detected." else "\"No\" gesture detected."
}
},
modifier = Modifier
.fillMaxWidth()
.height(55.dp),
colors = ButtonDefaults.buttonColors(
containerColor = backgroundColor
),
shape = RoundedCornerShape(8.dp)
) {
Text(
"Test Head Gestures",
style = TextStyle(
fontSize = 16.sp,
fontWeight = FontWeight.Medium,
fontFamily = FontFamily(Font(R.font.sf_pro)),
color = textColor
),
)
}
var lastClickTime by remember { mutableLongStateOf(0L) }
var shouldExplode by remember { mutableStateOf(false) }
LaunchedEffect(gestureText) {
if (gestureText.isNotEmpty()) {
lastClickTime = System.currentTimeMillis()
delay(3000)
if (System.currentTimeMillis() - lastClickTime >= 3000) {
shouldExplode = true
}
}
}
Box(
contentAlignment = Alignment.Center,
modifier = Modifier.padding(top = 12.dp)
) {
AnimatedContent(
targetState = gestureText,
transitionSpec = {
(fadeIn(
animationSpec = tween(300)
) + slideInVertically(
initialOffsetY = { 40 },
animationSpec = tween(300)
)).togetherWith(fadeOut(animationSpec = tween(150)))
}
) { text ->
if (shouldExplode) {
LaunchedEffect(Unit) {
CoroutineScope(coroutineScope.coroutineContext).launch {
delay(750)
gestureText = ""
}
}
ParticleText(
text = text,
style = TextStyle(
fontSize = 20.sp,
fontWeight = FontWeight.Medium,
fontFamily = FontFamily(Font(R.font.sf_pro)),
color = textColor,
textAlign = TextAlign.Center
),
onAnimationComplete = {
shouldExplode = false
},
)
} else {
Text(
text = text,
style = TextStyle(
fontSize = 20.sp,
fontWeight = FontWeight.Medium,
fontFamily = FontFamily(Font(R.font.sf_pro)),
color = textColor,
textAlign = TextAlign.Center
),
modifier = Modifier
.fillMaxWidth()
)
}
}
}
}
}
}
private data class Particle(
val initialPosition: Offset,
val velocity: Offset,
var alpha: Float = 1f
)
@Composable
private fun ParticleText(
text: String,
style: TextStyle,
onAnimationComplete: () -> Unit,
) {
val particles = remember { mutableStateListOf<Particle>() }
val textMeasurer = rememberTextMeasurer()
var isAnimating by remember { mutableStateOf(true) }
var textVisible by remember { mutableStateOf(true) }
Canvas(modifier = Modifier.fillMaxWidth()) {
val textLayoutResult = textMeasurer.measure(text, style)
val textBounds = textLayoutResult.size
val centerX = (size.width - textBounds.width) / 2
val centerY = size.height / 2
if (textVisible && particles.isEmpty()) {
drawText(
textMeasurer = textMeasurer,
text = text,
style = style,
topLeft = Offset(centerX, centerY - textBounds.height / 2)
)
}
if (particles.isEmpty()) {
val random = Random(System.currentTimeMillis())
for (i in 0..100) {
val x = centerX + random.nextFloat() * textBounds.width
val y = centerY - textBounds.height / 2 + random.nextFloat() * textBounds.height
val vx = (random.nextFloat() - 0.5f) * 20
val vy = (random.nextFloat() - 0.5f) * 20
particles.add(Particle(Offset(x, y), Offset(vx, vy)))
}
textVisible = false
}
particles.forEach { particle ->
drawCircle(
color = style.color.copy(alpha = particle.alpha),
radius = 0.5.dp.toPx(),
center = particle.initialPosition
)
}
}
LaunchedEffect(text) {
while (isAnimating) {
delay(16)
particles.forEachIndexed { index, particle ->
particles[index] = particle.copy(
initialPosition = particle.initialPosition + particle.velocity,
alpha = (particle.alpha - 0.02f).coerceAtLeast(0f)
)
}
if (particles.all { it.alpha <= 0f }) {
isAnimating = false
onAnimationComplete()
}
}
}
}
@Composable
private fun HeadVisualization() {
val orientation by HeadTracking.orientation.collectAsState()
val darkTheme = isSystemInDarkTheme()
val backgroundColor = if (darkTheme) Color(0xFF1C1C1E) else Color.White
val strokeColor = if (darkTheme) Color.White else Color.Black
Card(
modifier = Modifier
.fillMaxWidth()
.aspectRatio(2f),
colors = CardDefaults.cardColors(
containerColor = backgroundColor
)
) {
Box(
modifier = Modifier.fillMaxSize(),
contentAlignment = Alignment.Center
) {
Canvas(
modifier = Modifier
.fillMaxSize()
.padding(16.dp)
) {
val width = size.width
val height = size.height
val center = Offset(width / 2, height / 2)
val faceRadius = height * 0.35f
val pitch = Math.toRadians(orientation.pitch.toDouble())
val yaw = Math.toRadians(orientation.yaw.toDouble())
val cosY = cos(yaw).toFloat()
val sinY = sin(yaw).toFloat()
val cosP = cos(pitch).toFloat()
val sinP = sin(pitch).toFloat()
fun rotate3D(point: Triple<Float, Float, Float>): Triple<Float, Float, Float> {
val (x, y, z) = point
val x1 = x * cosY - z * sinY
val y1 = y
val z1 = x * sinY + z * cosY
val x2 = x1
val y2 = y1 * cosP - z1 * sinP
val z2 = y1 * sinP + z1 * cosP
return Triple(x2, y2, z2)
}
fun project(point: Triple<Float, Float, Float>): Pair<Float, Float> {
val (x, y, z) = point
val scale = 1f + (z / width)
return Pair(center.x + x * scale, center.y + y * scale)
}
val earWidth = height * 0.08f
val earHeight = height * 0.2f
val earOffsetX = height * 0.4f
val earOffsetY = 0f
val earZ = 0f
for (xSign in listOf(-1f, 1f)) {
val rotated = rotate3D(Triple(earOffsetX * xSign, earOffsetY, earZ))
val (earX, earY) = project(rotated)
drawRoundRect(
color = strokeColor,
topLeft = Offset(earX - earWidth/2, earY - earHeight/2),
size = Size(earWidth, earHeight),
cornerRadius = CornerRadius(earWidth/2),
style = Stroke(width = 4.dp.toPx())
)
}
val spherePath = Path()
val firstPoint = project(rotate3D(Triple(faceRadius, 0f, 0f)))
spherePath.moveTo(firstPoint.first, firstPoint.second)
for (i in 1..32) {
val angle = (i * 2 * Math.PI / 32).toFloat()
val point = project(rotate3D(Triple(
cos(angle) * faceRadius,
sin(angle) * faceRadius,
0f
)))
spherePath.lineTo(point.first, point.second)
}
spherePath.close()
drawContext.canvas.nativeCanvas.apply {
val paint = android.graphics.Paint().apply {
style = android.graphics.Paint.Style.FILL
shader = android.graphics.RadialGradient(
center.x + sinY * faceRadius * 0.3f,
center.y - sinP * faceRadius * 0.3f,
faceRadius * 1.4f,
intArrayOf(
backgroundColor.copy(alpha = 1f).toArgb(),
backgroundColor.copy(alpha = 0.95f).toArgb(),
backgroundColor.copy(alpha = 0.9f).toArgb(),
backgroundColor.copy(alpha = 0.8f).toArgb(),
backgroundColor.copy(alpha = 0.7f).toArgb()
),
floatArrayOf(0.3f, 0.5f, 0.7f, 0.8f, 1f),
android.graphics.Shader.TileMode.CLAMP
)
}
drawPath(spherePath.asAndroidPath(), paint)
val highlightPaint = android.graphics.Paint().apply {
style = android.graphics.Paint.Style.FILL
shader = android.graphics.RadialGradient(
center.x - faceRadius * 0.4f - sinY * faceRadius * 0.5f,
center.y - faceRadius * 0.4f - sinP * faceRadius * 0.5f,
faceRadius * 0.9f,
intArrayOf(
android.graphics.Color.WHITE,
android.graphics.Color.argb(100, 255, 255, 255),
android.graphics.Color.TRANSPARENT
),
floatArrayOf(0f, 0.3f, 1f),
android.graphics.Shader.TileMode.CLAMP
)
alpha = if (darkTheme) 30 else 60
}
drawPath(spherePath.asAndroidPath(), highlightPaint)
val secondaryHighlightPaint = android.graphics.Paint().apply {
style = android.graphics.Paint.Style.FILL
shader = android.graphics.RadialGradient(
center.x + faceRadius * 0.3f + sinY * faceRadius * 0.3f,
center.y + faceRadius * 0.3f - sinP * faceRadius * 0.3f,
faceRadius * 0.7f,
intArrayOf(
android.graphics.Color.WHITE,
android.graphics.Color.TRANSPARENT
),
floatArrayOf(0f, 1f),
android.graphics.Shader.TileMode.CLAMP
)
alpha = if (darkTheme) 15 else 30
}
drawPath(spherePath.asAndroidPath(), secondaryHighlightPaint)
val shadowPaint = android.graphics.Paint().apply {
style = android.graphics.Paint.Style.FILL
shader = android.graphics.RadialGradient(
center.x + sinY * faceRadius * 0.5f,
center.y - sinP * faceRadius * 0.5f,
faceRadius * 1.1f,
intArrayOf(
android.graphics.Color.TRANSPARENT,
android.graphics.Color.BLACK
),
floatArrayOf(0.7f, 1f),
android.graphics.Shader.TileMode.CLAMP
)
alpha = if (darkTheme) 40 else 20
}
drawPath(spherePath.asAndroidPath(), shadowPaint)
}
drawPath(
path = spherePath,
color = strokeColor,
style = Stroke(width = 4.dp.toPx())
)
val smileRadius = faceRadius * 0.5f
val smileStartAngle = -340f
val smileSweepAngle = 140f
val smileOffsetY = faceRadius * 0.1f
val smilePath = Path()
for (i in 0..32) {
val angle = Math.toRadians(smileStartAngle + (smileSweepAngle * i / 32.0))
val x = cos(angle.toFloat()) * smileRadius
val y = sin(angle.toFloat()) * smileRadius + smileOffsetY
val rotated = rotate3D(Triple(x, y, 0f))
val projected = project(rotated)
if (i == 0) {
smilePath.moveTo(projected.first, projected.second)
} else {
smilePath.lineTo(projected.first, projected.second)
}
}
drawPath(
path = smilePath,
color = strokeColor,
style = Stroke(
width = 4.dp.toPx(),
cap = StrokeCap.Round
)
)
val eyeOffsetX = height * 0.15f
val eyeOffsetY = height * 0.1f
val eyeLength = height * 0.08f
for (xSign in listOf(-1f, 1f)) {
val rotated = rotate3D(Triple(eyeOffsetX * xSign, -eyeOffsetY, 0f))
val (eyeX, eyeY) = project(rotated)
drawLine(
color = strokeColor,
start = Offset(eyeX, eyeY - eyeLength/2),
end = Offset(eyeX, eyeY + eyeLength/2),
strokeWidth = 4.dp.toPx(),
cap = StrokeCap.Round
)
}
drawContext.canvas.nativeCanvas.apply {
val paint = android.graphics.Paint().apply {
color = if (darkTheme) android.graphics.Color.WHITE else android.graphics.Color.BLACK
textSize = 12.sp.toPx()
textAlign = android.graphics.Paint.Align.RIGHT
typeface = android.graphics.Typeface.create(
"SF Pro",
android.graphics.Typeface.NORMAL
)
}
val pitch = orientation.pitch.toInt()
val yaw = orientation.yaw.toInt()
val text = "Pitch: ${pitch}° Yaw: ${yaw}°"
drawText(
text,
width - 8.dp.toPx(),
height - 8.dp.toPx(),
paint
)
}
}
}
}
}
@Composable
private fun AccelerationPlot() {
val acceleration by HeadTracking.acceleration.collectAsState()
val maxPoints = 100
val points = remember { mutableStateListOf<Pair<Float, Float>>() }
val darkTheme = isSystemInDarkTheme()
var maxAbs by remember { mutableFloatStateOf(1000f) }
LaunchedEffect(acceleration) {
points.add(Pair(acceleration.horizontal, acceleration.vertical))
if (points.size > maxPoints) {
points.removeAt(0)
}
val currentMax = points.maxOf { maxOf(abs(it.first), abs(it.second)) }
maxAbs = maxOf(currentMax * 1.2f, 1000f)
}
Card(
modifier = Modifier
.fillMaxWidth()
.height(300.dp),
colors = CardDefaults.cardColors(
containerColor = if (darkTheme) Color(0xFF1C1C1E) else Color.White
)
) {
Box(
modifier = Modifier
.fillMaxSize()
.padding(16.dp)
) {
Canvas(
modifier = Modifier.fillMaxSize()
) {
val width = size.width
val height = size.height
val xScale = width / maxPoints
val yScale = (height - 40.dp.toPx()) / (maxAbs * 2)
val zeroY = height / 2
val gridColor = if (darkTheme) Color.White.copy(alpha = 0.1f) else Color.Black.copy(alpha = 0.1f)
for (i in 0..maxPoints step 10) {
val x = i * xScale
drawLine(
color = gridColor,
start = Offset(x, 0f),
end = Offset(x, height),
strokeWidth = 1.dp.toPx()
)
}
val gridStep = maxAbs / 4
for (value in (-maxAbs.toInt()..maxAbs.toInt()) step gridStep.toInt()) {
val y = zeroY - value * yScale
drawLine(
color = gridColor,
start = Offset(0f, y),
end = Offset(width, y),
strokeWidth = 1.dp.toPx()
)
}
drawLine(
color = if (darkTheme) Color.White.copy(alpha = 0.3f) else Color.Black.copy(alpha = 0.3f),
start = Offset(0f, zeroY),
end = Offset(width, zeroY),
strokeWidth = 1.5f.dp.toPx()
)
if (points.size > 1) {
for (i in 0 until points.size - 1) {
val x1 = i * xScale
val x2 = (i + 1) * xScale
drawLine(
color = Color(0xFF007AFF),
start = Offset(x1, zeroY - points[i].first * yScale),
end = Offset(x2, zeroY - points[i + 1].first * yScale),
strokeWidth = 2.dp.toPx()
)
drawLine(
color = Color(0xFFFF3B30),
start = Offset(x1, zeroY - points[i].second * yScale),
end = Offset(x2, zeroY - points[i + 1].second * yScale),
strokeWidth = 2.dp.toPx()
)
}
}
drawContext.canvas.nativeCanvas.apply {
val paint = android.graphics.Paint().apply {
color = if (darkTheme) android.graphics.Color.WHITE else android.graphics.Color.BLACK
textSize = 12.sp.toPx()
textAlign = android.graphics.Paint.Align.RIGHT
}
drawText("${maxAbs.toInt()}", 30.dp.toPx(), 20.dp.toPx(), paint)
drawText("0", 30.dp.toPx(), height/2, paint)
drawText("-${maxAbs.toInt()}", 30.dp.toPx(), height - 10.dp.toPx(), paint)
}
val legendY = 15.dp.toPx()
val textOffsetY = legendY + 5.dp.toPx() / 2
drawCircle(Color(0xFF007AFF), 5.dp.toPx(), Offset(width - 150.dp.toPx(), legendY))
drawContext.canvas.nativeCanvas.apply {
val paint = android.graphics.Paint().apply {
color = if (darkTheme) android.graphics.Color.WHITE else android.graphics.Color.BLACK
textSize = 12.sp.toPx()
textAlign = android.graphics.Paint.Align.LEFT
}
drawText("Horizontal", width - 140.dp.toPx(), textOffsetY, paint)
}
drawCircle(Color(0xFFFF3B30), 5.dp.toPx(), Offset(width - 70.dp.toPx(), legendY))
drawContext.canvas.nativeCanvas.apply {
val paint = android.graphics.Paint().apply {
color = if (darkTheme) android.graphics.Color.WHITE else android.graphics.Color.BLACK
textSize = 12.sp.toPx()
textAlign = android.graphics.Paint.Align.LEFT
}
drawText("Vertical", width - 60.dp.toPx(), textOffsetY, paint)
}
}
}
}
}
@RequiresApi(Build.VERSION_CODES.Q)
@Preview
@Composable
fun HeadTrackingScreenPreview() {
HeadTrackingScreen(navController = NavController(LocalContext.current))
}

View File

@@ -216,7 +216,7 @@ fun LongPressElement(name: String, checked: MutableState<Boolean>, id: String, e
sharedPreferences.getBoolean("long_press_adaptive", false)
)
ServiceManager.getService()
?.updateLongPress(originalLongPressArray, newLongPressArray)
?.updateLongPress(originalLongPressArray, newLongPressArray, offListeningMode)
}
val shape = when {
isFirst -> RoundedCornerShape(topStart = 14.dp, topEnd = 14.dp)

View File

@@ -36,6 +36,7 @@ import android.content.Context
import android.content.Intent
import android.content.IntentFilter
import android.content.SharedPreferences
import android.content.pm.PackageManager
import android.content.res.Resources
import android.media.AudioManager
import android.os.BatteryManager
@@ -45,22 +46,27 @@ import android.os.Handler
import android.os.IBinder
import android.os.Looper
import android.os.ParcelUuid
import android.telecom.TelecomManager
import android.telephony.PhoneStateListener
import android.telephony.TelephonyManager
import android.util.Log
import android.util.TypedValue
import android.view.View
import android.widget.RemoteViews
import android.widget.Toast
import androidx.annotation.RequiresApi
import androidx.annotation.RequiresPermission
import androidx.compose.material3.ExperimentalMaterial3Api
import androidx.core.app.NotificationCompat
import androidx.core.content.edit
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.StateFlow
import kotlinx.coroutines.launch
import kotlinx.coroutines.suspendCancellableCoroutine
import me.kavishdevar.aln.MainActivity
import me.kavishdevar.aln.R
import me.kavishdevar.aln.utils.AirPodsNotifications
@@ -70,15 +76,19 @@ import me.kavishdevar.aln.utils.BatteryStatus
import me.kavishdevar.aln.utils.CrossDevice
import me.kavishdevar.aln.utils.CrossDevicePackets
import me.kavishdevar.aln.utils.Enums
import me.kavishdevar.aln.utils.GestureDetector
import me.kavishdevar.aln.utils.HeadTracking
import me.kavishdevar.aln.utils.IslandType
import me.kavishdevar.aln.utils.IslandWindow
import me.kavishdevar.aln.utils.LongPressMode
import me.kavishdevar.aln.utils.LongPressPackets
import me.kavishdevar.aln.utils.MediaController
import me.kavishdevar.aln.utils.PopupWindow
import me.kavishdevar.aln.utils.determinePacket
import me.kavishdevar.aln.utils.isHeadTrackingData
import me.kavishdevar.aln.widgets.BatteryWidget
import me.kavishdevar.aln.widgets.NoiseControlWidget
import org.lsposed.hiddenapibypass.HiddenApiBypass
import java.nio.ByteBuffer
import java.nio.ByteOrder
object ServiceManager {
private var service: AirPodsService? = null
@@ -152,7 +162,7 @@ class AirPodsService : Service() {
}
fun clearLogs() {
clearPacketLogs() // Expose a method to clear logs
clearPacketLogs()
_packetLogsFlow.value = emptySet()
}
@@ -160,6 +170,18 @@ class AirPodsService : Service() {
return LocalBinder()
}
private var gestureDetector: GestureDetector? = null
private var isInCall = false
private var callNumber: String? = null
@RequiresApi(Build.VERSION_CODES.Q)
private fun initGestureDetector() {
if (gestureDetector == null) {
gestureDetector = GestureDetector(this)
}
}
var popupShown = false
fun showPopup(service: Service, name: String) {
@@ -525,6 +547,98 @@ class AirPodsService : Service() {
notificationManager.notify(1, updatedNotification)
}
@RequiresApi(Build.VERSION_CODES.Q)
fun handleIncomingCall() {
if (isInCall) return
initGestureDetector()
gestureDetector?.startDetection { accepted ->
if (accepted) {
answerCall()
} else {
rejectCall()
}
}
}
@OptIn(ExperimentalCoroutinesApi::class)
@RequiresApi(Build.VERSION_CODES.Q)
suspend fun testHeadGestures(): Boolean {
return suspendCancellableCoroutine { continuation ->
gestureDetector?.startDetection(doNotStop = true) { accepted ->
if (continuation.isActive) {
continuation.resume(accepted) {
gestureDetector?.stopDetection()
}
}
}
}
}
private fun answerCall() {
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
val telecomManager = getSystemService(TELECOM_SERVICE) as TelecomManager
if (checkSelfPermission(Manifest.permission.ANSWER_PHONE_CALLS) == PackageManager.PERMISSION_GRANTED) {
telecomManager.acceptRingingCall()
}
} else {
val telephonyService = getSystemService(TELEPHONY_SERVICE) as TelephonyManager
val telephonyClass = Class.forName(telephonyService.javaClass.name)
val method = telephonyClass.getDeclaredMethod("getITelephony")
method.isAccessible = true
val telephonyInterface = method.invoke(telephonyService)
val answerCallMethod = telephonyInterface.javaClass.getDeclaredMethod("answerRingingCall")
answerCallMethod.invoke(telephonyInterface)
}
sendToast("Call answered via head gesture")
} catch (e: Exception) {
e.printStackTrace()
sendToast("Failed to answer call: ${e.message}")
} finally {
islandWindow?.close()
}
}
private fun rejectCall() {
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
val telecomManager = getSystemService(Context.TELECOM_SERVICE) as TelecomManager
if (checkSelfPermission(Manifest.permission.ANSWER_PHONE_CALLS) == PackageManager.PERMISSION_GRANTED) {
telecomManager.endCall()
}
} else {
val telephonyService = getSystemService(Context.TELEPHONY_SERVICE) as TelephonyManager
val telephonyClass = Class.forName(telephonyService.javaClass.name)
val method = telephonyClass.getDeclaredMethod("getITelephony")
method.isAccessible = true
val telephonyInterface = method.invoke(telephonyService)
val endCallMethod = telephonyInterface.javaClass.getDeclaredMethod("endCall")
endCallMethod.invoke(telephonyInterface)
}
sendToast("Call rejected via head gesture")
} catch (e: Exception) {
e.printStackTrace()
sendToast("Failed to reject call: ${e.message}")
} finally {
islandWindow?.close()
}
}
fun sendToast(message: String) {
Handler(Looper.getMainLooper()).post {
Toast.makeText(applicationContext, message, Toast.LENGTH_SHORT).show()
}
}
@RequiresApi(Build.VERSION_CODES.R)
fun processHeadTrackingData(data: ByteArray) {
val horizontal = ByteBuffer.wrap(data, 51, 2).order(ByteOrder.LITTLE_ENDIAN).short.toInt()
val vertical = ByteBuffer.wrap(data, 53, 2).order(ByteOrder.LITTLE_ENDIAN).short.toInt()
gestureDetector?.processHeadOrientation(horizontal, vertical)
}
private lateinit var connectionReceiver: BroadcastReceiver
private lateinit var disconnectionReceiver: BroadcastReceiver
@@ -533,6 +647,7 @@ class AirPodsService : Service() {
Log.d("AirPodsService", "Service started")
ServiceManager.setService(this)
startForegroundNotification()
initGestureDetector()
val audioManager =
this@AirPodsService.getSystemService(AUDIO_SERVICE) as AudioManager
MediaController.initialize(
@@ -559,9 +674,19 @@ class AirPodsService : Service() {
when (state) {
TelephonyManager.CALL_STATE_RINGING -> {
if (CrossDevice.isAvailable && !isConnectedLocally && earDetectionNotification.status.contains(0x00)) takeOver()
if (sharedPreferences.getBoolean("head_gestures", false)) {
callNumber = phoneNumber
handleIncomingCall()
}
}
TelephonyManager.CALL_STATE_OFFHOOK -> {
if (CrossDevice.isAvailable && !isConnectedLocally && earDetectionNotification.status.contains(0x00)) takeOver()
isInCall = true
}
TelephonyManager.CALL_STATE_IDLE -> {
isInCall = false
callNumber = null
gestureDetector?.stopDetection()
}
}
}
@@ -711,7 +836,7 @@ class AirPodsService : Service() {
}
if (!isConnectedLocally && !CrossDevice.isAvailable) {
clearPacketLogs() // Clear logs when device is not available
clearPacketLogs()
}
return START_STICKY
@@ -729,6 +854,7 @@ class AirPodsService : Service() {
}
}
@RequiresApi(Build.VERSION_CODES.R)
@SuppressLint("MissingPermission")
fun takeOver() {
Log.d("AirPodsService", "Taking over audio")
@@ -750,6 +876,7 @@ class AirPodsService : Service() {
CrossDevice.isAvailable = false
}
@RequiresApi(Build.VERSION_CODES.R)
@SuppressLint("MissingPermission", "UnspecifiedRegisterReceiverFlag")
fun connectToSocket(device: BluetoothDevice) {
HiddenApiBypass.addHiddenApiExemptions("Landroid/bluetooth/BluetoothSocket;")
@@ -824,7 +951,6 @@ class AirPodsService : Service() {
var data: ByteArray = byteArrayOf()
if (bytesRead > 0) {
data = buffer.copyOfRange(0, bytesRead)
logPacket(data, "AirPods")
sendBroadcast(Intent(AirPodsNotifications.Companion.AIRPODS_DATA).apply {
putExtra("data", buffer.copyOfRange(0, bytesRead))
})
@@ -836,7 +962,10 @@ class AirPodsService : Service() {
sharedPreferences.getString("name", device.name),
batteryNotification.getBattery()
)
Log.d("AirPods Data", "Data received: $formattedHex")
if (!isHeadTrackingData(data)) {
Log.d("AirPods Data", "Data received: $formattedHex")
logPacket(data, "AirPods")
}
} else if (bytesRead == -1) {
Log.d("AirPods Service", "Socket closed (bytesRead = -1)")
sendBroadcast(Intent(AirPodsNotifications.Companion.AIRPODS_DISCONNECTED))
@@ -844,6 +973,7 @@ class AirPodsService : Service() {
}
var inEar = false
var inEarData = listOf<Boolean>()
processData(data)
if (earDetectionNotification.isEarDetectionData(data)) {
earDetectionNotification.setStatus(data)
sendBroadcast(Intent(AirPodsNotifications.Companion.EAR_DETECTION_DATA).apply {
@@ -1042,7 +1172,9 @@ class AirPodsService : Service() {
"AirPods Parser",
"Conversation Awareness: ${conversationAwarenessNotification.status}"
)
} else {
}
else if (isHeadTrackingData(data)) {
processHeadTrackingData(data)
}
}
}
@@ -1391,27 +1523,136 @@ class AirPodsService : Service() {
fun updateLongPress(
oldLongPressArray: BooleanArray,
newLongPressArray: BooleanArray,
offListeningMode: Boolean
) {
if (oldLongPressArray.contentEquals(newLongPressArray)) {
return
}
val oldModes = mutableSetOf<LongPressMode>()
val newModes = mutableSetOf<LongPressMode>()
val oldOffEnabled = oldLongPressArray[0]
val oldAncEnabled = oldLongPressArray[1]
val oldTransparencyEnabled = oldLongPressArray[2]
val oldAdaptiveEnabled = oldLongPressArray[3]
if (oldLongPressArray[0]) oldModes.add(LongPressMode.OFF)
if (oldLongPressArray[1]) oldModes.add(LongPressMode.ANC)
if (oldLongPressArray[2]) oldModes.add(LongPressMode.TRANSPARENCY)
if (oldLongPressArray[3]) oldModes.add(LongPressMode.ADAPTIVE)
if (newLongPressArray[0]) newModes.add(LongPressMode.OFF)
if (newLongPressArray[1]) newModes.add(LongPressMode.ANC)
if (newLongPressArray[2]) newModes.add(LongPressMode.TRANSPARENCY)
if (newLongPressArray[3]) newModes.add(LongPressMode.ADAPTIVE)
val newOffEnabled = newLongPressArray[0]
val newAncEnabled = newLongPressArray[1]
val newTransparencyEnabled = newLongPressArray[2]
val newAdaptiveEnabled = newLongPressArray[3]
val changedIndex = findChangedIndex(oldLongPressArray, newLongPressArray)
val newEnabled = newLongPressArray[changedIndex]
Log.d("AirPodsService", "changedIndex: $changedIndex")
var packet: ByteArray? = null
if (offListeningMode) {
packet = when (changedIndex) {
0 -> {
if (newOffEnabled) {
when {
oldAncEnabled && oldTransparencyEnabled && oldAdaptiveEnabled -> LongPressPackets.ENABLE_EVERYTHING.value
oldAncEnabled && oldTransparencyEnabled -> LongPressPackets.ENABLE_OFF_FROM_TRANSPARENCY_AND_ANC.value
oldAncEnabled && oldAdaptiveEnabled -> LongPressPackets.ENABLE_OFF_FROM_ADAPTIVE_AND_ANC.value
oldTransparencyEnabled && oldAdaptiveEnabled -> LongPressPackets.ENABLE_OFF_FROM_TRANSPARENCY_AND_ADAPTIVE.value
else -> null
}
} else {
when {
oldAncEnabled && oldTransparencyEnabled && oldAdaptiveEnabled -> LongPressPackets.DISABLE_OFF_FROM_EVERYTHING.value
oldAncEnabled && oldTransparencyEnabled -> LongPressPackets.DISABLE_OFF_FROM_TRANSPARENCY_AND_ANC.value
oldAncEnabled && oldAdaptiveEnabled -> LongPressPackets.DISABLE_OFF_FROM_ADAPTIVE_AND_ANC.value
oldTransparencyEnabled && oldAdaptiveEnabled -> LongPressPackets.DISABLE_OFF_FROM_TRANSPARENCY_AND_ADAPTIVE.value
else -> null
}
}
}
val packet = determinePacket(changedIndex, newEnabled, oldModes, newModes)
1 -> {
if (newAncEnabled) {
when {
oldOffEnabled && oldTransparencyEnabled && oldAdaptiveEnabled -> LongPressPackets.ENABLE_EVERYTHING.value
oldOffEnabled && oldTransparencyEnabled -> LongPressPackets.ENABLE_ANC_FROM_OFF_AND_TRANSPARENCY.value
oldOffEnabled && oldAdaptiveEnabled -> LongPressPackets.ENABLE_ANC_FROM_OFF_AND_ADAPTIVE.value
oldTransparencyEnabled && oldAdaptiveEnabled -> LongPressPackets.ENABLE_OFF_FROM_TRANSPARENCY_AND_ADAPTIVE.value
else -> null
}
} else {
when {
oldOffEnabled && oldTransparencyEnabled && oldAdaptiveEnabled -> LongPressPackets.DISABLE_ANC_FROM_EVERYTHING.value
oldOffEnabled && oldTransparencyEnabled -> LongPressPackets.DISABLE_ANC_FROM_OFF_AND_TRANSPARENCY.value
oldOffEnabled && oldAdaptiveEnabled -> LongPressPackets.DISABLE_ANC_FROM_OFF_AND_ADAPTIVE.value
oldTransparencyEnabled && oldAdaptiveEnabled -> LongPressPackets.DISABLE_OFF_FROM_TRANSPARENCY_AND_ADAPTIVE.value
else -> null
}
}
}
2 -> {
if (newTransparencyEnabled) {
when {
oldOffEnabled && oldAncEnabled && oldAdaptiveEnabled -> LongPressPackets.ENABLE_EVERYTHING.value
oldOffEnabled && oldAncEnabled -> LongPressPackets.ENABLE_TRANSPARENCY_FROM_OFF_AND_ANC.value
oldOffEnabled && oldAdaptiveEnabled -> LongPressPackets.ENABLE_TRANSPARENCY_FROM_OFF_AND_ADAPTIVE.value
oldAncEnabled && oldAdaptiveEnabled -> LongPressPackets.ENABLE_TRANSPARENCY_FROM_ADAPTIVE_AND_ANC.value
else -> null
}
} else {
when {
oldOffEnabled && oldAncEnabled && oldAdaptiveEnabled -> LongPressPackets.DISABLE_TRANSPARENCY_FROM_EVERYTHING.value
oldOffEnabled && oldAncEnabled -> LongPressPackets.DISABLE_TRANSPARENCY_FROM_OFF_AND_ANC.value
oldOffEnabled && oldAdaptiveEnabled -> LongPressPackets.DISABLE_TRANSPARENCY_FROM_OFF_AND_ADAPTIVE.value
oldAncEnabled && oldAdaptiveEnabled -> LongPressPackets.DISABLE_TRANSPARENCY_FROM_ADAPTIVE_AND_ANC.value
else -> null
}
}
}
3 -> {
if (newAdaptiveEnabled) {
when {
oldOffEnabled && oldAncEnabled && oldTransparencyEnabled -> LongPressPackets.ENABLE_EVERYTHING.value
oldOffEnabled && oldAncEnabled -> LongPressPackets.ENABLE_ADAPTIVE_FROM_OFF_AND_ANC.value
oldOffEnabled && oldTransparencyEnabled -> LongPressPackets.ENABLE_ADAPTIVE_FROM_OFF_AND_TRANSPARENCY.value
oldAncEnabled && oldTransparencyEnabled -> LongPressPackets.ENABLE_ADAPTIVE_FROM_TRANSPARENCY_AND_ANC.value
else -> null
}
} else {
when {
oldOffEnabled && oldAncEnabled && oldTransparencyEnabled -> LongPressPackets.DISABLE_ADAPTIVE_FROM_EVERYTHING.value
oldOffEnabled && oldAncEnabled -> LongPressPackets.DISABLE_ADAPTIVE_FROM_OFF_AND_ANC.value
oldOffEnabled && oldTransparencyEnabled -> LongPressPackets.DISABLE_ADAPTIVE_FROM_OFF_AND_TRANSPARENCY.value
oldAncEnabled && oldTransparencyEnabled -> LongPressPackets.DISABLE_ADAPTIVE_FROM_TRANSPARENCY_AND_ANC.value
else -> null
}
}
}
else -> null
}
} else {
when (changedIndex) {
1 -> {
packet = if (newLongPressArray[1]) {
LongPressPackets.ENABLE_EVERYTHING_OFF_DISABLED.value
} else {
LongPressPackets.DISABLE_ANC_OFF_DISABLED.value
}
}
2 -> {
packet = if (newLongPressArray[2]) {
LongPressPackets.ENABLE_EVERYTHING_OFF_DISABLED.value
} else {
LongPressPackets.DISABLE_TRANSPARENCY_OFF_DISABLED.value
}
}
3 -> {
packet = if (newLongPressArray[3]) {
LongPressPackets.ENABLE_EVERYTHING_OFF_DISABLED.value
} else {
LongPressPackets.DISABLE_ADAPTIVE_OFF_DISABLED.value
}
}
}
}
packet?.let {
Log.d("AirPodsService", "Sending packet: ${it.joinToString(" ") { "%02X".format(it) }}")
sendPacket(it)
@@ -1442,11 +1683,29 @@ class AirPodsService : Service() {
e.printStackTrace()
}
telephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_NONE)
// Reset state variables
isConnectedLocally = false
CrossDevice.isAvailable = true
super.onDestroy()
}
var isHeadTrackingActive = false
fun startHeadTracking() {
isHeadTrackingActive = true
socket.outputStream.write(Enums.START_HEAD_TRACKING.value)
HeadTracking.reset()
}
fun stopHeadTracking() {
socket.outputStream.write(Enums.STOP_HEAD_TRACKING.value)
isHeadTrackingActive = false
}
fun processData(data: ByteArray) {
if (isHeadTrackingActive && isHeadTrackingData(data)) {
HeadTracking.processPacket(data)
}
}
}
private fun Int.dpToPx(): Int {

View File

@@ -143,9 +143,7 @@ object CrossDevice {
}
fun sendReceivedPacket(packet: ByteArray) {
Log.d("CrossDevice", "Sending packet to remote device")
if (clientSocket == null || clientSocket!!.outputStream != null) {
Log.d("CrossDevice", "Client socket is null")
return
}
clientSocket?.outputStream?.write(CrossDevicePackets.AIRPODS_DATA_HEADER.packet + packet)
@@ -257,7 +255,6 @@ object CrossDevice {
fun sendRemotePacket(byteArray: ByteArray) {
if (clientSocket == null || clientSocket!!.outputStream == null) {
Log.d("CrossDevice", "Client socket is null")
return
}
clientSocket?.outputStream?.write(byteArray)

View File

@@ -0,0 +1,441 @@
package me.kavishdevar.aln.utils
import android.os.Build
import android.util.Log
import androidx.annotation.RequiresApi
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.Job
import kotlinx.coroutines.delay
import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
import me.kavishdevar.aln.services.AirPodsService
import me.kavishdevar.aln.services.ServiceManager
import java.util.Collections
import java.util.concurrent.CopyOnWriteArrayList
import kotlin.math.abs
import kotlin.math.max
import kotlin.math.min
import kotlin.math.pow
@RequiresApi(Build.VERSION_CODES.Q)
class GestureDetector(
private val airPodsService: AirPodsService,
) {
companion object {
private const val TAG = "GestureDetector"
private const val START_CMD = "04 00 04 00 17 00 00 00 10 00 10 00 08 A1 02 42 0B 08 0E 10 02 1A 05 01 40 9C 00 00"
private const val STOP_CMD = "04 00 04 00 17 00 00 00 10 00 11 00 08 7E 10 02 42 0B 08 4E 10 02 1A 05 01 00 00 00 00"
private const val IMMEDIATE_FEEDBACK_THRESHOLD = 600
private const val DIRECTION_CHANGE_SENSITIVITY = 150
private const val FAST_MOVEMENT_THRESHOLD = 300.0
private const val MIN_REQUIRED_EXTREMES = 3
private const val MAX_REQUIRED_EXTREMES = 4
}
val audio = GestureFeedback(ServiceManager.getService()?.baseContext!!)
private val horizontalBuffer = Collections.synchronizedList(ArrayList<Double>())
private val verticalBuffer = Collections.synchronizedList(ArrayList<Double>())
private val horizontalAvgBuffer = Collections.synchronizedList(ArrayList<Double>())
private val verticalAvgBuffer = Collections.synchronizedList(ArrayList<Double>())
private var prevHorizontal: Double = 0.0
private var prevVertical: Double = 0.0
private val horizontalPeaks = CopyOnWriteArrayList<Triple<Int, Double, Long>>()
private val horizontalTroughs = CopyOnWriteArrayList<Triple<Int, Double, Long>>()
private val verticalPeaks = CopyOnWriteArrayList<Triple<Int, Double, Long>>()
private val verticalTroughs = CopyOnWriteArrayList<Triple<Int, Double, Long>>()
private var lastPeakTime: Long = 0
private val peakIntervals = Collections.synchronizedList(ArrayList<Double>())
private val movementSpeedIntervals = Collections.synchronizedList(ArrayList<Long>())
private val peakThreshold = 400
private val directionChangeThreshold = DIRECTION_CHANGE_SENSITIVITY
private val rhythmConsistencyThreshold = 0.5
private var horizontalIncreasing: Boolean? = null
private var verticalIncreasing: Boolean? = null
private var detectionTimeout = 15000L
private val minConfidenceThreshold = 0.7
private var isRunning = false
private var detectionJob: Job? = null
private var gestureDetectedCallback: ((Boolean) -> Unit)? = null
private var significantMotion = false
private var lastSignificantMotionTime = 0L
init {
while (horizontalAvgBuffer.size < 3) horizontalAvgBuffer.add(0.0)
while (verticalAvgBuffer.size < 3) verticalAvgBuffer.add(0.0)
}
/**
* Start head gesture detection
*/
fun startDetection(doNotStop: Boolean = false, onGestureDetected: (Boolean) -> Unit) {
if (isRunning) return
Log.d(TAG, "Starting gesture detection...")
isRunning = true
gestureDetectedCallback = onGestureDetected
clearData()
prevHorizontal = 0.0
prevVertical = 0.0
airPodsService.sendPacket(START_CMD)
detectionJob = CoroutineScope(Dispatchers.Default).launch {
val startTime = System.currentTimeMillis()
while (isRunning && (System.currentTimeMillis() - startTime < detectionTimeout)) {
delay(50)
val gesture = detectGestures()
if (gesture != null) {
withContext(Dispatchers.Main) {
audio.playConfirmation(gesture)
gestureDetectedCallback?.invoke(gesture)
stopDetection(doNotStop)
}
break
}
}
if (isRunning) {
Log.d(TAG, "Gesture detection timed out")
withContext(Dispatchers.Main) {
stopDetection(doNotStop)
}
}
}
}
/**
* Stop head gesture detection
*/
fun stopDetection(doNotStop: Boolean = false) {
if (!isRunning) return
Log.d(TAG, "Stopping gesture detection")
isRunning = false
if (!doNotStop) airPodsService.sendPacket(STOP_CMD)
detectionJob?.cancel()
detectionJob = null
gestureDetectedCallback = null
}
/**
* Process head orientation data received from AirPods
*/
@RequiresApi(Build.VERSION_CODES.R)
fun processHeadOrientation(horizontal: Int, vertical: Int) {
if (!isRunning) return
val horizontalDelta = horizontal - prevHorizontal
val verticalDelta = vertical - prevVertical
val significantHorizontal = abs(horizontalDelta) > IMMEDIATE_FEEDBACK_THRESHOLD
val significantVertical = abs(verticalDelta) > IMMEDIATE_FEEDBACK_THRESHOLD
if (significantHorizontal && (!significantVertical || abs(horizontalDelta) > abs(verticalDelta))) {
CoroutineScope(Dispatchers.Main).launch {
audio.playDirectional(isVertical = false, value = horizontalDelta)
}
significantMotion = true
lastSignificantMotionTime = System.currentTimeMillis()
Log.d(TAG, "Significant HORIZONTAL movement: $horizontalDelta")
}
else if (significantVertical) {
CoroutineScope(Dispatchers.Main).launch {
audio.playDirectional(isVertical = true, value = verticalDelta)
}
significantMotion = true
lastSignificantMotionTime = System.currentTimeMillis()
Log.d(TAG, "Significant VERTICAL movement: $verticalDelta")
}
else if (significantMotion &&
(System.currentTimeMillis() - lastSignificantMotionTime) > 300) {
significantMotion = false
}
prevHorizontal = horizontal.toDouble()
prevVertical = vertical.toDouble()
val smoothHorizontal = applySmoothing(horizontal.toDouble(), horizontalAvgBuffer)
val smoothVertical = applySmoothing(vertical.toDouble(), verticalAvgBuffer)
synchronized(horizontalBuffer) {
horizontalBuffer.add(smoothHorizontal)
if (horizontalBuffer.size > 100) horizontalBuffer.removeAt(0)
}
synchronized(verticalBuffer) {
verticalBuffer.add(smoothVertical)
if (verticalBuffer.size > 100) verticalBuffer.removeAt(0)
}
detectPeaksAndTroughs()
}
/**
* Apply moving average smoothing
*/
private fun applySmoothing(newValue: Double, buffer: MutableList<Double>): Double {
synchronized(buffer) {
buffer.add(newValue)
if (buffer.size > 3) buffer.removeAt(0)
return buffer.average()
}
}
/**
* Detect motion direction changes
*/
private fun detectPeaksAndTroughs() {
if (horizontalBuffer.size < 4 || verticalBuffer.size < 4) return
val hValues = horizontalBuffer.takeLast(4)
val vValues = verticalBuffer.takeLast(4)
val hVariance = calculateVariance(hValues)
val vVariance = calculateVariance(vValues)
processDirectionChanges(
horizontalBuffer,
horizontalIncreasing,
hVariance,
horizontalPeaks,
horizontalTroughs
)?.let { horizontalIncreasing = it }
processDirectionChanges(
verticalBuffer,
verticalIncreasing,
vVariance,
verticalPeaks,
verticalTroughs
)?.let { verticalIncreasing = it }
}
/**
* Process direction changes and detect peaks/troughs
*/
private fun processDirectionChanges(
buffer: List<Double>,
isIncreasing: Boolean?,
variance: Double,
peaks: MutableList<Triple<Int, Double, Long>>,
troughs: MutableList<Triple<Int, Double, Long>>
): Boolean? {
if (buffer.size < 2) return isIncreasing
val current = buffer.last()
val prev = buffer[buffer.size - 2]
var increasing = isIncreasing ?: (current > prev)
val dynamicThreshold = max(50.0, min(directionChangeThreshold.toDouble(), variance / 3))
val now = System.currentTimeMillis()
if (increasing && current < prev - dynamicThreshold) {
if (abs(prev) > peakThreshold) {
peaks.add(Triple(buffer.size - 1, prev, now))
if (lastPeakTime > 0) {
val interval = (now - lastPeakTime) / 1000.0
val timeDiff = now - lastPeakTime
synchronized(peakIntervals) {
peakIntervals.add(interval)
if (peakIntervals.size > 5) peakIntervals.removeAt(0)
}
synchronized(movementSpeedIntervals) {
movementSpeedIntervals.add(timeDiff)
if (movementSpeedIntervals.size > 5) movementSpeedIntervals.removeAt(0)
}
}
lastPeakTime = now
}
increasing = false
} else if (!increasing && current > prev + dynamicThreshold) {
if (abs(prev) > peakThreshold) {
troughs.add(Triple(buffer.size - 1, prev, now))
if (lastPeakTime > 0) {
val interval = (now - lastPeakTime) / 1000.0
val timeDiff = now - lastPeakTime
synchronized(peakIntervals) {
peakIntervals.add(interval)
if (peakIntervals.size > 5) peakIntervals.removeAt(0)
}
synchronized(movementSpeedIntervals) {
movementSpeedIntervals.add(timeDiff)
if (movementSpeedIntervals.size > 5) movementSpeedIntervals.removeAt(0)
}
}
lastPeakTime = now
}
increasing = true
}
return increasing
}
/**
* Calculate variance of a list of values
*/
private fun calculateVariance(values: List<Double>): Double {
if (values.size <= 1) return 0.0
val mean = values.average()
val squaredDiffs = values.map { (it - mean) * (it - mean) }
return squaredDiffs.average()
}
/**
* Calculate how consistent the timing between peaks is
*/
private fun calculateRhythmConsistency(): Double {
if (peakIntervals.size < 2) return 0.0
val meanInterval = peakIntervals.average()
if (meanInterval == 0.0) return 0.0
val variances = peakIntervals.map { (it / meanInterval - 1.0).pow(2) }
val consistency = 1.0 - min(1.0, variances.average() / rhythmConsistencyThreshold)
return max(0.0, consistency)
}
/**
* Calculate confidence score for gesture detection
*/
private fun calculateConfidenceScore(extremes: List<Triple<Int, Double, Long>>, isVertical: Boolean): Double {
if (extremes.size < getRequiredExtremes()) return 0.0
val sortedExtremes = extremes.sortedBy { it.first }
val recent = sortedExtremes.takeLast(getRequiredExtremes())
val avgAmplitude = recent.map { abs(it.second) }.average()
val amplitudeFactor = min(1.0, avgAmplitude / 600)
val rhythmFactor = calculateRhythmConsistency()
val signs = recent.map { if (it.second > 0) 1 else -1 }
val alternating = (1 until signs.size).all { signs[it] != signs[it - 1] }
val alternationFactor = if (alternating) 1.0 else 0.5
val isolationFactor = if (isVertical) {
val vertAmplitude = recent.map { abs(it.second) }.average()
val horizVals = horizontalBuffer.takeLast(recent.size * 2)
val horizAmplitude = horizVals.map { abs(it) }.average()
min(1.0, vertAmplitude / (horizAmplitude + 0.1) * 1.2)
} else {
val horizAmplitude = recent.map { abs(it.second) }.average()
val vertVals = verticalBuffer.takeLast(recent.size * 2)
val vertAmplitude = vertVals.map { abs(it) }.average()
min(1.0, horizAmplitude / (vertAmplitude + 0.1) * 1.2)
}
return (
amplitudeFactor * 0.4 +
rhythmFactor * 0.2 +
alternationFactor * 0.2 +
isolationFactor * 0.2
)
}
/**
* Calculate the required number of extremes based on movement speed
* - Fast movements (short intervals) require more evidence (5 extremes)
* - Slow, deliberate movements need fewer evidence points (3 extremes)
*/
private fun getRequiredExtremes(): Int {
if (movementSpeedIntervals.isEmpty()) return MIN_REQUIRED_EXTREMES
val avgInterval = movementSpeedIntervals.average()
Log.d(TAG, "Average movement interval: $avgInterval ms")
return if (avgInterval < FAST_MOVEMENT_THRESHOLD) {
MAX_REQUIRED_EXTREMES
} else {
MIN_REQUIRED_EXTREMES
}
}
/**
* Detect gestures based on collected motion data
*/
private fun detectGestures(): Boolean? {
val requiredExtremes = getRequiredExtremes()
Log.d(TAG, "Current required extremes: $requiredExtremes")
if (verticalPeaks.size + verticalTroughs.size >= requiredExtremes) {
val allExtremes = (verticalPeaks + verticalTroughs).sortedBy { it.first }
val confidence = calculateConfidenceScore(allExtremes, isVertical = true)
Log.d(TAG, "Vertical motion confidence: $confidence (need $minConfidenceThreshold)")
if (confidence >= minConfidenceThreshold) {
Log.d(TAG, "\"Yes\" Gesture Detected (confidence: $confidence, extremes: ${allExtremes.size}/$requiredExtremes)")
return true
}
}
if (horizontalPeaks.size + horizontalTroughs.size >= requiredExtremes) {
val allExtremes = (horizontalPeaks + horizontalTroughs).sortedBy { it.first }
val confidence = calculateConfidenceScore(allExtremes, isVertical = false)
Log.d(TAG, "Horizontal motion confidence: $confidence (need $minConfidenceThreshold)")
if (confidence >= minConfidenceThreshold) {
Log.d(TAG, "\"No\" Gesture Detected (confidence: $confidence, extremes: ${allExtremes.size}/$requiredExtremes)")
return false
}
}
return null
}
/**
* Clear all buffers and tracking data
*/
private fun clearData() {
horizontalBuffer.clear()
verticalBuffer.clear()
horizontalPeaks.clear()
horizontalTroughs.clear()
verticalPeaks.clear()
verticalTroughs.clear()
peakIntervals.clear()
movementSpeedIntervals.clear()
horizontalIncreasing = null
verticalIncreasing = null
lastPeakTime = 0
significantMotion = false
lastSignificantMotionTime = 0L
}
/**
* Extension function for power calculation
*/
private fun Double.pow(exponent: Int): Double = this.pow(exponent.toDouble())
}

View File

@@ -0,0 +1,228 @@
@file:Suppress("PrivatePropertyName")
package me.kavishdevar.aln.utils
import android.content.Context
import android.media.AudioAttributes
import android.media.AudioDeviceInfo
import android.media.AudioFocusRequest
import android.media.AudioManager
import android.media.SoundPool
import android.os.Build
import android.os.SystemClock
import android.util.Log
import androidx.annotation.RequiresApi
import me.kavishdevar.aln.R
import java.util.concurrent.atomic.AtomicBoolean
/**
* Audio feedback for head gestures using efficient SoundPool API
* - Simple audio cues for direction
* - Strict channel separation for clarity
* - Optimized for low latency
*/
@RequiresApi(Build.VERSION_CODES.Q)
class GestureFeedback(private val context: Context) {
private val TAG = "GestureFeedback"
private val soundsLoaded = AtomicBoolean(false)
private fun forceBluetoothRouting(audioManager: AudioManager) {
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
val devices = audioManager.getDevices(AudioManager.GET_DEVICES_OUTPUTS)
val bluetoothDevice = devices.find {
it.type == AudioDeviceInfo.TYPE_BLUETOOTH_A2DP ||
it.type == AudioDeviceInfo.TYPE_BLUETOOTH_SCO
}
bluetoothDevice?.let { device ->
val focusRequest = AudioFocusRequest.Builder(AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE)
.setAudioAttributes(AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_ASSISTANCE_ACCESSIBILITY)
.setContentType(AudioAttributes.CONTENT_TYPE_SONIFICATION)
.build())
.build()
audioManager.requestAudioFocus(focusRequest)
if (!audioManager.isBluetoothScoOn) {
audioManager.isBluetoothScoOn = true
audioManager.startBluetoothSco()
}
Log.d(TAG, "Forced audio routing to Bluetooth device")
}
} else {
if (!audioManager.isBluetoothScoOn) {
audioManager.isBluetoothScoOn = true
audioManager.startBluetoothSco()
Log.d(TAG, "Started Bluetooth SCO")
}
}
} catch (e: Exception) {
Log.e(TAG, "Failed to force Bluetooth routing", e)
}
}
private val soundPool = SoundPool.Builder()
.setMaxStreams(3)
.setAudioAttributes(
AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_ASSISTANCE_ACCESSIBILITY)
.setContentType(AudioAttributes.CONTENT_TYPE_SONIFICATION)
.setFlags(AudioAttributes.FLAG_LOW_LATENCY or
AudioAttributes.FLAG_AUDIBILITY_ENFORCED)
.build()
)
.build()
private var soundId = 0
private var confirmYesId = 0
private var confirmNoId = 0
private var lastHorizontalTime = 0L
private var lastLeftTime = 0L
private var lastRightTime = 0L
private var lastVerticalTime = 0L
private var lastUpTime = 0L
private var lastDownTime = 0L
private val MIN_TIME_BETWEEN_SOUNDS = 150L
private val MIN_TIME_BETWEEN_DIRECTION = 200L
private var currentHorizontalStreamId = 0
private var currentVerticalStreamId = 0
private val LEFT_VOLUME = Pair(1.0f, 0.0f)
private val RIGHT_VOLUME = Pair(0.0f, 1.0f)
private val VERTICAL_VOLUME = Pair(1.0f, 1.0f)
init {
soundId = soundPool.load(context, R.raw.blip_no, 1)
confirmYesId = soundPool.load(context, R.raw.confirm_yes, 1)
confirmNoId = soundPool.load(context, R.raw.confirm_no, 1)
soundPool.setOnLoadCompleteListener { _, _, _ ->
Log.d(TAG, "Sounds loaded")
soundsLoaded.set(true)
soundPool.play(soundId, 0.0f, 0.0f, 1, 0, 1.0f)
}
}
/**
* Play directional feedback sound with appropriate channel separation
* Optimized for minimal latency
*/
@RequiresApi(Build.VERSION_CODES.R)
fun playDirectional(isVertical: Boolean, value: Double) {
if (!soundsLoaded.get()) {
Log.d(TAG, "Sounds not yet loaded, skipping playback")
return
}
val now = SystemClock.uptimeMillis()
if (isVertical) {
val isUp = value > 0
if (now - lastVerticalTime < MIN_TIME_BETWEEN_SOUNDS) {
Log.d(TAG, "Skipping vertical sound due to general vertical debounce")
return
}
if (isUp && now - lastUpTime < MIN_TIME_BETWEEN_DIRECTION) {
Log.d(TAG, "Skipping UP sound due to direction debounce")
return
}
if (!isUp && now - lastDownTime < MIN_TIME_BETWEEN_DIRECTION) {
Log.d(TAG, "Skipping DOWN sound due to direction debounce")
return
}
if (currentVerticalStreamId > 0) {
soundPool.stop(currentVerticalStreamId)
}
val (leftVol, rightVol) = VERTICAL_VOLUME
currentVerticalStreamId = soundPool.play(soundId, leftVol, rightVol, 1, 0, 1.0f)
Log.d(TAG, "Playing VERTICAL sound: ${if (isUp) "UP" else "DOWN"} - streamID=$currentVerticalStreamId")
lastVerticalTime = now
if (isUp) {
lastUpTime = now
} else {
lastDownTime = now
}
} else {
if (now - lastHorizontalTime < MIN_TIME_BETWEEN_SOUNDS) {
Log.d(TAG, "Skipping horizontal sound due to general horizontal debounce")
return
}
val isRight = value > 0
if (isRight && now - lastRightTime < MIN_TIME_BETWEEN_DIRECTION) {
Log.d(TAG, "Skipping RIGHT sound due to direction debounce")
return
}
if (!isRight && now - lastLeftTime < MIN_TIME_BETWEEN_DIRECTION) {
Log.d(TAG, "Skipping LEFT sound due to direction debounce")
return
}
if (currentHorizontalStreamId > 0) {
soundPool.stop(currentHorizontalStreamId)
}
val (leftVol, rightVol) = if (isRight) RIGHT_VOLUME else LEFT_VOLUME
currentHorizontalStreamId = soundPool.play(soundId, leftVol, rightVol, 1, 0, 1.0f)
Log.d(TAG, "Playing HORIZONTAL sound: ${if (isRight) "RIGHT" else "LEFT"} - streamID=$currentHorizontalStreamId")
lastHorizontalTime = now
if (isRight) {
lastRightTime = now
} else {
lastLeftTime = now
}
}
}
/**
* Play confirmation sound for completed gesture
*/
fun playConfirmation(isYes: Boolean) {
if (currentHorizontalStreamId > 0) {
soundPool.stop(currentHorizontalStreamId)
}
if (currentVerticalStreamId > 0) {
soundPool.stop(currentVerticalStreamId)
}
val soundId = if (isYes) confirmYesId else confirmNoId
if (soundId != 0 && soundsLoaded.get()) {
val streamId = soundPool.play(soundId, 1.0f, 1.0f, 1, 0, 1.0f)
Log.d(TAG, "Playing ${if (isYes) "YES" else "NO"} confirmation - streamID=$streamId")
}
}
/**
* Clean up resources
*/
fun release() {
try {
soundPool.release()
} catch (e: Exception) {
Log.e(TAG, "Error releasing resources", e)
}
}
}

View File

@@ -0,0 +1,84 @@
package me.kavishdevar.aln.utils
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.asStateFlow
import kotlin.math.roundToInt
data class Orientation(val pitch: Float = 0f, val yaw: Float = 0f)
data class Acceleration(val vertical: Float = 0f, val horizontal: Float = 0f)
object HeadTracking {
private val _orientation = MutableStateFlow(Orientation())
val orientation = _orientation.asStateFlow()
private val _acceleration = MutableStateFlow(Acceleration())
val acceleration = _acceleration.asStateFlow()
private val calibrationSamples = mutableListOf<Triple<Int, Int, Int>>()
private var isCalibrated = false
private var o1Neutral = 19000
private var o2Neutral = 0
private var o3Neutral = 0
private const val CALIBRATION_SAMPLE_COUNT = 10
private const val ORIENTATION_OFFSET = 5500
fun processPacket(packet: ByteArray) {
val o1 = bytesToInt(packet[43], packet[44])
val o2 = bytesToInt(packet[45], packet[46])
val o3 = bytesToInt(packet[47], packet[48])
val horizontalAccel = bytesToInt(packet[51], packet[52]).toFloat()
val verticalAccel = bytesToInt(packet[53], packet[54]).toFloat()
if (!isCalibrated) {
calibrationSamples.add(Triple(o1, o2, o3))
if (calibrationSamples.size >= CALIBRATION_SAMPLE_COUNT) {
calibrate()
}
return
}
val orientation = calculateOrientation(o1, o2, o3)
_orientation.value = orientation
_acceleration.value = Acceleration(verticalAccel, horizontalAccel)
}
private fun calibrate() {
if (calibrationSamples.size < 3) return
// Add offset during calibration
o1Neutral = calibrationSamples.map { it.first + ORIENTATION_OFFSET }.average().roundToInt()
o2Neutral = calibrationSamples.map { it.second + ORIENTATION_OFFSET }.average().roundToInt()
o3Neutral = calibrationSamples.map { it.third + ORIENTATION_OFFSET }.average().roundToInt()
isCalibrated = true
}
@Suppress("UnusedVariable")
private fun calculateOrientation(o1: Int, o2: Int, o3: Int): Orientation {
if (!isCalibrated) return Orientation()
// Add offset before normalizationval
val o1Norm = (o1 + ORIENTATION_OFFSET) - o1Neutral
val o2Norm = (o2 + ORIENTATION_OFFSET) - o2Neutral
val o3Norm = (o3 + ORIENTATION_OFFSET) - o3Neutral
val pitch = (o2Norm + o3Norm) / 2f / 32000f * 180f
val yaw = (o2Norm - o3Norm) / 2f / 32000f * 180f
return Orientation(pitch, yaw)
}
private fun bytesToInt(b1: Byte, b2: Byte): Int {
return (b2.toInt() shl 8) or (b1.toInt() and 0xFF)
}
fun reset() {
calibrationSamples.clear()
isCalibrated = false
_orientation.value = Orientation()
_acceleration.value = Acceleration()
}
}

View File

@@ -45,7 +45,8 @@ import me.kavishdevar.aln.services.ServiceManager
enum class IslandType {
CONNECTED,
TAKING_OVER,
MOVED_TO_REMOTE
MOVED_TO_REMOTE,
// CALL_GESTURE
}
class IslandWindow(context: Context) {
@@ -84,14 +85,20 @@ class IslandWindow(context: Context) {
close()
}
if (type == IslandType.TAKING_OVER) {
islandView.findViewById<TextView>(R.id.island_connected_text).text = getString(context, R.string.island_taking_over_text)
} else if (type == IslandType.MOVED_TO_REMOTE) {
islandView.findViewById<TextView>(R.id.island_connected_text).text = getString(context, R.string.island_moved_to_remote_text)
} else if (CrossDevice.isAvailable) {
islandView.findViewById<TextView>(R.id.island_connected_text).text = getString(context, R.string.island_connected_remote_text)
} else if (type == IslandType.CONNECTED) {
islandView.findViewById<TextView>(R.id.island_connected_text).text = getString(context, R.string.island_connected_text)
when (type) {
IslandType.CONNECTED -> {
islandView.findViewById<TextView>(R.id.island_connected_text).text = getString(context, R.string.island_connected_text)
}
IslandType.TAKING_OVER -> {
islandView.findViewById<TextView>(R.id.island_connected_text).text = getString(context, R.string.island_taking_over_text)
}
IslandType.MOVED_TO_REMOTE -> {
islandView.findViewById<TextView>(R.id.island_connected_text).text = getString(context, R.string.island_moved_to_remote_text)
}
// IslandType.CALL_GESTURE -> {
// islandView.findViewById<TextView>(R.id.island_connected_text).text = "Incoming Call from $name"
// islandView.findViewById<TextView>(R.id.island_device_name).text = "Use Head Gestures to answer."
// }
}
val batteryProgressBar = islandView.findViewById<ProgressBar>(R.id.island_battery_progress)

View File

@@ -45,7 +45,9 @@ enum class Enums(val value: ByteArray) {
NOISE_CANCELLATION_ADAPTIVE(NOISE_CANCELLATION_PREFIX.value + Capabilities.NoiseCancellation.ADAPTIVE.value + SUFFIX.value),
SET_CONVERSATION_AWARENESS_OFF(PREFIX.value + SETTINGS.value + CONVERSATION_AWARENESS.value + Capabilities.ConversationAwareness.OFF.value + SUFFIX.value),
SET_CONVERSATION_AWARENESS_ON(PREFIX.value + SETTINGS.value + CONVERSATION_AWARENESS.value + Capabilities.ConversationAwareness.ON.value + SUFFIX.value),
CONVERSATION_AWARENESS_RECEIVE_PREFIX(PREFIX.value + byteArrayOf(0x4b, 0x00, 0x02, 0x00));
CONVERSATION_AWARENESS_RECEIVE_PREFIX(PREFIX.value + byteArrayOf(0x4b, 0x00, 0x02, 0x00)),
START_HEAD_TRACKING(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x17, 0x00, 0x00, 0x00, 0x10, 0x00, 0x10, 0x00, 0x08, 0xA1.toByte(), 0x02, 0x42, 0x0B, 0x08, 0x0E, 0x10, 0x02, 0x1A, 0x05, 0x01, 0x40, 0x9C.toByte(), 0x00, 0x00)),
STOP_HEAD_TRACKING(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x17, 0x00, 0x00, 0x00, 0x10, 0x00, 0x11, 0x00, 0x08, 0x7E.toByte(), 0x10, 0x02, 0x42, 0x0B, 0x08, 0x4E.toByte(), 0x10, 0x02, 0x1A, 0x05, 0x01, 0x00, 0x00, 0x00, 0x00));
}
object BatteryComponent {
@@ -241,36 +243,105 @@ class Capabilities {
}
}
enum class LongPressMode {
OFF, TRANSPARENCY, ADAPTIVE, ANC
enum class LongPressPackets(val value: ByteArray) {
ENABLE_EVERYTHING(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0F, 0x00, 0x00, 0x00)),
DISABLE_OFF_FROM_EVERYTHING(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0e, 0x00, 0x00, 0x00)),
DISABLE_OFF_FROM_TRANSPARENCY_AND_ADAPTIVE(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0c, 0x00, 0x00, 0x00)),
DISABLE_OFF_FROM_TRANSPARENCY_AND_ANC(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x06, 0x00, 0x00, 0x00)),
DISABLE_OFF_FROM_ADAPTIVE_AND_ANC(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0a, 0x00, 0x00, 0x00)),
ENABLE_OFF_FROM_TRANSPARENCY_AND_ANC(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x07, 0x00, 0x00, 0x00)),
ENABLE_OFF_FROM_ADAPTIVE_AND_ANC(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0b, 0x00, 0x00, 0x00)),
ENABLE_OFF_FROM_TRANSPARENCY_AND_ADAPTIVE(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0d, 0x00, 0x00, 0x00)),
DISABLE_TRANSPARENCY_FROM_EVERYTHING(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0b, 0x00, 0x00, 0x00)),
DISABLE_TRANSPARENCY_FROM_OFF_AND_ANC(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x03, 0x00, 0x00, 0x00)),
DISABLE_TRANSPARENCY_FROM_ADAPTIVE_AND_ANC(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0a, 0x00, 0x00, 0x00)),
DISABLE_TRANSPARENCY_FROM_OFF_AND_ADAPTIVE(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x09, 0x00, 0x00, 0x00)),
ENABLE_TRANSPARENCY_FROM_OFF_AND_ANC(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x07, 0x00, 0x00, 0x00)),
ENABLE_TRANSPARENCY_FROM_ADAPTIVE_AND_ANC(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0e, 0x00, 0x00, 0x00)),
ENABLE_TRANSPARENCY_FROM_OFF_AND_ADAPTIVE(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0d, 0x00, 0x00, 0x00)),
DISABLE_ANC_FROM_EVERYTHING(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0D, 0x00, 0x00, 0x00)),
DISABLE_ANC_FROM_OFF_AND_TRANSPARENCY(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x05, 0x00, 0x00, 0x00)),
DISABLE_ANC_FROM_ADAPTIVE_AND_TRANSPARENCY(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0c, 0x00, 0x00, 0x00)),
DISABLE_ANC_FROM_OFF_AND_ADAPTIVE(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x09, 0x00, 0x00, 0x00)),
ENABLE_ANC_FROM_OFF_AND_TRANSPARENCY(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x07, 0x00, 0x00, 0x00)),
ENABLE_ANC_FROM_ADAPTIVE_AND_TRANSPARENCY(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0e, 0x00, 0x00, 0x00)),
ENABLE_ANC_FROM_OFF_AND_ADAPTIVE(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0b, 0x00, 0x00, 0x00)),
DISABLE_ADAPTIVE_FROM_EVERYTHING(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x07, 0x00, 0x00, 0x00)),
DISABLE_ADAPTIVE_FROM_OFF_AND_TRANSPARENCY(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x05, 0x00, 0x00, 0x00)),
DISABLE_ADAPTIVE_FROM_TRANSPARENCY_AND_ANC(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x06, 0x00, 0x00, 0x00)),
DISABLE_ADAPTIVE_FROM_OFF_AND_ANC(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x03, 0x00, 0x00, 0x00)),
ENABLE_ADAPTIVE_FROM_OFF_AND_TRANSPARENCY(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0d, 0x00, 0x00, 0x00)),
ENABLE_ADAPTIVE_FROM_TRANSPARENCY_AND_ANC(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0e, 0x00, 0x00, 0x00)),
ENABLE_ADAPTIVE_FROM_OFF_AND_ANC(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0b, 0x00, 0x00, 0x00)),
ENABLE_EVERYTHING_OFF_DISABLED(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0E, 0x00, 0x00, 0x00)),
DISABLE_TRANSPARENCY_OFF_DISABLED(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0A, 0x00, 0x00, 0x00)),
DISABLE_ADAPTIVE_OFF_DISABLED(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x06, 0x00, 0x00, 0x00)),
DISABLE_ANC_OFF_DISABLED(byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A, 0x0C, 0x00, 0x00, 0x00)),
}
data class LongPressPacket(val modes: Set<LongPressMode>) {
val value: ByteArray
get() {
val baseArray = byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A)
val modeByte = calculateModeByte()
return baseArray + byteArrayOf(modeByte, 0x00, 0x00, 0x00)
}
//enum class LongPressMode {
// OFF, TRANSPARENCY, ADAPTIVE, ANC
//}
//
//data class LongPressPacket(val modes: Set<LongPressMode>) {
// val value: ByteArray
// get() {
// val baseArray = byteArrayOf(0x04, 0x00, 0x04, 0x00, 0x09, 0x00, 0x1A)
// val modeByte = calculateModeByte()
// return baseArray + byteArrayOf(modeByte, 0x00, 0x00, 0x00)
// }
//
// private fun calculateModeByte(): Byte {
// var modeByte: Byte = 0x00
// modes.forEach { mode ->
// modeByte = when (mode) {
// LongPressMode.OFF -> (modeByte + 0x01).toByte()
// LongPressMode.TRANSPARENCY -> (modeByte + 0x02).toByte()
// LongPressMode.ADAPTIVE -> (modeByte + 0x04).toByte()
// LongPressMode.ANC -> (modeByte + 0x08).toByte()
// }
// }
// return modeByte
// }
//}
//
//fun determinePacket(changedIndex: Int, newEnabled: Boolean, oldModes: Set<LongPressMode>, newModes: Set<LongPressMode>): ByteArray? {
// return if (newEnabled) {
// LongPressPacket(oldModes + newModes.elementAt(changedIndex)).value
// } else {
// LongPressPacket(oldModes - newModes.elementAt(changedIndex)).value
// }
//}
private fun calculateModeByte(): Byte {
var modeByte: Byte = 0x00
modes.forEach { mode ->
modeByte = when (mode) {
LongPressMode.OFF -> (modeByte + 0x01).toByte()
LongPressMode.TRANSPARENCY -> (modeByte + 0x02).toByte()
LongPressMode.ADAPTIVE -> (modeByte + 0x04).toByte()
LongPressMode.ANC -> (modeByte + 0x08).toByte()
}
}
return modeByte
fun isHeadTrackingData(data: ByteArray): Boolean {
// Check minimum size requirement first for efficiency
if (data.size <= 60) return false
// Check if the first 10 bytes match
val prefixPattern = byteArrayOf(
0x04, 0x00, 0x04, 0x00, 0x17, 0x00, 0x00, 0x00,
0x10, 0x00
)
// Check prefix (first 10 bytes)
for (i in prefixPattern.indices) {
if (data[i] != prefixPattern[i].toByte()) return false
}
}
fun determinePacket(changedIndex: Int, newEnabled: Boolean, oldModes: Set<LongPressMode>, newModes: Set<LongPressMode>): ByteArray? {
return if (newEnabled) {
LongPressPacket(oldModes + newModes.elementAt(changedIndex)).value
} else {
LongPressPacket(oldModes - newModes.elementAt(changedIndex)).value
}
// Check if byte 11 is either 0x44 or 0x45
if (data[10] != 0x44.toByte() && data[10] != 0x45.toByte()) return false
// Check byte 12
if (data[11] != 0x00.toByte()) return false
return true
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -17,6 +17,7 @@
<string name="adaptive">Adaptive</string>
<string name="noise_cancellation">Noise Cancellation</string>
<string name="press_and_hold_airpods">Press and Hold AirPods</string>
<string name="head_gestures">Head Gestures</string>
<string name="left">Left</string>
<string name="right">Right</string>
<string name="adjusts_volume">Adjusts the volume of media in response to your environment</string>
@@ -46,4 +47,6 @@
<string name="island_connected_remote_text">Connected to Linux</string>
<string name="island_taking_over_text">Moved to phone</string>
<string name="island_moved_to_remote_text">Moved to Linux</string>
<string name="head_tracking">Head Tracking</string>
<string name="head_gestures_details">Nod to answer calls, and shake your head to decline.</string>
</resources>

View File

@@ -1,6 +1,6 @@
[versions]
accompanistPermissions = "0.36.0"
agp = "8.8.0"
agp = "8.8.2"
hiddenapibypass = "4.3"
kotlin = "2.0.0"
coreKtx = "1.15.0"