chore(database): Room migration 39→40 — drop legacy node tables

- Remove my_node, nodes, metadata tables via AutoMigration with @DeleteTable
- Parameterize all 34 PacketDao queries to accept myNodeNum directly
- Delete NodeInfoDao, NodeEntity, MyNodeEntity, MetadataEntity
- Delete CommonNodeInfoDaoTest (dead)
- Rewrite PacketRepositoryImpl to inject NodeRepository for myNodeNum
- Rewrite CommonPacketDaoTest and CommonPacketRepositoryTest
- Update MigrationTest to remove nodeInfoDao usage
- Update core/database README to reflect current schema

SDK is now sole source of truth for all node data.
Room only stores messages, logs, and user annotations.

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
James Rich
2026-05-05 10:48:52 -05:00
parent 1dd3637b50
commit 19eba729f7
12 changed files with 951 additions and 1088 deletions

View File

@@ -21,6 +21,7 @@ import androidx.paging.PagingConfig
import androidx.paging.PagingData
import androidx.paging.map
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.distinctUntilChanged
import kotlinx.coroutines.flow.flatMapLatest
import kotlinx.coroutines.flow.map
import kotlinx.coroutines.flow.mapLatest
@@ -46,15 +47,26 @@ import org.meshtastic.core.repository.PacketRepository as SharedPacketRepository
@Suppress("TooManyFunctions", "LongParameterList")
@Single
class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val dispatchers: CoroutineDispatchers) :
SharedPacketRepository {
class PacketRepositoryImpl(
private val dbManager: DatabaseProvider,
private val dispatchers: CoroutineDispatchers,
private val nodeRepository: org.meshtastic.core.repository.NodeRepository,
) : SharedPacketRepository {
override fun getWaypoints(): Flow<List<DataPacket>> = dbManager.currentDb
.flatMapLatest { db -> db.packetDao().getAllWaypointsFlow() }
/** Current myNodeNum snapshot — 0 means "no node connected yet" (matches legacy behavior). */
private val currentMyNodeNum: Int get() = nodeRepository.myNodeInfo.value?.myNodeNum ?: 0
/** Reactive myNodeNum flow, only re-emits when the number actually changes. */
private val myNodeNumFlow: Flow<Int> = nodeRepository.myNodeInfo
.map { it?.myNodeNum ?: 0 }
.distinctUntilChanged()
override fun getWaypoints(): Flow<List<DataPacket>> = myNodeNumFlow
.flatMapLatest { num -> dbManager.currentDb.flatMapLatest { db -> db.packetDao().getAllWaypointsFlow(num) } }
.map { list -> list.map { it.data } }
override fun getContacts(): Flow<Map<String, DataPacket>> = dbManager.currentDb
.flatMapLatest { db -> db.packetDao().getContactKeys() }
override fun getContacts(): Flow<Map<String, DataPacket>> = myNodeNumFlow
.flatMapLatest { num -> dbManager.currentDb.flatMapLatest { db -> db.packetDao().getContactKeys(num) } }
.map { map -> map.mapValues { it.value.data } }
override fun getContactsPaged(): Flow<PagingData<DataPacket>> = Pager(
@@ -64,34 +76,34 @@ class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val
enablePlaceholders = false,
initialLoadSize = CONTACTS_PAGE_SIZE,
),
pagingSourceFactory = { dbManager.currentDb.value.packetDao().getContactKeysPaged() },
pagingSourceFactory = { dbManager.currentDb.value.packetDao().getContactKeysPaged(currentMyNodeNum) },
)
.flow
.map { pagingData -> pagingData.map { it.data } }
override suspend fun getMessageCount(contact: String): Int =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().getMessageCount(contact) }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().getMessageCount(currentMyNodeNum, contact) }
override suspend fun getUnreadCount(contact: String): Int =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().getUnreadCount(contact) }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().getUnreadCount(currentMyNodeNum, contact) }
override fun getUnreadCountFlow(contact: String): Flow<Int> =
dbManager.currentDb.flatMapLatest { db -> db.packetDao().getUnreadCountFlow(contact) }
override fun getUnreadCountFlow(contact: String): Flow<Int> = myNodeNumFlow
.flatMapLatest { num -> dbManager.currentDb.flatMapLatest { db -> db.packetDao().getUnreadCountFlow(num, contact) } }
override fun getFirstUnreadMessageUuid(contact: String): Flow<Long?> =
dbManager.currentDb.flatMapLatest { db -> db.packetDao().getFirstUnreadMessageUuid(contact) }
override fun getFirstUnreadMessageUuid(contact: String): Flow<Long?> = myNodeNumFlow
.flatMapLatest { num -> dbManager.currentDb.flatMapLatest { db -> db.packetDao().getFirstUnreadMessageUuid(num, contact) } }
override fun hasUnreadMessages(contact: String): Flow<Boolean> =
dbManager.currentDb.flatMapLatest { db -> db.packetDao().hasUnreadMessages(contact) }
override fun hasUnreadMessages(contact: String): Flow<Boolean> = myNodeNumFlow
.flatMapLatest { num -> dbManager.currentDb.flatMapLatest { db -> db.packetDao().hasUnreadMessages(num, contact) } }
override fun getUnreadCountTotal(): Flow<Int> =
dbManager.currentDb.flatMapLatest { db -> db.packetDao().getUnreadCountTotal() }
override fun getUnreadCountTotal(): Flow<Int> = myNodeNumFlow
.flatMapLatest { num -> dbManager.currentDb.flatMapLatest { db -> db.packetDao().getUnreadCountTotal(num) } }
override suspend fun clearUnreadCount(contact: String, timestamp: Long) =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().clearUnreadCount(contact, timestamp) }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().clearUnreadCount(currentMyNodeNum, contact, timestamp) }
override suspend fun clearAllUnreadCounts() =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().clearAllUnreadCounts() }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().clearAllUnreadCounts(currentMyNodeNum) }
override suspend fun updateLastReadMessage(contact: String, messageUuid: Long, lastReadTimestamp: Long) =
withContext(dispatchers.io) {
@@ -110,7 +122,7 @@ class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val
}
override suspend fun getQueuedPackets(): List<DataPacket> = withContext(dispatchers.io) {
dbManager.currentDb.value.packetDao().getAllDataPackets().filter { it.status == MessageStatus.QUEUED }
dbManager.currentDb.value.packetDao().getAllDataPackets(currentMyNodeNum).filter { it.status == MessageStatus.QUEUED }
}
suspend fun insertRoomPacket(packet: RoomPacket) =
@@ -149,11 +161,12 @@ class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val
getNode: suspend (String?) -> Node,
): Flow<List<Message>> = withContext(dispatchers.io) {
val dao = dbManager.currentDb.value.packetDao()
val num = currentMyNodeNum
val flow =
when {
limit != null -> dao.getMessagesFrom(contact, limit)
!includeFiltered -> dao.getMessagesFrom(contact, includeFiltered = false)
else -> dao.getMessagesFrom(contact)
limit != null -> dao.getMessagesFrom(num, contact, limit)
!includeFiltered -> dao.getMessagesFrom(num, contact, includeFiltered = false)
else -> dao.getMessagesFrom(num, contact)
}
flow.mapLatest { packets ->
val cachedGetNode = memoize(getNode)
@@ -176,7 +189,7 @@ class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val
enablePlaceholders = false,
initialLoadSize = MESSAGES_PAGE_SIZE,
),
pagingSourceFactory = { dbManager.currentDb.value.packetDao().getMessagesFromPaged(contact) },
pagingSourceFactory = { dbManager.currentDb.value.packetDao().getMessagesFromPaged(currentMyNodeNum, contact) },
)
.flow
.map { pagingData ->
@@ -205,7 +218,7 @@ class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val
initialLoadSize = MESSAGES_PAGE_SIZE,
),
pagingSourceFactory = {
dbManager.currentDb.value.packetDao().getMessagesFromPaged(contactKey, includeFiltered)
dbManager.currentDb.value.packetDao().getMessagesFromPaged(currentMyNodeNum, contactKey, includeFiltered)
},
)
.flow
@@ -224,28 +237,29 @@ class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val
}
override suspend fun updateMessageStatus(d: DataPacket, m: MessageStatus) =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().updateMessageStatus(d, m) }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().updateMessageStatus(currentMyNodeNum, d, m) }
override suspend fun updateMessageId(d: DataPacket, id: Int) =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().updateMessageId(d, id) }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().updateMessageId(currentMyNodeNum, d, id) }
override suspend fun getPacketById(id: Int): DataPacket? =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().getPacketById(id)?.data }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().getPacketById(currentMyNodeNum, id)?.data }
override suspend fun getPacketByPacketId(packetId: Int): DataPacket? = withContext(dispatchers.io) {
dbManager.currentDb.value.packetDao().getPacketByPacketId(packetId)?.packet?.data
dbManager.currentDb.value.packetDao().getPacketByPacketId(currentMyNodeNum, packetId)?.packet?.data
}
private suspend fun getPacketByPacketIdInternal(packetId: Int) =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().getPacketByPacketId(packetId) }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().getPacketByPacketId(currentMyNodeNum, packetId) }
private suspend fun batchGetPacketsByIds(ids: List<Int>): Map<Int, PacketEntity> = if (ids.isEmpty()) {
emptyMap()
} else {
withContext(dispatchers.io) {
val dao = dbManager.currentDb.value.packetDao()
val num = currentMyNodeNum
ids.chunked(MAX_SQLITE_BIND_PARAMS)
.flatMap { dao.getPacketsByPacketIds(it) }
.flatMap { dao.getPacketsByPacketIds(num, it) }
.associateBy { it.packet.packetId }
}
}
@@ -283,8 +297,8 @@ class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val
override suspend fun update(packet: DataPacket, routingError: Int): Unit = withContext(dispatchers.io) {
val dao = dbManager.currentDb.value.packetDao()
// Match on key fields that identify the packet, rather than the entire data object
dao.findPacketsWithId(packet.id)
val num = currentMyNodeNum
dao.findPacketsWithId(num, packet.id)
.find { it.data.id == packet.id && it.data.from == packet.from && it.data.to == packet.to }
?.let { existing ->
val updated =
@@ -302,28 +316,28 @@ class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val
override suspend fun updateReaction(reaction: Reaction) = withContext(dispatchers.io) {
val dao = dbManager.currentDb.value.packetDao()
dao.findReactionsWithId(reaction.packetId)
dao.findReactionsWithId(currentMyNodeNum, reaction.packetId)
.find { it.userId == reaction.user.id && it.emoji == reaction.emoji }
?.let { dao.update(reaction.toEntity(it.myNodeNum)) } ?: Unit
}
override suspend fun getReactionByPacketId(packetId: Int): Reaction? = withContext(dispatchers.io) {
dbManager.currentDb.value.packetDao().getReactionByPacketId(packetId)?.toReaction { null }
dbManager.currentDb.value.packetDao().getReactionByPacketId(currentMyNodeNum, packetId)?.toReaction { null }
}
override suspend fun findPacketsWithId(packetId: Int): List<DataPacket> = withContext(dispatchers.io) {
dbManager.currentDb.value.packetDao().findPacketsWithId(packetId).map { it.data }
dbManager.currentDb.value.packetDao().findPacketsWithId(currentMyNodeNum, packetId).map { it.data }
}
private suspend fun findPacketsWithIdInternal(packetId: Int) =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().findPacketsWithId(packetId) }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().findPacketsWithId(currentMyNodeNum, packetId) }
override suspend fun findReactionsWithId(packetId: Int): List<Reaction> = withContext(dispatchers.io) {
dbManager.currentDb.value.packetDao().findReactionsWithId(packetId).toReaction { null }
dbManager.currentDb.value.packetDao().findReactionsWithId(currentMyNodeNum, packetId).toReaction { null }
}
private suspend fun findReactionsWithIdInternal(packetId: Int) =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().findReactionsWithId(packetId) }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().findReactionsWithId(currentMyNodeNum, packetId) }
@Suppress("CyclomaticComplexMethod")
override suspend fun updateSFPPStatus(
@@ -397,8 +411,9 @@ class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val
override suspend fun updateSFPPStatusByHash(hash: ByteArray, status: MessageStatus, rxTime: Long): Unit =
withContext(dispatchers.io) {
val dao = dbManager.currentDb.value.packetDao()
val num = currentMyNodeNum
val hashByteString = hash.toByteString()
dao.findPacketBySfppHash(hashByteString)?.let { packet ->
dao.findPacketBySfppHash(num, hashByteString)?.let { packet ->
// If it's already confirmed, don't downgrade it
if (packet.data.status == MessageStatus.SFPP_CONFIRMED && status == MessageStatus.SFPP_ROUTING) {
return@let
@@ -408,7 +423,7 @@ class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val
dao.update(packet.copy(data = updatedData, sfpp_hash = hashByteString, received_time = newTime))
}
dao.findReactionBySfppHash(hashByteString)?.let { reaction ->
dao.findReactionBySfppHash(num, hashByteString)?.let { reaction ->
if (reaction.status == MessageStatus.SFPP_CONFIRMED && status == MessageStatus.SFPP_ROUTING) {
return@let
}
@@ -419,17 +434,17 @@ class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val
}
override suspend fun deleteMessages(uuidList: List<Long>) = withContext(dispatchers.io) {
val num = currentMyNodeNum
for (chunk in uuidList.chunked(DELETE_CHUNK_SIZE)) {
// Fetch DAO per chunk to avoid holding a stale reference if the active DB switches
dbManager.currentDb.value.packetDao().deleteMessages(chunk)
dbManager.currentDb.value.packetDao().deleteMessages(num, chunk)
}
}
override suspend fun deleteContacts(contactList: List<String>) =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().deleteContacts(contactList) }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().deleteContacts(currentMyNodeNum, contactList) }
override suspend fun deleteWaypoint(id: Int) =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().deleteWaypoint(id) }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().deleteWaypoint(currentMyNodeNum, id) }
suspend fun delete(packet: RoomPacket) =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().delete(packet) }
@@ -454,11 +469,11 @@ class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val
suspend fun updateReaction(reaction: RoomReaction) =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().update(reaction) }
override fun getFilteredCountFlow(contactKey: String): Flow<Int> =
dbManager.currentDb.flatMapLatest { db -> db.packetDao().getFilteredCountFlow(contactKey) }
override fun getFilteredCountFlow(contactKey: String): Flow<Int> = myNodeNumFlow
.flatMapLatest { num -> dbManager.currentDb.flatMapLatest { db -> db.packetDao().getFilteredCountFlow(num, contactKey) } }
override suspend fun getFilteredCount(contactKey: String): Int =
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().getFilteredCount(contactKey) }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().getFilteredCount(currentMyNodeNum, contactKey) }
override suspend fun setContactFilteringDisabled(contactKey: String, disabled: Boolean) =
withContext(dispatchers.io) {
@@ -475,11 +490,11 @@ class PacketRepositoryImpl(private val dbManager: DatabaseProvider, private val
override suspend fun updateFilteredBySender(senderId: String, filtered: Boolean) {
val pattern = "%\"from\":\"${senderId}\"%"
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().updateFilteredBySender(pattern, filtered) }
withContext(dispatchers.io) { dbManager.currentDb.value.packetDao().updateFilteredBySender(currentMyNodeNum, pattern, filtered) }
}
private fun org.meshtastic.core.database.dao.PacketDao.getAllWaypointsFlow(): Flow<List<RoomPacket>> =
getAllPackets(PortNum.WAYPOINT_APP.value)
private fun org.meshtastic.core.database.dao.PacketDao.getAllWaypointsFlow(myNodeNum: Int): Flow<List<RoomPacket>> =
getAllPackets(myNodeNum, PortNum.WAYPOINT_APP.value)
private fun ContactSettingsEntity.toShared() = ContactSettings(
contactKey = contact_key,

View File

@@ -18,10 +18,10 @@ package org.meshtastic.core.data.repository
import kotlinx.coroutines.test.UnconfinedTestDispatcher
import kotlinx.coroutines.test.runTest
import org.meshtastic.core.database.entity.MyNodeEntity
import org.meshtastic.core.di.CoroutineDispatchers
import org.meshtastic.core.model.DataPacket
import org.meshtastic.core.testing.FakeDatabaseProvider
import org.meshtastic.core.testing.FakeNodeRepository
import kotlin.test.AfterTest
import kotlin.test.Test
import kotlin.test.assertEquals
@@ -31,12 +31,13 @@ abstract class CommonPacketRepositoryTest {
protected lateinit var dbProvider: FakeDatabaseProvider
private val testDispatcher = UnconfinedTestDispatcher()
private val dispatchers = CoroutineDispatchers(main = testDispatcher, io = testDispatcher, default = testDispatcher)
private val nodeRepository = FakeNodeRepository()
protected lateinit var repository: PacketRepositoryImpl
fun setupRepo() {
dbProvider = FakeDatabaseProvider()
repository = PacketRepositoryImpl(dbProvider, dispatchers)
repository = PacketRepositoryImpl(dbProvider, dispatchers, nodeRepository)
}
@AfterTest
@@ -49,23 +50,8 @@ abstract class CommonPacketRepositoryTest {
val myNodeNum = 1
val contact = "contact"
// Ensure my_node is present so getMessageCount finds the packet
dbProvider.currentDb.value
.nodeInfoDao()
.setMyNodeInfo(
MyNodeEntity(
myNodeNum = myNodeNum,
model = "model",
firmwareVersion = "1.0",
couldUpdate = false,
shouldUpdate = false,
currentPacketId = 0L,
messageTimeoutMsec = 0,
minAppVersion = 0,
maxChannels = 0,
hasWifi = false,
),
)
// Set the current node number so PacketRepositoryImpl can pass it to queries
nodeRepository.setMyNodeInfo(org.meshtastic.core.testing.TestDataFactory.createMyNodeInfo(myNodeNum = myNodeNum))
val packet = DataPacket(to = "0!ffffffff", bytes = okio.ByteString.EMPTY, dataType = 1, id = 123)

View File

@@ -6,19 +6,17 @@ This module provides the local Room database persistence layer for the applicati
- **`MeshtasticDatabase`**: The main Room database class, defined in `commonMain`.
- **DAOs (Data Access Objects)**:
- `NodeInfoDao`: Manages storage and retrieval of node information (`NodeEntity`). Contains critical logic for handling Public Key Conflict (PKC) resolution and preventing identity wiping attacks.
- `PacketDao`: Handles storage of mesh packets, including text messages, waypoints, and reactions.
- `NodeMetadataDao`: Manages app-local node annotations (favorites, notes, muting).
- **Entities**:
- `NodeEntity`: Represents a node on the mesh.
- `Packet`: Represents a stored packet.
- `ReactionEntity`: Represents emoji reactions to packets.
- `NodeMetadataEntity`: Persists user annotations that survive process death.
## Security Considerations
## Notes
### Public Key Conflict (PKC) Handling
The `NodeInfoDao` implements specific logic to protect against impersonation and "wipe" attacks:
- **Wipe Protection**: Receiving an `is_licensed=true` packet (which normally clears the public key for compliance) will **not** clear an existing valid public key if one is already known. This prevents attackers from sending fake licensed packets to wipe keys from the DB.
- **Conflict Detection**: If a new key arrives for an existing node ID that conflicts with a known valid key, the key is set to `ERROR_BYTE_STRING` to flag the potential impersonation.
Node data (positions, telemetry, user info) is managed by the SDK's SqlDelight database.
The Room database only stores messages, logs, and user-local annotations.
## Module dependency graph

View File

@@ -0,0 +1,755 @@
{
"formatVersion": 1,
"database": {
"version": 40,
"identityHash": "24d17bdf342c1f3bfa50564b0e93e6f5",
"entities": [
{
"tableName": "node_metadata",
"createSql": "CREATE TABLE IF NOT EXISTS `${TABLE_NAME}` (`num` INTEGER NOT NULL, `is_favorite` INTEGER NOT NULL DEFAULT 0, `is_ignored` INTEGER NOT NULL DEFAULT 0, `is_muted` INTEGER NOT NULL DEFAULT 0, `notes` TEXT NOT NULL DEFAULT '', `manually_verified` INTEGER NOT NULL DEFAULT 0, PRIMARY KEY(`num`))",
"fields": [
{
"fieldPath": "num",
"columnName": "num",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "isFavorite",
"columnName": "is_favorite",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "isIgnored",
"columnName": "is_ignored",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "isMuted",
"columnName": "is_muted",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "notes",
"columnName": "notes",
"affinity": "TEXT",
"notNull": true,
"defaultValue": "''"
},
{
"fieldPath": "manuallyVerified",
"columnName": "manually_verified",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
}
],
"primaryKey": {
"autoGenerate": false,
"columnNames": [
"num"
]
}
},
{
"tableName": "packet",
"createSql": "CREATE TABLE IF NOT EXISTS `${TABLE_NAME}` (`uuid` INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, `myNodeNum` INTEGER NOT NULL DEFAULT 0, `port_num` INTEGER NOT NULL, `contact_key` TEXT NOT NULL, `received_time` INTEGER NOT NULL, `read` INTEGER NOT NULL DEFAULT 1, `data` TEXT NOT NULL, `packet_id` INTEGER NOT NULL DEFAULT 0, `routing_error` INTEGER NOT NULL DEFAULT -1, `snr` REAL NOT NULL DEFAULT 0, `rssi` INTEGER NOT NULL DEFAULT 0, `hopsAway` INTEGER NOT NULL DEFAULT -1, `sfpp_hash` BLOB, `filtered` INTEGER NOT NULL DEFAULT 0)",
"fields": [
{
"fieldPath": "uuid",
"columnName": "uuid",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "myNodeNum",
"columnName": "myNodeNum",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "port_num",
"columnName": "port_num",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "contact_key",
"columnName": "contact_key",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "received_time",
"columnName": "received_time",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "read",
"columnName": "read",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "1"
},
{
"fieldPath": "data",
"columnName": "data",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "packetId",
"columnName": "packet_id",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "routingError",
"columnName": "routing_error",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "-1"
},
{
"fieldPath": "snr",
"columnName": "snr",
"affinity": "REAL",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "rssi",
"columnName": "rssi",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "hopsAway",
"columnName": "hopsAway",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "-1"
},
{
"fieldPath": "sfpp_hash",
"columnName": "sfpp_hash",
"affinity": "BLOB"
},
{
"fieldPath": "filtered",
"columnName": "filtered",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
}
],
"primaryKey": {
"autoGenerate": true,
"columnNames": [
"uuid"
]
},
"indices": [
{
"name": "index_packet_myNodeNum",
"unique": false,
"columnNames": [
"myNodeNum"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_packet_myNodeNum` ON `${TABLE_NAME}` (`myNodeNum`)"
},
{
"name": "index_packet_port_num",
"unique": false,
"columnNames": [
"port_num"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_packet_port_num` ON `${TABLE_NAME}` (`port_num`)"
},
{
"name": "index_packet_contact_key",
"unique": false,
"columnNames": [
"contact_key"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_packet_contact_key` ON `${TABLE_NAME}` (`contact_key`)"
},
{
"name": "index_packet_contact_key_port_num_received_time",
"unique": false,
"columnNames": [
"contact_key",
"port_num",
"received_time"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_packet_contact_key_port_num_received_time` ON `${TABLE_NAME}` (`contact_key`, `port_num`, `received_time`)"
},
{
"name": "index_packet_packet_id",
"unique": false,
"columnNames": [
"packet_id"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_packet_packet_id` ON `${TABLE_NAME}` (`packet_id`)"
},
{
"name": "index_packet_received_time",
"unique": false,
"columnNames": [
"received_time"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_packet_received_time` ON `${TABLE_NAME}` (`received_time`)"
},
{
"name": "index_packet_filtered",
"unique": false,
"columnNames": [
"filtered"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_packet_filtered` ON `${TABLE_NAME}` (`filtered`)"
},
{
"name": "index_packet_read",
"unique": false,
"columnNames": [
"read"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_packet_read` ON `${TABLE_NAME}` (`read`)"
}
]
},
{
"tableName": "contact_settings",
"createSql": "CREATE TABLE IF NOT EXISTS `${TABLE_NAME}` (`contact_key` TEXT NOT NULL, `muteUntil` INTEGER NOT NULL, `last_read_message_uuid` INTEGER, `last_read_message_timestamp` INTEGER, `filtering_disabled` INTEGER NOT NULL DEFAULT 0, PRIMARY KEY(`contact_key`))",
"fields": [
{
"fieldPath": "contact_key",
"columnName": "contact_key",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "muteUntil",
"columnName": "muteUntil",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "lastReadMessageUuid",
"columnName": "last_read_message_uuid",
"affinity": "INTEGER"
},
{
"fieldPath": "lastReadMessageTimestamp",
"columnName": "last_read_message_timestamp",
"affinity": "INTEGER"
},
{
"fieldPath": "filteringDisabled",
"columnName": "filtering_disabled",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
}
],
"primaryKey": {
"autoGenerate": false,
"columnNames": [
"contact_key"
]
}
},
{
"tableName": "log",
"createSql": "CREATE TABLE IF NOT EXISTS `${TABLE_NAME}` (`uuid` TEXT NOT NULL, `type` TEXT NOT NULL, `received_date` INTEGER NOT NULL, `message` TEXT NOT NULL, `from_num` INTEGER NOT NULL DEFAULT 0, `port_num` INTEGER NOT NULL DEFAULT 0, `from_radio` BLOB NOT NULL DEFAULT x'', PRIMARY KEY(`uuid`))",
"fields": [
{
"fieldPath": "uuid",
"columnName": "uuid",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "message_type",
"columnName": "type",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "received_date",
"columnName": "received_date",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "raw_message",
"columnName": "message",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "fromNum",
"columnName": "from_num",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "portNum",
"columnName": "port_num",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "fromRadio",
"columnName": "from_radio",
"affinity": "BLOB",
"notNull": true,
"defaultValue": "x''"
}
],
"primaryKey": {
"autoGenerate": false,
"columnNames": [
"uuid"
]
},
"indices": [
{
"name": "index_log_from_num",
"unique": false,
"columnNames": [
"from_num"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_log_from_num` ON `${TABLE_NAME}` (`from_num`)"
},
{
"name": "index_log_port_num",
"unique": false,
"columnNames": [
"port_num"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_log_port_num` ON `${TABLE_NAME}` (`port_num`)"
}
]
},
{
"tableName": "quick_chat",
"createSql": "CREATE TABLE IF NOT EXISTS `${TABLE_NAME}` (`uuid` INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, `name` TEXT NOT NULL, `message` TEXT NOT NULL, `mode` TEXT NOT NULL, `position` INTEGER NOT NULL)",
"fields": [
{
"fieldPath": "uuid",
"columnName": "uuid",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "name",
"columnName": "name",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "message",
"columnName": "message",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "mode",
"columnName": "mode",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "position",
"columnName": "position",
"affinity": "INTEGER",
"notNull": true
}
],
"primaryKey": {
"autoGenerate": true,
"columnNames": [
"uuid"
]
}
},
{
"tableName": "reactions",
"createSql": "CREATE TABLE IF NOT EXISTS `${TABLE_NAME}` (`myNodeNum` INTEGER NOT NULL DEFAULT 0, `reply_id` INTEGER NOT NULL, `user_id` TEXT NOT NULL, `emoji` TEXT NOT NULL, `timestamp` INTEGER NOT NULL, `snr` REAL NOT NULL DEFAULT 0, `rssi` INTEGER NOT NULL DEFAULT 0, `hopsAway` INTEGER NOT NULL DEFAULT -1, `packet_id` INTEGER NOT NULL DEFAULT 0, `status` INTEGER NOT NULL DEFAULT 0, `routing_error` INTEGER NOT NULL DEFAULT 0, `relays` INTEGER NOT NULL DEFAULT 0, `relay_node` INTEGER, `to` TEXT, `channel` INTEGER NOT NULL DEFAULT 0, `sfpp_hash` BLOB, PRIMARY KEY(`myNodeNum`, `reply_id`, `user_id`, `emoji`))",
"fields": [
{
"fieldPath": "myNodeNum",
"columnName": "myNodeNum",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "replyId",
"columnName": "reply_id",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "userId",
"columnName": "user_id",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "emoji",
"columnName": "emoji",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "timestamp",
"columnName": "timestamp",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "snr",
"columnName": "snr",
"affinity": "REAL",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "rssi",
"columnName": "rssi",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "hopsAway",
"columnName": "hopsAway",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "-1"
},
{
"fieldPath": "packetId",
"columnName": "packet_id",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "status",
"columnName": "status",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "routingError",
"columnName": "routing_error",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "relays",
"columnName": "relays",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "relayNode",
"columnName": "relay_node",
"affinity": "INTEGER"
},
{
"fieldPath": "to",
"columnName": "to",
"affinity": "TEXT"
},
{
"fieldPath": "channel",
"columnName": "channel",
"affinity": "INTEGER",
"notNull": true,
"defaultValue": "0"
},
{
"fieldPath": "sfpp_hash",
"columnName": "sfpp_hash",
"affinity": "BLOB"
}
],
"primaryKey": {
"autoGenerate": false,
"columnNames": [
"myNodeNum",
"reply_id",
"user_id",
"emoji"
]
},
"indices": [
{
"name": "index_reactions_reply_id",
"unique": false,
"columnNames": [
"reply_id"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_reactions_reply_id` ON `${TABLE_NAME}` (`reply_id`)"
},
{
"name": "index_reactions_packet_id",
"unique": false,
"columnNames": [
"packet_id"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_reactions_packet_id` ON `${TABLE_NAME}` (`packet_id`)"
}
]
},
{
"tableName": "device_hardware",
"createSql": "CREATE TABLE IF NOT EXISTS `${TABLE_NAME}` (`actively_supported` INTEGER NOT NULL, `architecture` TEXT NOT NULL, `display_name` TEXT NOT NULL, `has_ink_hud` INTEGER, `has_mui` INTEGER, `hwModel` INTEGER NOT NULL, `hw_model_slug` TEXT NOT NULL, `images` TEXT, `last_updated` INTEGER NOT NULL, `partition_scheme` TEXT, `platformio_target` TEXT NOT NULL, `requires_dfu` INTEGER, `support_level` INTEGER, `tags` TEXT, PRIMARY KEY(`platformio_target`))",
"fields": [
{
"fieldPath": "activelySupported",
"columnName": "actively_supported",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "architecture",
"columnName": "architecture",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "displayName",
"columnName": "display_name",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "hasInkHud",
"columnName": "has_ink_hud",
"affinity": "INTEGER"
},
{
"fieldPath": "hasMui",
"columnName": "has_mui",
"affinity": "INTEGER"
},
{
"fieldPath": "hwModel",
"columnName": "hwModel",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "hwModelSlug",
"columnName": "hw_model_slug",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "images",
"columnName": "images",
"affinity": "TEXT"
},
{
"fieldPath": "lastUpdated",
"columnName": "last_updated",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "partitionScheme",
"columnName": "partition_scheme",
"affinity": "TEXT"
},
{
"fieldPath": "platformioTarget",
"columnName": "platformio_target",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "requiresDfu",
"columnName": "requires_dfu",
"affinity": "INTEGER"
},
{
"fieldPath": "supportLevel",
"columnName": "support_level",
"affinity": "INTEGER"
},
{
"fieldPath": "tags",
"columnName": "tags",
"affinity": "TEXT"
}
],
"primaryKey": {
"autoGenerate": false,
"columnNames": [
"platformio_target"
]
}
},
{
"tableName": "firmware_release",
"createSql": "CREATE TABLE IF NOT EXISTS `${TABLE_NAME}` (`id` TEXT NOT NULL, `page_url` TEXT NOT NULL, `release_notes` TEXT NOT NULL, `title` TEXT NOT NULL, `zip_url` TEXT NOT NULL, `last_updated` INTEGER NOT NULL, `release_type` TEXT NOT NULL, PRIMARY KEY(`id`))",
"fields": [
{
"fieldPath": "id",
"columnName": "id",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "pageUrl",
"columnName": "page_url",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "releaseNotes",
"columnName": "release_notes",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "title",
"columnName": "title",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "zipUrl",
"columnName": "zip_url",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "lastUpdated",
"columnName": "last_updated",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "releaseType",
"columnName": "release_type",
"affinity": "TEXT",
"notNull": true
}
],
"primaryKey": {
"autoGenerate": false,
"columnNames": [
"id"
]
}
},
{
"tableName": "traceroute_node_position",
"createSql": "CREATE TABLE IF NOT EXISTS `${TABLE_NAME}` (`log_uuid` TEXT NOT NULL, `request_id` INTEGER NOT NULL, `node_num` INTEGER NOT NULL, `position` BLOB NOT NULL, PRIMARY KEY(`log_uuid`, `node_num`), FOREIGN KEY(`log_uuid`) REFERENCES `log`(`uuid`) ON UPDATE NO ACTION ON DELETE CASCADE )",
"fields": [
{
"fieldPath": "logUuid",
"columnName": "log_uuid",
"affinity": "TEXT",
"notNull": true
},
{
"fieldPath": "requestId",
"columnName": "request_id",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "nodeNum",
"columnName": "node_num",
"affinity": "INTEGER",
"notNull": true
},
{
"fieldPath": "position",
"columnName": "position",
"affinity": "BLOB",
"notNull": true
}
],
"primaryKey": {
"autoGenerate": false,
"columnNames": [
"log_uuid",
"node_num"
]
},
"indices": [
{
"name": "index_traceroute_node_position_log_uuid",
"unique": false,
"columnNames": [
"log_uuid"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_traceroute_node_position_log_uuid` ON `${TABLE_NAME}` (`log_uuid`)"
},
{
"name": "index_traceroute_node_position_request_id",
"unique": false,
"columnNames": [
"request_id"
],
"orders": [],
"createSql": "CREATE INDEX IF NOT EXISTS `index_traceroute_node_position_request_id` ON `${TABLE_NAME}` (`request_id`)"
}
],
"foreignKeys": [
{
"table": "log",
"onDelete": "CASCADE",
"onUpdate": "NO ACTION",
"columns": [
"log_uuid"
],
"referencedColumns": [
"uuid"
]
}
]
}
],
"setupQueries": [
"CREATE TABLE IF NOT EXISTS room_master_table (id INTEGER PRIMARY KEY,identity_hash TEXT)",
"INSERT OR REPLACE INTO room_master_table (id,identity_hash) VALUES(42, '24d17bdf342c1f3bfa50564b0e93e6f5')"
]
}
}

View File

@@ -29,7 +29,6 @@ import org.junit.runner.RunWith
import org.meshtastic.core.common.util.nowMillis
import org.meshtastic.core.database.MeshtasticDatabase
import org.meshtastic.core.database.MeshtasticDatabaseConstructor
import org.meshtastic.core.database.entity.MyNodeEntity
import org.meshtastic.core.database.entity.Packet
import org.meshtastic.core.model.DataPacket
import org.meshtastic.proto.ChannelSettings
@@ -42,21 +41,8 @@ import kotlin.test.assertEquals
class MigrationTest {
private lateinit var database: MeshtasticDatabase
private lateinit var packetDao: PacketDao
private lateinit var nodeInfoDao: NodeInfoDao
private val myNodeInfo: MyNodeEntity =
MyNodeEntity(
myNodeNum = 42424242,
model = null,
firmwareVersion = null,
couldUpdate = false,
shouldUpdate = false,
currentPacketId = 1L,
messageTimeoutMsec = 5 * 60 * 1000,
minAppVersion = 1,
maxChannels = 8,
hasWifi = false,
)
private val myNodeNum = 42424242
@Before
fun createDb(): Unit = runTest {
@@ -67,7 +53,6 @@ class MigrationTest {
factory = { MeshtasticDatabaseConstructor.initialize() },
)
.build()
nodeInfoDao = database.nodeInfoDao().apply { setMyNodeInfo(myNodeInfo) }
packetDao = database.packetDao()
}
@@ -78,26 +63,20 @@ class MigrationTest {
@Test
fun testMigrateChannelsByPSK_duplicatePSK() = runTest {
// PSK \"AQ==\" is base64 for single byte 0x01
val pskBytes = byteArrayOf(0x01).toByteString()
// Create packets for Channel 0
insertPacket(channel = 0, text = "Message Ch0")
// Old settings: Channel 0 has PSK_A
val oldSettings = listOf(ChannelSettings(psk = pskBytes, name = "LongFast"))
// New settings: Channel 0 has PSK_A, Channel 1 has PSK_A
val newSettings =
listOf(
ChannelSettings(psk = pskBytes, name = "LongFast"),
ChannelSettings(psk = pskBytes, name = "NewChan"),
)
// Perform migration
packetDao.migrateChannelsByPSK(oldSettings, newSettings)
// Check packet channel
val p = getFirstPacket()
assertEquals(0, p.data.channel, "Packet should remain on channel 0")
}
@@ -130,7 +109,6 @@ class MigrationTest {
val oldSettings = listOf(ChannelSettings(psk = pskA, name = "A1"), ChannelSettings(psk = pskA, name = "A2"))
// Swap positions but keep names and PSKs
val newSettings = listOf(ChannelSettings(psk = pskA, name = "A2"), ChannelSettings(psk = pskA, name = "A1"))
packetDao.migrateChannelsByPSK(oldSettings, newSettings)
@@ -148,7 +126,6 @@ class MigrationTest {
val oldSettings = listOf(ChannelSettings(psk = pskA, name = "A"))
// New settings has two identical channels (same PSK, same Name)
val newSettings = listOf(ChannelSettings(psk = pskA, name = "A"), ChannelSettings(psk = pskA, name = "A"))
packetDao.migrateChannelsByPSK(oldSettings, newSettings)
@@ -161,7 +138,7 @@ class MigrationTest {
val packet =
Packet(
uuid = 0L,
myNodeNum = 42424242,
myNodeNum = myNodeNum,
port_num = PortNum.TEXT_MESSAGE_APP.value,
contact_key = "$channel!broadcast",
received_time = nowMillis,
@@ -171,7 +148,7 @@ class MigrationTest {
packetDao.insert(packet)
}
private suspend fun getAllPackets() = packetDao.getAllPackets(PortNum.TEXT_MESSAGE_APP.value).first()
private suspend fun getAllPackets() = packetDao.getAllPackets(myNodeNum, PortNum.TEXT_MESSAGE_APP.value).first()
private suspend fun getFirstPacket() = getAllPackets().first()
}

View File

@@ -29,7 +29,6 @@ import org.meshtastic.core.common.util.ioDispatcher
import org.meshtastic.core.database.dao.DeviceHardwareDao
import org.meshtastic.core.database.dao.FirmwareReleaseDao
import org.meshtastic.core.database.dao.MeshLogDao
import org.meshtastic.core.database.dao.NodeInfoDao
import org.meshtastic.core.database.dao.NodeMetadataDao
import org.meshtastic.core.database.dao.PacketDao
import org.meshtastic.core.database.dao.QuickChatActionDao
@@ -38,9 +37,6 @@ import org.meshtastic.core.database.entity.ContactSettings
import org.meshtastic.core.database.entity.DeviceHardwareEntity
import org.meshtastic.core.database.entity.FirmwareReleaseEntity
import org.meshtastic.core.database.entity.MeshLog
import org.meshtastic.core.database.entity.MetadataEntity
import org.meshtastic.core.database.entity.MyNodeEntity
import org.meshtastic.core.database.entity.NodeEntity
import org.meshtastic.core.database.entity.NodeMetadataEntity
import org.meshtastic.core.database.entity.Packet
import org.meshtastic.core.database.entity.QuickChatAction
@@ -50,15 +46,12 @@ import org.meshtastic.core.database.entity.TracerouteNodePositionEntity
@Database(
entities =
[
MyNodeEntity::class,
NodeEntity::class,
NodeMetadataEntity::class,
Packet::class,
ContactSettings::class,
MeshLog::class,
QuickChatAction::class,
ReactionEntity::class,
MetadataEntity::class,
DeviceHardwareEntity::class,
FirmwareReleaseEntity::class,
TracerouteNodePositionEntity::class,
@@ -101,15 +94,15 @@ import org.meshtastic.core.database.entity.TracerouteNodePositionEntity
AutoMigration(from = 36, to = 37),
AutoMigration(from = 37, to = 38),
AutoMigration(from = 38, to = 39, spec = AutoMigration38to39::class),
AutoMigration(from = 39, to = 40, spec = AutoMigration39to40::class),
],
version = 39,
version = 40,
exportSchema = true,
)
@androidx.room3.ConstructedBy(MeshtasticDatabaseConstructor::class)
@TypeConverters(Converters::class)
@androidx.room3.DaoReturnTypeConverters(androidx.room3.paging.PagingSourceDaoReturnTypeConverter::class)
abstract class MeshtasticDatabase : RoomDatabase() {
abstract fun nodeInfoDao(): NodeInfoDao
abstract fun nodeMetadataDao(): NodeMetadataDao
@@ -160,3 +153,9 @@ class AutoMigration38to39 : AutoMigrationSpec {
)
}
}
/** Drops legacy node tables — SDK is now the source of truth for node data. */
@DeleteTable(tableName = "my_node")
@DeleteTable(tableName = "nodes")
@DeleteTable(tableName = "metadata")
class AutoMigration39to40 : AutoMigrationSpec

View File

@@ -1,406 +0,0 @@
/*
* Copyright (c) 2026 Meshtastic LLC
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.meshtastic.core.database.dao
import androidx.room3.Dao
import androidx.room3.MapColumn
import androidx.room3.Query
import androidx.room3.Transaction
import androidx.room3.Upsert
import kotlinx.coroutines.flow.Flow
import okio.ByteString
import org.meshtastic.core.database.entity.MetadataEntity
import org.meshtastic.core.database.entity.MyNodeEntity
import org.meshtastic.core.database.entity.NodeEntity
import org.meshtastic.core.database.entity.NodeWithRelations
import org.meshtastic.proto.HardwareModel
@Suppress("TooManyFunctions")
@Dao
interface NodeInfoDao {
companion object {
const val KEY_SIZE = 32
/** SQLite has a limit of ~999 bind parameters per query. */
const val MAX_BIND_PARAMS = 999
}
/**
* Verifies a [NodeEntity] before an upsert operation. It handles populating the publicKey for lazy migration,
* checks for public key conflicts with new nodes, and manages updates to existing nodes, particularly in cases of
* public key mismatches to prevent potential impersonation or data corruption.
*
* @param incomingNode The node entity to be verified.
* @return A [NodeEntity] that is safe to upsert, or null if the upsert should be aborted (e.g., due to an
* impersonation attempt, though this logic is currently commented out).
*/
private suspend fun getVerifiedNodeForUpsert(incomingNode: NodeEntity): NodeEntity {
// Populate the NodeEntity.publicKey field from the User.publicKey for consistency
// and to support lazy migration.
incomingNode.publicKey = incomingNode.user.public_key
// Populate denormalized name columns from the User protobuf for search functionality
// Only populate if the user is not a placeholder (hwModel != UNSET); otherwise keep them null
if (incomingNode.user.hw_model != HardwareModel.UNSET) {
incomingNode.longName = incomingNode.user.long_name
incomingNode.shortName = incomingNode.user.short_name
} else {
incomingNode.longName = null
incomingNode.shortName = null
}
val existingNodeEntity = getNodeByNum(incomingNode.num)?.node
return if (existingNodeEntity == null) {
handleNewNodeUpsertValidation(incomingNode)
} else {
handleExistingNodeUpsertValidation(existingNodeEntity, incomingNode)
}
}
/** Validates a new node before it is inserted into the database. */
private suspend fun handleNewNodeUpsertValidation(newNode: NodeEntity): NodeEntity {
// Check if the new node's public key (if present and not empty)
// is already claimed by another existing node.
if ((newNode.publicKey?.size ?: 0) > 0) {
val nodeWithSamePK = findNodeByPublicKey(newNode.publicKey)
if (nodeWithSamePK != null && nodeWithSamePK.num != newNode.num) {
// This is a potential impersonation attempt.
return nodeWithSamePK
}
}
// If no conflicting public key is found, or if the impersonation check is not active,
// the new node is considered safe to add.
return newNode
}
/**
* Resolves the public key for an existing node during an update.
*
* This function implements safety checks to prevent public key conflicts (PKC) and ensure robust handling of key
* updates.
*
* @param existingNode The current state of the node in the database.
* @param incomingNode The new node data being upserted.
* @return The resolved [ByteString] for the public key:
* - [NodeEntity.ERROR_BYTE_STRING]: If there is a mismatch between a valid existing key and a new incoming key.
* - `incomingNode.publicKey`: If the incoming key is new, matches the existing one, or if recovering from an error
* state.
* - `existingNode.publicKey`: If the incoming update has no key, or if the user is licensed but already has a valid
* key (prevents wiping).
* - [ByteString.EMPTY]: If the user is licensed and didn't previously have a key (or if key is explicitly cleared).
*/
private fun resolvePublicKey(existingNode: NodeEntity, incomingNode: NodeEntity): ByteString? {
val existingKey = existingNode.publicKey ?: existingNode.user.public_key
val incomingKey = incomingNode.publicKey
val incomingHasKey = (incomingKey?.size ?: 0) == KEY_SIZE
val existingHasKey = existingKey.size == KEY_SIZE && existingKey != NodeEntity.ERROR_BYTE_STRING
return when {
incomingHasKey -> {
if (existingHasKey && incomingKey != existingKey) {
// Actual mismatch between two non-empty keys
NodeEntity.ERROR_BYTE_STRING
} else {
// New key, same key, or recovery from Error state
incomingKey
}
}
existingHasKey -> existingKey
incomingNode.user.is_licensed -> ByteString.EMPTY
else -> existingKey
}
}
/**
* Handles the validation logic when upserting an existing node.
*
* It distinguishes between two scenarios:
* 1. **Preservation**: If the incoming update is a placeholder (unset HW model) with a default name, and the
* existing node has full user info, we preserve the existing identity (user, keys, names, verification) while
* updating telemetry and status fields from the incoming packet.
* 2. **Update**: If it's a normal update, we validate the public key using [resolvePublicKey] to prevent conflicts
* or accidental key wiping, and then update the node.
*/
@Suppress("CyclomaticComplexMethod", "MagicNumber")
private fun handleExistingNodeUpsertValidation(existingNode: NodeEntity, incomingNode: NodeEntity): NodeEntity {
val resolvedNotes = incomingNode.notes.ifBlank { existingNode.notes }
val isPlaceholder = incomingNode.user.hw_model == HardwareModel.UNSET
val hasExistingUser = existingNode.user.hw_model != HardwareModel.UNSET
val isDefaultName = incomingNode.user.long_name.matches(Regex("^Meshtastic [0-9a-fA-F]{4}$"))
if (hasExistingUser && isPlaceholder && isDefaultName) {
return incomingNode.copy(
user = existingNode.user,
publicKey = existingNode.publicKey,
longName = existingNode.longName,
shortName = existingNode.shortName,
manuallyVerified = existingNode.manuallyVerified,
notes = resolvedNotes,
)
}
val resolvedKey = resolvePublicKey(existingNode, incomingNode)
return incomingNode.copy(
user = incomingNode.user.copy(public_key = resolvedKey ?: ByteString.EMPTY),
publicKey = resolvedKey,
notes = resolvedNotes,
)
}
@Query("SELECT * FROM my_node")
fun getMyNodeInfo(): Flow<MyNodeEntity?>
@Upsert suspend fun setMyNodeInfo(myInfo: MyNodeEntity)
@Query("DELETE FROM my_node")
suspend fun clearMyNodeInfo()
@Query(
"""
SELECT * FROM nodes
ORDER BY CASE
WHEN num = (SELECT myNodeNum FROM my_node LIMIT 1) THEN 0
ELSE 1
END,
last_heard DESC
""",
)
@Transaction
fun nodeDBbyNum(): Flow<
Map<
@MapColumn(columnName = "num")
Int,
NodeWithRelations,
>,
>
@Query(
"""
WITH OurNode AS (
SELECT latitude, longitude
FROM nodes
WHERE num = (SELECT myNodeNum FROM my_node LIMIT 1)
)
SELECT * FROM nodes
WHERE (:includeUnknown = 1 OR short_name IS NOT NULL)
AND (:filter = ''
OR (long_name LIKE '%' || :filter || '%'
OR short_name LIKE '%' || :filter || '%'
OR printf('!%08x', CASE WHEN num < 0 THEN num + 4294967296 ELSE num END) LIKE '%' || :filter || '%'
OR CAST(CASE WHEN num < 0 THEN num + 4294967296 ELSE num END AS TEXT) LIKE '%' || :filter || '%'))
AND (:lastHeardMin = -1 OR last_heard >= :lastHeardMin)
AND (:hopsAwayMax = -1 OR (hops_away <= :hopsAwayMax AND hops_away >= 0) OR num = (SELECT myNodeNum FROM my_node LIMIT 1))
ORDER BY CASE
WHEN num = (SELECT myNodeNum FROM my_node LIMIT 1) THEN 0
ELSE 1
END,
CASE
WHEN :sort = 'last_heard' THEN last_heard * -1
WHEN :sort = 'alpha' THEN UPPER(long_name)
WHEN :sort = 'distance' THEN
CASE
WHEN latitude IS NULL OR longitude IS NULL OR
(latitude = 0.0 AND longitude = 0.0) THEN 999999999
ELSE
(latitude - (SELECT latitude FROM OurNode)) *
(latitude - (SELECT latitude FROM OurNode)) +
(longitude - (SELECT longitude FROM OurNode)) *
(longitude - (SELECT longitude FROM OurNode))
END
WHEN :sort = 'hops_away' THEN
CASE
WHEN hops_away = -1 THEN 999999999
ELSE hops_away
END
WHEN :sort = 'channel' THEN channel
WHEN :sort = 'via_mqtt' THEN via_mqtt
WHEN :sort = 'via_favorite' THEN is_favorite * -1
ELSE 0
END ASC,
last_heard DESC
""",
)
@Transaction
fun getNodes(
sort: String,
filter: String,
includeUnknown: Boolean,
hopsAwayMax: Int,
lastHeardMin: Int,
): Flow<List<NodeWithRelations>>
@Transaction
suspend fun clearNodeInfo(preserveFavorites: Boolean) {
if (preserveFavorites) {
deleteNonFavoriteNodes()
} else {
deleteAllNodes()
}
}
@Query("DELETE FROM nodes WHERE is_favorite = 0")
suspend fun deleteNonFavoriteNodes()
@Query("DELETE FROM nodes")
suspend fun deleteAllNodes()
@Query("DELETE FROM nodes WHERE num=:num")
suspend fun deleteNode(num: Int)
@Query("DELETE FROM nodes WHERE num IN (:nodeNums)")
suspend fun deleteNodes(nodeNums: List<Int>)
@Query("SELECT * FROM nodes WHERE last_heard < :lastHeard")
suspend fun getNodesOlderThan(lastHeard: Int): List<NodeEntity>
@Query("SELECT * FROM nodes WHERE short_name IS NULL")
suspend fun getUnknownNodes(): List<NodeEntity>
@Upsert suspend fun upsert(meta: MetadataEntity)
@Query("DELETE FROM metadata WHERE num=:num")
suspend fun deleteMetadata(num: Int)
@Query("SELECT * FROM nodes WHERE num=:num")
@Transaction
suspend fun getNodeByNum(num: Int): NodeWithRelations?
@Query("SELECT * FROM nodes WHERE num IN (:nodeNums)")
suspend fun getNodeEntitiesByNums(nodeNums: List<Int>): List<NodeEntity>
@Query("SELECT * FROM nodes WHERE public_key = :publicKey LIMIT 1")
suspend fun findNodeByPublicKey(publicKey: ByteString?): NodeEntity?
@Query("SELECT * FROM nodes WHERE public_key IN (:publicKeys)")
suspend fun findNodesByPublicKeys(publicKeys: List<ByteString>): List<NodeEntity>
@Upsert suspend fun doUpsert(node: NodeEntity)
@Transaction
suspend fun upsert(node: NodeEntity) {
val verifiedNode = getVerifiedNodeForUpsert(node)
doUpsert(verifiedNode)
}
@Upsert suspend fun putAll(nodes: List<NodeEntity>)
@Query("UPDATE nodes SET notes = :notes WHERE num = :num")
suspend fun setNodeNotes(num: Int, notes: String)
/**
* Batch version of [getVerifiedNodeForUpsert]. Pre-fetches all existing nodes and public-key conflicts in two
* queries instead of N individual queries, then processes each node in memory.
*/
@Suppress("NestedBlockDepth")
private suspend fun getVerifiedNodesForUpsert(incomingNodes: List<NodeEntity>): List<NodeEntity> {
// Prepare all incoming nodes (populate denormalized fields)
incomingNodes.forEach { node ->
node.publicKey = node.user.public_key
if (node.user.hw_model != HardwareModel.UNSET) {
node.longName = node.user.long_name
node.shortName = node.user.short_name
} else {
node.longName = null
node.shortName = null
}
}
// Batch fetch all existing nodes by num (chunked for SQLite bind-param limit)
val existingNodesMap =
incomingNodes
.map { it.num }
.chunked(MAX_BIND_PARAMS)
.flatMap { getNodeEntitiesByNums(it) }
.associateBy { it.num }
// Partition into updates vs. inserts and resolve existing nodes in-memory
val result = mutableListOf<NodeEntity>()
val newNodes = mutableListOf<NodeEntity>()
for (incoming in incomingNodes) {
val existing = existingNodesMap[incoming.num]
if (existing != null) {
result.add(handleExistingNodeUpsertValidation(existing, incoming))
} else {
newNodes.add(incoming)
}
}
// Batch validate new nodes' public keys (one query instead of N)
val publicKeysToCheck = newNodes.mapNotNull { node -> node.publicKey?.takeIf { it.size > 0 } }.distinct()
val pkConflicts =
if (publicKeysToCheck.isNotEmpty()) {
publicKeysToCheck
.chunked(MAX_BIND_PARAMS)
.flatMap { findNodesByPublicKeys(it) }
.associateBy { it.publicKey }
} else {
emptyMap()
}
for (newNode in newNodes) {
if ((newNode.publicKey?.size ?: 0) > 0) {
val conflicting = pkConflicts[newNode.publicKey]
if (conflicting != null && conflicting.num != newNode.num) {
result.add(conflicting)
} else {
result.add(newNode)
}
} else {
result.add(newNode)
}
}
return result
}
@Transaction
suspend fun installConfig(mi: MyNodeEntity, nodes: List<NodeEntity>) {
clearMyNodeInfo()
setMyNodeInfo(mi)
putAll(getVerifiedNodesForUpsert(nodes))
}
/**
* Backfills longName and shortName columns from the user protobuf for nodes where these columns are NULL. This
* ensures search functionality works for all nodes. Skips placeholder/default users (hwModel == UNSET).
*/
@Transaction
suspend fun backfillDenormalizedNames() {
val nodes = getAllNodesSnapshot()
val nodesToUpdate =
nodes
.filter { node ->
// Only backfill if columns are NULL AND the user is not a placeholder (hwModel != UNSET)
(node.longName == null || node.shortName == null) && node.user.hw_model != HardwareModel.UNSET
}
.map { node -> node.copy(longName = node.user.long_name, shortName = node.user.short_name) }
if (nodesToUpdate.isNotEmpty()) {
putAll(nodesToUpdate)
}
}
@Query("SELECT * FROM nodes")
suspend fun getAllNodesSnapshot(): List<NodeEntity>
}

View File

@@ -43,22 +43,22 @@ interface PacketDao {
@Query(
"""
SELECT * FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = :portNum
ORDER BY received_time ASC
""",
)
fun getAllPackets(portNum: Int): Flow<List<Packet>>
fun getAllPackets(myNodeNum: Int, portNum: Int): Flow<List<Packet>>
@Query(
"""
SELECT * FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND filtered = 0
ORDER BY received_time DESC
""",
)
fun getContactKeys(): Flow<
fun getContactKeys(myNodeNum: Int): Flow<
Map<
@MapColumn(columnName = "contact_key")
String,
@@ -72,93 +72,93 @@ interface PacketDao {
INNER JOIN (
SELECT contact_key, MAX(received_time) as max_time
FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND filtered = 0
GROUP BY contact_key
) latest ON p.contact_key = latest.contact_key AND p.received_time = latest.max_time
WHERE (p.myNodeNum = 0 OR p.myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (p.myNodeNum = 0 OR p.myNodeNum = :myNodeNum)
AND p.port_num = 1 AND p.filtered = 0
GROUP BY p.contact_key
ORDER BY p.received_time DESC
""",
)
fun getContactKeysPaged(): PagingSource<Int, Packet>
fun getContactKeysPaged(myNodeNum: Int): PagingSource<Int, Packet>
@Query(
"""
SELECT COUNT(*) FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND contact_key = :contact
""",
)
suspend fun getMessageCount(contact: String): Int
suspend fun getMessageCount(myNodeNum: Int, contact: String): Int
@Query(
"""
SELECT COUNT(*) FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND contact_key = :contact AND read = 0 AND filtered = 0
""",
)
suspend fun getUnreadCount(contact: String): Int
suspend fun getUnreadCount(myNodeNum: Int, contact: String): Int
@Query(
"""
SELECT COUNT(*) FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND contact_key = :contact AND read = 0 AND filtered = 0
""",
)
fun getUnreadCountFlow(contact: String): Flow<Int>
fun getUnreadCountFlow(myNodeNum: Int, contact: String): Flow<Int>
@Query(
"""
SELECT uuid FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND contact_key = :contact AND read = 0 AND filtered = 0
ORDER BY received_time ASC
LIMIT 1
""",
)
fun getFirstUnreadMessageUuid(contact: String): Flow<Long?>
fun getFirstUnreadMessageUuid(myNodeNum: Int, contact: String): Flow<Long?>
@Query(
"""
SELECT COUNT(*) > 0 FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND contact_key = :contact AND read = 0 AND filtered = 0
""",
)
fun hasUnreadMessages(contact: String): Flow<Boolean>
fun hasUnreadMessages(myNodeNum: Int, contact: String): Flow<Boolean>
@Query(
"""
SELECT COUNT(*) FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND read = 0 AND filtered = 0
""",
)
fun getUnreadCountTotal(): Flow<Int>
fun getUnreadCountTotal(myNodeNum: Int): Flow<Int>
@Query(
"""
UPDATE packet
SET read = 1
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND contact_key = :contact AND read = 0 AND filtered = 0 AND received_time <= :timestamp
""",
)
suspend fun clearUnreadCount(contact: String, timestamp: Long)
suspend fun clearUnreadCount(myNodeNum: Int, contact: String, timestamp: Long)
@Query(
"""
UPDATE packet
SET read = 1
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND read = 0 AND filtered = 0
""",
)
suspend fun clearAllUnreadCounts()
suspend fun clearAllUnreadCounts(myNodeNum: Int)
@Upsert suspend fun insert(packet: Packet)
@@ -166,56 +166,56 @@ interface PacketDao {
@Query(
"""
SELECT * FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND contact_key = :contact
ORDER BY received_time DESC
""",
)
fun getMessagesFrom(contact: String): Flow<List<PacketEntity>>
fun getMessagesFrom(myNodeNum: Int, contact: String): Flow<List<PacketEntity>>
@Transaction
@Query(
"""
SELECT * FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND contact_key = :contact
ORDER BY received_time DESC
LIMIT :limit
""",
)
fun getMessagesFrom(contact: String, limit: Int): Flow<List<PacketEntity>>
fun getMessagesFrom(myNodeNum: Int, contact: String, limit: Int): Flow<List<PacketEntity>>
@Transaction
@Query(
"""
SELECT * FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND contact_key = :contact
AND (filtered = 0 OR :includeFiltered = 1)
ORDER BY received_time DESC
""",
)
fun getMessagesFrom(contact: String, includeFiltered: Boolean): Flow<List<PacketEntity>>
fun getMessagesFrom(myNodeNum: Int, contact: String, includeFiltered: Boolean): Flow<List<PacketEntity>>
@Transaction
@Query(
"""
SELECT * FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND contact_key = :contact
ORDER BY received_time DESC
""",
)
fun getMessagesFromPaged(contact: String): PagingSource<Int, PacketEntity>
fun getMessagesFromPaged(myNodeNum: Int, contact: String): PagingSource<Int, PacketEntity>
@Query(
"""
SELECT * FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND data = :data
""",
)
suspend fun findDataPacket(data: DataPacket): Packet?
suspend fun findDataPacket(myNodeNum: Int, data: DataPacket): Packet?
@Query("DELETE FROM packet WHERE uuid in (:uuidList)")
suspend fun deletePackets(uuidList: List<Long>)
@@ -223,11 +223,11 @@ interface PacketDao {
@Query(
"""
DELETE FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND contact_key IN (:contactList)
""",
)
suspend fun deleteContacts(contactList: List<String>)
suspend fun deleteContacts(myNodeNum: Int, contactList: List<String>)
@Query("DELETE FROM packet WHERE uuid=:uuid")
suspend fun delete(uuid: Long)
@@ -243,17 +243,17 @@ interface PacketDao {
@Query(
"""
DELETE FROM reactions
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND reply_id IN (:packetIds)
""",
)
suspend fun deleteReactions(packetIds: List<Int>)
suspend fun deleteReactions(myNodeNum: Int, packetIds: List<Int>)
@Transaction
suspend fun deleteMessages(uuidList: List<Long>) {
suspend fun deleteMessages(myNodeNum: Int, uuidList: List<Long>) {
val packetIds = getPacketIdsFrom(uuidList)
if (packetIds.isNotEmpty()) {
deleteReactions(packetIds)
deleteReactions(myNodeNum, packetIds)
}
deletePackets(uuidList)
}
@@ -261,19 +261,19 @@ interface PacketDao {
@Update suspend fun update(packet: Packet)
@Transaction
suspend fun updateMessageStatus(data: DataPacket, m: MessageStatus) {
suspend fun updateMessageStatus(myNodeNum: Int, data: DataPacket, m: MessageStatus) {
val new = data.copy(status = m)
// Match on key fields that identify the packet, rather than the entire data object
findPacketsWithId(data.id)
findPacketsWithId(myNodeNum, data.id)
.find { it.data.id == data.id && it.data.from == data.from && it.data.to == data.to }
?.let { update(it.copy(data = new)) }
}
@Transaction
suspend fun updateMessageId(data: DataPacket, id: Int) {
suspend fun updateMessageId(myNodeNum: Int, data: DataPacket, id: Int) {
val new = data.copy(id = id)
// Match on key fields that identify the packet
findPacketsWithId(data.id)
findPacketsWithId(myNodeNum, data.id)
.find { it.data.id == data.id && it.data.from == data.from && it.data.to == data.to }
?.let { update(it.copy(data = new, packetId = id)) }
}
@@ -281,88 +281,88 @@ interface PacketDao {
@Query(
"""
SELECT data FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
ORDER BY received_time ASC
""",
)
suspend fun getDataPackets(): List<DataPacket>
suspend fun getDataPackets(myNodeNum: Int): List<DataPacket>
@Transaction
@Query(
"""
SELECT * FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND packet_id = :requestId
ORDER BY received_time DESC
""",
)
suspend fun getPacketById(requestId: Int): Packet?
suspend fun getPacketById(myNodeNum: Int, requestId: Int): Packet?
@Transaction
@Query(
"""
SELECT * FROM packet
WHERE packet_id = :packetId
AND (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
AND (myNodeNum = 0 OR myNodeNum = :myNodeNum)
LIMIT 1
""",
)
suspend fun getPacketByPacketId(packetId: Int): PacketEntity?
suspend fun getPacketByPacketId(myNodeNum: Int, packetId: Int): PacketEntity?
@Transaction
@Query(
"""
SELECT * FROM packet
WHERE packet_id IN (:packetIds)
AND (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
AND (myNodeNum = 0 OR myNodeNum = :myNodeNum)
""",
)
suspend fun getPacketsByPacketIds(packetIds: List<Int>): List<PacketEntity>
suspend fun getPacketsByPacketIds(myNodeNum: Int, packetIds: List<Int>): List<PacketEntity>
@Query(
"""
SELECT * FROM packet
WHERE packet_id = :packetId
AND (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
AND (myNodeNum = 0 OR myNodeNum = :myNodeNum)
""",
)
suspend fun findPacketsWithId(packetId: Int): List<Packet>
suspend fun findPacketsWithId(myNodeNum: Int, packetId: Int): List<Packet>
@Transaction
@Query(
"""
SELECT * FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND substr(sfpp_hash, 1, 8) = substr(:hash, 1, 8)
""",
)
suspend fun findPacketBySfppHash(hash: ByteString): Packet?
suspend fun findPacketBySfppHash(myNodeNum: Int, hash: ByteString): Packet?
// Fetches all DataPackets for the current node, ordered by time.
// Callers should filter by status in Kotlin (avoids SQLite json_extract dependency).
@Query(
"""
SELECT data FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
ORDER BY received_time ASC
""",
)
suspend fun getAllDataPackets(): List<DataPacket>
suspend fun getAllDataPackets(myNodeNum: Int): List<DataPacket>
@Query(
"""
SELECT * FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 8
ORDER BY received_time ASC
""",
)
suspend fun getAllWaypoints(): List<Packet>
suspend fun getAllWaypoints(myNodeNum: Int): List<Packet>
@Transaction
suspend fun deleteWaypoint(id: Int) {
val uuidList = getAllWaypoints().filter { it.data.waypoint?.id == id }.map { it.uuid }
deleteMessages(uuidList)
suspend fun deleteWaypoint(myNodeNum: Int, id: Int) {
val uuidList = getAllWaypoints(myNodeNum).filter { it.data.waypoint?.id == id }.map { it.uuid }
deleteMessages(myNodeNum, uuidList)
}
@Query("SELECT * FROM contact_settings")
@@ -407,60 +407,60 @@ interface PacketDao {
"""
SELECT * FROM reactions
WHERE packet_id = :packetId
AND (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
AND (myNodeNum = 0 OR myNodeNum = :myNodeNum)
""",
)
suspend fun findReactionsWithId(packetId: Int): List<ReactionEntity>
suspend fun findReactionsWithId(myNodeNum: Int, packetId: Int): List<ReactionEntity>
@Query(
"""
SELECT * FROM reactions
WHERE packet_id = :packetId
AND (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
AND (myNodeNum = 0 OR myNodeNum = :myNodeNum)
LIMIT 1
""",
)
suspend fun getReactionByPacketId(packetId: Int): ReactionEntity?
suspend fun getReactionByPacketId(myNodeNum: Int, packetId: Int): ReactionEntity?
@Transaction
@Query(
"""
SELECT * FROM reactions
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND substr(sfpp_hash, 1, 8) = substr(:hash, 1, 8)
""",
)
suspend fun findReactionBySfppHash(hash: ByteString): ReactionEntity?
suspend fun findReactionBySfppHash(myNodeNum: Int, hash: ByteString): ReactionEntity?
@Query(
"""
SELECT COUNT(*) FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND contact_key = :contact AND filtered = 1
""",
)
suspend fun getFilteredCount(contact: String): Int
suspend fun getFilteredCount(myNodeNum: Int, contact: String): Int
@Query(
"""
SELECT COUNT(*) FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND contact_key = :contact AND filtered = 1
""",
)
fun getFilteredCountFlow(contact: String): Flow<Int>
fun getFilteredCountFlow(myNodeNum: Int, contact: String): Flow<Int>
@Transaction
@Query(
"""
SELECT * FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum)
AND port_num = 1 AND contact_key = :contact
AND (filtered = 0 OR :includeFiltered = 1)
ORDER BY received_time DESC
""",
)
fun getMessagesFromPaged(contact: String, includeFiltered: Boolean): PagingSource<Int, PacketEntity>
fun getMessagesFromPaged(myNodeNum: Int, contact: String, includeFiltered: Boolean): PagingSource<Int, PacketEntity>
@Query("SELECT filtering_disabled FROM contact_settings WHERE contact_key = :contact")
suspend fun getContactFilteringDisabled(contact: String): Boolean?
@@ -544,7 +544,7 @@ interface PacketDao {
@Suppress("MaxLineLength")
@Query(
"UPDATE packet SET filtered = :filtered WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node)) AND data LIKE :senderIdPattern",
"UPDATE packet SET filtered = :filtered WHERE (myNodeNum = 0 OR myNodeNum = :myNodeNum) AND data LIKE :senderIdPattern",
)
suspend fun updateFilteredBySender(senderIdPattern: String, filtered: Boolean)
suspend fun updateFilteredBySender(myNodeNum: Int, senderIdPattern: String, filtered: Boolean)
}

View File

@@ -1,60 +0,0 @@
/*
* Copyright (c) 2026 Meshtastic LLC
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.meshtastic.core.database.entity
import androidx.room3.Entity
import androidx.room3.PrimaryKey
import org.meshtastic.core.model.MyNodeInfo
@Entity(tableName = "my_node")
@Suppress("LongParameterList")
open class MyNodeEntity(
@PrimaryKey(autoGenerate = false) val myNodeNum: Int,
val model: String?,
val firmwareVersion: String?,
val couldUpdate: Boolean, // this application contains a software load we _could_ install if you want
val shouldUpdate: Boolean, // this device has old firmware
val currentPacketId: Long,
val messageTimeoutMsec: Int,
val minAppVersion: Int,
val maxChannels: Int,
val hasWifi: Boolean,
val deviceId: String? = "unknown",
val pioEnv: String? = null,
) {
/** A human readable description of the software/hardware version */
val firmwareString: String
get() = "$model $firmwareVersion"
open fun toMyNodeInfo() = MyNodeInfo(
myNodeNum = myNodeNum,
hasGPS = false,
model = model,
firmwareVersion = firmwareVersion,
couldUpdate = couldUpdate,
shouldUpdate = shouldUpdate,
currentPacketId = currentPacketId,
messageTimeoutMsec = messageTimeoutMsec,
minAppVersion = minAppVersion,
maxChannels = maxChannels,
hasWifi = hasWifi,
channelUtilization = 0f,
airUtilTx = 0f,
deviceId = deviceId,
pioEnv = pioEnv,
)
}

View File

@@ -1,259 +0,0 @@
/*
* Copyright (c) 2026 Meshtastic LLC
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.meshtastic.core.database.entity
import androidx.room3.ColumnInfo
import androidx.room3.Embedded
import androidx.room3.Entity
import androidx.room3.Index
import androidx.room3.PrimaryKey
import androidx.room3.Relation
import okio.ByteString
import okio.ByteString.Companion.toByteString
import org.meshtastic.core.common.util.nowMillis
import org.meshtastic.core.common.util.nowSeconds
import org.meshtastic.core.model.DeviceMetrics
import org.meshtastic.core.model.EnvironmentMetrics
import org.meshtastic.core.model.MeshUser
import org.meshtastic.core.model.Node
import org.meshtastic.core.model.NodeInfo
import org.meshtastic.core.model.Position
import org.meshtastic.core.model.util.onlineTimeThreshold
import org.meshtastic.proto.DeviceMetadata
import org.meshtastic.proto.HardwareModel
import org.meshtastic.proto.MeshPacket
import org.meshtastic.proto.Paxcount
import org.meshtastic.proto.Telemetry
import org.meshtastic.proto.User
import org.meshtastic.proto.Position as WirePosition
data class NodeWithRelations(
@Embedded val node: NodeEntity,
@Relation(entity = MetadataEntity::class, parentColumn = "num", entityColumn = "num")
val metadata: MetadataEntity? = null,
) {
fun toModel() = with(node) {
Node(
num = num,
metadata = metadata?.proto,
user = user,
position = position,
snr = snr,
rssi = rssi,
lastHeard = lastHeard,
deviceMetrics = deviceMetrics ?: org.meshtastic.proto.DeviceMetrics(),
channel = channel,
viaMqtt = viaMqtt,
hopsAway = hopsAway,
isFavorite = isFavorite,
isIgnored = isIgnored,
isMuted = isMuted,
environmentMetrics = environmentMetrics ?: org.meshtastic.proto.EnvironmentMetrics(),
powerMetrics = powerMetrics ?: org.meshtastic.proto.PowerMetrics(),
paxcounter = paxcounter,
publicKey = publicKey ?: user.public_key,
notes = notes,
manuallyVerified = manuallyVerified,
nodeStatus = nodeStatus,
lastTransport = lastTransport,
)
}
fun toEntity() = with(node) {
NodeEntity(
num = num,
user = user,
position = position,
snr = snr,
rssi = rssi,
lastHeard = lastHeard,
deviceTelemetry = deviceTelemetry,
channel = channel,
viaMqtt = viaMqtt,
hopsAway = hopsAway,
isFavorite = isFavorite,
isIgnored = isIgnored,
isMuted = isMuted,
environmentTelemetry = environmentTelemetry,
powerTelemetry = powerTelemetry,
paxcounter = paxcounter,
publicKey = publicKey ?: user.public_key,
notes = notes,
manuallyVerified = manuallyVerified,
nodeStatus = nodeStatus,
lastTransport = lastTransport,
)
}
}
@Entity(tableName = "metadata", indices = [Index(value = ["num"])])
data class MetadataEntity(
@PrimaryKey val num: Int,
@ColumnInfo(name = "proto", typeAffinity = ColumnInfo.BLOB) val proto: DeviceMetadata,
val timestamp: Long = nowMillis,
)
@Suppress("MagicNumber")
@Entity(
tableName = "nodes",
indices =
[
Index(value = ["last_heard"]),
Index(value = ["short_name"]),
Index(value = ["long_name"]),
Index(value = ["hops_away"]),
Index(value = ["is_favorite"]),
Index(value = ["last_heard", "is_favorite"]),
Index(value = ["public_key"]),
],
)
data class NodeEntity(
@PrimaryKey(autoGenerate = false) val num: Int, // This is immutable, and used as a key
@ColumnInfo(typeAffinity = ColumnInfo.BLOB) var user: User = User(),
@ColumnInfo(name = "long_name") var longName: String? = null,
@ColumnInfo(name = "short_name") var shortName: String? = null, // used in includeUnknown filter
@ColumnInfo(typeAffinity = ColumnInfo.BLOB) var position: WirePosition = WirePosition(),
var latitude: Double = 0.0,
var longitude: Double = 0.0,
var snr: Float = Float.MAX_VALUE,
var rssi: Int = Int.MAX_VALUE,
@ColumnInfo(name = "last_heard") var lastHeard: Int = 0, // the last time we've seen this node in secs since 1970
@ColumnInfo(name = "device_metrics", typeAffinity = ColumnInfo.BLOB) var deviceTelemetry: Telemetry = Telemetry(),
var channel: Int = 0,
@ColumnInfo(name = "via_mqtt") var viaMqtt: Boolean = false,
@ColumnInfo(name = "hops_away") var hopsAway: Int = -1,
@ColumnInfo(name = "is_favorite") var isFavorite: Boolean = false,
@ColumnInfo(name = "is_ignored", defaultValue = "0") var isIgnored: Boolean = false,
@ColumnInfo(name = "is_muted", defaultValue = "0") var isMuted: Boolean = false,
@ColumnInfo(name = "environment_metrics", typeAffinity = ColumnInfo.BLOB)
var environmentTelemetry: Telemetry = Telemetry(),
@ColumnInfo(name = "power_metrics", typeAffinity = ColumnInfo.BLOB) var powerTelemetry: Telemetry = Telemetry(),
@ColumnInfo(typeAffinity = ColumnInfo.BLOB) var paxcounter: Paxcount = Paxcount(),
@ColumnInfo(name = "public_key") var publicKey: ByteString? = null,
@ColumnInfo(name = "notes", defaultValue = "") var notes: String = "",
@ColumnInfo(name = "manually_verified", defaultValue = "0")
var manuallyVerified: Boolean = false, // ONLY set true when scanned/imported manually
@ColumnInfo(name = "node_status") var nodeStatus: String? = null,
/** The transport mechanism this node was last heard over (see [MeshPacket.TransportMechanism]). */
@ColumnInfo(name = "last_transport", defaultValue = "0") var lastTransport: Int = 0,
) {
val deviceMetrics: org.meshtastic.proto.DeviceMetrics?
get() = deviceTelemetry.device_metrics
val environmentMetrics: org.meshtastic.proto.EnvironmentMetrics?
get() = environmentTelemetry.environment_metrics
val powerMetrics: org.meshtastic.proto.PowerMetrics?
get() = powerTelemetry.power_metrics
val isUnknownUser
get() = user.hw_model == HardwareModel.UNSET
val hasPKC
get() = (publicKey ?: user.public_key).size > 0
fun setPosition(p: WirePosition, defaultTime: Int = currentTime()) {
position = p.copy(time = if (p.time != 0) p.time else defaultTime)
latitude = degD(p.latitude_i ?: 0)
longitude = degD(p.longitude_i ?: 0)
}
/** true if the device was heard from recently */
val isOnline: Boolean
get() {
return lastHeard > onlineTimeThreshold()
}
companion object {
// Convert to a double representation of degrees
fun degD(i: Int) = i * 1e-7
fun degI(d: Double) = (d * 1e7).toInt()
val ERROR_BYTE_STRING: ByteString = ByteArray(32) { 0 }.toByteString()
fun currentTime() = nowSeconds.toInt()
}
fun toModel() = Node(
num = num,
user = user,
position = position,
snr = snr,
rssi = rssi,
lastHeard = lastHeard,
deviceMetrics = deviceMetrics ?: org.meshtastic.proto.DeviceMetrics(),
channel = channel,
viaMqtt = viaMqtt,
hopsAway = hopsAway,
isFavorite = isFavorite,
isIgnored = isIgnored,
isMuted = isMuted,
environmentMetrics = environmentMetrics ?: org.meshtastic.proto.EnvironmentMetrics(),
powerMetrics = powerMetrics ?: org.meshtastic.proto.PowerMetrics(),
paxcounter = paxcounter,
publicKey = publicKey ?: user.public_key,
notes = notes,
nodeStatus = nodeStatus,
lastTransport = lastTransport,
)
fun toNodeInfo() = NodeInfo(
num = num,
user =
MeshUser(
id = user.id,
longName = user.long_name,
shortName = user.short_name,
hwModel = user.hw_model,
role = user.role.value,
)
.takeIf { user.id.isNotEmpty() },
position =
Position(
latitude = latitude,
longitude = longitude,
altitude = position.altitude ?: 0,
time = position.time,
satellitesInView = position.sats_in_view,
groundSpeed = position.ground_speed ?: 0,
groundTrack = position.ground_track ?: 0,
precisionBits = position.precision_bits,
)
.takeIf { it.isValid() },
snr = snr,
rssi = rssi,
lastHeard = lastHeard,
deviceMetrics =
DeviceMetrics(
time = deviceTelemetry.time,
batteryLevel = deviceMetrics?.battery_level ?: 0,
voltage = deviceMetrics?.voltage ?: 0f,
channelUtilization = deviceMetrics?.channel_utilization ?: 0f,
airUtilTx = deviceMetrics?.air_util_tx ?: 0f,
uptimeSeconds = deviceMetrics?.uptime_seconds ?: 0,
),
channel = channel,
environmentMetrics =
EnvironmentMetrics.fromTelemetryProto(
environmentTelemetry.environment_metrics ?: org.meshtastic.proto.EnvironmentMetrics(),
environmentTelemetry.time,
),
hopsAway = hopsAway,
nodeStatus = nodeStatus,
)
}

View File

@@ -1,115 +0,0 @@
/*
* Copyright (c) 2026 Meshtastic LLC
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.meshtastic.core.database.dao
import kotlinx.coroutines.flow.first
import kotlinx.coroutines.test.runTest
import org.meshtastic.core.common.util.nowMillis
import org.meshtastic.core.database.MeshtasticDatabase
import org.meshtastic.core.database.entity.MyNodeEntity
import org.meshtastic.core.database.entity.NodeEntity
import org.meshtastic.core.database.getInMemoryDatabaseBuilder
import org.meshtastic.proto.User
import kotlin.test.AfterTest
import kotlin.test.Test
import kotlin.test.assertEquals
import kotlin.test.assertNotNull
import kotlin.test.assertTrue
abstract class CommonNodeInfoDaoTest {
private lateinit var database: MeshtasticDatabase
private lateinit var dao: NodeInfoDao
private val myNodeInfo: MyNodeEntity =
MyNodeEntity(
myNodeNum = 42424242,
model = "TBEAM",
firmwareVersion = "2.5.0",
couldUpdate = false,
shouldUpdate = false,
currentPacketId = 1L,
messageTimeoutMsec = 300000,
minAppVersion = 1,
maxChannels = 8,
hasWifi = false,
)
suspend fun createDb() {
database = getInMemoryDatabaseBuilder().build()
dao = database.nodeInfoDao()
dao.setMyNodeInfo(myNodeInfo)
}
@AfterTest
fun closeDb() {
database.close()
}
@Test
fun testGetMyNodeInfo() = runTest {
val info = dao.getMyNodeInfo().first()
assertNotNull(info)
assertEquals(myNodeInfo.myNodeNum, info.myNodeNum)
}
@Test
fun testUpsertNode() = runTest {
val node =
NodeEntity(
num = 1234,
user = User(long_name = "Test Node", id = "!test", hw_model = org.meshtastic.proto.HardwareModel.TBEAM),
lastHeard = (nowMillis / 1000).toInt(),
)
dao.upsert(node)
val result = dao.getNodeByNum(1234)
assertNotNull(result)
assertEquals("Test Node", result.node.longName)
}
@Test
fun testNodeDBbyNum() = runTest {
val node1 = NodeEntity(num = 1, user = User(id = "!1"))
val node2 = NodeEntity(num = 2, user = User(id = "!2"))
dao.putAll(listOf(node1, node2))
val nodes = dao.nodeDBbyNum().first()
assertEquals(2, nodes.size)
assertTrue(nodes.containsKey(1))
assertTrue(nodes.containsKey(2))
}
@Test
fun testDeleteNode() = runTest {
val node = NodeEntity(num = 1, user = User(id = "!1"))
dao.upsert(node)
dao.deleteNode(1)
val result = dao.getNodeByNum(1)
assertEquals(null, result)
}
@Test
fun testClearNodeInfo() = runTest {
val node1 = NodeEntity(num = 1, user = User(id = "!1"), isFavorite = true)
val node2 = NodeEntity(num = 2, user = User(id = "!2"), isFavorite = false)
dao.putAll(listOf(node1, node2))
dao.clearNodeInfo(preserveFavorites = true)
val nodes = dao.nodeDBbyNum().first()
assertEquals(1, nodes.size)
assertTrue(nodes.containsKey(1))
}
}

View File

@@ -21,7 +21,6 @@ import kotlinx.coroutines.test.runTest
import okio.ByteString.Companion.toByteString
import org.meshtastic.core.common.util.nowMillis
import org.meshtastic.core.database.MeshtasticDatabase
import org.meshtastic.core.database.entity.MyNodeEntity
import org.meshtastic.core.database.entity.Packet
import org.meshtastic.core.database.entity.ReactionEntity
import org.meshtastic.core.database.getInMemoryDatabaseBuilder
@@ -37,33 +36,17 @@ import kotlin.test.assertTrue
abstract class CommonPacketDaoTest {
private lateinit var database: MeshtasticDatabase
private lateinit var nodeInfoDao: NodeInfoDao
private lateinit var packetDao: PacketDao
private val myNodeInfo: MyNodeEntity =
MyNodeEntity(
myNodeNum = 42424242,
model = null,
firmwareVersion = null,
couldUpdate = false,
shouldUpdate = false,
currentPacketId = 1L,
messageTimeoutMsec = 5 * 60 * 1000,
minAppVersion = 1,
maxChannels = 8,
hasWifi = false,
)
private val myNodeNum: Int
get() = myNodeInfo.myNodeNum
private val myNodeNum = 42424242
private val testContactKeys = listOf("0${DataPacket.ID_BROADCAST}", "1!test1234")
private fun generateTestPackets(myNodeNum: Int) = testContactKeys.flatMap { contactKey ->
private fun generateTestPackets(nodeNum: Int) = testContactKeys.flatMap { contactKey ->
List(SAMPLE_SIZE) {
Packet(
uuid = 0L,
myNodeNum = myNodeNum,
myNodeNum = nodeNum,
port_num = PortNum.TEXT_MESSAGE_APP.value,
contact_key = contactKey,
received_time = nowMillis + it,
@@ -80,8 +63,6 @@ abstract class CommonPacketDaoTest {
suspend fun createDb() {
database = getInMemoryDatabaseBuilder().build()
nodeInfoDao = database.nodeInfoDao().apply { setMyNodeInfo(myNodeInfo) }
packetDao =
database.packetDao().apply {
generateTestPackets(42424243).forEach { insert(it) }
@@ -97,7 +78,7 @@ abstract class CommonPacketDaoTest {
@Test
fun testGetMessagesFrom() = runTest {
val contactKey = testContactKeys.first()
val messages = packetDao.getMessagesFrom(contactKey).first()
val messages = packetDao.getMessagesFrom(myNodeNum, contactKey).first()
assertEquals(SAMPLE_SIZE, messages.size)
assertTrue(messages.all { it.packet.myNodeNum == myNodeNum })
assertTrue(messages.all { it.packet.contact_key == contactKey })
@@ -106,42 +87,40 @@ abstract class CommonPacketDaoTest {
@Test
fun testGetMessageCount() = runTest {
val contactKey = testContactKeys.first()
assertEquals(SAMPLE_SIZE, packetDao.getMessageCount(contactKey))
assertEquals(SAMPLE_SIZE, packetDao.getMessageCount(myNodeNum, contactKey))
}
@Test
fun testGetUnreadCount() = runTest {
val contactKey = testContactKeys.first()
assertEquals(SAMPLE_SIZE, packetDao.getUnreadCount(contactKey))
assertEquals(SAMPLE_SIZE, packetDao.getUnreadCount(myNodeNum, contactKey))
}
@Test
fun testClearUnreadCount() = runTest {
val contactKey = testContactKeys.first()
packetDao.clearUnreadCount(contactKey, nowMillis + SAMPLE_SIZE)
assertEquals(0, packetDao.getUnreadCount(contactKey))
packetDao.clearUnreadCount(myNodeNum, contactKey, nowMillis + SAMPLE_SIZE)
assertEquals(0, packetDao.getUnreadCount(myNodeNum, contactKey))
}
@Test
fun testClearAllUnreadCounts() = runTest {
packetDao.clearAllUnreadCounts()
testContactKeys.forEach { assertEquals(0, packetDao.getUnreadCount(it)) }
packetDao.clearAllUnreadCounts(myNodeNum)
testContactKeys.forEach { assertEquals(0, packetDao.getUnreadCount(myNodeNum, it)) }
}
@Test
fun testUpdateMessageStatus() = runTest {
val contactKey = testContactKeys.first()
val messages = packetDao.getMessagesFrom(contactKey).first()
val messages = packetDao.getMessagesFrom(myNodeNum, contactKey).first()
val packet = messages.first().packet.data
val originalStatus = packet.status
// Ensure packet has a valid ID for updating
val packetWithId = packet.copy(id = 999, from = "!$myNodeNum")
val updatedRoomPacket = messages.first().packet.copy(data = packetWithId, packetId = 999)
packetDao.update(updatedRoomPacket)
packetDao.updateMessageStatus(packetWithId, MessageStatus.DELIVERED)
val updatedMessages = packetDao.getMessagesFrom(contactKey).first()
packetDao.updateMessageStatus(myNodeNum, packetWithId, MessageStatus.DELIVERED)
val updatedMessages = packetDao.getMessagesFrom(myNodeNum, contactKey).first()
assertEquals(MessageStatus.DELIVERED, updatedMessages.first { it.packet.data.id == 999 }.packet.data.status)
}
@@ -164,7 +143,7 @@ abstract class CommonPacketDaoTest {
),
)
packetDao.insert(queuedPacket)
val queued = packetDao.getAllDataPackets().filter { it.status == MessageStatus.QUEUED }
val queued = packetDao.getAllDataPackets(myNodeNum).filter { it.status == MessageStatus.QUEUED }
assertNotNull(queued)
assertEquals(1, queued.size)
assertEquals("Queued", queued.first().text)
@@ -173,13 +152,13 @@ abstract class CommonPacketDaoTest {
@Test
fun testDeleteMessages() = runTest {
val contactKey = testContactKeys.first()
packetDao.deleteContacts(listOf(contactKey))
assertEquals(0, packetDao.getMessageCount(contactKey))
packetDao.deleteContacts(myNodeNum, listOf(contactKey))
assertEquals(0, packetDao.getMessageCount(myNodeNum, contactKey))
}
@Test
fun testGetContactKeys() = runTest {
val contacts = packetDao.getContactKeys().first()
val contacts = packetDao.getContactKeys(myNodeNum).first()
assertEquals(testContactKeys.size, contacts.size)
testContactKeys.forEach { assertTrue(contacts.containsKey(it)) }
}
@@ -202,9 +181,8 @@ abstract class CommonPacketDaoTest {
),
)
packetDao.insert(waypointPacket)
val waypoints = packetDao.getAllWaypoints()
val waypoints = packetDao.getAllWaypoints(myNodeNum)
assertEquals(1, waypoints.size)
// Waypoints aren't text messages, so they don't resolve a string text.
}
@Test
@@ -221,7 +199,7 @@ abstract class CommonPacketDaoTest {
val filteredMessages = listOf("Filtered 1")
normalMessages.forEachIndexed { index, text ->
val packet =
packetDao.insert(
Packet(
uuid = 0L,
myNodeNum = myNodeNum,
@@ -229,19 +207,18 @@ abstract class CommonPacketDaoTest {
contact_key = contactKey,
received_time = nowMillis + index,
read = false,
data =
DataPacket(
data = DataPacket(
to = DataPacket.ID_BROADCAST,
bytes = text.encodeToByteArray().toByteString(),
dataType = PortNum.TEXT_MESSAGE_APP.value,
),
filtered = false,
)
packetDao.insert(packet)
),
)
}
filteredMessages.forEachIndexed { index, text ->
val packet =
packetDao.insert(
Packet(
uuid = 0L,
myNodeNum = myNodeNum,
@@ -249,35 +226,31 @@ abstract class CommonPacketDaoTest {
contact_key = contactKey,
received_time = nowMillis + normalMessages.size + index,
read = true,
data =
DataPacket(
data = DataPacket(
to = DataPacket.ID_BROADCAST,
bytes = text.encodeToByteArray().toByteString(),
dataType = PortNum.TEXT_MESSAGE_APP.value,
),
filtered = true,
)
packetDao.insert(packet)
),
)
}
val allMessages = packetDao.getMessagesFrom(contactKey).first()
val allMessages = packetDao.getMessagesFrom(myNodeNum, contactKey).first()
assertEquals(normalMessages.size + filteredMessages.size, allMessages.size)
val includingFiltered = packetDao.getMessagesFrom(contactKey, includeFiltered = true).first()
val includingFiltered = packetDao.getMessagesFrom(myNodeNum, contactKey, includeFiltered = true).first()
assertEquals(normalMessages.size + filteredMessages.size, includingFiltered.size)
val excludingFiltered = packetDao.getMessagesFrom(contactKey, includeFiltered = false).first()
val excludingFiltered = packetDao.getMessagesFrom(myNodeNum, contactKey, includeFiltered = false).first()
assertEquals(normalMessages.size, excludingFiltered.size)
assertFalse(excludingFiltered.any { it.packet.filtered })
}
@Test
fun testGetPacketsByPacketIdsChunked() = runTest {
// Regression test for SQLITE_MAX_VARIABLE_NUMBER (999) limit. Inserting >999 packets and
// looking them up by id must not throw; callers are expected to chunk, and each chunk
// must return the correct rows.
val totalPackets = 2000
val chunkSize = NodeInfoDao.MAX_BIND_PARAMS
val chunkSize = MAX_SQLITE_BIND_PARAMS
val contactKey = "chunk-test"
val baseTime = nowMillis
val packetIds = (1..totalPackets).toList()
@@ -291,8 +264,7 @@ abstract class CommonPacketDaoTest {
contact_key = contactKey,
received_time = baseTime + id,
read = false,
data =
DataPacket(
data = DataPacket(
to = DataPacket.ID_BROADCAST,
bytes = "Chunk $id".encodeToByteArray().toByteString(),
dataType = PortNum.TEXT_MESSAGE_APP.value,
@@ -302,12 +274,13 @@ abstract class CommonPacketDaoTest {
)
}
val fetched = packetIds.chunked(chunkSize).flatMap { packetDao.getPacketsByPacketIds(it) }
val fetched = packetIds.chunked(chunkSize).flatMap { packetDao.getPacketsByPacketIds(myNodeNum, it) }
assertEquals(totalPackets, fetched.size)
assertEquals(packetIds.toSet(), fetched.map { it.packet.packetId }.toSet())
}
companion object {
private const val SAMPLE_SIZE = 10
private const val MAX_SQLITE_BIND_PARAMS = 999
}
}