mirror of
https://github.com/meshtastic/Meshtastic-Android.git
synced 2026-02-07 06:12:56 -05:00
fix: node search by keeping denormalized name columns up to date and backfilling existing nodes (#3839)
This commit is contained in:
@@ -37,4 +37,6 @@ interface NodeInfoWriteDataSource {
|
||||
suspend fun upsert(metadata: MetadataEntity)
|
||||
|
||||
suspend fun setNodeNotes(num: Int, notes: String)
|
||||
|
||||
suspend fun backfillDenormalizedNames()
|
||||
}
|
||||
|
||||
@@ -57,4 +57,7 @@ constructor(
|
||||
|
||||
override suspend fun setNodeNotes(num: Int, notes: String) =
|
||||
withContext(dispatchers.io) { dbManager.withDb { it.nodeInfoDao().setNodeNotes(num, notes) } }
|
||||
|
||||
override suspend fun backfillDenormalizedNames() =
|
||||
withContext(dispatchers.io) { dbManager.withDb { it.nodeInfoDao().backfillDenormalizedNames() } }
|
||||
}
|
||||
|
||||
@@ -54,6 +54,13 @@ constructor(
|
||||
private val nodeInfoWriteDataSource: NodeInfoWriteDataSource,
|
||||
private val dispatchers: CoroutineDispatchers,
|
||||
) {
|
||||
init {
|
||||
// Backfill denormalized name columns for existing nodes on startup
|
||||
processLifecycle.coroutineScope.launchWhenCreated {
|
||||
withContext(dispatchers.io) { nodeInfoWriteDataSource.backfillDenormalizedNames() }
|
||||
}
|
||||
}
|
||||
|
||||
// hardware info about our local device (can be null)
|
||||
val myNodeInfo: StateFlow<MyNodeEntity?> =
|
||||
nodeInfoReadDataSource
|
||||
|
||||
@@ -30,6 +30,7 @@ import org.meshtastic.core.database.entity.MetadataEntity
|
||||
import org.meshtastic.core.database.entity.MyNodeEntity
|
||||
import org.meshtastic.core.database.entity.NodeEntity
|
||||
import org.meshtastic.core.database.entity.NodeWithRelations
|
||||
import org.meshtastic.proto.MeshProtos
|
||||
|
||||
@Suppress("TooManyFunctions")
|
||||
@Dao
|
||||
@@ -49,6 +50,13 @@ interface NodeInfoDao {
|
||||
// and to support lazy migration.
|
||||
incomingNode.publicKey = incomingNode.user.publicKey
|
||||
|
||||
// Populate denormalized name columns from the User protobuf for search functionality
|
||||
// Only populate if the user is not a placeholder (hwModel != UNSET)
|
||||
if (incomingNode.user.hwModel != MeshProtos.HardwareModel.UNSET) {
|
||||
incomingNode.longName = incomingNode.user.longName
|
||||
incomingNode.shortName = incomingNode.user.shortName
|
||||
}
|
||||
|
||||
val existingNodeEntity = getNodeByNum(incomingNode.num)?.node
|
||||
|
||||
return if (existingNodeEntity == null) {
|
||||
@@ -240,4 +248,27 @@ interface NodeInfoDao {
|
||||
setMyNodeInfo(mi)
|
||||
putAll(nodes.map { getVerifiedNodeForUpsert(it) })
|
||||
}
|
||||
|
||||
/**
|
||||
* Backfills longName and shortName columns from the user protobuf for nodes where these columns are NULL. This
|
||||
* ensures search functionality works for all nodes. Skips placeholder/default users (hwModel == UNSET).
|
||||
*/
|
||||
@Transaction
|
||||
fun backfillDenormalizedNames() {
|
||||
val nodes = getAllNodesSnapshot()
|
||||
val nodesToUpdate =
|
||||
nodes
|
||||
.filter { node ->
|
||||
// Only backfill if columns are NULL AND the user is not a placeholder (hwModel != UNSET)
|
||||
(node.longName == null || node.shortName == null) &&
|
||||
node.user.hwModel != MeshProtos.HardwareModel.UNSET
|
||||
}
|
||||
.map { node -> node.copy(longName = node.user.longName, shortName = node.user.shortName) }
|
||||
if (nodesToUpdate.isNotEmpty()) {
|
||||
putAll(nodesToUpdate)
|
||||
}
|
||||
}
|
||||
|
||||
@Query("SELECT * FROM nodes")
|
||||
fun getAllNodesSnapshot(): List<NodeEntity>
|
||||
}
|
||||
|
||||
@@ -108,7 +108,7 @@ constructor(
|
||||
)
|
||||
.map { list ->
|
||||
list
|
||||
.filter { it.isIgnored == filter.showIgnored }
|
||||
.filter { filter.showIgnored || !it.isIgnored }
|
||||
.filter { node ->
|
||||
if (filter.excludeInfrastructure) {
|
||||
val role = node.user.role
|
||||
|
||||
Reference in New Issue
Block a user