Merge pull request #6585 from vector-im/feature/bca/fix_backup_regression

fix backup looping same keys
This commit is contained in:
Valere 2022-07-20 13:52:47 +02:00 committed by GitHub
commit a8cd77c267
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 12 additions and 34 deletions

1
changelog.d/6585.bugfix Normal file
View file

@ -0,0 +1 @@
Fix backup saving several times the same keys

View file

@ -24,7 +24,6 @@ import org.junit.Assert.assertNotNull
import org.junit.Assert.assertNull
import org.junit.Assert.assertTrue
import org.junit.FixMethodOrder
import org.junit.Ignore
import org.junit.Rule
import org.junit.Test
import org.junit.runner.RunWith
@ -56,7 +55,6 @@ import java.util.concurrent.CountDownLatch
@RunWith(AndroidJUnit4::class)
@FixMethodOrder(MethodSorters.JVM)
@LargeTest
@Ignore
class KeysBackupTest : InstrumentedTest {
@get:Rule val rule = RetryTestRule(3)

View file

@ -21,13 +21,10 @@ import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.launch
import kotlinx.coroutines.sync.Mutex
import org.matrix.android.sdk.api.MatrixCoroutineDispatchers
import org.matrix.android.sdk.api.extensions.tryOrNull
import org.matrix.android.sdk.api.logger.LoggerTag
import org.matrix.android.sdk.internal.crypto.model.MXInboundMegolmSessionWrapper
import org.matrix.android.sdk.internal.crypto.store.IMXCryptoStore
import timber.log.Timber
import java.util.Timer
import java.util.TimerTask
import javax.inject.Inject
internal data class InboundGroupSessionHolder(
@ -57,18 +54,13 @@ internal class InboundGroupSessionStore @Inject constructor(
if (oldValue != null) {
cryptoCoroutineScope.launch(coroutineDispatchers.crypto) {
Timber.tag(loggerTag.value).v("## Inbound: entryRemoved ${oldValue.wrapper.roomId}-${oldValue.wrapper.senderKey}")
store.storeInboundGroupSessions(listOf(oldValue).map { it.wrapper })
// store.storeInboundGroupSessions(listOf(oldValue).map { it.wrapper })
oldValue.wrapper.session.releaseSession()
}
}
}
}
private val timer = Timer()
private var timerTask: TimerTask? = null
private val dirtySession = mutableListOf<InboundGroupSessionHolder>()
@Synchronized
fun clear() {
sessionCache.evictAll()
@ -90,7 +82,6 @@ internal class InboundGroupSessionStore @Inject constructor(
@Synchronized
fun replaceGroupSession(old: InboundGroupSessionHolder, new: InboundGroupSessionHolder, sessionId: String, senderKey: String) {
Timber.tag(loggerTag.value).v("## Replacing outdated session ${old.wrapper.roomId}-${old.wrapper.senderKey}")
dirtySession.remove(old)
store.removeInboundGroupSession(sessionId, senderKey)
sessionCache.remove(CacheKey(sessionId, senderKey))
@ -107,33 +98,14 @@ internal class InboundGroupSessionStore @Inject constructor(
private fun internalStoreGroupSession(holder: InboundGroupSessionHolder, sessionId: String, senderKey: String) {
Timber.tag(loggerTag.value).v("## Inbound: getInboundGroupSession mark as dirty ${holder.wrapper.roomId}-${holder.wrapper.senderKey}")
// We want to batch this a bit for performances
dirtySession.add(holder)
if (sessionCache[CacheKey(sessionId, senderKey)] == null) {
// first time seen, put it in memory cache while waiting for batch insert
// If it's already known, no need to update cache it's already there
sessionCache.put(CacheKey(sessionId, senderKey), holder)
}
timerTask?.cancel()
timerTask = object : TimerTask() {
override fun run() {
batchSave()
}
}
timer.schedule(timerTask!!, 300)
}
@Synchronized
private fun batchSave() {
val toSave = mutableListOf<InboundGroupSessionHolder>().apply { addAll(dirtySession) }
dirtySession.clear()
cryptoCoroutineScope.launch(coroutineDispatchers.crypto) {
Timber.tag(loggerTag.value).v("## Inbound: getInboundGroupSession batching save of ${toSave.size}")
tryOrNull {
store.storeInboundGroupSessions(toSave.map { it.wrapper })
}
store.storeInboundGroupSessions(listOf(holder.wrapper))
}
}
}

View file

@ -808,7 +808,6 @@ internal class MXOlmDevice @Inject constructor(
}
replayAttackMap[messageIndexKey] = eventId
}
inboundGroupSessionStore.storeInBoundGroupSession(sessionHolder, sessionId, senderKey)
val payload = try {
val adapter = MoshiProvider.providesMoshi().adapter<JsonDict>(JSON_DICT_PARAMETERIZED_TYPE)
val payloadString = convertFromUTF8(decryptResult.mDecryptedMessage)

View file

@ -1349,6 +1349,8 @@ internal class DefaultKeysBackupService @Inject constructor(
// Mark keys as backed up
cryptoStore.markBackupDoneForInboundGroupSessions(olmInboundGroupSessionWrappers)
// we can release the sessions now
olmInboundGroupSessionWrappers.onEach { it.session.releaseSession() }
if (olmInboundGroupSessionWrappers.size < KEY_BACKUP_SEND_KEYS_MAX_COUNT) {
Timber.v("backupKeys: All keys have been backed up")

View file

@ -763,11 +763,17 @@ internal class RealmCryptoStore @Inject constructor(
// } ?: false
val key = OlmInboundGroupSessionEntity.createPrimaryKey(sessionIdentifier, wrapper.sessionData.senderKey)
val existing = realm.where<OlmInboundGroupSessionEntity>()
.equalTo(OlmInboundGroupSessionEntityFields.PRIMARY_KEY, key)
.findFirst()
val realmOlmInboundGroupSession = OlmInboundGroupSessionEntity().apply {
primaryKey = key
store(wrapper)
backedUp = existing?.backedUp ?: false
}
Timber.i("## CRYPTO | shouldShareHistory: ${wrapper.sessionData.sharedHistory} for $key")
Timber.v("## CRYPTO | shouldShareHistory: ${wrapper.sessionData.sharedHistory} for $key")
realm.insertOrUpdate(realmOlmInboundGroupSession)
}
}