mirror of
https://github.com/bitfireAT/davx5-ose
synced 2024-10-06 19:34:23 +00:00
WebDAV: refactor paging and caches (bitfireAT/davx5#502)
* WebDAV: don't try to load 0-byte segments * Extracted segmentation logic to SegmentedReader for testability * Reorganize and simplify caches * Refactor thumbnail cache * Use coroutines with Dispatchers.IO instead of custom Executor * Remove obsolete classes * Fix tests, simplify DiskCache * Paging reader: cache current page * PagingReader tests * Thumbnails: timeout for generation and not only for waiting * openDocumentThumbnail: actually cancel HTTP request when method is cancelled * Better KDoc * Add further tests
This commit is contained in:
parent
6ce0d35e6d
commit
357cf09be7
|
@ -1,78 +0,0 @@
|
|||
/***************************************************************************************************
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
**************************************************************************************************/
|
||||
|
||||
package at.bitfire.davdroid.webdav
|
||||
|
||||
import at.bitfire.davdroid.webdav.cache.MemoryCache
|
||||
import org.apache.commons.io.FileUtils
|
||||
import org.junit.Assert.*
|
||||
import org.junit.Before
|
||||
import org.junit.Test
|
||||
|
||||
class MemoryCacheTest {
|
||||
|
||||
companion object {
|
||||
val SAMPLE_KEY1 = "key1"
|
||||
val SAMPLE_CONTENT1 = "Sample Content 1".toByteArray()
|
||||
val SAMPLE_CONTENT2 = "Another Content".toByteArray()
|
||||
}
|
||||
|
||||
lateinit var storage: MemoryCache<String>
|
||||
|
||||
|
||||
@Before
|
||||
fun createStorage() {
|
||||
storage = MemoryCache(1*FileUtils.ONE_MB.toInt())
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
fun testGet() {
|
||||
// no entry yet, get should return null
|
||||
assertNull(storage.get(SAMPLE_KEY1))
|
||||
|
||||
// add entry
|
||||
storage.getOrPut(SAMPLE_KEY1) { SAMPLE_CONTENT1 }
|
||||
assertArrayEquals(SAMPLE_CONTENT1, storage.get(SAMPLE_KEY1))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testGetOrPut() {
|
||||
assertNull(storage.get(SAMPLE_KEY1))
|
||||
// no entry yet; SAMPLE_CONTENT1 should be generated
|
||||
var calledGenerateSampleContent1 = false
|
||||
assertArrayEquals(SAMPLE_CONTENT1, storage.getOrPut(SAMPLE_KEY1) {
|
||||
calledGenerateSampleContent1 = true
|
||||
SAMPLE_CONTENT1
|
||||
})
|
||||
assertTrue(calledGenerateSampleContent1)
|
||||
assertNotNull(storage.get(SAMPLE_KEY1))
|
||||
|
||||
// now there's a SAMPLE_CONTENT1 entry, it should be returned while SAMPLE_CONTENT2 is not generated
|
||||
var calledGenerateSampleContent2 = false
|
||||
assertArrayEquals(SAMPLE_CONTENT1, storage.getOrPut(SAMPLE_KEY1) {
|
||||
calledGenerateSampleContent2 = true
|
||||
SAMPLE_CONTENT2
|
||||
})
|
||||
assertFalse(calledGenerateSampleContent2)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testMaxCacheSize() {
|
||||
// Cache size is 1 MB. Add 11*100 kB -> the first entry should be gone then
|
||||
for (i in 0 until 11) {
|
||||
val key = "key$i"
|
||||
storage.getOrPut(key) {
|
||||
ByteArray(100 * FileUtils.ONE_KB.toInt()) { i.toByte() }
|
||||
}
|
||||
assertNotNull(storage.get(key))
|
||||
}
|
||||
|
||||
// now key0 should have been evicted and only key1..key11 should be there
|
||||
assertNull(storage.get("key0"))
|
||||
for (i in 1 until 11)
|
||||
assertNotNull(storage.get("key$i"))
|
||||
}
|
||||
|
||||
}
|
|
@ -1,159 +0,0 @@
|
|||
/***************************************************************************************************
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
**************************************************************************************************/
|
||||
|
||||
package at.bitfire.davdroid.webdav
|
||||
|
||||
import at.bitfire.davdroid.webdav.cache.Cache
|
||||
import at.bitfire.davdroid.webdav.cache.SegmentedCache
|
||||
import org.apache.commons.io.FileUtils
|
||||
import org.junit.Assert.assertArrayEquals
|
||||
import org.junit.Assert.assertEquals
|
||||
import org.junit.Test
|
||||
|
||||
class SegmentedCacheTest {
|
||||
|
||||
companion object {
|
||||
const val PAGE_SIZE = 100*FileUtils.ONE_KB.toInt()
|
||||
|
||||
const val SAMPLE_KEY1 = "key1"
|
||||
const val PAGE2_SIZE = 123
|
||||
}
|
||||
|
||||
val noCache = object: Cache<SegmentedCache.SegmentKey<String>> {
|
||||
override fun get(key: SegmentedCache.SegmentKey<String>) = null
|
||||
override fun getOrPut(key: SegmentedCache.SegmentKey<String>, generate: () -> ByteArray) = generate()
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testRead_AcrossPages() {
|
||||
val cache = SegmentedCache<String>(PAGE_SIZE, object: SegmentedCache.PageLoader<String> {
|
||||
override fun load(key: SegmentedCache.SegmentKey<String>, segmentSize: Int) =
|
||||
when (key.segment) {
|
||||
0 -> ByteArray(PAGE_SIZE) { 1 }
|
||||
1 -> ByteArray(PAGE2_SIZE) { 2 }
|
||||
else -> throw IndexOutOfBoundsException()
|
||||
}
|
||||
}, noCache)
|
||||
val dst = ByteArray(20)
|
||||
assertEquals(20, cache.read(SAMPLE_KEY1, (PAGE_SIZE - 10).toLong(), dst.size, dst))
|
||||
assertArrayEquals(ByteArray(20) { i ->
|
||||
if (i < 10)
|
||||
1
|
||||
else
|
||||
2
|
||||
}, dst)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testRead_AcrossPagesAndEOF() {
|
||||
val cache = SegmentedCache<String>(PAGE_SIZE, object: SegmentedCache.PageLoader<String> {
|
||||
override fun load(key: SegmentedCache.SegmentKey<String>, segmentSize: Int) =
|
||||
when (key.segment) {
|
||||
0 -> ByteArray(PAGE_SIZE) { 1 }
|
||||
1 -> ByteArray(PAGE2_SIZE) { 2 }
|
||||
else -> throw IndexOutOfBoundsException()
|
||||
}
|
||||
}, noCache)
|
||||
val dst = ByteArray(10 + PAGE2_SIZE + 10)
|
||||
assertEquals(10 + PAGE2_SIZE, cache.read(SAMPLE_KEY1, (PAGE_SIZE - 10).toLong(), dst.size, dst))
|
||||
assertArrayEquals(ByteArray(10 + PAGE2_SIZE) { i ->
|
||||
if (i < 10)
|
||||
1
|
||||
else
|
||||
2
|
||||
}, dst.copyOf(10 + PAGE2_SIZE))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testRead_ExactlyPageSize_BufferAlsoPageSize() {
|
||||
var loadCalled = 0
|
||||
val cache = SegmentedCache<String>(PAGE_SIZE, object: SegmentedCache.PageLoader<String> {
|
||||
override fun load(key: SegmentedCache.SegmentKey<String>, segmentSize: Int): ByteArray {
|
||||
loadCalled++
|
||||
if (key.segment == 0)
|
||||
return ByteArray(PAGE_SIZE)
|
||||
else
|
||||
throw IndexOutOfBoundsException()
|
||||
}
|
||||
}, noCache)
|
||||
val dst = ByteArray(PAGE_SIZE)
|
||||
assertEquals(PAGE_SIZE, cache.read(SAMPLE_KEY1, 0, dst.size, dst))
|
||||
assertEquals(1, loadCalled)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testRead_ExactlyPageSize_ButLargerBuffer() {
|
||||
var loadCalled = 0
|
||||
val cache = SegmentedCache<String>(PAGE_SIZE, object: SegmentedCache.PageLoader<String> {
|
||||
override fun load(key: SegmentedCache.SegmentKey<String>, segmentSize: Int): ByteArray {
|
||||
loadCalled++
|
||||
if (key.segment == 0)
|
||||
return ByteArray(PAGE_SIZE)
|
||||
else
|
||||
throw IndexOutOfBoundsException()
|
||||
}
|
||||
}, noCache)
|
||||
val dst = ByteArray(PAGE_SIZE + 10) // 10 bytes more so that the second segment is read
|
||||
assertEquals(PAGE_SIZE, cache.read(SAMPLE_KEY1, 0, dst.size, dst))
|
||||
assertEquals(2, loadCalled)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testRead_Offset() {
|
||||
val cache = SegmentedCache<String>(PAGE_SIZE, object: SegmentedCache.PageLoader<String> {
|
||||
override fun load(key: SegmentedCache.SegmentKey<String>, segmentSize: Int): ByteArray {
|
||||
if (key.segment == 0)
|
||||
return ByteArray(PAGE_SIZE) { 1 }
|
||||
else
|
||||
throw IndexOutOfBoundsException()
|
||||
}
|
||||
}, noCache)
|
||||
val dst = ByteArray(PAGE_SIZE)
|
||||
assertEquals(PAGE_SIZE - 100, cache.read(SAMPLE_KEY1, 100, dst.size, dst))
|
||||
assertArrayEquals(ByteArray(PAGE_SIZE) { i ->
|
||||
if (i < PAGE_SIZE - 100)
|
||||
1
|
||||
else
|
||||
0
|
||||
}, dst)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testRead_OnlyOnePageSmallerThanPageSize_From0() {
|
||||
val contentSize = 123
|
||||
val cache = SegmentedCache<String>(PAGE_SIZE, object: SegmentedCache.PageLoader<String> {
|
||||
override fun load(key: SegmentedCache.SegmentKey<String>, segmentSize: Int) =
|
||||
when (key.segment) {
|
||||
0 -> ByteArray(contentSize) { it.toByte() }
|
||||
else -> throw IndexOutOfBoundsException()
|
||||
}
|
||||
}, noCache)
|
||||
|
||||
// read less than content size
|
||||
var dst = ByteArray(10) // 10 < contentSize
|
||||
assertEquals(10, cache.read(SAMPLE_KEY1, 0, dst.size, dst))
|
||||
assertArrayEquals(ByteArray(10) { it.toByte() }, dst)
|
||||
|
||||
// read more than content size
|
||||
dst = ByteArray(1000) // 1000 > contentSize
|
||||
assertEquals(contentSize, cache.read(SAMPLE_KEY1, 0, dst.size, dst))
|
||||
assertArrayEquals(ByteArray(1000) { i ->
|
||||
if (i < contentSize)
|
||||
i.toByte()
|
||||
else
|
||||
0
|
||||
}, dst)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testRead_ZeroByteFile() {
|
||||
val cache = SegmentedCache<String>(PAGE_SIZE, object: SegmentedCache.PageLoader<String> {
|
||||
override fun load(key: SegmentedCache.SegmentKey<String>, segmentSize: Int) =
|
||||
throw IndexOutOfBoundsException()
|
||||
}, noCache)
|
||||
val dst = ByteArray(10)
|
||||
assertEquals(0, cache.read(SAMPLE_KEY1, 10, dst.size, dst))
|
||||
}
|
||||
|
||||
}
|
|
@ -13,10 +13,14 @@ import androidx.room.ForeignKey
|
|||
import androidx.room.Index
|
||||
import androidx.room.PrimaryKey
|
||||
import at.bitfire.davdroid.util.DavUtils.MEDIA_TYPE_OCTET_STREAM
|
||||
import at.bitfire.davdroid.webdav.DocumentState
|
||||
import okhttp3.HttpUrl
|
||||
import okhttp3.MediaType
|
||||
import okhttp3.MediaType.Companion.toMediaTypeOrNull
|
||||
import org.apache.commons.codec.digest.DigestUtils
|
||||
import org.apache.commons.digester.Digester
|
||||
import java.io.FileNotFoundException
|
||||
import java.time.Instant
|
||||
|
||||
@Entity(
|
||||
tableName = "webdav_document",
|
||||
|
@ -58,6 +62,12 @@ data class WebDavDocument(
|
|||
|
||||
) {
|
||||
|
||||
fun cacheKey(): CacheKey? {
|
||||
if (eTag != null || lastModified != null)
|
||||
return CacheKey(id, DocumentState(eTag, lastModified?.let { ts -> Instant.ofEpochMilli(ts) }))
|
||||
return null
|
||||
}
|
||||
|
||||
@SuppressLint("InlinedApi")
|
||||
fun toBundle(parent: WebDavDocument?): Bundle {
|
||||
if (parent?.isDirectory == false)
|
||||
|
@ -116,4 +126,13 @@ data class WebDavDocument(
|
|||
return builder.build()
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Represents a WebDAV document in a given state (with a given ETag/Last-Modified).
|
||||
*/
|
||||
data class CacheKey(
|
||||
val docId: Long,
|
||||
val documentState: DocumentState
|
||||
)
|
||||
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
/***************************************************************************************************
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
**************************************************************************************************/
|
||||
|
||||
package at.bitfire.davdroid.webdav
|
||||
|
||||
import java.util.concurrent.BlockingQueue
|
||||
import java.util.concurrent.LinkedBlockingDeque
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
class BlockingLifoQueue<E>(
|
||||
val base: LinkedBlockingDeque<E> = LinkedBlockingDeque<E>()
|
||||
): BlockingQueue<E> by base {
|
||||
|
||||
override fun element(): E = base.last
|
||||
override fun peek(): E? = base.peekLast()
|
||||
override fun poll(): E? = base.pollLast()
|
||||
override fun poll(timeout: Long, unit: TimeUnit?): E? = base.pollLast(timeout, unit)
|
||||
override fun remove(): E = base.removeLast()
|
||||
override fun take(): E = base.takeLast()
|
||||
|
||||
}
|
|
@ -17,7 +17,6 @@ import android.graphics.Point
|
|||
import android.media.ThumbnailUtils
|
||||
import android.net.ConnectivityManager
|
||||
import android.os.Build
|
||||
import android.os.Bundle
|
||||
import android.os.CancellationSignal
|
||||
import android.os.ParcelFileDescriptor
|
||||
import android.os.storage.StorageManager
|
||||
|
@ -39,11 +38,20 @@ import at.bitfire.davdroid.network.HttpClient
|
|||
import at.bitfire.davdroid.network.MemoryCookieStore
|
||||
import at.bitfire.davdroid.ui.webdav.WebdavMountsActivity
|
||||
import at.bitfire.davdroid.webdav.DavDocumentsProvider.DavDocumentsActor
|
||||
import at.bitfire.davdroid.webdav.cache.HeadResponseCache
|
||||
import at.bitfire.davdroid.webdav.cache.HeadResponseCacheBuilder
|
||||
import at.bitfire.davdroid.webdav.cache.ThumbnailCache
|
||||
import dagger.hilt.EntryPoint
|
||||
import dagger.hilt.InstallIn
|
||||
import dagger.hilt.android.EntryPointAccessors
|
||||
import dagger.hilt.components.SingletonComponent
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.cancel
|
||||
import kotlinx.coroutines.launch
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import kotlinx.coroutines.runInterruptible
|
||||
import kotlinx.coroutines.withTimeout
|
||||
import okhttp3.CookieJar
|
||||
import okhttp3.MediaType.Companion.toMediaTypeOrNull
|
||||
import okhttp3.RequestBody.Companion.toRequestBody
|
||||
|
@ -53,7 +61,6 @@ import java.io.FileNotFoundException
|
|||
import java.net.HttpURLConnection
|
||||
import java.util.concurrent.*
|
||||
import java.util.logging.Level
|
||||
import kotlin.math.min
|
||||
|
||||
/**
|
||||
* Provides functionality on WebDav documents.
|
||||
|
@ -82,15 +89,15 @@ class DavDocumentsProvider: DocumentsProvider() {
|
|||
)
|
||||
|
||||
const val MAX_NAME_ATTEMPTS = 5
|
||||
const val THUMBNAIL_TIMEOUT = 15L
|
||||
const val THUMBNAIL_TIMEOUT_MS = 15000L
|
||||
|
||||
fun notifyMountsChanged(context: Context) {
|
||||
context.contentResolver.notifyChange(buildRootsUri(context.getString(R.string.webdav_authority)), null)
|
||||
}
|
||||
}
|
||||
|
||||
val ourContext by lazy { context!! } // requireContext() requires API level 30
|
||||
val authority by lazy { ourContext.getString(R.string.webdav_authority) }
|
||||
private val ourContext by lazy { context!! } // requireContext() requires API level 30
|
||||
private val authority by lazy { ourContext.getString(R.string.webdav_authority) }
|
||||
|
||||
private val db by lazy { EntryPointAccessors.fromApplication(ourContext, DavDocumentsProviderEntryPoint::class.java).appDatabase() }
|
||||
private val mountDao by lazy { db.webDavMountDao() }
|
||||
|
@ -98,15 +105,12 @@ class DavDocumentsProvider: DocumentsProvider() {
|
|||
|
||||
private val credentialsStore by lazy { CredentialsStore(ourContext) }
|
||||
private val cookieStore by lazy { mutableMapOf<Long, CookieJar>() }
|
||||
private val headResponseCache by lazy { HeadResponseCache() }
|
||||
private val thumbnailCache by lazy { ThumbnailCache(ourContext) }
|
||||
private val headResponseCache by lazy { HeadResponseCacheBuilder.getInstance() }
|
||||
private val thumbnailCache by lazy { ThumbnailCache.getInstance(ourContext) }
|
||||
|
||||
private val connectivityManager by lazy { ourContext.getSystemService<ConnectivityManager>()!! }
|
||||
private val storageManager by lazy { ourContext.getSystemService<StorageManager>()!! }
|
||||
|
||||
private val executor by lazy {
|
||||
ThreadPoolExecutor(1, min(Runtime.getRuntime().availableProcessors(), 4), 30, TimeUnit.SECONDS, BlockingLifoQueue())
|
||||
}
|
||||
/** List of currently active [queryChildDocuments] runners.
|
||||
*
|
||||
* Key: document ID (directory) for which children are listed.
|
||||
|
@ -118,10 +122,6 @@ class DavDocumentsProvider: DocumentsProvider() {
|
|||
|
||||
override fun onCreate() = true
|
||||
|
||||
override fun shutdown() {
|
||||
executor.shutdown()
|
||||
}
|
||||
|
||||
|
||||
/*** query ***/
|
||||
|
||||
|
@ -222,10 +222,10 @@ class DavDocumentsProvider: DocumentsProvider() {
|
|||
|
||||
// Dispatch worker querying for the children and keep track of it
|
||||
val running = runningQueryChildren.getOrPut(parentId) {
|
||||
executor.submit {
|
||||
CoroutineScope(Dispatchers.IO).launch {
|
||||
actor.queryChildren(parent)
|
||||
// Once the query is done, set query as finished (not running)
|
||||
runningQueryChildren.put(parentId, false)
|
||||
runningQueryChildren[parentId] = false
|
||||
// .. and notify - effectively calling this method again
|
||||
ourContext.contentResolver.notifyChange(notificationUri, null)
|
||||
}
|
||||
|
@ -462,12 +462,18 @@ class DavDocumentsProvider: DocumentsProvider() {
|
|||
else -> throw UnsupportedOperationException("Mode $mode not supported by WebDAV")
|
||||
}
|
||||
|
||||
val fileInfo = headResponseCache.get(doc) {
|
||||
val deferredFileInfo = executor.submit(HeadInfoDownloader(client, url))
|
||||
signal?.setOnCancelListener {
|
||||
deferredFileInfo.cancel(true)
|
||||
val fileInfo = headResponseCache.getOrPutIfNotNull(doc.cacheKey()) {
|
||||
val response = CoroutineScope(Dispatchers.IO).async {
|
||||
runInterruptible {
|
||||
HeadResponse.fromUrl(client, url)
|
||||
}
|
||||
}
|
||||
signal?.setOnCancelListener {
|
||||
response.cancel("Cancelled by signal")
|
||||
}
|
||||
runBlocking {
|
||||
response.await()
|
||||
}
|
||||
deferredFileInfo.get()
|
||||
}
|
||||
Logger.log.fine("Received file info: $fileInfo")
|
||||
|
||||
|
@ -519,45 +525,51 @@ class DavDocumentsProvider: DocumentsProvider() {
|
|||
}
|
||||
|
||||
val doc = documentDao.get(documentId.toLong()) ?: throw FileNotFoundException()
|
||||
val thumbFile = thumbnailCache.get(doc, sizeHint) {
|
||||
|
||||
val docCacheKey = doc.cacheKey()
|
||||
if (docCacheKey == null) {
|
||||
Logger.log.warning("openDocumentThumbnail won't generate thumbnails when document state (ETag/Last-Modified) is unknown")
|
||||
return null
|
||||
}
|
||||
|
||||
val thumbFile = thumbnailCache.get(docCacheKey, sizeHint) {
|
||||
// create thumbnail
|
||||
val result = executor.submit(Callable<ByteArray> {
|
||||
actor.httpClient(doc.mountId).use { client ->
|
||||
val url = doc.toHttpUrl(db)
|
||||
val dav = DavResource(client.okHttpClient, url)
|
||||
var result: ByteArray? = null
|
||||
dav.get("image/*", null) { response ->
|
||||
response.body?.byteStream()?.use { data ->
|
||||
BitmapFactory.decodeStream(data)?.let { bitmap ->
|
||||
val thumb = ThumbnailUtils.extractThumbnail(bitmap, sizeHint.x, sizeHint.y)
|
||||
val baos = ByteArrayOutputStream()
|
||||
thumb.compress(Bitmap.CompressFormat.JPEG, 95, baos)
|
||||
result = baos.toByteArray()
|
||||
val job = CoroutineScope(Dispatchers.IO).async {
|
||||
withTimeout(THUMBNAIL_TIMEOUT_MS) {
|
||||
actor.httpClient(doc.mountId).use { client ->
|
||||
val url = doc.toHttpUrl(db)
|
||||
val dav = DavResource(client.okHttpClient, url)
|
||||
var result: ByteArray? = null
|
||||
runInterruptible {
|
||||
dav.get("image/*", null) { response ->
|
||||
response.body?.byteStream()?.use { data ->
|
||||
BitmapFactory.decodeStream(data)?.let { bitmap ->
|
||||
val thumb = ThumbnailUtils.extractThumbnail(bitmap, sizeHint.x, sizeHint.y)
|
||||
val baos = ByteArrayOutputStream()
|
||||
thumb.compress(Bitmap.CompressFormat.JPEG, 95, baos)
|
||||
result = baos.toByteArray()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
result
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
signal.setOnCancelListener {
|
||||
Logger.log.fine("Cancelling thumbnail for ${doc.name}")
|
||||
result.cancel(true)
|
||||
job.cancel("Cancelled by signal")
|
||||
}
|
||||
|
||||
val finalResult =
|
||||
try {
|
||||
result.get(THUMBNAIL_TIMEOUT, TimeUnit.SECONDS)
|
||||
} catch (e: TimeoutException) {
|
||||
Logger.log.warning("Couldn't generate thumbnail in time, cancelling")
|
||||
result.cancel(true)
|
||||
null
|
||||
} catch (e: Exception) {
|
||||
Logger.log.log(Level.WARNING, "Couldn't generate thumbnail", e)
|
||||
null
|
||||
try {
|
||||
runBlocking {
|
||||
job.await()
|
||||
}
|
||||
|
||||
finalResult
|
||||
} catch (e: Exception) {
|
||||
Logger.log.log(Level.WARNING, "Couldn't generate thumbnail", e)
|
||||
null
|
||||
}
|
||||
}
|
||||
|
||||
if (thumbFile != null)
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
|
||||
package at.bitfire.davdroid.webdav
|
||||
|
||||
import at.bitfire.davdroid.webdav.cache.CacheUtils
|
||||
import java.time.Instant
|
||||
|
||||
data class DocumentState(
|
||||
|
@ -17,14 +16,10 @@ data class DocumentState(
|
|||
throw IllegalArgumentException("Either ETag or Last-Modified is required")
|
||||
}
|
||||
|
||||
fun asString(): String =
|
||||
when {
|
||||
eTag != null ->
|
||||
CacheUtils.md5("eTag", eTag)
|
||||
lastModified != null ->
|
||||
CacheUtils.md5("lastModified", lastModified)
|
||||
else ->
|
||||
throw IllegalStateException()
|
||||
}
|
||||
override fun toString() =
|
||||
if (eTag != null)
|
||||
"eTag=$eTag"
|
||||
else
|
||||
"lastModified=$lastModified"
|
||||
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
/***************************************************************************************************
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
**************************************************************************************************/
|
||||
|
||||
package at.bitfire.davdroid.webdav
|
||||
|
||||
import at.bitfire.dav4jvm.DavResource
|
||||
import at.bitfire.dav4jvm.HttpUtils
|
||||
import at.bitfire.dav4jvm.property.GetETag
|
||||
import at.bitfire.davdroid.network.HttpClient
|
||||
import okhttp3.HttpUrl
|
||||
import java.time.Instant
|
||||
import java.util.*
|
||||
import java.util.concurrent.Callable
|
||||
|
||||
class HeadInfoDownloader(
|
||||
val client: HttpClient,
|
||||
val url: HttpUrl
|
||||
): Callable<HeadResponse> {
|
||||
|
||||
override fun call(): HeadResponse {
|
||||
var size: Long? = null
|
||||
var eTag: String? = null
|
||||
var lastModified: Instant? = null
|
||||
var supportsPartial: Boolean? = null
|
||||
|
||||
DavResource(client.okHttpClient, url).head { response ->
|
||||
response.header("ETag", null)?.let {
|
||||
val getETag = GetETag(it)
|
||||
if (!getETag.weak)
|
||||
eTag = getETag.eTag
|
||||
}
|
||||
response.header("Last-Modified", null)?.let {
|
||||
lastModified = HttpUtils.parseDate(it)?.toInstant()
|
||||
}
|
||||
response.headers["Content-Length"]?.let {
|
||||
size = it.toLong()
|
||||
}
|
||||
response.headers["Accept-Ranges"]?.let { acceptRangesStr ->
|
||||
val acceptRanges = acceptRangesStr.split(',').map { it.trim().lowercase() }
|
||||
when {
|
||||
acceptRanges.contains("none") -> supportsPartial = false
|
||||
acceptRanges.contains("bytes") -> supportsPartial = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return HeadResponse(size, eTag, lastModified, supportsPartial)
|
||||
}
|
||||
|
||||
}
|
|
@ -4,6 +4,11 @@
|
|||
|
||||
package at.bitfire.davdroid.webdav
|
||||
|
||||
import at.bitfire.dav4jvm.DavResource
|
||||
import at.bitfire.dav4jvm.HttpUtils
|
||||
import at.bitfire.dav4jvm.property.GetETag
|
||||
import at.bitfire.davdroid.network.HttpClient
|
||||
import okhttp3.HttpUrl
|
||||
import java.time.Instant
|
||||
|
||||
/**
|
||||
|
@ -17,9 +22,44 @@ data class HeadResponse(
|
|||
|
||||
val supportsPartial: Boolean? = null
|
||||
) {
|
||||
|
||||
companion object {
|
||||
|
||||
fun fromUrl(client: HttpClient, url: HttpUrl): HeadResponse {
|
||||
var size: Long? = null
|
||||
var eTag: String? = null
|
||||
var lastModified: Instant? = null
|
||||
var supportsPartial: Boolean? = null
|
||||
|
||||
DavResource(client.okHttpClient, url).head { response ->
|
||||
response.header("ETag", null)?.let {
|
||||
val getETag = GetETag(it)
|
||||
if (!getETag.weak)
|
||||
eTag = getETag.eTag
|
||||
}
|
||||
response.header("Last-Modified", null)?.let {
|
||||
lastModified = HttpUtils.parseDate(it)?.toInstant()
|
||||
}
|
||||
response.headers["Content-Length"]?.let {
|
||||
size = it.toLong()
|
||||
}
|
||||
response.headers["Accept-Ranges"]?.let { acceptRangesStr ->
|
||||
val acceptRanges = acceptRangesStr.split(',').map { it.trim().lowercase() }
|
||||
when {
|
||||
acceptRanges.contains("none") -> supportsPartial = false
|
||||
acceptRanges.contains("bytes") -> supportsPartial = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return HeadResponse(size, eTag, lastModified, supportsPartial)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
fun toDocumentState(): DocumentState? =
|
||||
if (eTag != null || lastModified != null)
|
||||
DocumentState(eTag, lastModified)
|
||||
else
|
||||
null
|
||||
|
||||
}
|
144
app/src/main/kotlin/at/bitfire/davdroid/webdav/PagingReader.kt
Normal file
144
app/src/main/kotlin/at/bitfire/davdroid/webdav/PagingReader.kt
Normal file
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
*/
|
||||
|
||||
package at.bitfire.davdroid.webdav
|
||||
|
||||
import java.io.IOException
|
||||
import java.util.logging.Logger
|
||||
import kotlin.math.min
|
||||
|
||||
/**
|
||||
* Splits a resource into pages (segments) so that read accesses can be cached per page.
|
||||
*
|
||||
* For instance, if [fileSize] is 3 MB and [pageSize] is 2 MB, multiple read requests within the
|
||||
* first 2 MB will cause only the first page (0 – 2 MB) to be loaded once and then fulfilled
|
||||
* from within the cache. For requests between 2 MB and 3 MB, the second page (and in this case last)
|
||||
* is loaded and used.
|
||||
*
|
||||
* @param fileSize file size (must not change between read operations)
|
||||
* @param pageSize page size (big enough to cache efficiently, small enough to avoid unnecessary traffic and spare memory)
|
||||
* @param loader loads page content from the actual data source
|
||||
*/
|
||||
@Suppress("LocalVariableName")
|
||||
class PagingReader(
|
||||
private val fileSize: Long,
|
||||
private val pageSize: Int,
|
||||
private val loader: PageLoader
|
||||
) {
|
||||
|
||||
val logger: Logger = Logger.getLogger(javaClass.name)
|
||||
|
||||
/**
|
||||
* Interface for loading a page from the data source (like a local file
|
||||
* or a WebDAV resource).
|
||||
*/
|
||||
fun interface PageLoader {
|
||||
/**
|
||||
* Loads the requested data from the data source. For instance this could cause
|
||||
*
|
||||
* - a file seek + read or
|
||||
* - a ranged WebDAV request.
|
||||
*
|
||||
* @param offset position to start
|
||||
* @param size number of bytes to load
|
||||
*
|
||||
* @return array with bytes fetched from data source
|
||||
*/
|
||||
fun loadPage(offset: Long, size: Int): ByteArray
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a loaded page (meta information + data).
|
||||
*/
|
||||
class CachedPage(
|
||||
val idx: Long,
|
||||
val start: Long,
|
||||
val end: Long,
|
||||
val data: ByteArray
|
||||
)
|
||||
|
||||
/** currently loaded page */
|
||||
private var currentPage: CachedPage? = null
|
||||
|
||||
/**
|
||||
* Reads a given number of bytes from a given position.
|
||||
*
|
||||
* Will split the request into multiple page access operations, if necessary.
|
||||
*
|
||||
* @param offset starting position
|
||||
* @param _size number of bytes to read
|
||||
* @param dst destination where data are read into
|
||||
*
|
||||
* @return number of bytes read (may be smaller than [_size] if the file is not that big)
|
||||
*/
|
||||
fun read(offset: Long, _size: Int, dst: ByteArray): Int {
|
||||
// input validation
|
||||
if (offset > fileSize)
|
||||
throw IndexOutOfBoundsException()
|
||||
var remaining = min(_size.toLong(), fileSize - offset).toInt()
|
||||
|
||||
var transferred = 0
|
||||
while (remaining > 0) {
|
||||
val nrBytes = readPage(offset + transferred, remaining, dst, transferred)
|
||||
if (nrBytes == 0) // EOF
|
||||
break
|
||||
transferred += nrBytes
|
||||
remaining -= nrBytes
|
||||
}
|
||||
|
||||
return transferred
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to read a given number of bytes from a given position, but stays
|
||||
* within one page – it will not read across two pages.
|
||||
*
|
||||
* This method will determine the page that contains [position] and read only
|
||||
* from this page.
|
||||
*
|
||||
* @param position starting position
|
||||
* @param size number of bytes requested
|
||||
* @param dst destination where data are read into
|
||||
* @param dstOffset starting offset within destination array
|
||||
*
|
||||
* @return number of bytes read (may be less than [size] when the page ends before);
|
||||
* 0 guarantees that there are no more bytes (EOF)
|
||||
*/
|
||||
@Synchronized
|
||||
fun readPage(position: Long, size: Int, dst: ByteArray, dstOffset: Int): Int {
|
||||
logger.fine("read(position=$position, size=$size, dstOffset=$dstOffset)")
|
||||
|
||||
// read max. 1 page
|
||||
val pgIdx = position / pageSize
|
||||
val page = currentPage?.takeIf { it.idx == pgIdx } ?: run {
|
||||
val pgStart = pgIdx * pageSize
|
||||
val pgEnd = min((pgIdx + 1) * pageSize, fileSize)
|
||||
val pgSize = (pgEnd - pgStart).toInt()
|
||||
|
||||
val pageData =
|
||||
if (pgSize == 0)
|
||||
ByteArray(0) // don't load 0-byte pages
|
||||
else
|
||||
loader.loadPage(pgStart, pgSize)
|
||||
if (pageData.size != pgSize)
|
||||
throw IOException("Couldn't fetch whole file segment (expected $pgSize bytes, got ${pageData.size} bytes)")
|
||||
|
||||
val newPage = CachedPage(pgIdx, pgStart, pgEnd, pageData)
|
||||
currentPage = newPage
|
||||
newPage
|
||||
}
|
||||
|
||||
val pgSize = (page.end - page.start).toInt()
|
||||
logger.fine("pgIdx=${page.idx}, pgStart=${page.start}, pgEnd=${page.end}, pgSize=$pgSize")
|
||||
|
||||
val inPageStart = (position - page.start).toInt()
|
||||
val len = min(pgSize - inPageStart, size) // use the remaining number of bytes in the page, or less if less were requested
|
||||
logger.fine("inPageStart=$inPageStart, len=$len")
|
||||
|
||||
System.arraycopy(page.data, inPageStart, dst, dstOffset, len)
|
||||
|
||||
return len
|
||||
}
|
||||
|
||||
}
|
|
@ -5,7 +5,6 @@
|
|||
package at.bitfire.davdroid.webdav
|
||||
|
||||
import android.annotation.TargetApi
|
||||
import android.app.ActivityManager
|
||||
import android.content.Context
|
||||
import android.os.CancellationSignal
|
||||
import android.os.Handler
|
||||
|
@ -15,7 +14,6 @@ import android.system.ErrnoException
|
|||
import android.system.OsConstants
|
||||
import androidx.core.app.NotificationCompat
|
||||
import androidx.core.app.NotificationManagerCompat
|
||||
import androidx.core.content.getSystemService
|
||||
import at.bitfire.dav4jvm.DavResource
|
||||
import at.bitfire.dav4jvm.HttpUtils
|
||||
import at.bitfire.dav4jvm.exception.DavException
|
||||
|
@ -27,8 +25,7 @@ import at.bitfire.davdroid.ui.NotificationUtils
|
|||
import at.bitfire.davdroid.ui.NotificationUtils.notifyIfPossible
|
||||
import at.bitfire.davdroid.util.DavUtils
|
||||
import at.bitfire.davdroid.webdav.RandomAccessCallback.Wrapper.Companion.TIMEOUT_INTERVAL
|
||||
import at.bitfire.davdroid.webdav.cache.MemoryCache
|
||||
import at.bitfire.davdroid.webdav.cache.SegmentedCache
|
||||
import at.bitfire.davdroid.webdav.cache.PageCacheBuilder
|
||||
import okhttp3.Headers
|
||||
import okhttp3.HttpUrl
|
||||
import okhttp3.MediaType
|
||||
|
@ -46,47 +43,21 @@ import ru.nsk.kstatemachine.processEventBlocking
|
|||
import ru.nsk.kstatemachine.state
|
||||
import ru.nsk.kstatemachine.transitionOn
|
||||
import java.io.InterruptedIOException
|
||||
import java.lang.ref.WeakReference
|
||||
import java.net.HttpURLConnection
|
||||
import java.util.Timer
|
||||
import java.util.TimerTask
|
||||
import java.util.logging.Level
|
||||
import kotlin.concurrent.schedule
|
||||
|
||||
typealias MemorySegmentCache = MemoryCache<SegmentedCache.SegmentKey<RandomAccessCallback.DocumentKey>>
|
||||
|
||||
@TargetApi(26)
|
||||
class RandomAccessCallback private constructor(
|
||||
val context: Context,
|
||||
val httpClient: HttpClient,
|
||||
val url: HttpUrl,
|
||||
val mimeType: MediaType?,
|
||||
val headResponse: HeadResponse,
|
||||
val cancellationSignal: CancellationSignal?
|
||||
): ProxyFileDescriptorCallback(), SegmentedCache.PageLoader<RandomAccessCallback.DocumentKey> {
|
||||
|
||||
companion object {
|
||||
/** one GET request per 2 MB */
|
||||
const val PAGE_SIZE: Int = (2*FileUtils.ONE_MB).toInt()
|
||||
|
||||
private var _memoryCache: WeakReference<MemorySegmentCache>? = null
|
||||
|
||||
@Synchronized
|
||||
fun getMemoryCache(context: Context): MemorySegmentCache {
|
||||
val cache = _memoryCache?.get()
|
||||
if (cache != null)
|
||||
return cache
|
||||
|
||||
Logger.log.fine("Creating memory cache")
|
||||
val maxHeapSizeMB = context.getSystemService<ActivityManager>()!!.memoryClass
|
||||
val cacheSize = maxHeapSizeMB * FileUtils.ONE_MB.toInt() / 2
|
||||
val newCache = MemorySegmentCache(cacheSize)
|
||||
|
||||
_memoryCache = WeakReference(newCache)
|
||||
return newCache
|
||||
}
|
||||
|
||||
}
|
||||
headResponse: HeadResponse,
|
||||
private val cancellationSignal: CancellationSignal?
|
||||
): ProxyFileDescriptorCallback(), PagingReader.PageLoader {
|
||||
|
||||
private val dav = DavResource(httpClient.okHttpClient, url)
|
||||
|
||||
|
@ -102,11 +73,10 @@ class RandomAccessCallback private constructor(
|
|||
.setSubText(FileUtils.byteCountToDisplaySize(fileSize))
|
||||
.setSmallIcon(R.drawable.ic_storage_notify)
|
||||
.setOngoing(true)
|
||||
val notificationTag = url.toString()
|
||||
|
||||
val memoryCache = getMemoryCache(context)
|
||||
val cache = SegmentedCache(PAGE_SIZE, this, memoryCache)
|
||||
private val notificationTag = url.toString()
|
||||
|
||||
private val pagingReader = PagingReader(fileSize, PageCacheBuilder.MAX_PAGE_SIZE, this)
|
||||
private val pageCache = PageCacheBuilder.getInstance()
|
||||
|
||||
override fun onFsync() { /* not used */ }
|
||||
|
||||
|
@ -132,10 +102,9 @@ class RandomAccessCallback private constructor(
|
|||
)
|
||||
|
||||
try {
|
||||
val docKey = DocumentKey(url, documentState)
|
||||
return cache.read(docKey, offset, size, data)
|
||||
return pagingReader.read(offset, size, data)
|
||||
} catch (e: Exception) {
|
||||
Logger.log.log(Level.WARNING, "Couldn't read remote file", e)
|
||||
Logger.log.log(Level.WARNING, "Couldn't read from WebDAV resource", e)
|
||||
throw e.toErrNoException("onRead")
|
||||
}
|
||||
}
|
||||
|
@ -152,38 +121,35 @@ class RandomAccessCallback private constructor(
|
|||
}
|
||||
|
||||
|
||||
override fun load(key: SegmentedCache.SegmentKey<DocumentKey>, segmentSize: Int): ByteArray {
|
||||
if (key.documentKey.resource != url || key.documentKey.state != documentState)
|
||||
throw IllegalArgumentException()
|
||||
Logger.log.fine("Loading page $key")
|
||||
override fun loadPage(offset: Long, size: Int): ByteArray {
|
||||
Logger.log.fine("Loading page $url $offset/$size")
|
||||
return pageCache.getOrPut(PageCacheBuilder.PageIdentifier(url, offset, size)) {
|
||||
val ifMatch: Headers =
|
||||
documentState.eTag?.let { eTag ->
|
||||
Headers.headersOf("If-Match", "\"$eTag\"")
|
||||
} ?:
|
||||
documentState.lastModified?.let { lastModified ->
|
||||
Headers.headersOf("If-Unmodified-Since", HttpUtils.formatDate(lastModified))
|
||||
} ?: throw IllegalStateException("ETag/Last-Modified required for random access")
|
||||
|
||||
val ifMatch: Headers =
|
||||
documentState.eTag?.let { eTag ->
|
||||
Headers.headersOf("If-Match", "\"$eTag\"")
|
||||
} ?:
|
||||
documentState.lastModified?.let { lastModified ->
|
||||
Headers.headersOf("If-Unmodified-Since", HttpUtils.formatDate(lastModified))
|
||||
} ?: throw IllegalStateException("ETag/Last-Modified required for random access")
|
||||
var result: ByteArray? = null
|
||||
dav.getRange(
|
||||
DavUtils.acceptAnything(preferred = mimeType),
|
||||
offset,
|
||||
size,
|
||||
ifMatch
|
||||
) { response ->
|
||||
if (response.code == 200) // server doesn't support ranged requests
|
||||
throw PartialContentNotSupportedException()
|
||||
else if (response.code != 206)
|
||||
throw HttpException(response)
|
||||
|
||||
var result: ByteArray? = null
|
||||
dav.getRange(
|
||||
DavUtils.acceptAnything(preferred = mimeType),
|
||||
key.segment * PAGE_SIZE.toLong(),
|
||||
PAGE_SIZE,
|
||||
ifMatch
|
||||
) { response ->
|
||||
if (response.code == 200) // server doesn't support ranged requests
|
||||
throw PartialContentNotSupportedException()
|
||||
else if (response.code != 206)
|
||||
throw HttpException(response)
|
||||
|
||||
result = response.body?.bytes()
|
||||
result = response.body?.bytes()
|
||||
}
|
||||
return@getOrPut result ?: throw DavException("No response body")
|
||||
}
|
||||
|
||||
return result ?: throw DavException("No response body")
|
||||
}
|
||||
|
||||
|
||||
private fun throwIfCancelled(functionName: String) {
|
||||
if (cancellationSignal?.isCanceled == true) {
|
||||
Logger.log.warning("Random file access cancelled, throwing ErrnoException(EINTR)")
|
||||
|
@ -200,6 +166,7 @@ class RandomAccessCallback private constructor(
|
|||
HttpURLConnection.HTTP_NOT_FOUND -> OsConstants.ENOENT
|
||||
else -> OsConstants.EIO
|
||||
}
|
||||
is IndexOutOfBoundsException -> OsConstants.ENXIO // no such [device or] address, see man lseek (2)
|
||||
is InterruptedIOException -> OsConstants.EINTR
|
||||
is PartialContentNotSupportedException -> OsConstants.EOPNOTSUPP
|
||||
else -> OsConstants.EIO
|
||||
|
|
|
@ -1,70 +0,0 @@
|
|||
/***************************************************************************************************
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
**************************************************************************************************/
|
||||
|
||||
package at.bitfire.davdroid.webdav
|
||||
|
||||
import android.content.Context
|
||||
import android.graphics.Point
|
||||
import android.os.Build
|
||||
import android.os.storage.StorageManager
|
||||
import androidx.annotation.WorkerThread
|
||||
import androidx.core.content.getSystemService
|
||||
import at.bitfire.davdroid.db.WebDavDocument
|
||||
import at.bitfire.davdroid.log.Logger
|
||||
import at.bitfire.davdroid.webdav.cache.CacheUtils
|
||||
import at.bitfire.davdroid.webdav.cache.DiskCache
|
||||
import org.apache.commons.io.FileUtils
|
||||
import java.io.File
|
||||
import java.util.LinkedList
|
||||
import java.util.UUID
|
||||
|
||||
@WorkerThread
|
||||
class ThumbnailCache(context: Context) {
|
||||
|
||||
val cache: DiskCache
|
||||
|
||||
init {
|
||||
val storageManager = context.getSystemService<StorageManager>()!!
|
||||
val cacheDir = File(context.cacheDir, "webdav/thumbnail")
|
||||
val maxBytes = if (Build.VERSION.SDK_INT >= 26)
|
||||
storageManager.getCacheQuotaBytes(storageManager.getUuidForPath(cacheDir)) / 2
|
||||
else
|
||||
50*FileUtils.ONE_MB
|
||||
Logger.log.info("Initializing WebDAV thumbnail cache with ${FileUtils.byteCountToDisplaySize(maxBytes)}")
|
||||
|
||||
cache = DiskCache(cacheDir, maxBytes)
|
||||
}
|
||||
|
||||
|
||||
fun get(doc: WebDavDocument, sizeHint: Point, generate: () -> ByteArray?): File? {
|
||||
val key = docToKey(doc, sizeHint)
|
||||
return cache.getFile(key, generate)
|
||||
}
|
||||
|
||||
private fun docToKey(doc: WebDavDocument, sizeHint: Point): String {
|
||||
val segments = LinkedList<Any>()
|
||||
segments += doc.id
|
||||
segments += sizeHint.x
|
||||
segments += sizeHint.y
|
||||
|
||||
when {
|
||||
doc.eTag != null -> {
|
||||
segments += "eTag"
|
||||
segments += doc.eTag!!
|
||||
}
|
||||
doc.lastModified != null -> {
|
||||
segments += "lastModified"
|
||||
segments += doc.lastModified!!
|
||||
}
|
||||
doc.size != null -> {
|
||||
segments += "size"
|
||||
segments += doc.size!!
|
||||
}
|
||||
else ->
|
||||
segments += UUID.randomUUID()
|
||||
}
|
||||
return CacheUtils.md5(*segments.toArray())
|
||||
}
|
||||
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
/***************************************************************************************************
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
**************************************************************************************************/
|
||||
|
||||
package at.bitfire.davdroid.webdav.cache
|
||||
|
||||
interface Cache<K> {
|
||||
|
||||
fun get(key: K): ByteArray?
|
||||
fun getOrPut(key: K, generate: () -> ByteArray): ByteArray
|
||||
|
||||
}
|
|
@ -1,21 +0,0 @@
|
|||
/***************************************************************************************************
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
**************************************************************************************************/
|
||||
|
||||
package at.bitfire.davdroid.webdav.cache
|
||||
|
||||
import java.net.URLEncoder
|
||||
import java.security.MessageDigest
|
||||
|
||||
object CacheUtils {
|
||||
|
||||
fun md5(vararg data: Any): String {
|
||||
val str = data.joinToString("/") { entry ->
|
||||
URLEncoder.encode(entry.toString(), "UTF-8")
|
||||
}
|
||||
|
||||
val md5 = MessageDigest.getInstance("MD5").digest(str.toByteArray())
|
||||
return md5.joinToString("") { b -> String.format("%02x", b) }
|
||||
}
|
||||
|
||||
}
|
|
@ -9,7 +9,6 @@ import org.apache.commons.io.FileUtils
|
|||
import java.io.File
|
||||
import java.io.IOException
|
||||
import java.util.logging.Level
|
||||
import kotlin.math.min
|
||||
|
||||
/**
|
||||
* Disk-based cache that maps [String]s to [ByteArray]s.
|
||||
|
@ -37,61 +36,6 @@ class DiskCache(
|
|||
throw IllegalArgumentException("Couldn't create cache in $cacheDir")
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Gets the cached value with the given key. If the key is not in the cache, the value is being generated from the
|
||||
* callback, stored in the cache and returned.
|
||||
*
|
||||
* @param key key of the cached entry
|
||||
* @param offset used if only a part of the value is required
|
||||
* @param maxSize used if only a part of the value is required
|
||||
* @param generate callback that generates the whole value (not only the part given by [offset] and [maxSize]!)
|
||||
*
|
||||
* @return the value (either taken from the cache or from [generate]), limited [offset]/[maxSize]
|
||||
*/
|
||||
fun get(key: String, offset: Long = 0, maxSize: Int = Int.MAX_VALUE, generate: () -> ByteArray?): ByteArray? {
|
||||
synchronized(this) {
|
||||
val file = File(cacheDir, key)
|
||||
if (file.exists()) {
|
||||
// cache hit
|
||||
file.inputStream().use { input ->
|
||||
if (offset != 0L)
|
||||
if (input.skip(offset) != offset)
|
||||
throw IllegalStateException("Couldn't skip first $offset bytes of $file")
|
||||
|
||||
val size = min(
|
||||
maxSize.toLong(),
|
||||
file.length() - offset
|
||||
).toInt()
|
||||
val buffer = ByteArray(size)
|
||||
input.read(buffer)
|
||||
return buffer
|
||||
}
|
||||
} else {
|
||||
// file does't exist yet; cache miss
|
||||
val result = generate() ?: return null
|
||||
|
||||
file.outputStream().use { output ->
|
||||
try {
|
||||
output.write(result)
|
||||
} catch (e: IOException) {
|
||||
// write error; disk full?
|
||||
Logger.log.log(Level.WARNING, "Couldn't write cache entry $key", e)
|
||||
file.delete()
|
||||
}
|
||||
}
|
||||
|
||||
if (writeCounter++.mod(CLEANUP_RATE) == 0)
|
||||
trim()
|
||||
|
||||
if (maxSize != -1)
|
||||
return result.copyOfRange(offset.toInt(), min(offset.toInt() + maxSize, result.size))
|
||||
else
|
||||
return result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the file that contains the given key. If the key is not in the cache, the value is being generated from the
|
||||
* callback, stored in the cache and the backing file is returned.
|
||||
|
@ -104,7 +48,7 @@ class DiskCache(
|
|||
*
|
||||
* @return the file that contains the value
|
||||
*/
|
||||
fun getFile(key: String, generate: () -> ByteArray?): File? {
|
||||
fun getFileOrPut(key: String, generate: () -> ByteArray?): File? {
|
||||
synchronized(this) {
|
||||
val file = File(cacheDir, key)
|
||||
if (file.exists()) {
|
||||
|
|
48
app/src/main/kotlin/at/bitfire/davdroid/webdav/cache/ExtendedLruCache.kt
vendored
Normal file
48
app/src/main/kotlin/at/bitfire/davdroid/webdav/cache/ExtendedLruCache.kt
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
*/
|
||||
|
||||
package at.bitfire.davdroid.webdav.cache
|
||||
|
||||
import android.util.LruCache
|
||||
|
||||
/**
|
||||
* Simple thread-safe cache class based on [LruCache] that provides atomic [getOrPut]
|
||||
* and [getOrPutIfNotNull] methods.
|
||||
*/
|
||||
class ExtendedLruCache<K, V>(maxSize: Int) : LruCache<K, V>(maxSize) {
|
||||
|
||||
/**
|
||||
* Retrieves data from the cache, if available. Otherwise calls a callback to
|
||||
* compute the data and puts it into the cache.
|
||||
*
|
||||
* @param key cache key to request
|
||||
* @param compute callback that computes the data for the cache key
|
||||
*
|
||||
* @return data (from cache in case of a cache hit or generated by [compute] in case of a cache miss)
|
||||
*/
|
||||
@Synchronized
|
||||
fun getOrPut(key: K, compute: () -> V): V {
|
||||
// use cached value, if possible
|
||||
val data = get(key)
|
||||
if (data != null)
|
||||
return data
|
||||
|
||||
// compute new value otherwise
|
||||
val newValue = compute()
|
||||
put(key, newValue)
|
||||
return newValue
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as [getOrPut], but allows [key] to be `null`. In this case, the
|
||||
* cache will be bypassed and the callback will always be executed.
|
||||
*/
|
||||
@Synchronized
|
||||
fun getOrPutIfNotNull(key: K?, compute: () -> V): V =
|
||||
if (key == null)
|
||||
compute()
|
||||
else
|
||||
getOrPut(key, compute)
|
||||
|
||||
}
|
|
@ -1,44 +0,0 @@
|
|||
/***************************************************************************************************
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
**************************************************************************************************/
|
||||
|
||||
package at.bitfire.davdroid.webdav.cache
|
||||
|
||||
import android.util.LruCache
|
||||
import at.bitfire.davdroid.db.WebDavDocument
|
||||
import at.bitfire.davdroid.webdav.DocumentState
|
||||
import at.bitfire.davdroid.webdav.HeadResponse
|
||||
import java.time.Instant
|
||||
import java.util.*
|
||||
|
||||
class HeadResponseCache {
|
||||
|
||||
companion object {
|
||||
const val MAX_SIZE = 50
|
||||
}
|
||||
|
||||
data class Key(
|
||||
val docId: Long,
|
||||
val documentState: DocumentState
|
||||
)
|
||||
|
||||
val cache = LruCache<Key, HeadResponse>(MAX_SIZE)
|
||||
|
||||
|
||||
@Synchronized
|
||||
fun get(doc: WebDavDocument, generate: () -> HeadResponse): HeadResponse {
|
||||
var key: Key? = null
|
||||
if (doc.eTag != null || doc.lastModified != null) {
|
||||
key = Key(doc.id, DocumentState(doc.eTag, doc.lastModified?.let { ts -> Instant.ofEpochMilli(ts) }))
|
||||
cache.get(key)?.let { info ->
|
||||
return info
|
||||
}
|
||||
}
|
||||
|
||||
val info = generate()
|
||||
if (key != null)
|
||||
cache.put(key, info)
|
||||
return info
|
||||
}
|
||||
|
||||
}
|
31
app/src/main/kotlin/at/bitfire/davdroid/webdav/cache/HeadResponseCacheBuilder.kt
vendored
Normal file
31
app/src/main/kotlin/at/bitfire/davdroid/webdav/cache/HeadResponseCacheBuilder.kt
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
/***************************************************************************************************
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
**************************************************************************************************/
|
||||
|
||||
package at.bitfire.davdroid.webdav.cache
|
||||
|
||||
import at.bitfire.davdroid.db.WebDavDocument
|
||||
import at.bitfire.davdroid.webdav.HeadResponse
|
||||
import java.lang.ref.WeakReference
|
||||
|
||||
/**
|
||||
* Memory cache for HEAD responses. Using a [WebDavDocument.CacheKey] as key guarantees that
|
||||
* the cached response won't be used anymore if the ETag changes.
|
||||
*/
|
||||
typealias HeadResponseCache = ExtendedLruCache<WebDavDocument.CacheKey, HeadResponse>
|
||||
|
||||
object HeadResponseCacheBuilder {
|
||||
|
||||
private const val MAX_ENTRIES = 50
|
||||
|
||||
private var _cache: WeakReference<HeadResponseCache>? = null
|
||||
|
||||
@Synchronized
|
||||
fun getInstance(): HeadResponseCache {
|
||||
_cache?.get()?.let { return it }
|
||||
val newCache = HeadResponseCache(MAX_ENTRIES)
|
||||
_cache = WeakReference(newCache)
|
||||
return newCache
|
||||
}
|
||||
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
/***************************************************************************************************
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
**************************************************************************************************/
|
||||
|
||||
package at.bitfire.davdroid.webdav.cache
|
||||
|
||||
import android.util.LruCache
|
||||
|
||||
class MemoryCache<K>(maxMemory: Int): Cache<K> {
|
||||
|
||||
private val storage = object: LruCache<K, ByteArray>(maxMemory) {
|
||||
// measure cache size by ByteArray size
|
||||
override fun sizeOf(key: K, value: ByteArray) = value.size
|
||||
}
|
||||
|
||||
override fun get(key: K): ByteArray? = storage.get(key)
|
||||
|
||||
override fun getOrPut(key: K, generate: () -> ByteArray): ByteArray {
|
||||
synchronized(storage) {
|
||||
val cached = storage[key]
|
||||
if (cached != null)
|
||||
return cached
|
||||
|
||||
val data = generate()
|
||||
storage.put(key, data)
|
||||
return data
|
||||
}
|
||||
}
|
||||
|
||||
}
|
39
app/src/main/kotlin/at/bitfire/davdroid/webdav/cache/PageCacheBuilder.kt
vendored
Normal file
39
app/src/main/kotlin/at/bitfire/davdroid/webdav/cache/PageCacheBuilder.kt
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
/*
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
*/
|
||||
|
||||
package at.bitfire.davdroid.webdav.cache
|
||||
|
||||
import okhttp3.HttpUrl
|
||||
import org.apache.commons.io.FileUtils
|
||||
import java.lang.ref.WeakReference
|
||||
|
||||
/**
|
||||
* Memory cache for pages of a WebDAV resources (as they're requested by a [at.bitfire.davdroid.webdav.PagingReader.PageLoader]).
|
||||
*
|
||||
* Allows that multiple pages are kept in memory.
|
||||
*/
|
||||
typealias PageCache = ExtendedLruCache<PageCacheBuilder.PageIdentifier, ByteArray>
|
||||
|
||||
object PageCacheBuilder {
|
||||
|
||||
const val MAX_PAGE_SIZE = 2 * FileUtils.ONE_MB.toInt()
|
||||
private const val MAX_ENTRIES = 2 // cache up to 2 pages (4 MB in total) in memory
|
||||
|
||||
private var _cache: WeakReference<PageCache>? = null
|
||||
|
||||
@Synchronized
|
||||
fun getInstance(): PageCache {
|
||||
_cache?.get()?.let { return it }
|
||||
val newCache = PageCache(MAX_ENTRIES)
|
||||
_cache = WeakReference(newCache)
|
||||
return newCache
|
||||
}
|
||||
|
||||
data class PageIdentifier(
|
||||
val url: HttpUrl,
|
||||
val offset: Long,
|
||||
val size: Int
|
||||
)
|
||||
|
||||
}
|
|
@ -1,85 +0,0 @@
|
|||
/***************************************************************************************************
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
**************************************************************************************************/
|
||||
|
||||
package at.bitfire.davdroid.webdav.cache
|
||||
|
||||
import at.bitfire.davdroid.log.Logger
|
||||
|
||||
class SegmentedCache<K>(
|
||||
val pageSize: Int,
|
||||
val loader: PageLoader<K>,
|
||||
val backingCache: Cache<SegmentKey<K>>
|
||||
) {
|
||||
|
||||
fun read(key: K, offset: Long, size: Int, dst: ByteArray): Int {
|
||||
var transferred = 0
|
||||
|
||||
var pageIdx = (offset / pageSize).toInt()
|
||||
var pageOffset = offset.mod(pageSize)
|
||||
|
||||
while (true) {
|
||||
Logger.log.fine("Reading $key from offset: $offset, size: $size")
|
||||
|
||||
// read next chunk
|
||||
val data = try {
|
||||
val pageKey = SegmentKey(key, pageIdx)
|
||||
backingCache.getOrPut(pageKey) {
|
||||
loader.load(pageKey, pageSize)
|
||||
}
|
||||
} catch (e: IndexOutOfBoundsException) {
|
||||
// pageIdx is beyond the last page; return immediately
|
||||
break
|
||||
}
|
||||
Logger.log.fine("Got page $pageIdx with ${data.size} bytes")
|
||||
|
||||
/* Calculate the number of bytes we can actually copy. There are two cases when less
|
||||
* than the full page (data.size) has to be copied:
|
||||
* 1. At the beginnnig, we may not need the full page (pageOffset != 0).
|
||||
* 2. At the end, we need only the requested number of bytes. */
|
||||
var usableBytes = data.size - pageOffset
|
||||
if (usableBytes > size - transferred)
|
||||
usableBytes = size - transferred
|
||||
|
||||
// copy to destination
|
||||
System.arraycopy(data, pageOffset, dst, transferred, usableBytes)
|
||||
transferred += usableBytes
|
||||
|
||||
/* We have two termination conditions:
|
||||
* 1. It was the last page returned by the loader. We can know this by
|
||||
* a) data.size < pageSize, or
|
||||
* b) loader.load throws an IndexOutOfBoundsException.
|
||||
* 2. The number of requested bytes to transfer has been reached. */
|
||||
if (data.size < pageSize || transferred == size)
|
||||
break
|
||||
|
||||
pageIdx++
|
||||
pageOffset = 0
|
||||
}
|
||||
|
||||
return transferred
|
||||
}
|
||||
|
||||
|
||||
data class SegmentKey<K>(
|
||||
val documentKey: K,
|
||||
val segment: Int
|
||||
)
|
||||
|
||||
interface PageLoader<K> {
|
||||
|
||||
/**
|
||||
* Loads the given segment of the document. For instance, this could send a ranged
|
||||
* HTTP request and return the result.
|
||||
*
|
||||
* @param key document and segment number
|
||||
* @param segmentSize segment size (used to calculate the requested byte position and number of bytes)
|
||||
*
|
||||
* @return data within the requested range
|
||||
* @throws IndexOutOfBoundsException if the requested segment doesn't exist
|
||||
*/
|
||||
fun load(key: SegmentedCache.SegmentKey<K>, segmentSize: Int): ByteArray
|
||||
|
||||
}
|
||||
|
||||
}
|
66
app/src/main/kotlin/at/bitfire/davdroid/webdav/cache/ThumbnailCache.kt
vendored
Normal file
66
app/src/main/kotlin/at/bitfire/davdroid/webdav/cache/ThumbnailCache.kt
vendored
Normal file
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
*/
|
||||
|
||||
package at.bitfire.davdroid.webdav.cache
|
||||
|
||||
import android.content.Context
|
||||
import android.graphics.Point
|
||||
import android.os.Build
|
||||
import android.os.storage.StorageManager
|
||||
import androidx.core.content.getSystemService
|
||||
import at.bitfire.davdroid.db.WebDavDocument
|
||||
import at.bitfire.davdroid.log.Logger
|
||||
import org.apache.commons.codec.digest.DigestUtils
|
||||
import org.apache.commons.io.FileUtils
|
||||
import java.io.File
|
||||
|
||||
/**
|
||||
* Simple disk cache for image thumbnails.
|
||||
*/
|
||||
class ThumbnailCache private constructor(context: Context) {
|
||||
|
||||
companion object {
|
||||
|
||||
private var _instance: ThumbnailCache? = null
|
||||
|
||||
@Synchronized
|
||||
fun getInstance(context: Context): ThumbnailCache {
|
||||
_instance?.let { return it }
|
||||
|
||||
val newInstance = ThumbnailCache(context)
|
||||
_instance = newInstance
|
||||
return newInstance
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
val storage: DiskCache
|
||||
|
||||
init {
|
||||
val storageManager = context.getSystemService<StorageManager>()!!
|
||||
val cacheDir = File(context.cacheDir, "webdav/thumbnail")
|
||||
val maxBytes = if (Build.VERSION.SDK_INT >= 26)
|
||||
storageManager.getCacheQuotaBytes(storageManager.getUuidForPath(cacheDir)) / 2
|
||||
else
|
||||
50*FileUtils.ONE_MB
|
||||
Logger.log.info("Initializing WebDAV thumbnail cache with ${FileUtils.byteCountToDisplaySize(maxBytes)}")
|
||||
|
||||
storage = DiskCache(cacheDir, maxBytes)
|
||||
}
|
||||
|
||||
|
||||
fun get(docKey: WebDavDocument.CacheKey, sizeHint: Point, generate: () -> ByteArray?): File? {
|
||||
val key = Key(docKey, sizeHint)
|
||||
return storage.getFileOrPut(key.asString(), generate)
|
||||
}
|
||||
|
||||
data class Key(
|
||||
val document: WebDavDocument.CacheKey,
|
||||
val size: Point
|
||||
) {
|
||||
fun asString(): String =
|
||||
DigestUtils.sha1Hex("${document.docId} ${document.documentState.eTag} ${document.documentState.lastModified} ${size.x}x${size.y}")
|
||||
}
|
||||
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
/***************************************************************************************************
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
**************************************************************************************************/
|
||||
|
||||
package at.bitfire.davdroid.webdav
|
||||
|
||||
import org.junit.Assert.assertEquals
|
||||
import org.junit.Before
|
||||
import org.junit.Test
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
class BlockingLifoQueueTest {
|
||||
|
||||
var queue = BlockingLifoQueue<Int>()
|
||||
|
||||
@Before
|
||||
fun prepare() {
|
||||
queue.clear()
|
||||
queue.addAll(arrayOf(1, 2))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testElement() {
|
||||
assertEquals(2, queue.element())
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testPeek() {
|
||||
assertEquals(2, queue.peek())
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testPoll() {
|
||||
assertEquals(2, queue.poll())
|
||||
assertEquals(1, queue.poll())
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testPoll_Timeout() {
|
||||
assertEquals(2, queue.poll(1, TimeUnit.SECONDS))
|
||||
assertEquals(1, queue.poll(1, TimeUnit.SECONDS))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testRemove() {
|
||||
assertEquals(2, queue.remove())
|
||||
assertEquals(1, queue.remove())
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testTake() {
|
||||
assertEquals(2, queue.take())
|
||||
assertEquals(1, queue.take())
|
||||
}
|
||||
|
||||
}
|
|
@ -8,7 +8,10 @@ import at.bitfire.davdroid.webdav.cache.DiskCache
|
|||
import org.apache.commons.io.FileUtils
|
||||
import org.apache.commons.io.IOUtils
|
||||
import org.junit.After
|
||||
import org.junit.Assert.*
|
||||
import org.junit.Assert.assertArrayEquals
|
||||
import org.junit.Assert.assertEquals
|
||||
import org.junit.Assert.assertNull
|
||||
import org.junit.Assert.assertTrue
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
|
@ -44,47 +47,13 @@ class DiskCacheTest {
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
fun testGet_Null() {
|
||||
assertNull(cache.get(SOME_KEY) { null })
|
||||
assertEquals(0, cache.entries())
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testGet_NotNull() {
|
||||
assertArrayEquals(SOME_VALUE, cache.get(SOME_KEY) { SOME_VALUE })
|
||||
|
||||
// non-null value should have been written to cache
|
||||
assertEquals(1, cache.entries())
|
||||
assertArrayEquals(SOME_VALUE, cache.get(SOME_KEY) { SOME_OTHER_VALUE })
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testGet_NotNull_Partial() {
|
||||
assertArrayEquals(ByteArray(2) { (it+1).toByte() }, cache.get(SOME_KEY, 1, 2) { SOME_VALUE })
|
||||
|
||||
// full non-null value should have been written to cache
|
||||
assertEquals(1, cache.entries())
|
||||
assertArrayEquals(SOME_VALUE, cache.get(SOME_KEY) { SOME_OTHER_VALUE })
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testGet_NotNull_Partial_LargerThanSize() {
|
||||
assertArrayEquals(ByteArray(SOME_VALUE_LENGTH - 1) { (it+1).toByte() }, cache.get(SOME_KEY, 1, SOME_VALUE_LENGTH*2) { SOME_VALUE })
|
||||
|
||||
// full non-null value should have been written to cache
|
||||
assertEquals(1, cache.entries())
|
||||
assertArrayEquals(SOME_VALUE, cache.get(SOME_KEY) { SOME_OTHER_VALUE })
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
fun testGetFile_Null() {
|
||||
assertNull(cache.getFile(SOME_KEY) { null })
|
||||
assertNull(cache.getFileOrPut(SOME_KEY) { null })
|
||||
|
||||
// null value shouldn't have been written to cache
|
||||
assertEquals(0, cache.entries())
|
||||
val file = cache.getFile(SOME_KEY) { SOME_VALUE }
|
||||
val file = cache.getFileOrPut(SOME_KEY) { SOME_VALUE }
|
||||
file!!.inputStream().use { input ->
|
||||
assertArrayEquals(SOME_VALUE, IOUtils.toByteArray(input))
|
||||
}
|
||||
|
@ -92,13 +61,13 @@ class DiskCacheTest {
|
|||
|
||||
@Test
|
||||
fun testGetFile_NotNull() {
|
||||
cache.getFile(SOME_KEY) { SOME_VALUE }!!.inputStream().use { input ->
|
||||
cache.getFileOrPut(SOME_KEY) { SOME_VALUE }!!.inputStream().use { input ->
|
||||
assertArrayEquals(SOME_VALUE, IOUtils.toByteArray(input))
|
||||
}
|
||||
|
||||
// non-null value should have been written to cache
|
||||
assertEquals(1, cache.entries())
|
||||
cache.getFile(SOME_KEY) { SOME_OTHER_VALUE }!!.inputStream().use { input ->
|
||||
cache.getFileOrPut(SOME_KEY) { SOME_OTHER_VALUE }!!.inputStream().use { input ->
|
||||
assertArrayEquals(SOME_VALUE, IOUtils.toByteArray(input))
|
||||
}
|
||||
}
|
||||
|
@ -107,7 +76,7 @@ class DiskCacheTest {
|
|||
@Test
|
||||
fun testClear() {
|
||||
for (i in 1..50) {
|
||||
cache.get(i.toString()) { i.toString().toByteArray() }
|
||||
cache.getFileOrPut(i.toString()) { i.toString().toByteArray() }
|
||||
}
|
||||
assertEquals(50, cache.entries())
|
||||
|
||||
|
@ -120,7 +89,7 @@ class DiskCacheTest {
|
|||
fun testTrim() {
|
||||
assertEquals(0, cache.entries())
|
||||
|
||||
cache.get(SOME_KEY) { SOME_VALUE }
|
||||
cache.getFileOrPut(SOME_KEY) { SOME_VALUE }
|
||||
assertEquals(1, cache.entries())
|
||||
|
||||
cache.trim()
|
||||
|
@ -128,7 +97,7 @@ class DiskCacheTest {
|
|||
|
||||
// add 11 x 1 MB
|
||||
for (i in 0..MAX_CACHE_MB) {
|
||||
cache.get(i.toString()) { ByteArray(FileUtils.ONE_MB.toInt()) }
|
||||
cache.getFileOrPut(i.toString()) { ByteArray(FileUtils.ONE_MB.toInt()) }
|
||||
Thread.sleep(5) // make sure that files are exactly sortable by modification date
|
||||
}
|
||||
// now in cache: SOME_KEY (some bytes) and "0" .. "10" (1 MB each), i.e. 11 MB + some bytes in total
|
||||
|
|
|
@ -0,0 +1,196 @@
|
|||
/*
|
||||
* Copyright © All Contributors. See LICENSE and AUTHORS in the root directory for details.
|
||||
*/
|
||||
|
||||
package at.bitfire.davdroid.webdav
|
||||
|
||||
import org.junit.Assert.assertArrayEquals
|
||||
import org.junit.Assert.assertEquals
|
||||
import org.junit.Test
|
||||
|
||||
/**
|
||||
In the context of these tests, a "small file" is a file that is smaller
|
||||
than the max page size. A "page-sized file" is a file that is exactly as
|
||||
large as the page size. A "large file" is a file that is larger than the page
|
||||
size, i.e. a file that comprises at least two pages.
|
||||
*/
|
||||
class PagingReaderTest {
|
||||
|
||||
@Test
|
||||
fun testRead_AcrossThreePages() {
|
||||
var idx = 0
|
||||
val reader = PagingReader(350, 100) { offset, size ->
|
||||
assertEquals(idx * 100L, offset)
|
||||
assertEquals(
|
||||
when (idx) {
|
||||
0, 1, 2 -> 100
|
||||
3 -> 50
|
||||
else -> throw AssertionError("idx=$idx, size=$size")
|
||||
},
|
||||
size
|
||||
)
|
||||
idx += 1
|
||||
ByteArray(size) { idx.toByte() }
|
||||
}
|
||||
val dst = ByteArray(103)
|
||||
assertEquals(103, reader.read(99, 103, dst))
|
||||
assertArrayEquals(
|
||||
ByteArray(1) { 1 } + ByteArray(100) { 2 } + ByteArray(2) { 3 },
|
||||
dst
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testRead_AtBeginning_FewBytes() {
|
||||
val reader = PagingReader(200, 100) { offset, size ->
|
||||
assertEquals(0, offset)
|
||||
assertEquals(100, size)
|
||||
ByteArray(100) { 1 }
|
||||
}
|
||||
val dst = ByteArray(10)
|
||||
assertEquals(10, reader.read(0, 10, dst))
|
||||
assertArrayEquals(ByteArray(10) { 1 }, dst)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testRead_AtEOF() {
|
||||
val reader = PagingReader(200, 100) { _, _ ->
|
||||
throw AssertionError("Must not be called with size=0")
|
||||
}
|
||||
assertEquals(0, reader.read(200, 10, ByteArray(10)))
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
fun testReadPage_LargeFile_FromMid_ToMid() {
|
||||
val reader = PagingReader(200, 100) { offset, size ->
|
||||
assertEquals(0, offset)
|
||||
assertEquals(100, size)
|
||||
ByteArray(100) { 1 }
|
||||
}
|
||||
val dst = ByteArray(10)
|
||||
assertEquals(10, reader.readPage(50, 10, dst, 0))
|
||||
assertArrayEquals(ByteArray(10) { 1 }, dst)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testReadPage_LargeFile_FromMid_BeyondPage() {
|
||||
val reader = PagingReader(200, 100) { offset, size ->
|
||||
assertEquals(0, offset)
|
||||
assertEquals(100, size)
|
||||
ByteArray(100) { 1 }
|
||||
}
|
||||
val dst = ByteArray(100)
|
||||
assertEquals(50, reader.readPage(50, 100, dst, 0))
|
||||
assertArrayEquals(ByteArray(50) { 1 }, dst.copyOfRange(0, 50))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testReadPage_LargeFile_FromStart_LessThanAPage() {
|
||||
val reader = PagingReader(200, 100) { offset, size ->
|
||||
assertEquals(0, offset)
|
||||
assertEquals(100, size)
|
||||
ByteArray(100) { 1 }
|
||||
}
|
||||
val dst = ByteArray(10)
|
||||
assertEquals(10, reader.readPage(0, 10, dst, 0))
|
||||
assertArrayEquals(ByteArray(10) { 1 }, dst)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testReadPage_LargeFile_FromStart_OnePage() {
|
||||
val reader = PagingReader(200, 100) { offset, size ->
|
||||
assertEquals(0, offset)
|
||||
assertEquals(100, size)
|
||||
ByteArray(100) { 1 }
|
||||
}
|
||||
val dst = ByteArray(100)
|
||||
assertEquals(100, reader.readPage(0, 100, dst, 0))
|
||||
assertArrayEquals(ByteArray(100) { 1 }, dst)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testReadPage_LargeFile_FromStart_MoreThanAvailable() {
|
||||
val reader = PagingReader(200, 100) { offset, size ->
|
||||
assertEquals(0, offset)
|
||||
assertEquals(100, size)
|
||||
ByteArray(100) { 1 }
|
||||
}
|
||||
val dst = ByteArray(200)
|
||||
assertEquals(100, reader.readPage(0, 200, dst, 100))
|
||||
assertArrayEquals(ByteArray(100) { 1 }, dst.copyOfRange(100, 200))
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
fun testReadPage_PageSizedFile_FromEnd() {
|
||||
val reader = PagingReader(100, 100) { _, _ ->
|
||||
throw AssertionError()
|
||||
}
|
||||
val dst = ByteArray(100)
|
||||
assertEquals(0, reader.readPage(100, 100, dst, 0))
|
||||
assertArrayEquals(ByteArray(100), dst)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testReadPage_PageSizedFile_FromStart_Complete() {
|
||||
val reader = PagingReader(100, 100) { offset, size ->
|
||||
assertEquals(0, offset)
|
||||
assertEquals(100, size)
|
||||
ByteArray(100) { 1 }
|
||||
}
|
||||
val dst = ByteArray(100)
|
||||
assertEquals(100, reader.readPage(0, 100, dst, 0))
|
||||
assertArrayEquals(ByteArray(100) { 1 }, dst)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testReadPage_PageSizedFile_FromStart_MoreThanAvailable() {
|
||||
val reader = PagingReader(100, 100) { offset, size ->
|
||||
assertEquals(0, offset)
|
||||
assertEquals(100, size)
|
||||
ByteArray(100) { 1 }
|
||||
}
|
||||
val dst = ByteArray(200)
|
||||
assertEquals(100, reader.readPage(0, 200, dst, 100))
|
||||
assertArrayEquals(ByteArray(100) { 1 }, dst.copyOfRange(100, 200))
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
fun testReadPage_SmallFile_FromStart_Partial() {
|
||||
val reader = PagingReader(100, 1000) { offset, size ->
|
||||
assertEquals(0, offset)
|
||||
assertEquals(100, size)
|
||||
ByteArray(100) { 1 }
|
||||
}
|
||||
val dst = ByteArray(10)
|
||||
assertEquals(10, reader.readPage(0, 10, dst, 0))
|
||||
assertArrayEquals(dst, ByteArray(10) { 1 })
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testReadPage_SmallFile_FromStart_Complete() {
|
||||
val reader = PagingReader(100, 1000) { offset, size ->
|
||||
assertEquals(0, offset)
|
||||
assertEquals(100, size)
|
||||
ByteArray(100) { 1 }
|
||||
}
|
||||
val dst = ByteArray(100)
|
||||
assertEquals(100, reader.readPage(0, 100, dst, 0))
|
||||
assertArrayEquals(ByteArray(100) { 1 }, dst)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testReadPage_SmallFile_FromStart_MoreThanAvailable() {
|
||||
val reader = PagingReader(100, 1000) { offset, size ->
|
||||
assertEquals(0, offset)
|
||||
assertEquals(100, size)
|
||||
ByteArray(100) { 1 }
|
||||
}
|
||||
val dst = ByteArray(200)
|
||||
assertEquals(100, reader.readPage(0, 200, dst, 100))
|
||||
assertArrayEquals(ByteArray(100) { 1 }, dst.copyOfRange(100, 200))
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in a new issue