From 6e89ab61cb61a41e23788aef25a00f4abee2b637 Mon Sep 17 00:00:00 2001 From: Marius Constantin Date: Fri, 15 Dec 2023 09:57:27 +0100 Subject: [PATCH] RUM-2014 Use the synchronous equivalent of readNextBatch API into the uploader --- .../core/internal/data/upload/UploadWorker.kt | 41 +-- .../data/upload/v2/DataUploadRunnable.kt | 37 +- .../internal/persistence/AbstractStorage.kt | 45 +-- .../core/internal/persistence/BatchData.kt | 38 ++ .../persistence/ConsentAwareStorage.kt | 52 +-- .../core/internal/persistence/Storage.kt | 17 +- .../internal/data/upload/UploadWorkerTest.kt | 155 ++------- .../data/upload/v2/DataUploadRunnableTest.kt | 328 +++++------------- .../persistence/AbstractStorageTest.kt | 83 +---- .../internal/persistence/BatchDataTest.kt | 34 ++ .../persistence/ConsentAwareStorageTest.kt | 179 +++------- .../utils/forge/BatchDataForgeryFactory.kt | 29 ++ .../android/utils/forge/Configurator.kt | 1 + detekt_custom.yml | 1 + 14 files changed, 346 insertions(+), 694 deletions(-) create mode 100644 dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/BatchData.kt create mode 100644 dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/persistence/BatchDataTest.kt create mode 100644 dd-sdk-android-core/src/test/kotlin/com/datadog/android/utils/forge/BatchDataForgeryFactory.kt diff --git a/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/data/upload/UploadWorker.kt b/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/data/upload/UploadWorker.kt index dceee55521..09de7f46bb 100644 --- a/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/data/upload/UploadWorker.kt +++ b/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/data/upload/UploadWorker.kt @@ -22,8 +22,6 @@ import com.datadog.android.core.internal.metrics.RemovalReason import com.datadog.android.core.internal.utils.unboundInternalLogger import java.util.LinkedList import java.util.Queue -import java.util.concurrent.CountDownLatch -import java.util.concurrent.TimeUnit internal class UploadWorker( appContext: Context, @@ -87,32 +85,22 @@ internal class UploadWorker( val storage = feature.storage val uploader = feature.uploader - - // storage APIs may be async, so we need to block current thread to keep Worker alive - @Suppress("UnsafeThirdPartyFunctionCall") // safe to create, argument is not negative - val lock = CountDownLatch(1) - - storage.readNextBatch(noBatchCallback = { - lock.countDown() - }) { batchId, reader -> - val batch = reader.read() - val batchMeta = reader.currentMetadata() - - val uploadStatus = consumeBatch(context, batch, batchMeta, uploader) + val nextBatchData = storage.readNextBatch() + if (nextBatchData != null) { + val uploadStatus = consumeBatch( + context, + nextBatchData.data, + nextBatchData.metadata, + uploader + ) storage.confirmBatchRead( - batchId, - RemovalReason.IntakeCode(uploadStatus.code) - ) { confirmation -> - confirmation.markAsRead(deleteBatch = !uploadStatus.shouldRetry) - @Suppress("UnsafeThirdPartyFunctionCall") // safe to add - taskQueue.offer(UploadNextBatchTask(taskQueue, sdkCore, feature)) - lock.countDown() - } + nextBatchData.id, + RemovalReason.IntakeCode(uploadStatus.code), + deleteBatch = !uploadStatus.shouldRetry + ) + @Suppress("UnsafeThirdPartyFunctionCall") // safe to add + taskQueue.offer(UploadNextBatchTask(taskQueue, sdkCore, feature)) } - - @Suppress("UnsafeThirdPartyFunctionCall") // if interrupt happens, WorkManager - // will handle it - lock.await(LOCK_AWAIT_SECONDS, TimeUnit.SECONDS) } private fun consumeBatch( @@ -128,7 +116,6 @@ internal class UploadWorker( // endregion companion object { - const val LOCK_AWAIT_SECONDS = 30L const val MESSAGE_NOT_INITIALIZED = "Datadog has not been initialized." diff --git a/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/data/upload/v2/DataUploadRunnable.kt b/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/data/upload/v2/DataUploadRunnable.kt index baa41892b8..198b2e5c10 100644 --- a/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/data/upload/v2/DataUploadRunnable.kt +++ b/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/data/upload/v2/DataUploadRunnable.kt @@ -12,7 +12,6 @@ import com.datadog.android.api.context.DatadogContext import com.datadog.android.api.context.NetworkInfo import com.datadog.android.api.storage.RawBatchEvent import com.datadog.android.core.internal.ContextProvider -import com.datadog.android.core.internal.CoreFeature import com.datadog.android.core.internal.configuration.DataUploadConfiguration import com.datadog.android.core.internal.data.upload.UploadRunnable import com.datadog.android.core.internal.data.upload.UploadStatus @@ -22,7 +21,6 @@ import com.datadog.android.core.internal.persistence.BatchId import com.datadog.android.core.internal.persistence.Storage import com.datadog.android.core.internal.system.SystemInfoProvider import com.datadog.android.core.internal.utils.scheduleSafe -import java.util.concurrent.CountDownLatch import java.util.concurrent.ScheduledThreadPoolExecutor import java.util.concurrent.TimeUnit import kotlin.math.max @@ -37,7 +35,6 @@ internal class DataUploadRunnable( private val networkInfoProvider: NetworkInfoProvider, private val systemInfoProvider: SystemInfoProvider, uploadConfiguration: DataUploadConfiguration, - private val batchUploadWaitTimeoutMs: Long = CoreFeature.NETWORK_TIMEOUT_MS, private val internalLogger: InternalLogger ) : UploadRunnable { @@ -52,8 +49,6 @@ internal class DataUploadRunnable( override fun run() { if (isNetworkAvailable() && isSystemReady()) { val context = contextProvider.context - // TODO RUMM-0000 it should be already on the worker thread and if readNextBatch is async, - // we should wait until it completes before scheduling further var batchConsumerAvailableAttempts = maxBatchesPerJob var lastBatchUploadStatus: UploadStatus? do { @@ -90,27 +85,15 @@ internal class DataUploadRunnable( @Suppress("UnsafeThirdPartyFunctionCall") // called inside a dedicated executor private fun handleNextBatch(context: DatadogContext): UploadStatus? { var uploadStatus: UploadStatus? = null - val lock = CountDownLatch(1) - storage.readNextBatch( - noBatchCallback = { - lock.countDown() - } - ) { batchId, reader -> - try { - val batch = reader.read() - val batchMeta = reader.currentMetadata() - - uploadStatus = consumeBatch( - context, - batchId, - batch, - batchMeta - ) - } finally { - lock.countDown() - } + val nextBatchData = storage.readNextBatch() + if (nextBatchData != null) { + uploadStatus = consumeBatch( + context, + nextBatchData.id, + nextBatchData.data, + nextBatchData.metadata + ) } - lock.await(batchUploadWaitTimeoutMs, TimeUnit.MILLISECONDS) return uploadStatus } @@ -151,9 +134,7 @@ internal class DataUploadRunnable( } else { RemovalReason.IntakeCode(status.code) } - storage.confirmBatchRead(batchId, removalReason) { - it.markAsRead(deleteBatch = !status.shouldRetry) - } + storage.confirmBatchRead(batchId, removalReason, deleteBatch = !status.shouldRetry) return status } diff --git a/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/AbstractStorage.kt b/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/AbstractStorage.kt index 9329595265..a900958eaf 100644 --- a/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/AbstractStorage.kt +++ b/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/AbstractStorage.kt @@ -86,28 +86,13 @@ internal class AbstractStorage( } @WorkerThread - override fun readNextBatch( - noBatchCallback: () -> Unit, - readBatchCallback: (BatchId, BatchReader) -> Unit - ) { - val batch = grantedPersistenceStrategy.lockAndReadNext() - if (batch == null) { - noBatchCallback() - } else { - val batchId = BatchId(batch.batchId) - val reader = object : BatchReader { - - @WorkerThread - override fun currentMetadata(): ByteArray? { - return batch.metadata - } - - @WorkerThread - override fun read(): List { - return batch.events - } - } - readBatchCallback.invoke(batchId, reader) + override fun readNextBatch(): BatchData? { + return grantedPersistenceStrategy.lockAndReadNext()?.let { + BatchData( + id = BatchId(it.batchId), + data = it.events, + metadata = it.metadata + ) } } @@ -115,19 +100,13 @@ internal class AbstractStorage( override fun confirmBatchRead( batchId: BatchId, removalReason: RemovalReason, - callback: (BatchConfirmation) -> Unit + deleteBatch: Boolean ) { - val confirmation = object : BatchConfirmation { - @WorkerThread - override fun markAsRead(deleteBatch: Boolean) { - if (deleteBatch) { - grantedPersistenceStrategy.unlockAndDelete(batchId.id) - } else { - grantedPersistenceStrategy.unlockAndKeep(batchId.id) - } - } + if (deleteBatch) { + grantedPersistenceStrategy.unlockAndDelete(batchId.id) + } else { + grantedPersistenceStrategy.unlockAndKeep(batchId.id) } - callback(confirmation) } override fun dropAll() { diff --git a/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/BatchData.kt b/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/BatchData.kt new file mode 100644 index 0000000000..c4cbcc0289 --- /dev/null +++ b/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/BatchData.kt @@ -0,0 +1,38 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache License Version 2.0. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2016-Present Datadog, Inc. + */ + +package com.datadog.android.core.internal.persistence + +import com.datadog.android.api.storage.RawBatchEvent + +internal data class BatchData( + val id: BatchId, + val data: List, + val metadata: ByteArray? = null +) { + override fun equals(other: Any?): Boolean { + if (this === other) return true + if (javaClass != other?.javaClass) return false + + other as BatchData + + if (id != other.id) return false + if (data != other.data) return false + if (metadata != null) { + if (other.metadata == null) return false + if (!metadata.contentEquals(other.metadata)) return false + } else if (other.metadata != null) return false + + return true + } + + override fun hashCode(): Int { + var result = id.hashCode() + result = 31 * result + data.hashCode() + result = 31 * result + (metadata?.contentHashCode() ?: 0) + return result + } +} diff --git a/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/ConsentAwareStorage.kt b/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/ConsentAwareStorage.kt index 27976f0aed..d4ac1143ab 100644 --- a/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/ConsentAwareStorage.kt +++ b/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/ConsentAwareStorage.kt @@ -10,7 +10,6 @@ import androidx.annotation.WorkerThread import com.datadog.android.api.InternalLogger import com.datadog.android.api.context.DatadogContext import com.datadog.android.api.storage.EventBatchWriter -import com.datadog.android.api.storage.RawBatchEvent import com.datadog.android.core.internal.metrics.MetricsDispatcher import com.datadog.android.core.internal.metrics.RemovalReason import com.datadog.android.core.internal.persistence.file.FileMover @@ -84,17 +83,10 @@ internal class ConsentAwareStorage( /** @inheritdoc */ @WorkerThread - override fun readNextBatch( - noBatchCallback: () -> Unit, - readBatchCallback: (BatchId, BatchReader) -> Unit - ) { + override fun readNextBatch(): BatchData? { val (batchFile, metaFile) = synchronized(lockedBatches) { val batchFile = grantedOrchestrator - .getReadableFile(lockedBatches.map { it.file }.toSet()) - if (batchFile == null) { - noBatchCallback() - return - } + .getReadableFile(lockedBatches.map { it.file }.toSet()) ?: return null val metaFile = grantedOrchestrator.getMetadataFile(batchFile) lockedBatches.add(Batch(batchFile, metaFile)) @@ -102,21 +94,14 @@ internal class ConsentAwareStorage( } val batchId = BatchId.fromFile(batchFile) - val reader = object : BatchReader { - - @WorkerThread - override fun currentMetadata(): ByteArray? { - if (metaFile == null || !metaFile.existsSafe(internalLogger)) return null - - return batchMetadataReaderWriter.readData(metaFile) - } - - @WorkerThread - override fun read(): List { - return batchEventsReaderWriter.readData(batchFile) - } + val batchMetadata = if (metaFile == null || !metaFile.existsSafe(internalLogger)) { + null + } else { + batchMetadataReaderWriter.readData(metaFile) } - readBatchCallback(batchId, reader) + val batchData = batchEventsReaderWriter.readData(batchFile) + + return BatchData(id = batchId, data = batchData, metadata = batchMetadata) } /** @inheritdoc */ @@ -124,23 +109,18 @@ internal class ConsentAwareStorage( override fun confirmBatchRead( batchId: BatchId, removalReason: RemovalReason, - callback: (BatchConfirmation) -> Unit + deleteBatch: Boolean ) { val batch = synchronized(lockedBatches) { lockedBatches.firstOrNull { batchId.matchesFile(it.file) } } ?: return - val confirmation = object : BatchConfirmation { - @WorkerThread - override fun markAsRead(deleteBatch: Boolean) { - if (deleteBatch) { - deleteBatch(batch, removalReason) - } - synchronized(lockedBatches) { - lockedBatches.remove(batch) - } - } + + if (deleteBatch) { + deleteBatch(batch, removalReason) + } + synchronized(lockedBatches) { + lockedBatches.remove(batch) } - callback(confirmation) } /** @inheritdoc */ diff --git a/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/Storage.kt b/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/Storage.kt index be41562d05..4684799e37 100644 --- a/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/Storage.kt +++ b/dd-sdk-android-core/src/main/kotlin/com/datadog/android/core/internal/persistence/Storage.kt @@ -39,28 +39,23 @@ internal interface Storage { ) /** - * Utility to read a batch, asynchronously. - * @param noBatchCallback an optional callback which is called when there is no batch available to read. - * @param readBatchCallback an operation to perform with a [BatchId] and [BatchReader] that will target - * the next readable Batch + * Utility to read a batch, synchronously. */ @WorkerThread - fun readNextBatch( - noBatchCallback: () -> Unit = {}, - readBatchCallback: (BatchId, BatchReader) -> Unit - ) + fun readNextBatch(): BatchData? /** - * Utility to update the state of a batch, asynchronously. + * Utility to update the state of a batch, synchronously. * @param batchId the id of the Batch to confirm * @param removalReason the reason why the batch is being removed - * @param callback an operation to perform with a [BatchConfirmation] + * @param deleteBatch if `true` the batch will be deleted, otherwise it will be marked as + * not readable. */ @WorkerThread fun confirmBatchRead( batchId: BatchId, removalReason: RemovalReason, - callback: (BatchConfirmation) -> Unit + deleteBatch: Boolean ) /** diff --git a/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/data/upload/UploadWorkerTest.kt b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/data/upload/UploadWorkerTest.kt index 7deab9339e..7e63b6be7f 100644 --- a/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/data/upload/UploadWorkerTest.kt +++ b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/data/upload/UploadWorkerTest.kt @@ -20,9 +20,8 @@ import com.datadog.android.core.InternalSdkCore import com.datadog.android.core.internal.SdkFeature import com.datadog.android.core.internal.data.upload.v2.DataUploader import com.datadog.android.core.internal.metrics.RemovalReason -import com.datadog.android.core.internal.persistence.BatchConfirmation +import com.datadog.android.core.internal.persistence.BatchData import com.datadog.android.core.internal.persistence.BatchId -import com.datadog.android.core.internal.persistence.BatchReader import com.datadog.android.core.internal.persistence.Storage import com.datadog.android.utils.config.ApplicationContextTestConfiguration import com.datadog.android.utils.config.InternalLoggerTestConfiguration @@ -49,9 +48,7 @@ import org.mockito.Mock import org.mockito.invocation.InvocationOnMock import org.mockito.junit.jupiter.MockitoExtension import org.mockito.junit.jupiter.MockitoSettings -import org.mockito.kotlin.any import org.mockito.kotlin.argThat -import org.mockito.kotlin.doAnswer import org.mockito.kotlin.doReturn import org.mockito.kotlin.eq import org.mockito.kotlin.mock @@ -60,7 +57,6 @@ import org.mockito.kotlin.verifyNoInteractions import org.mockito.kotlin.whenever import org.mockito.quality.Strictness import org.mockito.stubbing.Answer -import java.util.concurrent.Executor import java.util.concurrent.Executors @Extensions( @@ -83,9 +79,6 @@ internal class UploadWorkerTest { @Mock lateinit var mockStorageA: Storage - @Mock - lateinit var mockBatchReaderA: BatchReader - @Mock lateinit var mockUploaderA: DataUploader @@ -95,9 +88,6 @@ internal class UploadWorkerTest { @Mock lateinit var mockStorageB: Storage - @Mock - lateinit var mockBatchReaderB: BatchReader - @Mock lateinit var mockUploaderB: DataUploader @@ -151,24 +141,18 @@ internal class UploadWorkerTest { val batchAMetadata = forge.aNullable { batchAMeta.toByteArray() } val batchBMetadata = forge.aNullable { batchBMeta.toByteArray() } - val batchAConfirmation = mock() val batchId1 = mock() val batchId2 = mock() stubReadSequence( mockStorageA, - mockBatchReaderA, batchId1, - batchAConfirmation, batchA, batchAMetadata ) - val batchBConfirmation = mock() stubReadSequence( mockStorageB, - mockBatchReaderB, batchId2, - batchBConfirmation, batchB, batchBMetadata ) @@ -208,15 +192,13 @@ internal class UploadWorkerTest { verify(mockStorageA).confirmBatchRead( eq(batchId1), argThat { this.toString() == "intake-code-${uploadStatus1.code}" }, - any() + eq(true) ) verify(mockStorageB).confirmBatchRead( eq(batchId2), argThat { this.toString() == "intake-code-${uploadStatus2.code}" }, - any() + eq(true) ) - verify(batchAConfirmation).markAsRead(true) - verify(batchBConfirmation).markAsRead(true) assertThat(result) .isEqualTo(ListenableWorker.Result.success()) @@ -236,24 +218,18 @@ internal class UploadWorkerTest { val batchAMetadata = forge.aNullable { batchAMeta.toByteArray() } val batchBMetadata = forge.aNullable { batchBMeta.toByteArray() } - val batchAConfirmation = mock() val batchId1 = mock() stubReadSequence( mockStorageA, - mockBatchReaderA, batchId1, - batchAConfirmation, batchA, batchAMetadata ) - val batchBConfirmation = mock() val batchId2 = mock() stubReadSequence( mockStorageB, - mockBatchReaderB, batchId2, - batchBConfirmation, batchB, batchBMetadata ) @@ -290,15 +266,13 @@ internal class UploadWorkerTest { verify(mockStorageA).confirmBatchRead( eq(batchId1), argThat { this.toString() == "intake-code-${status.code}" }, - any() + eq(!status.shouldRetry) ) verify(mockStorageB).confirmBatchRead( eq(batchId2), argThat { this.toString() == "intake-code-${status.code}" }, - any() + eq(!status.shouldRetry) ) - verify(batchAConfirmation).markAsRead(!status.shouldRetry) - verify(batchBConfirmation).markAsRead(!status.shouldRetry) assertThat(result) .isEqualTo(ListenableWorker.Result.success()) @@ -314,28 +288,21 @@ internal class UploadWorkerTest { forge.aNullable { forge.aString().toByteArray() } } val aIds = forge.aList(batchesA.size) { mock() } - val aConfirmations = forge.aList(batchesA.size) { mock() } - val aReaders = forge.aList(batchesA.size) { mock() } val batchB = forge.aList { RawBatchEvent(aString().toByteArray()) } val batchBMeta = forge.aString().toByteArray() stubMultipleReadSequence( mockStorageA, - aReaders, aIds, - aConfirmations, batchesA, batchesAMeta ) - val batchBConfirmation = mock() val batchId = mock() stubReadSequence( mockStorageB, - mockBatchReaderB, batchId, - batchBConfirmation, batchB, batchBMeta ) @@ -373,10 +340,8 @@ internal class UploadWorkerTest { verify(mockStorageA).confirmBatchRead( eq(aIds[index]), argThat { this.toString() == "intake-code-${aStatuses[index].code}" }, - any() + eq(true) ) - - verify(aConfirmations[index]).markAsRead(true) } verify(mockUploaderB).upload( @@ -387,10 +352,8 @@ internal class UploadWorkerTest { verify(mockStorageB).confirmBatchRead( eq(batchId), argThat { this.toString() == "intake-code-${successStatus.code}" }, - any() + eq(true) ) - verify(batchBConfirmation).markAsRead(true) - assertThat(result) .isEqualTo(ListenableWorker.Result.success()) } @@ -405,8 +368,6 @@ internal class UploadWorkerTest { aNullable { aString().toByteArray() } } val aIds = forge.aList(batchesA.size) { mock() } - val aConfirmations = forge.aList(batchesA.size) { mock() } - val aReaders = forge.aList(batchesA.size) { mock() } val batchB = forge.aList { RawBatchEvent(aString().toByteArray()) } val batchBMeta = forge.aString().toByteArray() @@ -414,27 +375,22 @@ internal class UploadWorkerTest { stubMultipleReadSequence( mockStorageA, - aReaders, aIds, - aConfirmations, batchesA, batchesAMeta ) - val batchBConfirmation = mock() val batchId = mock() stubReadSequence( mockStorageB, - mockBatchReaderB, batchId, - batchBConfirmation, batchB, batchBMeta ) val executorService = Executors.newSingleThreadExecutor() - whenever(mockFeatureA.storage) doReturn AsyncStorageDelegate(mockStorageA, executorService) - whenever(mockFeatureB.storage) doReturn AsyncStorageDelegate(mockStorageB, executorService) + whenever(mockFeatureA.storage) doReturn StorageDelegate(mockStorageA) + whenever(mockFeatureB.storage) doReturn StorageDelegate(mockStorageB) batchesA.forEachIndexed { index, batch -> whenever( @@ -468,9 +424,8 @@ internal class UploadWorkerTest { verify(mockStorageA).confirmBatchRead( eq(aIds[index]), argThat { this.toString() == "intake-code-${aStatuses[index].code}" }, - any() + eq(true) ) - verify(aConfirmations[index]).markAsRead(true) } verify(mockUploaderB).upload( @@ -481,9 +436,8 @@ internal class UploadWorkerTest { verify(mockStorageB).confirmBatchRead( eq(batchId), argThat { this.toString() == "intake-code-${successStatus.code}" }, - any() + eq(true) ) - verify(batchBConfirmation).markAsRead(true) assertThat(result) .isEqualTo(ListenableWorker.Result.success()) @@ -505,8 +459,6 @@ internal class UploadWorkerTest { aNullable { aString().toByteArray() } } val aIds = forge.aList(batchesA.size) { mock() } - val aConfirmations = forge.aList(batchesA.size) { mock() } - val aReaders = forge.aList(batchesA.size) { mock() } val batchB = forge.aList { RawBatchEvent(aString().toByteArray()) } val batchBMeta = forge.aString().toByteArray() @@ -522,20 +474,15 @@ internal class UploadWorkerTest { stubMultipleReadSequence( mockStorageA, - aReaders, aIds, - aConfirmations, batchesA, batchesAMeta ) - val batchBConfirmation = mock() val batchId = mock() stubReadSequence( mockStorageB, - mockBatchReaderB, batchId, - batchBConfirmation, batchB, batchBMeta ) @@ -571,15 +518,18 @@ internal class UploadWorkerTest { ) if (index == failingBatchIndex) { - verify(aConfirmations[index]).markAsRead(!failingStatus.shouldRetry) + verify(mockStorageA).confirmBatchRead( + eq(aIds[index]), + argThat { this.toString() == "intake-code-${aStatuses[index].code}" }, + eq(!failingStatus.shouldRetry) + ) } else { - verify(aConfirmations[index]).markAsRead(true) + verify(mockStorageA).confirmBatchRead( + eq(aIds[index]), + argThat { this.toString() == "intake-code-${aStatuses[index].code}" }, + eq(true) + ) } - verify(mockStorageA).confirmBatchRead( - eq(aIds[index]), - argThat { this.toString() == "intake-code-${aStatuses[index].code}" }, - any() - ) } verify(mockUploaderB).upload( @@ -590,10 +540,8 @@ internal class UploadWorkerTest { verify(mockStorageB).confirmBatchRead( eq(batchId), argThat { this.toString() == "intake-code-${fakeUploadSuccess2.code}" }, - any() + eq(true) ) - verify(batchBConfirmation).markAsRead(true) - assertThat(result) .isEqualTo(ListenableWorker.Result.success()) } @@ -612,8 +560,8 @@ internal class UploadWorkerTest { InternalLogger.Target.USER, UploadWorker.MESSAGE_NOT_INITIALIZED ) - verifyNoInteractions(mockFeatureA, mockBatchReaderA, mockUploaderA) - verifyNoInteractions(mockFeatureB, mockBatchReaderB, mockUploaderB) + verifyNoInteractions(mockFeatureA, mockUploaderA) + verifyNoInteractions(mockFeatureB, mockUploaderB) assertThat(result) .isEqualTo(ListenableWorker.Result.success()) @@ -638,17 +586,13 @@ internal class UploadWorkerTest { private fun stubReadSequence( storage: Storage, - batchReader: BatchReader, batchId: BatchId, - batchConfirmation: BatchConfirmation, batch: List, batchMetadata: ByteArray? ) { stubMultipleReadSequence( storage, - listOf(batchReader), listOf(batchId), - listOf(batchConfirmation), listOf(batch), listOf(batchMetadata) ) @@ -656,46 +600,31 @@ internal class UploadWorkerTest { private fun stubMultipleReadSequence( storage: Storage, - batchReaders: List, batchIds: List, - batchConfirmations: List, batches: List>, batchMetadata: List ) { - whenever(storage.readNextBatch(any(), any())) - .thenAnswer(object : Answer { + whenever(storage.readNextBatch()) + .thenAnswer(object : Answer { var invocationCount: Int = 0 - override fun answer(invocation: InvocationOnMock) { + override fun answer(invocation: InvocationOnMock): BatchData? { if (invocationCount >= batches.size) { - (invocation.getArgument<() -> Unit>(0)).invoke() - return + return null } - val reader = batchReaders[invocationCount] val batchId = batchIds[invocationCount] - val batchConfirmation = batchConfirmations[invocationCount] - - whenever(reader.read()) doReturn batches[invocationCount] - whenever(reader.currentMetadata()) doReturn batchMetadata[invocationCount] - - invocationCount++ - - whenever(storage.confirmBatchRead(eq(batchId), any(), any())) doAnswer { - (it.getArgument<(BatchConfirmation) -> Unit>(2)).invoke(batchConfirmation) - } - - (invocation.getArgument<(BatchId, BatchReader) -> Unit>(1)).invoke( - batchId, - reader + return BatchData( + id = batchId, + data = batches[invocationCount], + metadata = batchMetadata[invocationCount++] ) } }) } - private class AsyncStorageDelegate( - private val delegate: Storage, - private val executor: Executor + private class StorageDelegate( + private val delegate: Storage ) : Storage { override fun writeCurrentBatch( datadogContext: DatadogContext, @@ -705,24 +634,16 @@ internal class UploadWorkerTest { fail("we don't expect this one to be called") } - override fun readNextBatch( - noBatchCallback: () -> Unit, - readBatchCallback: (BatchId, BatchReader) -> Unit - ) { - executor.execute { - delegate.readNextBatch( - noBatchCallback, - readBatchCallback - ) - } + override fun readNextBatch(): BatchData? { + return delegate.readNextBatch() } override fun confirmBatchRead( batchId: BatchId, removalReason: RemovalReason, - callback: (BatchConfirmation) -> Unit + deleteBatch: Boolean ) { - executor.execute { delegate.confirmBatchRead(batchId, removalReason, callback) } + delegate.confirmBatchRead(batchId, removalReason, deleteBatch) } override fun dropAll() { diff --git a/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/data/upload/v2/DataUploadRunnableTest.kt b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/data/upload/v2/DataUploadRunnableTest.kt index af06c19a32..b5a69a08c0 100644 --- a/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/data/upload/v2/DataUploadRunnableTest.kt +++ b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/data/upload/v2/DataUploadRunnableTest.kt @@ -14,14 +14,12 @@ import com.datadog.android.core.internal.ContextProvider import com.datadog.android.core.internal.configuration.DataUploadConfiguration import com.datadog.android.core.internal.data.upload.UploadStatus import com.datadog.android.core.internal.net.info.NetworkInfoProvider -import com.datadog.android.core.internal.persistence.BatchConfirmation +import com.datadog.android.core.internal.persistence.BatchData import com.datadog.android.core.internal.persistence.BatchId -import com.datadog.android.core.internal.persistence.BatchReader import com.datadog.android.core.internal.persistence.Storage import com.datadog.android.core.internal.system.SystemInfo import com.datadog.android.core.internal.system.SystemInfoProvider import com.datadog.android.utils.forge.Configurator -import com.datadog.tools.unit.forge.aThrowable import fr.xgouchet.elmyr.Forge import fr.xgouchet.elmyr.annotation.Forgery import fr.xgouchet.elmyr.annotation.IntForgery @@ -43,10 +41,9 @@ import org.mockito.kotlin.any import org.mockito.kotlin.argumentCaptor import org.mockito.kotlin.doAnswer import org.mockito.kotlin.doReturn -import org.mockito.kotlin.doReturnConsecutively -import org.mockito.kotlin.doThrow import org.mockito.kotlin.eq import org.mockito.kotlin.mock +import org.mockito.kotlin.reset import org.mockito.kotlin.same import org.mockito.kotlin.times import org.mockito.kotlin.verify @@ -58,8 +55,6 @@ import org.mockito.stubbing.Answer import java.util.concurrent.ScheduledThreadPoolExecutor import java.util.concurrent.TimeUnit -// TODO: RUM-2014 Simplify / Refactor these tests when we're going to switch to the -// sync API for uploading data @Extensions( ExtendWith(MockitoExtension::class), ExtendWith(ForgeExtension::class) @@ -130,7 +125,6 @@ internal class DataUploadRunnableTest { mockNetworkInfoProvider, mockSystemInfoProvider, fakeDataUploadConfiguration, - TEST_BATCH_UPLOAD_WAIT_TIMEOUT_MS, mockInternalLogger ) } @@ -171,20 +165,9 @@ internal class DataUploadRunnableTest { ) val batchId = mock() - val batchReader = mock() - val batchConfirmation = mock() val batchMetadata = forge.aNullable { batchMeta.toByteArray() } - whenever(batchReader.read()) doReturn batch - whenever(batchReader.currentMetadata()) doReturn batchMetadata - - whenever(mockStorage.readNextBatch(any(), any())) doAnswer { - whenever(mockStorage.confirmBatchRead(eq(batchId), any(), any())) doAnswer { - it.getArgument<(BatchConfirmation) -> Unit>(2).invoke(batchConfirmation) - } - it.getArgument<(BatchId, BatchReader) -> Unit>(1).invoke(batchId, batchReader) - } - + whenever(mockStorage.readNextBatch()).thenReturn(BatchData(batchId, batch, batchMetadata)) whenever(mockSystemInfoProvider.getLatestSystemInfo()) doReturn fakeSystemInfo whenever( @@ -199,9 +182,11 @@ internal class DataUploadRunnableTest { testedRunnable.run() // Then - verify(batchConfirmation).markAsRead(true) - verifyNoMoreInteractions(batchConfirmation) - verify(batchReader, times(expectedBatchesHandled)).read() + verify(mockStorage, times(expectedBatchesHandled)).confirmBatchRead( + eq(batchId), + any(), + eq(true) + ) verify(mockDataUploader, times(expectedBatchesHandled)).upload(fakeContext, batch, batchMetadata) verify(mockThreadPoolExecutor).schedule( same(testedRunnable), @@ -225,20 +210,9 @@ internal class DataUploadRunnableTest { powerSaveMode = false ) val batchId = mock() - val batchReader = mock() - val batchConfirmation = mock() val batchMetadata = forge.aNullable { batchMeta.toByteArray() } - whenever(batchReader.read()) doReturn batch - whenever(batchReader.currentMetadata()) doReturn batchMetadata - - whenever(mockStorage.readNextBatch(any(), any())) doAnswer { - whenever(mockStorage.confirmBatchRead(eq(batchId), any(), any())) doAnswer { - it.getArgument<(BatchConfirmation) -> Unit>(2).invoke(batchConfirmation) - } - it.getArgument<(BatchId, BatchReader) -> Unit>(1).invoke(batchId, batchReader) - } - + whenever(mockStorage.readNextBatch()).thenReturn(BatchData(batchId, batch, batchMetadata)) whenever(mockSystemInfoProvider.getLatestSystemInfo()) doReturn fakeSystemInfo whenever( @@ -253,9 +227,11 @@ internal class DataUploadRunnableTest { testedRunnable.run() // Then - verify(batchConfirmation, times(expectedBatchesHandled)).markAsRead(true) - verifyNoMoreInteractions(batchConfirmation) - verify(batchReader, times(expectedBatchesHandled)).read() + verify(mockStorage, times(expectedBatchesHandled)).confirmBatchRead( + eq(batchId), + any(), + eq(true) + ) verify(mockDataUploader, times(expectedBatchesHandled)) .upload(fakeContext, batch, batchMetadata) verify(mockThreadPoolExecutor).schedule( @@ -279,20 +255,9 @@ internal class DataUploadRunnableTest { powerSaveMode = false ) val batchId = mock() - val batchReader = mock() - val batchConfirmation = mock() val batchMetadata = forge.aNullable { batchMeta.toByteArray() } - whenever(batchReader.read()) doReturn batch - whenever(batchReader.currentMetadata()) doReturn batchMetadata - - whenever(mockStorage.readNextBatch(any(), any())) doAnswer { - whenever(mockStorage.confirmBatchRead(eq(batchId), any(), any())) doAnswer { - it.getArgument<(BatchConfirmation) -> Unit>(2).invoke(batchConfirmation) - } - it.getArgument<(BatchId, BatchReader) -> Unit>(1).invoke(batchId, batchReader) - } - + whenever(mockStorage.readNextBatch()).thenReturn(BatchData(batchId, batch, batchMetadata)) whenever(mockSystemInfoProvider.getLatestSystemInfo()) doReturn fakeSystemInfo whenever( @@ -307,9 +272,11 @@ internal class DataUploadRunnableTest { testedRunnable.run() // Then - verify(batchConfirmation, times(expectedBatchesHandled)).markAsRead(true) - verifyNoMoreInteractions(batchConfirmation) - verify(batchReader, times(expectedBatchesHandled)).read() + verify(mockStorage, times(expectedBatchesHandled)).confirmBatchRead( + eq(batchId), + any(), + eq(true) + ) verify(mockDataUploader, times(expectedBatchesHandled)) .upload(fakeContext, batch, batchMetadata) verify(mockThreadPoolExecutor).schedule( @@ -422,15 +389,13 @@ internal class DataUploadRunnableTest { @Test fun `𝕄 do nothing 𝕎 no batch to send`() { // Given - whenever(mockStorage.readNextBatch(any(), any())) doAnswer { - it.getArgument<() -> Unit>(0)() - } + whenever(mockStorage.readNextBatch()).thenReturn(null) // When testedRunnable.run() // Then - verify(mockStorage).readNextBatch(any(), any()) + verify(mockStorage).readNextBatch() verifyNoMoreInteractions(mockStorage) verifyNoInteractions(mockDataUploader) verify(mockThreadPoolExecutor).schedule( @@ -448,20 +413,9 @@ internal class DataUploadRunnableTest { ) { // Given val batchId = mock() - val batchReader = mock() - val batchConfirmation = mock() val batchMetadata = forge.aNullable { batchMeta.toByteArray() } - whenever(batchReader.read()) doReturn batch - whenever(batchReader.currentMetadata()) doReturn batchMetadata - - whenever(mockStorage.readNextBatch(any(), any())) doAnswer { - whenever(mockStorage.confirmBatchRead(eq(batchId), any(), any())) doAnswer { - it.getArgument<(BatchConfirmation) -> Unit>(2).invoke(batchConfirmation) - } - it.getArgument<(BatchId, BatchReader) -> Unit>(1).invoke(batchId, batchReader) - } - + whenever(mockStorage.readNextBatch()).thenReturn(BatchData(batchId, batch, batchMetadata)) whenever( mockDataUploader.upload( fakeContext, @@ -474,9 +428,11 @@ internal class DataUploadRunnableTest { testedRunnable.run() // Then - verify(batchConfirmation, times(expectedBatchesHandled)).markAsRead(true) - verifyNoMoreInteractions(batchConfirmation) - verify(batchReader, times(expectedBatchesHandled)).read() + verify(mockStorage, times(expectedBatchesHandled)).confirmBatchRead( + any(), + any(), + eq(true) + ) verify(mockDataUploader, times(expectedBatchesHandled)) .upload(fakeContext, batch, batchMetadata) verify(mockThreadPoolExecutor).schedule( @@ -496,20 +452,9 @@ internal class DataUploadRunnableTest { ) { // Given val batchId = mock() - val batchReader = mock() - val batchConfirmation = mock() val batchMetadata = forge.aNullable { batchMeta.toByteArray() } - whenever(batchReader.read()) doReturn batch - whenever(batchReader.currentMetadata()) doReturn batchMetadata - - whenever(mockStorage.readNextBatch(any(), any())) doAnswer { - whenever(mockStorage.confirmBatchRead(eq(batchId), any(), any())) doAnswer { - it.getArgument<(BatchConfirmation) -> Unit>(2).invoke(batchConfirmation) - } - it.getArgument<(BatchId, BatchReader) -> Unit>(1).invoke(batchId, batchReader) - } - + whenever(mockStorage.readNextBatch()).thenReturn(BatchData(batchId, batch, batchMetadata)) whenever( mockDataUploader.upload( fakeContext, @@ -522,9 +467,11 @@ internal class DataUploadRunnableTest { testedRunnable.run() // Then - verify(batchConfirmation, times(expectedBatchesHandled)).markAsRead(false) - verifyNoMoreInteractions(batchConfirmation) - verify(batchReader, times(expectedBatchesHandled)).read() + verify(mockStorage, times(expectedBatchesHandled)).confirmBatchRead( + eq(batchId), + any(), + eq(false) + ) verify(mockDataUploader, times(expectedBatchesHandled)) .upload(fakeContext, batch, batchMetadata) verify(mockThreadPoolExecutor).schedule( @@ -545,19 +492,9 @@ internal class DataUploadRunnableTest { ) { // Given val batchId = mock() - val batchReader = mock() - val batchConfirmation = mock() val batchMetadata = forge.aNullable { batchMeta.toByteArray() } - whenever(batchReader.read()) doReturn batch - whenever(batchReader.currentMetadata()) doReturn batchMetadata - - whenever(mockStorage.readNextBatch(any(), any())) doAnswer { - whenever(mockStorage.confirmBatchRead(eq(batchId), any(), any())) doAnswer { - it.getArgument<(BatchConfirmation) -> Unit>(2).invoke(batchConfirmation) - } - it.getArgument<(BatchId, BatchReader) -> Unit>(1).invoke(batchId, batchReader) - } + whenever(mockStorage.readNextBatch()).thenReturn(BatchData(batchId, batch, batchMetadata)) whenever( mockDataUploader.upload( fakeContext, @@ -572,10 +509,11 @@ internal class DataUploadRunnableTest { } // Then - verify(batchConfirmation, times(runCount)) - .markAsRead(false) - verifyNoMoreInteractions(batchConfirmation) - verify(batchReader, times(runCount * expectedBatchesHandled)).read() + verify(mockStorage, times(runCount)).confirmBatchRead( + eq(batchId), + any(), + eq(false) + ) verify(mockDataUploader, times(runCount * expectedBatchesHandled)) .upload(fakeContext, batch, batchMetadata) verify(mockThreadPoolExecutor, times(runCount)).schedule( @@ -595,19 +533,9 @@ internal class DataUploadRunnableTest { ) { // Given val batchId = mock() - val batchReader = mock() - val batchConfirmation = mock() val batchMetadata = forge.aNullable { batchMeta.toByteArray() } - whenever(batchReader.read()) doReturn batch - whenever(batchReader.currentMetadata()) doReturn batchMetadata - - whenever(mockStorage.readNextBatch(any(), any())) doAnswer { - whenever(mockStorage.confirmBatchRead(eq(batchId), any(), any())) doAnswer { - it.getArgument<(BatchConfirmation) -> Unit>(2).invoke(batchConfirmation) - } - it.getArgument<(BatchId, BatchReader) -> Unit>(1).invoke(batchId, batchReader) - } + whenever(mockStorage.readNextBatch()).thenReturn(BatchData(batchId, batch, batchMetadata)) whenever( mockDataUploader.upload( fakeContext, @@ -620,9 +548,11 @@ internal class DataUploadRunnableTest { testedRunnable.run() // Then - verify(batchConfirmation, times(expectedBatchesHandled)).markAsRead(true) - verifyNoMoreInteractions(batchConfirmation) - verify(batchReader, times(expectedBatchesHandled)).read() + verify(mockStorage, times(expectedBatchesHandled)).confirmBatchRead( + eq(batchId), + any(), + eq(true) + ) verify(mockDataUploader, times(expectedBatchesHandled)) .upload(fakeContext, batch, batchMetadata) verify(mockThreadPoolExecutor).schedule( @@ -640,19 +570,8 @@ internal class DataUploadRunnableTest { ) { // Given val batchId = mock() - val batchReader = mock() - val batchConfirmation = mock() val batchMetadata = forge.aNullable { batchMeta.toByteArray() } - - whenever(batchReader.read()) doReturn batch - whenever(batchReader.currentMetadata()) doReturn batchMetadata - - whenever(mockStorage.readNextBatch(any(), any())) doAnswer { - whenever(mockStorage.confirmBatchRead(eq(batchId), any(), any())) doAnswer { - it.getArgument<(BatchConfirmation) -> Unit>(2).invoke(batchConfirmation) - } - it.getArgument<(BatchId, BatchReader) -> Unit>(1).invoke(batchId, batchReader) - } + whenever(mockStorage.readNextBatch()).thenReturn(BatchData(batchId, batch, batchMetadata)) whenever( mockDataUploader.upload( fakeContext, @@ -688,19 +607,9 @@ internal class DataUploadRunnableTest { ) { // Given val batchId = mock() - val batchReader = mock() - val batchConfirmation = mock() val batchMetadata = forge.aNullable { batchMeta.toByteArray() } - whenever(batchReader.read()) doReturn batch - whenever(batchReader.currentMetadata()) doReturn batchMetadata - - whenever(mockStorage.readNextBatch(any(), any())) doAnswer { - whenever(mockStorage.confirmBatchRead(eq(batchId), any(), any())) doAnswer { - it.getArgument<(BatchConfirmation) -> Unit>(2).invoke(batchConfirmation) - } - it.getArgument<(BatchId, BatchReader) -> Unit>(1).invoke(batchId, batchReader) - } + whenever(mockStorage.readNextBatch()).thenReturn(BatchData(batchId, batch, batchMetadata)) whenever( mockDataUploader.upload( fakeContext, @@ -749,7 +658,6 @@ internal class DataUploadRunnableTest { mockNetworkInfoProvider, mockSystemInfoProvider, fakeConfiguration, - TEST_BATCH_UPLOAD_WAIT_TIMEOUT_MS, mockInternalLogger ) // extra batches to make sure we are not reaching the limit as this will fall into the @@ -757,14 +665,13 @@ internal class DataUploadRunnableTest { val batches = forge.aList(size = runCount * fakeConfiguration.maxBatchesPerUploadJob + 10) { aList { getForgery() } } + val batchIds: List = batches.map { mock() } val randomFailIndex = forge.anInt(min = 0, max = batches.size) - val batchReader = mock() - val batchConfirmation = mock() val batchMetadata = forge.aList(size = batches.size) { aNullable { aString().toByteArray() } } - stubBatchReader(batchReader, batches, batchMetadata, batchConfirmation) + stubStorage(batchIds, batches, batchMetadata) batches.forEachIndexed { index, batch -> val expectedStatus = if (index == randomFailIndex) { @@ -809,10 +716,7 @@ internal class DataUploadRunnableTest { @IntForgery(16, 64) runCount: Int ) { // When - whenever(mockStorage.readNextBatch(any(), any())) doAnswer { - it.getArgument<() -> Unit>(0).invoke() - } - + whenever(mockStorage.readNextBatch()) doReturn null repeat(runCount) { testedRunnable.run() } @@ -852,12 +756,12 @@ internal class DataUploadRunnableTest { mockNetworkInfoProvider, mockSystemInfoProvider, fakeConfiguration, - TEST_BATCH_UPLOAD_WAIT_TIMEOUT_MS, mockInternalLogger ) val batches = forge.aList(size = runCount * fakeConfiguration.maxBatchesPerUploadJob) { aList { getForgery() } } + val batchIds: List = batches.map { mock() } val failIndexesSet = mutableSetOf().apply { var index = 0 repeat(runCount) { @@ -865,13 +769,11 @@ internal class DataUploadRunnableTest { index += fakeConfiguration.maxBatchesPerUploadJob } } - val batchReader = mock() - val batchConfirmation = mock() val batchMetadata = forge.aList(size = batches.size) { aNullable { aString().toByteArray() } } - stubBatchReader(batchReader, batches, batchMetadata, batchConfirmation) + stubStorage(batchIds, batches, batchMetadata) batches.forEachIndexed { index, batch -> val expectedStatus = if (index in failIndexesSet) { status @@ -910,70 +812,6 @@ internal class DataUploadRunnableTest { } } - // region async - - @Test - fun `𝕄 respect batch wait upload timeout 𝕎 run()`() { - // Given - whenever(mockStorage.readNextBatch(any(), any())) doAnswer { - // imitate async which never completes - } - - // When - testedRunnable.run() - - // Then - verify(mockThreadPoolExecutor).schedule( - same(testedRunnable), - any(), - eq(TimeUnit.MILLISECONDS) - ) - } - - @Test - fun `𝕄 stop waiting 𝕎 run() { exception is thrown }`( - @Forgery batch: List, - @StringForgery batchMeta: String, - forge: Forge - ) { - // Given - val batchId = mock() - val batchReader = mock() - val batchMetadata = forge.aNullable { batchMeta.toByteArray() } - - whenever(batchReader.read()) doReturn batch - whenever(batchReader.currentMetadata()) doReturn batchMetadata - - whenever(mockStorage.readNextBatch(any(), any())) doAnswer { - Thread { - it.getArgument<(BatchId, BatchReader) -> Unit>(1).invoke(batchId, batchReader) - }.start() - } - - whenever( - mockDataUploader.upload( - fakeContext, - batch, - batchMetadata - ) - ) doThrow forge.aThrowable() - - // When - val start = System.currentTimeMillis() - testedRunnable.run() - - // Then - assertThat(System.currentTimeMillis() - start) - .isLessThan(TEST_BATCH_UPLOAD_WAIT_TIMEOUT_MS) - verify(mockThreadPoolExecutor).schedule( - same(testedRunnable), - any(), - eq(TimeUnit.MILLISECONDS) - ) - } - - // endregion - // region maxBatchesPerJob @Test @@ -990,7 +828,6 @@ internal class DataUploadRunnableTest { mockNetworkInfoProvider, mockSystemInfoProvider, fakeConfiguration, - TEST_BATCH_UPLOAD_WAIT_TIMEOUT_MS, mockInternalLogger ) val batches = forge.aList( @@ -1001,10 +838,9 @@ internal class DataUploadRunnableTest { ) { aList { getForgery() } } - val batchReader = mock() - val batchConfirmation = mock() + val batchIds: List = batches.map { mock() } val batchMetadata = forge.aList(size = batches.size) { aNullable { aString().toByteArray() } } - stubBatchReader(batchReader, batches, batchMetadata, batchConfirmation) + stubStorage(batchIds, batches, batchMetadata) batches.forEachIndexed { index, batch -> whenever( mockDataUploader.upload( @@ -1019,12 +855,13 @@ internal class DataUploadRunnableTest { testedRunnable.run() // Then - verify(batchConfirmation, times(fakeConfiguration.maxBatchesPerUploadJob)) - .markAsRead(true) - verifyNoMoreInteractions(batchConfirmation) - verify(batchReader, times(fakeConfiguration.maxBatchesPerUploadJob)).read() batches.take(fakeConfiguration.maxBatchesPerUploadJob).forEachIndexed { index, batch -> verify(mockDataUploader).upload(fakeContext, batch, batchMetadata[index]) + verify(mockStorage).confirmBatchRead( + eq(batchIds[index]), + any(), + eq(true) + ) } verifyNoMoreInteractions(mockDataUploader) verify(mockThreadPoolExecutor).schedule( @@ -1048,7 +885,6 @@ internal class DataUploadRunnableTest { mockNetworkInfoProvider, mockSystemInfoProvider, fakeConfiguration, - TEST_BATCH_UPLOAD_WAIT_TIMEOUT_MS, mockInternalLogger ) val fakeBatchesCount = forge.anInt( @@ -1058,10 +894,9 @@ internal class DataUploadRunnableTest { val batches = forge.aList( size = fakeBatchesCount ) { aList { getForgery() } } - val batchReader = mock() - val batchConfirmation = mock() + val batchIds: List = batches.map { mock() } val batchMetadata = forge.aList(size = batches.size) { aNullable { aString().toByteArray() } } - stubBatchReader(batchReader, batches, batchMetadata, batchConfirmation) + stubStorage(batchIds, batches, batchMetadata) batches.forEachIndexed { index, batch -> whenever( mockDataUploader.upload( @@ -1076,12 +911,13 @@ internal class DataUploadRunnableTest { testedRunnable.run() // Then - val batchesCount = batches.size - verify(batchConfirmation, times(batchesCount)).markAsRead(true) - verifyNoMoreInteractions(batchConfirmation) - verify(batchReader, times(batchesCount)).read() batches.forEachIndexed { index, batch -> verify(mockDataUploader).upload(fakeContext, batch, batchMetadata[index]) + verify(mockStorage).confirmBatchRead( + eq(batchIds[index]), + any(), + eq(true) + ) } verifyNoMoreInteractions(mockDataUploader) verify(mockThreadPoolExecutor).schedule( @@ -1093,31 +929,24 @@ internal class DataUploadRunnableTest { // region Internal - private fun stubBatchReader( - mockBatchReader: BatchReader, + private fun stubStorage( + batchIds: List, batches: List>, - batchMeta: List, - batchConfirmation: BatchConfirmation + batchMeta: List ) { - whenever(mockBatchReader.read()).doReturnConsecutively(batches) - .thenReturn(null) - whenever(mockBatchReader.currentMetadata()).doReturnConsecutively(batchMeta) - .thenReturn(null) - val batchId = mock() - whenever(mockStorage.readNextBatch(any(), any())) doAnswer object : Answer { + reset(mockStorage) + whenever(mockStorage.readNextBatch()) doAnswer object : Answer { var count = 0 - override fun answer(invocation: InvocationOnMock) { - if (count >= batches.size) { - invocation.getArgument<() -> Unit>(0).invoke() + override fun answer(invocation: InvocationOnMock): BatchData? { + val data = if (count >= batches.size) { + null } else { - whenever(mockStorage.confirmBatchRead(any(), any(), any())) doAnswer { - it.getArgument<(BatchConfirmation) -> Unit>(2).invoke(batchConfirmation) - } - invocation.getArgument<(BatchId, BatchReader) -> Unit>(1) - .invoke(batchId, mockBatchReader) + val batchData = BatchData(batchIds[count], batches[count], batchMeta[count]) + batchData } count++ + return data } } } @@ -1125,7 +954,6 @@ internal class DataUploadRunnableTest { // endregion companion object { - const val TEST_BATCH_UPLOAD_WAIT_TIMEOUT_MS = 100L @JvmStatic fun retryBatchStatusValues(): List { diff --git a/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/persistence/AbstractStorageTest.kt b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/persistence/AbstractStorageTest.kt index 58d42ad10b..4ae955e38d 100644 --- a/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/persistence/AbstractStorageTest.kt +++ b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/persistence/AbstractStorageTest.kt @@ -378,84 +378,43 @@ internal class AbstractStorageTest { // region Storage.readNextBatch @Test - fun `𝕄 provide reader 𝕎 readNextBatch() {no batch}`() { + fun `𝕄 provide null 𝕎 readNextBatch() {no batch}`() { // Given - val mockNoBatchCallback = mock<() -> Unit>() - val mockReadBatchCallback = mock<(BatchId, BatchReader) -> Unit>() whenever(mockGrantedPersistenceStrategy.lockAndReadNext()) doReturn null - // When - testedStorage.readNextBatch(mockNoBatchCallback, mockReadBatchCallback) - // Then - verify(mockNoBatchCallback).invoke() + assertThat(testedStorage.readNextBatch()).isNull() verify(mockGrantedPersistenceStrategy).lockAndReadNext() - verifyNoInteractions(mockReadBatchCallback, mockPendingPersistenceStrategy) verifyNoMoreInteractions( - mockNoBatchCallback, mockGrantedPersistenceStrategy, mockInternalLogger ) } @Test - fun `𝕄 provide reader 𝕎 readNextBatch()+read() {with batch}`( + fun `𝕄 provide BatchData 𝕎 readNextBatch() {with batch}`( @Forgery fakeBatch: PersistenceStrategy.Batch ) { // Given - val mockNoBatchCallback = mock<() -> Unit>() - val mockReadBatchCallback = mock<(BatchId, BatchReader) -> Unit>() whenever(mockGrantedPersistenceStrategy.lockAndReadNext()) doReturn fakeBatch - var batchId: BatchId? = null - var result: List? = null - whenever(mockReadBatchCallback.invoke(any(), any())) doAnswer { - batchId = it.getArgument(0) as? BatchId - result = (it.getArgument(1) as? BatchReader)?.read() - } // When - testedStorage.readNextBatch(mockNoBatchCallback, mockReadBatchCallback) + val batchData = testedStorage.readNextBatch() // Then - assertThat(batchId).isEqualTo(BatchId(fakeBatch.batchId)) - assertThat(result).isEqualTo(fakeBatch.events) - verify(mockGrantedPersistenceStrategy).lockAndReadNext() - verifyNoInteractions(mockPendingPersistenceStrategy) - verifyNoMoreInteractions( - mockNoBatchCallback, - mockGrantedPersistenceStrategy, - mockInternalLogger - ) + assertThat(batchData).isNotNull + assertThat(batchData?.id).isEqualTo(BatchId(fakeBatch.batchId)) + assertThat(batchData?.data).isEqualTo(fakeBatch.events) + assertThat(batchData?.metadata).isEqualTo(fakeBatch.metadata) } @Test - fun `𝕄 provide reader 𝕎 readNextBatch()+currentMetadata() {with batch}`( - @Forgery fakeBatch: PersistenceStrategy.Batch - ) { + fun `𝕄 return null 𝕎 readNextBatch() {no batch}`() { // Given - val mockNoBatchCallback = mock<() -> Unit>() - val mockReadBatchCallback = mock<(BatchId, BatchReader) -> Unit>() - whenever(mockGrantedPersistenceStrategy.lockAndReadNext()) doReturn fakeBatch - var batchId: BatchId? = null - var result: ByteArray? = null - whenever(mockReadBatchCallback.invoke(any(), any())) doAnswer { - batchId = it.getArgument(0) as? BatchId - result = (it.getArgument(1) as? BatchReader)?.currentMetadata() - } - - // When - testedStorage.readNextBatch(mockNoBatchCallback, mockReadBatchCallback) + whenever(mockGrantedPersistenceStrategy.lockAndReadNext()) doReturn null // Then - assertThat(batchId).isEqualTo(BatchId(fakeBatch.batchId)) - assertThat(result).isEqualTo(fakeBatch.metadata) - verify(mockGrantedPersistenceStrategy).lockAndReadNext() - verifyNoInteractions(mockPendingPersistenceStrategy) - verifyNoMoreInteractions( - mockNoBatchCallback, - mockGrantedPersistenceStrategy, - mockInternalLogger - ) + assertThat(testedStorage.readNextBatch()).isNull() } // endregion @@ -463,18 +422,12 @@ internal class AbstractStorageTest { // region Storage.readNextBatch @Test - fun `𝕄 provide reader 𝕎 confirmBatchRead() {delete=true}`( + fun `𝕄 delete batch W confirmBatchRead() {delete=true}`( @StringForgery fakeBatchId: String, @Forgery fakeRemovalReason: RemovalReason ) { - // Given - val mockConfirmationCallback = mock<(BatchConfirmation) -> Unit>() - whenever(mockConfirmationCallback.invoke(any())) doAnswer { - (it.getArgument(0) as? BatchConfirmation)?.markAsRead(true) - } - // When - testedStorage.confirmBatchRead(BatchId(fakeBatchId), fakeRemovalReason, mockConfirmationCallback) + testedStorage.confirmBatchRead(BatchId(fakeBatchId), fakeRemovalReason, true) // Then verify(mockGrantedPersistenceStrategy).unlockAndDelete(fakeBatchId) @@ -486,18 +439,12 @@ internal class AbstractStorageTest { } @Test - fun `𝕄 provide reader 𝕎 confirmBatchRead() {delete=false}`( + fun `𝕄 keep batch W confirmBatchRead() {delete=false}`( @StringForgery fakeBatchId: String, @Forgery fakeRemovalReason: RemovalReason ) { - // Given - val mockConfirmationCallback = mock<(BatchConfirmation) -> Unit>() - whenever(mockConfirmationCallback.invoke(any())) doAnswer { - (it.getArgument(0) as? BatchConfirmation)?.markAsRead(false) - } - // When - testedStorage.confirmBatchRead(BatchId(fakeBatchId), fakeRemovalReason, mockConfirmationCallback) + testedStorage.confirmBatchRead(BatchId(fakeBatchId), fakeRemovalReason, false) // Then verify(mockGrantedPersistenceStrategy).unlockAndKeep(fakeBatchId) diff --git a/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/persistence/BatchDataTest.kt b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/persistence/BatchDataTest.kt new file mode 100644 index 0000000000..64bb9dce21 --- /dev/null +++ b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/persistence/BatchDataTest.kt @@ -0,0 +1,34 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache License Version 2.0. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2016-Present Datadog, Inc. + */ + +package com.datadog.android.core.internal.persistence + +import com.datadog.android.utils.forge.Configurator +import com.datadog.tools.unit.ObjectTest +import fr.xgouchet.elmyr.Forge +import fr.xgouchet.elmyr.junit5.ForgeConfiguration +import fr.xgouchet.elmyr.junit5.ForgeExtension +import org.junit.jupiter.api.extension.ExtendWith +import org.junit.jupiter.api.extension.Extensions +import org.mockito.junit.jupiter.MockitoSettings +import org.mockito.quality.Strictness + +@Extensions(ExtendWith(ForgeExtension::class)) +@MockitoSettings(strictness = Strictness.LENIENT) +@ForgeConfiguration(Configurator::class) +internal class BatchDataTest : ObjectTest() { + override fun createInstance(forge: Forge): BatchData { + return forge.getForgery() + } + + override fun createEqualInstance(source: BatchData, forge: Forge): BatchData { + return BatchData(source.id, source.data, source.metadata) + } + + override fun createUnequalInstance(source: BatchData, forge: Forge): BatchData? { + return forge.getForgery() + } +} diff --git a/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/persistence/ConsentAwareStorageTest.kt b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/persistence/ConsentAwareStorageTest.kt index 7421c7c6c4..5445842323 100644 --- a/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/persistence/ConsentAwareStorageTest.kt +++ b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/core/internal/persistence/ConsentAwareStorageTest.kt @@ -32,7 +32,6 @@ import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test import org.junit.jupiter.api.extension.ExtendWith import org.junit.jupiter.api.extension.Extensions -import org.junit.jupiter.api.fail import org.mockito.Mock import org.mockito.junit.jupiter.MockitoExtension import org.mockito.junit.jupiter.MockitoSettings @@ -359,7 +358,7 @@ internal class ConsentAwareStorageTest { // region readNextBatch @Test - fun `𝕄 provide reader 𝕎 readNextBatch()`( + fun `𝕄 provide batchData 𝕎 readNextBatch()`( @Forgery fakeData: List, @StringForgery metadata: String, @Forgery batchFile: File @@ -377,42 +376,37 @@ internal class ConsentAwareStorageTest { whenever(mockMetaReaderWriter.readData(mockMetaFile)) doReturn mockMetadata // Whenever - var readData: List? = null - var readMetadata: ByteArray? = null - testedStorage.readNextBatch { _, reader -> - readMetadata = reader.currentMetadata() - readData = reader.read() - } + val batchData = testedStorage.readNextBatch() // Then - assertThat(readData).isEqualTo(fakeData) - assertThat(readMetadata).isEqualTo(mockMetadata) + assertThat(batchData).isNotNull + assertThat(batchData?.id).isNotNull + assertThat(batchData?.data).isEqualTo(fakeData) + assertThat(batchData?.metadata).isEqualTo(mockMetadata) } @Test - fun `𝕄 provide reader 𝕎 readNextBatch() { no metadata file provided }`( + fun `𝕄 provide batchData 𝕎 readNextBatch() { no metadata file provided }`( @Forgery fakeData: List, @Forgery batchFile: File ) { // Given whenever(mockGrantedOrchestrator.getReadableFile(any())) doReturn batchFile whenever(mockBatchReaderWriter.readData(batchFile)) doReturn fakeData + whenever(mockGrantedOrchestrator.getMetadataFile(batchFile)) doReturn null // Whenever - var readData: List? = null - var readMetadata: ByteArray? = null - testedStorage.readNextBatch { _, reader -> - readMetadata = reader.currentMetadata() - readData = reader.read() - } + val batchData = testedStorage.readNextBatch() // Then - assertThat(readData).isEqualTo(fakeData) - assertThat(readMetadata).isNull() + assertThat(batchData).isNotNull + assertThat(batchData?.id).isNotNull + assertThat(batchData?.data).isEqualTo(fakeData) + assertThat(batchData?.metadata).isNull() } @Test - fun `𝕄 provide reader 𝕎 readNextBatch() { metadata file doesn't exist }`( + fun `𝕄 provide batchData 𝕎 readNextBatch() { metadata file doesn't exist }`( @Forgery fakeData: List, @Forgery batchFile: File ) { @@ -427,62 +421,54 @@ internal class ConsentAwareStorageTest { whenever(mockGrantedOrchestrator.getMetadataFile(batchFile)) doReturn mockMetaFile // Whenever - var readData: List? = null - var readMetadata: ByteArray? = null - testedStorage.readNextBatch { _, reader -> - readMetadata = reader.currentMetadata() - readData = reader.read() - } + val batchData = testedStorage.readNextBatch() // Then - assertThat(readData).isEqualTo(fakeData) - assertThat(readMetadata).isNull() + assertThat(batchData).isNotNull + assertThat(batchData?.id).isNotNull + assertThat(batchData?.data).isEqualTo(fakeData) + assertThat(batchData?.metadata).isNull() } @Test - fun `𝕄 provide reader only once 𝕎 readNextBatch()`( - @Forgery fakeData: List, - @Forgery file: File - ) { + fun `𝕄 return null no batch available 𝕎 readNextBatch() {no file}`() { // Given - whenever(mockGrantedOrchestrator.getReadableFile(emptySet())) doReturn file - whenever(mockGrantedOrchestrator.getReadableFile(setOf(file))) doReturn null - whenever(mockBatchReaderWriter.readData(file)) doReturn fakeData + whenever(mockGrantedOrchestrator.getReadableFile(any())) doReturn null - // When - var readData: List? = null - testedStorage.readNextBatch { _, reader -> - readData = reader.read() - } - testedStorage.readNextBatch { _, _ -> - fail { "Callback should not have been called again" } - } + // Whenever + val batchData = testedStorage.readNextBatch() // Then - assertThat(readData).isEqualTo(fakeData) + assertThat(batchData).isNull() } + // endregion + + // region confirmBatchRead + @Test - fun `𝕄 notify no batch available 𝕎 readNextBatch() {no file}`() { + fun `𝕄 read batch twice if released 𝕎 readNextBatch()+confirmBatchRead() {delete=false}`( + @Forgery reason: RemovalReason, + @Forgery file: File + ) { // Given - whenever(mockGrantedOrchestrator.getReadableFile(any())) doReturn null - val mockNoBatchCallback = mock<() -> Unit>() + whenever(mockGrantedOrchestrator.getReadableFile(emptySet())) doReturn file + val mockMetaFile: File = mock() + whenever(mockMetaFile.exists()) doReturn true + whenever(mockGrantedOrchestrator.getMetadataFile(file)) doReturn mockMetaFile + whenever(mockGrantedOrchestrator.getReadableFile(setOf(file))) doReturn null // When - testedStorage.readNextBatch( - noBatchCallback = mockNoBatchCallback - ) { _, _ -> - fail { "Callback should not have been called here" } - } + val batchData1 = testedStorage.readNextBatch() + testedStorage.confirmBatchRead(batchData1!!.id, reason, false) + val batchData2 = testedStorage.readNextBatch() // Then - verify(mockNoBatchCallback).invoke() + verify(mockFileMover, never()).delete(file) + verify(mockFileMover, never()).delete(mockMetaFile) + assertThat(batchData1).isEqualTo(batchData2) } - // endregion - - // region confirmBatchRead - @Test fun `𝕄 delete batch files 𝕎 readNextBatch()+confirmBatchRead() {delete=true}`( @Forgery file: File, @@ -511,13 +497,9 @@ internal class ConsentAwareStorageTest { doReturn(true).whenever(mockFileMover).delete(mockMetaFile) // When - var batchId: BatchId? = null - testedStorage.readNextBatch { id, _ -> - batchId = id - } - testedStorage.confirmBatchRead(batchId!!, reason) { confirm -> - confirm.markAsRead(true) - } + val batchData1 = testedStorage.readNextBatch() + testedStorage.confirmBatchRead(batchData1!!.id, reason, true) + testedStorage.readNextBatch() // Then verify(mockFileMover).delete(file) @@ -525,40 +507,6 @@ internal class ConsentAwareStorageTest { verify(mockMetricsDispatcher).sendBatchDeletedMetric(eq(file), eq(reason)) } - @Test - fun `𝕄 read batch twice if released 𝕎 readNextBatch()+confirmBatchRead() {delete=false}`( - @Forgery reason: RemovalReason, - @Forgery file: File - ) { - // Given - whenever(mockGrantedOrchestrator.getReadableFile(emptySet())) doReturn file - val mockMetaFile: File = mock() - whenever(mockMetaFile.exists()) doReturn true - whenever(mockGrantedOrchestrator.getMetadataFile(file)) doReturn mockMetaFile - whenever(mockGrantedOrchestrator.getReadableFile(setOf(file))) doReturn null - - // When - var batchId1: BatchId? = null - var batchId2: BatchId? = null - testedStorage.readNextBatch { id, _ -> - batchId1 = id - } - testedStorage.readNextBatch { _, _ -> - fail { "Callback should not have been called here" } - } - testedStorage.confirmBatchRead(batchId1!!, reason) { confirm -> - confirm.markAsRead(false) - } - testedStorage.readNextBatch { id, _ -> - batchId2 = id - } - - // Then - verify(mockFileMover, never()).delete(file) - verify(mockFileMover, never()).delete(mockMetaFile) - assertThat(batchId1).isEqualTo(batchId2) - } - @Test fun `𝕄 keep batch file locked 𝕎 readNextBatch()+confirmBatchRead() {delete=true, != batchId}`( @Forgery file: File, @@ -572,15 +520,9 @@ internal class ConsentAwareStorageTest { whenever(mockGrantedOrchestrator.getMetadataFile(file)) doReturn mockMetaFile // When - testedStorage.readNextBatch { _, _ -> - // no-op - } - testedStorage.confirmBatchRead(BatchId.fromFile(anotherFile), reason) { confirm -> - confirm.markAsRead(true) - } - testedStorage.readNextBatch { _, _ -> - fail { "Callback should not have been called here" } - } + testedStorage.readNextBatch() + testedStorage.confirmBatchRead(BatchId.fromFile(anotherFile), reason, false) + testedStorage.readNextBatch() // Then verify(mockFileMover, never()).delete(file) @@ -598,13 +540,9 @@ internal class ConsentAwareStorageTest { whenever(mockFileMover.delete(file)) doReturn false // When - var batchId: BatchId? = null - testedStorage.readNextBatch { id, _ -> - batchId = id - } - testedStorage.confirmBatchRead(batchId!!, reason) { confirm -> - confirm.markAsRead(true) - } + val batchData1 = testedStorage.readNextBatch() + testedStorage.confirmBatchRead(batchData1!!.id, reason, true) + testedStorage.readNextBatch() // Then verify(mockFileMover).delete(file) @@ -633,13 +571,8 @@ internal class ConsentAwareStorageTest { doReturn(false).whenever(mockFileMover).delete(mockMetaFile) // When - var batchId: BatchId? = null - testedStorage.readNextBatch { id, _ -> - batchId = id - } - testedStorage.confirmBatchRead(batchId!!, reason) { confirm -> - confirm.markAsRead(true) - } + val batchData1 = testedStorage.readNextBatch() + testedStorage.confirmBatchRead(batchData1!!.id, reason, true) // Then verify(mockFileMover).delete(file) @@ -748,9 +681,7 @@ internal class ConsentAwareStorageTest { // ConcurrentModificationException is thrown only during the next check after remove, // so to make sure it is not thrown we need at least 2 locked batches repeat(files.size) { - testedStorage.readNextBatch { _, _ -> - // no-op - } + testedStorage.readNextBatch() } // When diff --git a/dd-sdk-android-core/src/test/kotlin/com/datadog/android/utils/forge/BatchDataForgeryFactory.kt b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/utils/forge/BatchDataForgeryFactory.kt new file mode 100644 index 0000000000..6380528727 --- /dev/null +++ b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/utils/forge/BatchDataForgeryFactory.kt @@ -0,0 +1,29 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache License Version 2.0. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2016-Present Datadog, Inc. + */ + +package com.datadog.android.utils.forge + +import com.datadog.android.core.internal.persistence.BatchData +import com.datadog.android.core.internal.persistence.BatchId +import fr.xgouchet.elmyr.Forge +import fr.xgouchet.elmyr.ForgeryFactory + +internal class BatchDataForgeryFactory : ForgeryFactory { + override fun getForgery(forge: Forge): BatchData { + return BatchData( + id = BatchId(id = forge.anAlphaNumericalString()), + data = forge.aList { getForgery() }, + metadata = forge.aNullable { + forge.aString( + forge.anInt(min = 0, max = DATA_SIZE_LIMIT) + ).toByteArray() + } + ) + } + companion object { + const val DATA_SIZE_LIMIT = 32 + } +} diff --git a/dd-sdk-android-core/src/test/kotlin/com/datadog/android/utils/forge/Configurator.kt b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/utils/forge/Configurator.kt index 0d35916a58..363b80cfad 100644 --- a/dd-sdk-android-core/src/test/kotlin/com/datadog/android/utils/forge/Configurator.kt +++ b/dd-sdk-android-core/src/test/kotlin/com/datadog/android/utils/forge/Configurator.kt @@ -26,6 +26,7 @@ internal class Configurator : forge.addFactory(FilePersistenceConfigForgeryFactory()) forge.addFactory(AndroidInfoProviderForgeryFactory()) forge.addFactory(FeatureStorageConfigurationForgeryFactory()) + forge.addFactory(BatchDataForgeryFactory()) // IO forge.addFactory(BatchForgeryFactory()) diff --git a/detekt_custom.yml b/detekt_custom.yml index 15fb7e4595..d4bc87ff59 100644 --- a/detekt_custom.yml +++ b/detekt_custom.yml @@ -800,6 +800,7 @@ datadog: - "kotlin.collections.List.forEachIndexed(kotlin.Function2)" - "kotlin.collections.List.getOrNull(kotlin.Int)" - "kotlin.collections.List.groupBy(kotlin.Function1)" + - "kotlin.collections.List.hashCode()" - "kotlin.collections.List.isEmpty()" - "kotlin.collections.List.isNotEmpty()" - "kotlin.collections.List.join(kotlin.ByteArray, kotlin.ByteArray, kotlin.ByteArray, com.datadog.android.api.InternalLogger)"