Skip to content

Commit

Permalink
Merge branch 'feature/sdkv2' into feature/session-replay-vo
Browse files Browse the repository at this point in the history
  • Loading branch information
mariusc83 committed Jul 7, 2022
2 parents c37617f + d150a63 commit 244001a
Show file tree
Hide file tree
Showing 24 changed files with 2,748 additions and 399 deletions.
18 changes: 5 additions & 13 deletions dd-sdk-android/apiSurface
Original file line number Diff line number Diff line change
Expand Up @@ -1505,7 +1505,7 @@ interface com.datadog.android.v2.api.FeatureScope
data class com.datadog.android.v2.api.FeatureStorageConfiguration
constructor(Int, Int, Int, Long)
data class com.datadog.android.v2.api.FeatureUploadConfiguration
constructor(String, PayloadFormat)
constructor(RequestFactory)
interface com.datadog.android.v2.api.InternalLogger
enum Level
- DEBUG
Expand All @@ -1521,6 +1521,10 @@ interface com.datadog.android.v2.api.PayloadFormat
fun prefixBytes(): ByteArray
fun suffixBytes(): ByteArray
fun separatorBytes(): ByteArray
data class com.datadog.android.v2.api.Request
constructor(String, String, String, Map<String, String>, ByteArray, String? = null)
interface com.datadog.android.v2.api.RequestFactory
fun create(com.datadog.android.v2.api.context.DatadogContext, List<ByteArray>, ByteArray?): Request
interface com.datadog.android.v2.api.SDKCore
fun registerFeature(String, FeatureStorageConfiguration, FeatureUploadConfiguration)
fun getFeature(String): FeatureScope?
Expand Down Expand Up @@ -1564,18 +1568,6 @@ data class com.datadog.android.v2.api.context.TimeInfo
constructor(Long, Long)
data class com.datadog.android.v2.api.context.UserInfo
constructor(String?, String?, String?, Map<String, Any>)
class com.datadog.android.v2.core.DatadogCore : com.datadog.android.v2.api.SDKCore
constructor(android.content.Context, com.datadog.android.core.configuration.Credentials, com.datadog.android.core.configuration.Configuration)
override fun registerFeature(String, com.datadog.android.v2.api.FeatureStorageConfiguration, com.datadog.android.v2.api.FeatureUploadConfiguration)
override fun getFeature(String): com.datadog.android.v2.api.FeatureScope?
override fun setVerbosity(Int)
override fun getVerbosity(): Int
override fun setTrackingConsent(com.datadog.android.privacy.TrackingConsent)
override fun setUserInfo(com.datadog.android.core.model.UserInfo)
override fun clearAllData()
override fun stop()
override fun flushStoredData()
companion object
class com.datadog.android.webview.DatadogEventBridge
constructor()
constructor(List<String>)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@ import androidx.annotation.WorkerThread
import androidx.work.Worker
import androidx.work.WorkerParameters
import com.datadog.android.Datadog
import com.datadog.android.core.internal.net.DataUploader
import com.datadog.android.core.internal.net.UploadStatus
import com.datadog.android.core.internal.persistence.Batch
import com.datadog.android.core.internal.persistence.DataReader
import com.datadog.android.core.internal.utils.devLogger
import com.datadog.android.core.internal.utils.sdkLogger
import com.datadog.android.v2.api.context.DatadogContext
import com.datadog.android.v2.core.DatadogCore
import com.datadog.android.v2.core.DatadogFeature
import com.datadog.android.v2.core.internal.net.DataUploader
import com.datadog.android.v2.core.internal.storage.Storage

internal class UploadWorker(
appContext: Context,
Expand All @@ -33,46 +33,49 @@ internal class UploadWorker(
return Result.success()
}

// Upload Crash reports
val globalSDKCore: DatadogCore? = (Datadog.globalSDKCore as? DatadogCore)
listOfNotNull(
globalSDKCore?.crashReportsFeature,
globalSDKCore?.logsFeature,
globalSDKCore?.tracingFeature,
globalSDKCore?.rumFeature,
globalSDKCore?.webViewLogsFeature,
globalSDKCore?.webViewRumFeature
)
.forEach {
uploadAllBatches(
it.persistenceStrategy.getReader(),

if (globalSDKCore != null) {
val features =
globalSDKCore.getAllFeatures().mapNotNull { it as? DatadogFeature }

features.forEach {
// TODO RUMM-2296 do interleaving/randomization for the upload sequence, because
// if some feature upload is slow, all other feature uploads will wait until
// feature completes with all its batches
uploadNextBatch(
globalSDKCore,
it.storage,
it.uploader
)
}
}

return Result.success()
}

@WorkerThread
private fun uploadAllBatches(
reader: DataReader,
private fun uploadNextBatch(
datadogCore: DatadogCore,
storage: Storage,
uploader: DataUploader
) {
val failedBatches = mutableListOf<Batch>()
var batch: Batch?
do {
batch = reader.lockAndReadNext()
if (batch != null) {
if (consumeBatch(batch, uploader)) {
reader.drop(batch)
} else {
failedBatches.add(batch)
}
// context is unique for each batch query instead of using the same one for all the batches
// which will be uploaded, because it can change by the time the upload of the next batch
// is requested.
val context = datadogCore.context ?: return

storage.readNextBatch(context) { batchId, reader ->
val batch = reader.read()
val batchMeta = reader.currentMetadata()

val success = consumeBatch(context, batch, batchMeta, uploader)
storage.confirmBatchRead(batchId) { confirmation ->
confirmation.markAsRead(deleteBatch = success)
}
} while (batch != null)

failedBatches.forEach {
reader.release(it)
// TODO RUMM-2295 stack overflow protection?
uploadNextBatch(datadogCore, storage, uploader)
}
}

Expand All @@ -81,30 +84,14 @@ internal class UploadWorker(
// region Internal

private fun consumeBatch(
batch: Batch,
context: DatadogContext,
batch: List<ByteArray>,
batchMeta: ByteArray?,
uploader: DataUploader
): Boolean {
val status = uploader.upload(batch.data)
status.logStatus(
uploader.javaClass.simpleName,
batch.data.size,
devLogger,
ignoreInfo = false,
sendToTelemetry = false
)
status.logStatus(
uploader.javaClass.simpleName,
batch.data.size,
sdkLogger,
ignoreInfo = true,
sendToTelemetry = true
)
val status = uploader.upload(context, batch, batchMeta)
return status == UploadStatus.SUCCESS
}

// endregion

companion object {
private const val TAG = "UploadWorker"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,8 @@ package com.datadog.android.v2.api
/**
* Contains the upload configuration for an [FeatureScope] instance.
*
* @property endpointUrl the url endpoint data should be uploaded to
* @property payloadFormat the expected format of the payload
* @property requestFactory creates a request from a given batch and its metadata
*/
data class FeatureUploadConfiguration(
val endpointUrl: String,
val payloadFormat: PayloadFormat
val requestFactory: RequestFactory
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
/*
* Unless explicitly stated otherwise all files in this repository are licensed under the Apache License Version 2.0.
* This product includes software developed at Datadog (https://www.datadoghq.com/).
* Copyright 2016-Present Datadog, Inc.
*/

package com.datadog.android.v2.api

/**
* Request object holding the data to be sent.
*
* @property id Unique identifier of the request.
* @property description Description of the request (ex. "RUM request", "Logs request", etc.).
* @property url URL to call.
* @property headers Request headers. Note that User Agent header will be ignored.
* @property body Request payload.
* @property contentType Content type of the request, if needed.
*/
data class Request(
val id: String,
val description: String,
val url: String,
val headers: Map<String, String>,
// won't generate custom equals/hashcode, because ID field is enough to identify the request
// and we don't want to have array content comparison
@Suppress("ArrayInDataClass") val body: ByteArray,
val contentType: String? = null
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
/*
* Unless explicitly stated otherwise all files in this repository are licensed under the Apache License Version 2.0.
* This product includes software developed at Datadog (https://www.datadoghq.com/).
* Copyright 2016-Present Datadog, Inc.
*/

package com.datadog.android.v2.api

import com.datadog.android.v2.api.context.DatadogContext

/**
* Factory used to build requests from the batches stored.
*/
fun interface RequestFactory {

// TODO RUMM-2298 Support 1:many relationship between batch and requests
/**
* Creates a request for the given batch.
* @param context Datadog SDK context.
* @param batchData Raw data of the batch.
* @param batchMetadata Raw metadata of the batch.
*/
fun create(
context: DatadogContext,
batchData: List<ByteArray>,
batchMetadata: ByteArray?
): Request
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import com.datadog.android.v2.api.FeatureScope
import com.datadog.android.v2.api.FeatureStorageConfiguration
import com.datadog.android.v2.api.FeatureUploadConfiguration
import com.datadog.android.v2.api.SDKCore
import com.datadog.android.v2.api.context.DatadogContext
import com.datadog.android.webview.internal.log.WebViewLogsFeature
import com.datadog.android.webview.internal.rum.WebViewRumFeature
import com.datadog.opentracing.DDSpan
Expand All @@ -42,7 +43,7 @@ import com.google.gson.JsonObject
* Internal implementation of the [SDKCore] interface.
* @param credentials the Datadog credentials for this instance
*/
class DatadogCore(
internal class DatadogCore(
context: Context,
internal val credentials: Credentials,
configuration: Configuration
Expand All @@ -61,11 +62,15 @@ class DatadogCore(
internal var webViewLogsFeature: SdkFeature<JsonObject, Configuration.Feature.Logs>? = null
internal var webViewRumFeature: SdkFeature<Any, Configuration.Feature.RUM>? = null

// TODO RUMM-0000 handle context
internal var context: DatadogContext? = null

init {
val isDebug = isAppDebuggable(context)
if (isEnvironmentNameValid(credentials.envName)) {
initialize(context, credentials, configuration, isDebug)
} else {
@Suppress("ThrowingInternalException")
throw IllegalArgumentException(MESSAGE_ENV_NAME_NOT_VALID)
}
}
Expand Down Expand Up @@ -143,6 +148,15 @@ class DatadogCore(
webViewRumFeature?.flushStoredData()
}

/**
* Returns all registered features.
*/
fun getAllFeatures(): List<FeatureScope> {
// TODO-2138
// should it be a part of SDKCore?
return emptyList()
}

// endregion

// region Internal Initialization
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
/*
* Unless explicitly stated otherwise all files in this repository are licensed under the Apache License Version 2.0.
* This product includes software developed at Datadog (https://www.datadoghq.com/).
* Copyright 2016-Present Datadog, Inc.
*/

package com.datadog.android.v2.core

import com.datadog.android.v2.api.FeatureScope
import com.datadog.android.v2.core.internal.net.DataUploader
import com.datadog.android.v2.core.internal.storage.Storage

internal abstract class DatadogFeature(
val storage: Storage,
val uploader: DataUploader
) : FeatureScope
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
/*
* Unless explicitly stated otherwise all files in this repository are licensed under the Apache License Version 2.0.
* This product includes software developed at Datadog (https://www.datadoghq.com/).
* Copyright 2016-Present Datadog, Inc.
*/

package com.datadog.android.v2.core.internal

import com.datadog.android.v2.api.context.DatadogContext

internal interface ContextProvider {
val context: DatadogContext?
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
/*
* Unless explicitly stated otherwise all files in this repository are licensed under the Apache License Version 2.0.
* This product includes software developed at Datadog (https://www.datadoghq.com/).
* Copyright 2016-Present Datadog, Inc.
*/

package com.datadog.android.v2.core.internal

import com.datadog.android.v2.api.context.DatadogContext

internal class NoOpContextProvider : ContextProvider {
override val context: DatadogContext? = null
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
/*
* Unless explicitly stated otherwise all files in this repository are licensed under the Apache License Version 2.0.
* This product includes software developed at Datadog (https://www.datadoghq.com/).
* Copyright 2016-Present Datadog, Inc.
*/

package com.datadog.android.v2.core.internal.data.upload

import androidx.annotation.WorkerThread
import com.datadog.android.core.internal.persistence.file.FileMover
import com.datadog.android.core.internal.persistence.file.FileOrchestrator
import com.datadog.android.core.internal.persistence.file.FileReader
import com.datadog.android.core.internal.persistence.file.batch.BatchFileReader
import com.datadog.android.core.internal.persistence.file.existsSafe
import com.datadog.android.v2.core.internal.ContextProvider
import com.datadog.android.v2.core.internal.net.DataUploader

// TODO RUMM-0000 Should replace com.datadog.android.core.internal.net.DataFlusher once
// features are configured as V2
internal class DataFlusher(
internal val contextProvider: ContextProvider,
internal val fileOrchestrator: FileOrchestrator,
internal val fileReader: BatchFileReader,
internal val metadataFileReader: FileReader,
internal val fileMover: FileMover
) : Flusher {

@WorkerThread
override fun flush(uploader: DataUploader) {
val context = contextProvider.context ?: return

val toUploadFiles = fileOrchestrator.getFlushableFiles()
toUploadFiles.forEach {
val batch = fileReader.readData(it)
val metaFile = fileOrchestrator.getMetadataFile(it)
val meta = if (metaFile != null) metadataFileReader.readData(metaFile) else null
uploader.upload(context, batch, meta)
fileMover.delete(it)
if (metaFile?.existsSafe() == true) {
fileMover.delete(metaFile)
}
}
}
}
Loading

0 comments on commit 244001a

Please sign in to comment.