diff --git a/airbyte-cdk/bulk/core/base/src/main/resources/application.yaml b/airbyte-cdk/bulk/core/base/src/main/resources/application.yaml index 6174bb320390..7bad9b3fce29 100644 --- a/airbyte-cdk/bulk/core/base/src/main/resources/application.yaml +++ b/airbyte-cdk/bulk/core/base/src/main/resources/application.yaml @@ -9,4 +9,4 @@ airbyte: rate-ms: 900000 # 15 minutes window-ms: 900000 # 15 minutes destination: - record-batch-size: ${AIRBYTE_DESTINATION_RECORD_BATCH_SIZE:209715200} + record-batch-size-override: ${AIRBYTE_DESTINATION_RECORD_BATCH_SIZE_OVERRIDE:null} diff --git a/airbyte-cdk/bulk/core/extract/src/testFixtures/kotlin/io/airbyte/cdk/read/DynamicDatatypeTestFactory.kt b/airbyte-cdk/bulk/core/extract/src/testFixtures/kotlin/io/airbyte/cdk/read/DynamicDatatypeTestFactory.kt new file mode 100644 index 000000000000..5f547521b2db --- /dev/null +++ b/airbyte-cdk/bulk/core/extract/src/testFixtures/kotlin/io/airbyte/cdk/read/DynamicDatatypeTestFactory.kt @@ -0,0 +1,306 @@ +/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ +package io.airbyte.cdk.read + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.ClockFactory +import io.airbyte.cdk.command.CliRunner +import io.airbyte.cdk.command.ConfigurationSpecification +import io.airbyte.cdk.command.SourceConfiguration +import io.airbyte.cdk.command.SourceConfigurationFactory +import io.airbyte.cdk.data.AirbyteSchemaType +import io.airbyte.cdk.discover.MetaField +import io.airbyte.cdk.output.BufferingOutputConsumer +import io.airbyte.cdk.util.Jsons +import io.airbyte.protocol.models.v0.AirbyteLogMessage +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.AirbyteTraceMessage +import io.airbyte.protocol.models.v0.CatalogHelpers +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.SyncMode +import io.github.oshai.kotlinlogging.KotlinLogging +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.DynamicNode +import org.junit.jupiter.api.DynamicTest +import org.junit.jupiter.api.function.Executable +import org.testcontainers.containers.GenericContainer + +class DynamicDatatypeTestFactory< + DB : GenericContainer<*>, + CS : ConfigurationSpecification, + C : SourceConfiguration, + F : SourceConfigurationFactory, + T : DatatypeTestCase, +>( + val ops: DatatypeTestOperations, +) { + private val log = KotlinLogging.logger {} + + fun build(dbContainer: DB): Iterable { + val actual = DiscoverAndReadAll(ops) { dbContainer } + val discoverAndReadAllTest: DynamicNode = + DynamicTest.dynamicTest("discover-and-read-all", actual) + val testCases: List = + ops.testCases.map { (id: String, testCase: T) -> + DynamicContainer.dynamicContainer(id, dynamicTests(actual, testCase)) + } + return listOf(discoverAndReadAllTest) + testCases + } + + private fun dynamicTests( + actual: DiscoverAndReadAll, + testCase: T + ): List { + val streamTests: List = + if (!testCase.isStream) emptyList() + else + listOf( + DynamicTest.dynamicTest("discover-stream") { + discover(testCase, actual.streamCatalog[testCase.id]) + }, + DynamicTest.dynamicTest("records-stream") { + records(testCase, actual.streamMessagesByStream[testCase.id]) + }, + ) + val globalTests: List = + if (!testCase.isGlobal) emptyList() + else + listOf( + DynamicTest.dynamicTest("discover-global") { + discover(testCase, actual.globalCatalog[testCase.id]) + }, + DynamicTest.dynamicTest("records-global") { + records(testCase, actual.globalMessagesByStream[testCase.id]) + }, + ) + return streamTests + globalTests + } + + private fun discover(testCase: T, actualStream: AirbyteStream?) { + Assertions.assertNotNull(actualStream) + log.info { + val streamJson: JsonNode = Jsons.valueToTree(actualStream) + "test case ${testCase.id}: discovered stream $streamJson" + } + val jsonSchema: JsonNode = actualStream!!.jsonSchema?.get("properties")!! + val actualSchema: JsonNode? = jsonSchema[testCase.fieldName] + Assertions.assertNotNull(actualSchema) + val expectedSchema: JsonNode = testCase.expectedAirbyteSchemaType.asJsonSchema() + Assertions.assertEquals(expectedSchema, actualSchema) + } + + private fun records(testCase: T, actualRead: BufferingOutputConsumer?) { + Assertions.assertNotNull(actualRead) + val actualRecords: List = actualRead?.records() ?: emptyList() + val actualRecordData: List = + actualRecords.mapNotNull { actualFieldData(testCase, it) } + val actual: JsonNode = sortedRecordData(actualRecordData) + log.info { "test case ${testCase.id}: emitted records $actual" } + val expected: JsonNode = sortedRecordData(testCase.expectedData) + Assertions.assertEquals(expected, actual) + } + + private fun sortedRecordData(data: List): JsonNode = + Jsons.createArrayNode().apply { addAll(data.sortedBy { it.toString() }) } + + private fun actualFieldData(testCase: T, record: AirbyteRecordMessage): JsonNode? { + val data: ObjectNode = record.data as? ObjectNode ?: return null + val fieldName: String = + data.fieldNames().asSequence().find { it.equals(testCase.fieldName, ignoreCase = true) } + ?: return null + return data[fieldName]?.deepCopy() + } +} + +interface DatatypeTestOperations< + DB : GenericContainer<*>, + CS : ConfigurationSpecification, + C : SourceConfiguration, + F : SourceConfigurationFactory, + T : DatatypeTestCase, +> { + val withGlobal: Boolean + val globalCursorMetaField: MetaField + fun streamConfigSpec(container: DB): CS + fun globalConfigSpec(container: DB): CS + val configFactory: F + val testCases: Map + fun createStreams(config: C) + fun populateStreams(config: C) +} + +interface DatatypeTestCase { + val id: String + val fieldName: String + val isGlobal: Boolean + val isStream: Boolean + val expectedAirbyteSchemaType: AirbyteSchemaType + val expectedData: List +} + +@SuppressFBWarnings(value = ["NP_NONNULL_RETURN_VIOLATION"], justification = "control flow") +class DiscoverAndReadAll< + DB : GenericContainer<*>, + CS : ConfigurationSpecification, + C : SourceConfiguration, + F : SourceConfigurationFactory, + T : DatatypeTestCase, +>( + val ops: DatatypeTestOperations, + dbContainerSupplier: () -> DB, +) : Executable { + private val log = KotlinLogging.logger {} + private val dbContainer: DB by lazy { dbContainerSupplier() } + + // CDC DISCOVER and READ intermediate values and final results. + // Intermediate values are present here as `lateinit var` instead of local variables + // to make debugging of individual test cases easier. + lateinit var globalConfigSpec: CS + lateinit var globalConfig: C + lateinit var globalCatalog: Map + lateinit var globalConfiguredCatalog: ConfiguredAirbyteCatalog + lateinit var globalInitialReadOutput: BufferingOutputConsumer + lateinit var globalCheckpoint: AirbyteStateMessage + lateinit var globalSubsequentReadOutput: BufferingOutputConsumer + lateinit var globalMessages: List + lateinit var globalMessagesByStream: Map + // Same as above but for JDBC. + lateinit var streamConfigSpec: CS + lateinit var streamConfig: C + lateinit var streamCatalog: Map + lateinit var streamConfiguredCatalog: ConfiguredAirbyteCatalog + lateinit var streamReadOutput: BufferingOutputConsumer + lateinit var streamMessages: List + lateinit var streamMessagesByStream: Map + + override fun execute() { + log.info { "Generating stream-sync config." } + streamConfigSpec = ops.streamConfigSpec(dbContainer) + streamConfig = ops.configFactory.make(streamConfigSpec) + log.info { "Creating empty datatype streams in source." } + ops.createStreams(streamConfig) + log.info { "Executing DISCOVER operation with stream-sync config." } + streamCatalog = discover(streamConfigSpec) + streamConfiguredCatalog = + configuredCatalog(streamCatalog.filterKeys { ops.testCases[it]?.isStream == true }) + if (ops.withGlobal) { + log.info { "Generating global-sync config." } + globalConfigSpec = ops.globalConfigSpec(dbContainer) + globalConfig = ops.configFactory.make(globalConfigSpec) + log.info { "Executing DISCOVER operation with global-sync config." } + globalCatalog = discover(globalConfigSpec) + globalConfiguredCatalog = + configuredCatalog(globalCatalog.filterKeys { ops.testCases[it]?.isGlobal == true }) + log.info { "Running initial global-sync READ operation." } + globalInitialReadOutput = + CliRunner.source("read", globalConfigSpec, globalConfiguredCatalog).run() + Assertions.assertNotEquals( + emptyList(), + globalInitialReadOutput.states() + ) + globalCheckpoint = globalInitialReadOutput.states().last() + Assertions.assertEquals( + emptyList(), + globalInitialReadOutput.records() + ) + Assertions.assertEquals(emptyList(), globalInitialReadOutput.logs()) + } + log.info { "Populating datatype streams in source." } + ops.populateStreams(streamConfig) + if (ops.withGlobal) { + log.info { "Running subsequent global-sync READ operation." } + globalSubsequentReadOutput = + CliRunner.source( + "read", + globalConfigSpec, + globalConfiguredCatalog, + listOf(globalCheckpoint) + ) + .run() + Assertions.assertNotEquals( + emptyList(), + globalSubsequentReadOutput.states() + ) + Assertions.assertNotEquals( + emptyList(), + globalSubsequentReadOutput.records() + ) + Assertions.assertEquals( + emptyList(), + globalSubsequentReadOutput.logs() + ) + globalMessages = globalSubsequentReadOutput.messages() + globalMessagesByStream = byStream(globalConfiguredCatalog, globalMessages) + } + log.info { "Running stream-sync READ operation." } + streamReadOutput = CliRunner.source("read", streamConfigSpec, streamConfiguredCatalog).run() + Assertions.assertNotEquals(emptyList(), streamReadOutput.states()) + Assertions.assertNotEquals(emptyList(), streamReadOutput.records()) + Assertions.assertEquals(emptyList(), streamReadOutput.logs()) + streamMessages = streamReadOutput.messages() + streamMessagesByStream = byStream(streamConfiguredCatalog, streamMessages) + log.info { "Done." } + } + + private fun discover(configSpec: CS): Map { + val output: BufferingOutputConsumer = CliRunner.source("discover", configSpec).run() + val streams: Map = + output.catalogs().firstOrNull()?.streams?.filterNotNull()?.associateBy { it.name } + ?: mapOf() + Assertions.assertFalse(streams.isEmpty()) + return streams + } + + private fun configuredCatalog(streams: Map): ConfiguredAirbyteCatalog { + val configuredStreams: List = + streams.values.map(CatalogHelpers::toDefaultConfiguredStream) + for (configuredStream in configuredStreams) { + if ( + configuredStream.stream.supportedSyncModes.contains(SyncMode.INCREMENTAL) && + configuredStream.stream.sourceDefinedCursor == true + ) { + configuredStream.syncMode = SyncMode.INCREMENTAL + configuredStream.cursorField = listOf(ops.globalCursorMetaField.id) + } else { + configuredStream.syncMode = SyncMode.FULL_REFRESH + } + } + return ConfiguredAirbyteCatalog().withStreams(configuredStreams) + } + + private fun byStream( + configuredCatalog: ConfiguredAirbyteCatalog, + messages: List + ): Map { + val result: Map = + configuredCatalog.streams.associate { + it.stream.name to BufferingOutputConsumer(ClockFactory().fixed()) + } + for (msg in messages) { + result[streamName(msg) ?: continue]?.accept(msg) + } + return result + } + + private fun streamName(msg: AirbyteMessage): String? = + when (msg.type) { + AirbyteMessage.Type.RECORD -> msg.record?.stream + AirbyteMessage.Type.STATE -> msg.state?.stream?.streamDescriptor?.name + AirbyteMessage.Type.TRACE -> + when (msg.trace?.type) { + AirbyteTraceMessage.Type.ERROR -> msg.trace?.error?.streamDescriptor?.name + AirbyteTraceMessage.Type.ESTIMATE -> msg.trace?.estimate?.name + AirbyteTraceMessage.Type.STREAM_STATUS -> + msg.trace?.streamStatus?.streamDescriptor?.name + AirbyteTraceMessage.Type.ANALYTICS -> null + null -> null + } + else -> null + } +} diff --git a/airbyte-cdk/bulk/core/load/src/integrationTest/kotlin/io/airbyte/cdk/load/mock_integration_test/MockDestinationWriter.kt b/airbyte-cdk/bulk/core/load/src/integrationTest/kotlin/io/airbyte/cdk/load/mock_integration_test/MockDestinationWriter.kt index 04ea591d91b1..9d31e52cc4b7 100644 --- a/airbyte-cdk/bulk/core/load/src/integrationTest/kotlin/io/airbyte/cdk/load/mock_integration_test/MockDestinationWriter.kt +++ b/airbyte-cdk/bulk/core/load/src/integrationTest/kotlin/io/airbyte/cdk/load/mock_integration_test/MockDestinationWriter.kt @@ -4,6 +4,7 @@ package io.airbyte.cdk.load.mock_integration_test +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings import io.airbyte.cdk.load.command.Append import io.airbyte.cdk.load.command.Dedupe import io.airbyte.cdk.load.command.DestinationStream @@ -16,9 +17,11 @@ import io.airbyte.cdk.load.state.StreamProcessingFailed import io.airbyte.cdk.load.test.util.OutputRecord import io.airbyte.cdk.load.write.DestinationWriter import io.airbyte.cdk.load.write.StreamLoader +import io.github.oshai.kotlinlogging.KotlinLogging import java.time.Instant import java.util.UUID import javax.inject.Singleton +import kotlinx.coroutines.delay @Singleton class MockDestinationWriter : DestinationWriter { @@ -27,7 +30,10 @@ class MockDestinationWriter : DestinationWriter { } } +@SuppressFBWarnings("NP_NONNULL_PARAM_VIOLATION", justification = "Kotlin async continuation") class MockStreamLoader(override val stream: DestinationStream) : StreamLoader { + private val log = KotlinLogging.logger {} + abstract class MockBatch : Batch { override val groupId: String? = null } @@ -38,9 +44,6 @@ class MockStreamLoader(override val stream: DestinationStream) : StreamLoader { data class LocalFileBatch(val file: DestinationFile) : MockBatch() { override val state = Batch.State.LOCAL } - data class PersistedBatch(val records: List) : MockBatch() { - override val state = Batch.State.PERSISTED - } override suspend fun close(streamFailure: StreamProcessingFailed?) { if (streamFailure == null) { @@ -70,7 +73,8 @@ class MockStreamLoader(override val stream: DestinationStream) : StreamLoader { override suspend fun processRecords( records: Iterator, - totalSizeBytes: Long + totalSizeBytes: Long, + endOfStream: Boolean ): Batch { return LocalBatch(records.asSequence().toList()) } @@ -82,6 +86,7 @@ class MockStreamLoader(override val stream: DestinationStream) : StreamLoader { override suspend fun processBatch(batch: Batch): Batch { return when (batch) { is LocalBatch -> { + log.info { "Persisting ${batch.records.size} records for ${stream.descriptor}" } batch.records.forEach { val filename = getFilename(it.stream, staging = true) val record = @@ -99,9 +104,14 @@ class MockStreamLoader(override val stream: DestinationStream) : StreamLoader { // blind insert into the staging area. We'll dedupe on commit. MockDestinationBackend.insert(filename, record) } - PersistedBatch(batch.records) + // HACK: This destination is too fast and causes a race + // condition between consuming and flushing state messages + // that causes the test to fail. This would not be an issue + // in a real sync, because we would always either get more + // data or an end-of-stream that would force a final flush. + delay(100L) + SimpleBatch(state = Batch.State.COMPLETE) } - is PersistedBatch -> SimpleBatch(state = Batch.State.COMPLETE) else -> throw IllegalStateException("Unexpected batch type: $batch") } } diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/command/DestinationConfiguration.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/command/DestinationConfiguration.kt index c5ed5fd8dd54..035d31159a6c 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/command/DestinationConfiguration.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/command/DestinationConfiguration.kt @@ -61,7 +61,8 @@ import java.nio.file.Path * ``` */ abstract class DestinationConfiguration : Configuration { - open val recordBatchSizeBytes: Long = 200L * 1024L * 1024L + open val recordBatchSizeBytes: Long = DEFAULT_RECORD_BATCH_SIZE_BYTES + open val processEmptyFiles: Boolean = false open val tmpFileDirectory: Path = Path.of("airbyte-cdk-load") /** Memory queue settings */ @@ -85,6 +86,12 @@ abstract class DestinationConfiguration : Configuration { open val gracefulCancellationTimeoutMs: Long = 60 * 1000L // 1 minutes open val numProcessRecordsWorkers: Int = 2 + open val numProcessBatchWorkers: Int = 5 + open val batchQueueDepth: Int = 10 + + companion object { + const val DEFAULT_RECORD_BATCH_SIZE_BYTES = 200L * 1024L * 1024L + } /** * Micronaut factory which glues [ConfigurationSpecificationSupplier] and diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/config/SyncBeanFactory.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/config/SyncBeanFactory.kt index 488682f6b24c..380c7143baea 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/config/SyncBeanFactory.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/config/SyncBeanFactory.kt @@ -6,6 +6,7 @@ package io.airbyte.cdk.load.config import io.airbyte.cdk.load.command.DestinationCatalog import io.airbyte.cdk.load.command.DestinationConfiguration +import io.airbyte.cdk.load.message.BatchEnvelope import io.airbyte.cdk.load.message.MultiProducerChannel import io.airbyte.cdk.load.state.ReservationManager import io.airbyte.cdk.load.task.implementor.FileAggregateMessage @@ -67,6 +68,15 @@ class SyncBeanFactory { val capacity = min(maxBatchesMinusUploadOverhead, idealDepth) log.info { "Creating file aggregate queue with limit $capacity" } val channel = Channel(capacity) - return MultiProducerChannel(streamCount.toLong(), channel) + return MultiProducerChannel(streamCount.toLong(), channel, "fileAggregateQueue") + } + + @Singleton + @Named("batchQueue") + fun batchQueue( + config: DestinationConfiguration, + ): MultiProducerChannel> { + val channel = Channel>(config.batchQueueDepth) + return MultiProducerChannel(config.numProcessRecordsWorkers.toLong(), channel, "batchQueue") } } diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/data/TimeStringToInteger.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/data/TimeStringToInteger.kt index a422d58cef57..a09419f7daf0 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/data/TimeStringToInteger.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/data/TimeStringToInteger.kt @@ -19,11 +19,11 @@ import java.time.temporal.ChronoUnit */ class TimeStringToInteger : AirbyteValueIdentityMapper() { companion object { - private val DATE_TIME_FORMATTER: DateTimeFormatter = + val DATE_TIME_FORMATTER: DateTimeFormatter = DateTimeFormatter.ofPattern( "[yyyy][yy]['-']['/']['.'][' '][MMM][MM][M]['-']['/']['.'][' '][dd][d][[' '][G]][[' ']['T']HH:mm[':'ss[.][SSSSSS][SSSSS][SSSS][SSS][' '][z][zzz][Z][O][x][XXX][XX][X][[' '][G]]]]" ) - private val TIME_FORMATTER: DateTimeFormatter = + val TIME_FORMATTER: DateTimeFormatter = DateTimeFormatter.ofPattern( "HH:mm[':'ss[.][SSSSSS][SSSSS][SSSS][SSS][' '][z][zzz][Z][O][x][XXX][XX][X]]" ) diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/file/SpillFileProvider.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/file/SpillFileProvider.kt index aba15226d7ca..0d5607099f01 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/file/SpillFileProvider.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/file/SpillFileProvider.kt @@ -20,6 +20,6 @@ class DefaultSpillFileProvider(val config: DestinationConfiguration) : SpillFile override fun createTempFile(): Path { val directory = config.tmpFileDirectory Files.createDirectories(directory) - return Files.createTempFile(directory, "staged-raw-records", "jsonl") + return Files.createTempFile(directory, "staged-raw-records", ".jsonl") } } diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/message/Batch.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/message/Batch.kt index fd86cfe4acde..f0c284e9433a 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/message/Batch.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/message/Batch.kt @@ -7,6 +7,7 @@ package io.airbyte.cdk.load.message import com.google.common.collect.Range import com.google.common.collect.RangeSet import com.google.common.collect.TreeRangeSet +import io.airbyte.cdk.load.command.DestinationStream /** * Represents an accumulated batch of records in some stage of processing. @@ -53,6 +54,7 @@ interface Batch { val groupId: String? enum class State { + PROCESSED, LOCAL, PERSISTED, COMPLETE @@ -66,6 +68,13 @@ interface Batch { } val state: State + + /** + * If a [Batch] is [State.COMPLETE], there's nothing further to do. If it is part of a group, + * then its state will be updated by the next batch in the group that advances. + */ + val requiresProcessing: Boolean + get() = state != State.COMPLETE && groupId == null } /** Simple batch: use if you need no other metadata for processing. */ @@ -80,14 +89,20 @@ data class SimpleBatch( */ data class BatchEnvelope( val batch: B, - val ranges: RangeSet = TreeRangeSet.create() + val ranges: RangeSet = TreeRangeSet.create(), + val streamDescriptor: DestinationStream.Descriptor ) { constructor( batch: B, - range: Range - ) : this(batch = batch, ranges = TreeRangeSet.create(listOf(range))) + range: Range?, + streamDescriptor: DestinationStream.Descriptor + ) : this( + batch = batch, + ranges = range?.let { TreeRangeSet.create(listOf(range)) } ?: TreeRangeSet.create(), + streamDescriptor = streamDescriptor + ) fun withBatch(newBatch: C): BatchEnvelope { - return BatchEnvelope(newBatch, ranges) + return BatchEnvelope(newBatch, ranges, streamDescriptor) } } diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/message/MultiProducerChannel.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/message/MultiProducerChannel.kt index db46835ab87b..c369e8b47b8c 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/message/MultiProducerChannel.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/message/MultiProducerChannel.kt @@ -15,6 +15,7 @@ import kotlinx.coroutines.channels.Channel class MultiProducerChannel( producerCount: Long, override val channel: Channel, + private val name: String, ) : ChannelMessageQueue() { private val log = KotlinLogging.logger {} private val initializedProducerCount = producerCount @@ -23,7 +24,7 @@ class MultiProducerChannel( override suspend fun close() { val count = producerCount.decrementAndGet() log.info { - "Closing producer (active count=$count, initialized count: $initializedProducerCount)" + "Closing producer $name (active count=$count, initialized count: $initializedProducerCount)" } if (count == 0L) { log.info { "Closing underlying queue" } diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/state/StreamManager.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/state/StreamManager.kt index 491b86fa808a..060c09cdb2f1 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/state/StreamManager.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/state/StreamManager.kt @@ -150,7 +150,6 @@ class DefaultStreamManager( } override fun updateBatchState(batch: BatchEnvelope) { - rangesState[batch.batch.state] ?: throw IllegalArgumentException("Invalid batch state: ${batch.batch.state}") @@ -158,10 +157,6 @@ class DefaultStreamManager( // to the most advanced state. Otherwise, just use the ranges provided. val cachedRangesMaybe = batch.batch.groupId?.let { cachedRangesById[batch.batch.groupId] } - log.info { - "Updating state for stream ${stream.descriptor} with batch $batch using cached ranges $cachedRangesMaybe" - } - val stateToSet = cachedRangesMaybe?.state?.let { maxOf(it, batch.batch.state) } ?: batch.batch.state val rangesToUpdate = TreeRangeSet.create(batch.ranges) @@ -178,24 +173,37 @@ class DefaultStreamManager( rangesToUpdate.asRanges().map { it.span(Range.singleton(it.upperEndpoint() + 1)) } when (stateToSet) { - Batch.State.PERSISTED -> { - rangesState[Batch.State.PERSISTED]?.addAll(expanded) - } Batch.State.COMPLETE -> { // A COMPLETED state implies PERSISTED, so also mark PERSISTED. rangesState[Batch.State.PERSISTED]?.addAll(expanded) rangesState[Batch.State.COMPLETE]?.addAll(expanded) } - else -> Unit - } - - log.info { - "Updated ranges for ${stream.descriptor}[${batch.batch.state}]: $expanded. PERSISTED is also updated on COMPLETE." + else -> { + // For all other states, just mark the state. + rangesState[stateToSet]?.addAll(expanded) + } } batch.batch.groupId?.also { cachedRangesById[it] = CachedRanges(stateToSet, rangesToUpdate) } + + log.info { + val groupLineMaybe = + if (cachedRangesMaybe != null) { + "\n (from group: ${cachedRangesMaybe.state}->${cachedRangesMaybe.ranges})\n" + } else { + "" + } + """ For stream ${stream.descriptor.namespace}.${stream.descriptor.name} + From batch ${batch.batch.state}->${batch.ranges} (groupId ${batch.batch.groupId})$groupLineMaybe + Added $stateToSet->$rangesToUpdate to ${stream.descriptor.namespace}.${stream.descriptor.name} + PROCESSED: ${rangesState[Batch.State.PROCESSED]} + LOCAL: ${rangesState[Batch.State.LOCAL]} + PERSISTED: ${rangesState[Batch.State.PERSISTED]} + COMPLETE: ${rangesState[Batch.State.COMPLETE]} + """.trimIndent() + } } /** True if all records in `[0, index)` have reached the given state. */ diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/DestinationTaskLauncher.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/DestinationTaskLauncher.kt index 3b0da20da844..5e4fa1389bc7 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/DestinationTaskLauncher.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/DestinationTaskLauncher.kt @@ -8,7 +8,6 @@ import edu.umd.cs.findbugs.annotations.SuppressFBWarnings import io.airbyte.cdk.load.command.DestinationCatalog import io.airbyte.cdk.load.command.DestinationConfiguration import io.airbyte.cdk.load.command.DestinationStream -import io.airbyte.cdk.load.message.Batch import io.airbyte.cdk.load.message.BatchEnvelope import io.airbyte.cdk.load.message.CheckpointMessageWrapped import io.airbyte.cdk.load.message.DestinationFile @@ -153,6 +152,10 @@ class DefaultDestinationTaskLauncher( handleException(e) } } + + override fun toString(): String { + return "TaskWrapper($innerTask)" + } } inner class NoopWrapper( @@ -186,6 +189,7 @@ class DefaultDestinationTaskLauncher( val setupTask = setupTaskFactory.make(this) enqueue(setupTask) + // TODO: pluggable file transfer if (!fileTransferEnabled) { // Start a spill-to-disk task for each record stream catalog.streams.forEach { stream -> @@ -199,6 +203,12 @@ class DefaultDestinationTaskLauncher( val task = processRecordsTaskFactory.make(this) enqueue(task) } + + repeat(config.numProcessBatchWorkers) { + log.info { "Launching process batch task $it" } + val task = processBatchTaskFactory.make(this) + enqueue(task) + } } // Start flush task @@ -224,8 +234,8 @@ class DefaultDestinationTaskLauncher( override suspend fun handleSetupComplete() { catalog.streams.forEach { log.info { "Starting open stream task for $it" } - val openStreamTask = openStreamTaskFactory.make(this, it) - enqueue(openStreamTask) + val task = openStreamTaskFactory.make(this, it) + enqueue(task) } } @@ -248,27 +258,19 @@ class DefaultDestinationTaskLauncher( streamManager.updateBatchState(wrapped) if (wrapped.batch.isPersisted()) { - enqueue(flushCheckpointsTaskFactory.make()) - } - - if (wrapped.batch.state != Batch.State.COMPLETE) { log.info { - "Batch not complete: Starting process batch task for ${stream}, batch $wrapped" + "Batch $wrapped is persisted: Starting flush checkpoints task for $stream" } + enqueue(flushCheckpointsTaskFactory.make()) + } - val task = processBatchTaskFactory.make(this, stream, wrapped) - enqueue(task) - } else if (streamManager.isBatchProcessingComplete()) { - log.info { - "Batch $wrapped complete and batch processing complete: Starting close stream task for $stream" - } + if (streamManager.isBatchProcessingComplete()) { + log.info { "Batch processing complete: Starting close stream task for $stream" } val task = closeStreamTaskFactory.make(this, stream) enqueue(task) } else { - log.info { - "Batch $wrapped complete, but batch processing not complete: nothing else to do." - } + log.info { "Batch processing not complete: nothing else to do." } } } } @@ -291,12 +293,9 @@ class DefaultDestinationTaskLauncher( } override suspend fun handleException(e: Exception) { - catalog.streams.forEach { - enqueue( - failStreamTaskFactory.make(this, e, it.descriptor), - withExceptionHandling = false - ) - } + catalog.streams + .map { failStreamTaskFactory.make(this, e, it.descriptor) } + .forEach { enqueue(it, withExceptionHandling = false) } } override suspend fun handleFailStreamComplete( diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/implementor/ProcessBatchTask.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/implementor/ProcessBatchTask.kt index 4f276cd77682..34b4d94a88a1 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/implementor/ProcessBatchTask.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/implementor/ProcessBatchTask.kt @@ -4,49 +4,50 @@ package io.airbyte.cdk.load.task.implementor -import io.airbyte.cdk.load.command.DestinationStream import io.airbyte.cdk.load.message.BatchEnvelope +import io.airbyte.cdk.load.message.MultiProducerChannel import io.airbyte.cdk.load.state.SyncManager import io.airbyte.cdk.load.task.DestinationTaskLauncher -import io.airbyte.cdk.load.task.ImplementorScope +import io.airbyte.cdk.load.task.KillableScope import io.airbyte.cdk.load.write.StreamLoader import io.micronaut.context.annotation.Secondary +import jakarta.inject.Named import jakarta.inject.Singleton -interface ProcessBatchTask : ImplementorScope +interface ProcessBatchTask : KillableScope /** Wraps @[StreamLoader.processBatch] and handles the resulting batch. */ class DefaultProcessBatchTask( private val syncManager: SyncManager, - private val batchEnvelope: BatchEnvelope<*>, - private val streamDescriptor: DestinationStream.Descriptor, + private val batchQueue: MultiProducerChannel>, private val taskLauncher: DestinationTaskLauncher ) : ProcessBatchTask { + override suspend fun execute() { - val streamLoader = syncManager.getOrAwaitStreamLoader(streamDescriptor) - val nextBatch = streamLoader.processBatch(batchEnvelope.batch) - val nextWrapped = batchEnvelope.withBatch(nextBatch) - taskLauncher.handleNewBatch(streamDescriptor, nextWrapped) + batchQueue.consume().collect { batchEnvelope -> + val streamLoader = syncManager.getOrAwaitStreamLoader(batchEnvelope.streamDescriptor) + val nextBatch = streamLoader.processBatch(batchEnvelope.batch) + val nextWrapped = batchEnvelope.withBatch(nextBatch) + taskLauncher.handleNewBatch(nextWrapped.streamDescriptor, nextWrapped) + } } } interface ProcessBatchTaskFactory { fun make( taskLauncher: DestinationTaskLauncher, - stream: DestinationStream.Descriptor, - batchEnvelope: BatchEnvelope<*> ): ProcessBatchTask } @Singleton @Secondary -class DefaultProcessBatchTaskFactory(private val syncManager: SyncManager) : - ProcessBatchTaskFactory { +class DefaultProcessBatchTaskFactory( + private val syncManager: SyncManager, + @Named("batchQueue") private val batchQueue: MultiProducerChannel> +) : ProcessBatchTaskFactory { override fun make( taskLauncher: DestinationTaskLauncher, - stream: DestinationStream.Descriptor, - batchEnvelope: BatchEnvelope<*> ): ProcessBatchTask { - return DefaultProcessBatchTask(syncManager, batchEnvelope, stream, taskLauncher) + return DefaultProcessBatchTask(syncManager, batchQueue, taskLauncher) } } diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/implementor/ProcessFileTask.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/implementor/ProcessFileTask.kt index 73fd34683bc5..36801ce82156 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/implementor/ProcessFileTask.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/implementor/ProcessFileTask.kt @@ -31,7 +31,7 @@ class DefaultProcessFileTask( val batch = streamLoader.processFile(file) - val wrapped = BatchEnvelope(batch, Range.singleton(index)) + val wrapped = BatchEnvelope(batch, Range.singleton(index), streamDescriptor) taskLauncher.handleNewBatch(streamDescriptor, wrapped) } } diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/implementor/ProcessRecordsTask.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/implementor/ProcessRecordsTask.kt index 7e8735e3f27a..23ebe1e0146f 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/implementor/ProcessRecordsTask.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/implementor/ProcessRecordsTask.kt @@ -4,7 +4,10 @@ package io.airbyte.cdk.load.task.implementor +import com.google.common.collect.Range +import io.airbyte.cdk.load.command.DestinationConfiguration import io.airbyte.cdk.load.command.DestinationStream +import io.airbyte.cdk.load.message.Batch import io.airbyte.cdk.load.message.BatchEnvelope import io.airbyte.cdk.load.message.Deserializer import io.airbyte.cdk.load.message.DestinationMessage @@ -13,20 +16,25 @@ import io.airbyte.cdk.load.message.DestinationRecordStreamComplete import io.airbyte.cdk.load.message.DestinationRecordStreamIncomplete import io.airbyte.cdk.load.message.DestinationStreamAffinedMessage import io.airbyte.cdk.load.message.MessageQueue +import io.airbyte.cdk.load.message.MultiProducerChannel import io.airbyte.cdk.load.state.ReservationManager import io.airbyte.cdk.load.state.SyncManager import io.airbyte.cdk.load.task.DestinationTaskLauncher -import io.airbyte.cdk.load.task.ImplementorScope +import io.airbyte.cdk.load.task.KillableScope import io.airbyte.cdk.load.task.internal.SpilledRawMessagesLocalFile import io.airbyte.cdk.load.util.lineSequence +import io.airbyte.cdk.load.util.use +import io.airbyte.cdk.load.write.BatchAccumulator import io.airbyte.cdk.load.write.StreamLoader import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Secondary import jakarta.inject.Named import jakarta.inject.Singleton +import java.io.InputStream +import java.util.concurrent.ConcurrentHashMap import kotlin.io.path.inputStream -interface ProcessRecordsTask : ImplementorScope +interface ProcessRecordsTask : KillableScope /** * Wraps @[StreamLoader.processRecords] and feeds it a lazy iterator over the last batch of spooled @@ -37,54 +45,91 @@ interface ProcessRecordsTask : ImplementorScope * moved to the task launcher. */ class DefaultProcessRecordsTask( + private val config: DestinationConfiguration, private val taskLauncher: DestinationTaskLauncher, private val deserializer: Deserializer, private val syncManager: SyncManager, private val diskManager: ReservationManager, private val inputQueue: MessageQueue, + private val outputQueue: MultiProducerChannel>, ) : ProcessRecordsTask { private val log = KotlinLogging.logger {} + private val accumulators = ConcurrentHashMap() override suspend fun execute() { - inputQueue.consume().collect { (streamDescriptor, file) -> - log.info { "Fetching stream loader for $streamDescriptor" } - val streamLoader = syncManager.getOrAwaitStreamLoader(streamDescriptor) - log.info { "Processing records from $file for stream $streamDescriptor" } - val batch = - try { - file.localFile.inputStream().use { inputStream -> - val records = - inputStream - .lineSequence() - .map { - when (val message = deserializer.deserialize(it)) { - is DestinationStreamAffinedMessage -> message - else -> - throw IllegalStateException( - "Expected record message, got ${message::class}" - ) - } - } - .takeWhile { - it !is DestinationRecordStreamComplete && - it !is DestinationRecordStreamIncomplete + outputQueue.use { + inputQueue.consume().collect { (streamDescriptor, file) -> + log.info { "Fetching stream loader for $streamDescriptor" } + val streamLoader = syncManager.getOrAwaitStreamLoader(streamDescriptor) + val acc = + accumulators.getOrPut(streamDescriptor) { + streamLoader.createBatchAccumulator() + } + log.info { "Processing records from $file for stream $streamDescriptor" } + val batch = + try { + file.localFile.inputStream().use { + val records = + if (file.isEmpty) { + emptyList().listIterator() + } else { + it.toRecordIterator() } - .map { it as DestinationRecord } - .iterator() - val batch = streamLoader.processRecords(records, file.totalSizeBytes) - log.info { "Finished processing $file" } - batch + val batch = + acc.processRecords(records, file.totalSizeBytes, file.endOfStream) + log.info { "Finished processing $file" } + batch + } + } finally { + log.info { "Processing completed, deleting $file" } + file.localFile.toFile().delete() + diskManager.release(file.totalSizeBytes) } - } finally { - log.info { "Processing completed, deleting $file" } - file.localFile.toFile().delete() - diskManager.release(file.totalSizeBytes) + handleBatch(streamDescriptor, batch, file.indexRange) + } + if (config.processEmptyFiles) { + // TODO: Get rid of the need to handle empty files please + log.info { "Forcing finalization of all accumulators." } + accumulators.forEach { (streamDescriptor, acc) -> + val finalBatch = + acc.processRecords(emptyList().listIterator(), 0, true) + handleBatch(streamDescriptor, finalBatch, null) } + } + } + } - val wrapped = BatchEnvelope(batch, file.indexRange) - log.info { "Updating batch $wrapped for $streamDescriptor" } - taskLauncher.handleNewBatch(streamDescriptor, wrapped) + private suspend fun handleBatch( + streamDescriptor: DestinationStream.Descriptor, + batch: Batch, + indexRange: Range? + ) { + val wrapped = BatchEnvelope(batch, indexRange, streamDescriptor) + taskLauncher.handleNewBatch(streamDescriptor, wrapped) + log.info { "Updating batch $wrapped for $streamDescriptor" } + if (batch.requiresProcessing) { + outputQueue.publish(wrapped) + } else { + log.info { "Batch $wrapped requires no further processing." } } } + + private fun InputStream.toRecordIterator(): Iterator { + return lineSequence() + .map { + when (val message = deserializer.deserialize(it)) { + is DestinationStreamAffinedMessage -> message + else -> + throw IllegalStateException( + "Expected record message, got ${message::class}" + ) + } + } + .takeWhile { + it !is DestinationRecordStreamComplete && it !is DestinationRecordStreamIncomplete + } + .map { it as DestinationRecord } + .iterator() + } } interface ProcessRecordsTaskFactory { @@ -101,20 +146,25 @@ data class FileAggregateMessage( @Singleton @Secondary class DefaultProcessRecordsTaskFactory( + private val config: DestinationConfiguration, private val deserializer: Deserializer, private val syncManager: SyncManager, @Named("diskManager") private val diskManager: ReservationManager, - @Named("fileAggregateQueue") private val inputQueue: MessageQueue + @Named("fileAggregateQueue") private val inputQueue: MessageQueue, + @Named("batchQueue") private val outputQueue: MultiProducerChannel>, ) : ProcessRecordsTaskFactory { + override fun make( taskLauncher: DestinationTaskLauncher, ): ProcessRecordsTask { return DefaultProcessRecordsTask( + config, taskLauncher, deserializer, syncManager, diskManager, inputQueue, + outputQueue, ) } } diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/internal/InputConsumerTask.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/internal/InputConsumerTask.kt index 3bb0bb716545..66da0212e0fe 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/internal/InputConsumerTask.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/internal/InputConsumerTask.kt @@ -84,12 +84,14 @@ class DefaultInputConsumerTask( is DestinationRecordStreamComplete -> { reserved.release() // safe because multiple calls conflate val wrapped = StreamEndEvent(index = manager.markEndOfStream(true)) + log.info { "Read COMPLETE for stream $stream" } recordQueue.publish(reserved.replace(wrapped)) recordQueue.close() } is DestinationRecordStreamIncomplete -> { reserved.release() // safe because multiple calls conflate val wrapped = StreamEndEvent(index = manager.markEndOfStream(false)) + log.info { "Read INCOMPLETE for stream $stream" } recordQueue.publish(reserved.replace(wrapped)) recordQueue.close() } @@ -100,7 +102,11 @@ class DefaultInputConsumerTask( is DestinationFileStreamComplete -> { reserved.release() // safe because multiple calls conflate manager.markEndOfStream(true) - val envelope = BatchEnvelope(SimpleBatch(Batch.State.COMPLETE)) + val envelope = + BatchEnvelope( + SimpleBatch(Batch.State.COMPLETE), + streamDescriptor = message.stream + ) destinationTaskLauncher.handleNewBatch(stream, envelope) } is DestinationFileStreamIncomplete -> diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/internal/SpillToDiskTask.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/internal/SpillToDiskTask.kt index ea8965002ea8..bf3a9df7ccc6 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/internal/SpillToDiskTask.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/task/internal/SpillToDiskTask.kt @@ -6,6 +6,7 @@ package io.airbyte.cdk.load.task.internal import com.google.common.collect.Range import com.google.common.collect.TreeRangeSet +import io.airbyte.cdk.load.command.DestinationConfiguration import io.airbyte.cdk.load.command.DestinationStream import io.airbyte.cdk.load.file.SpillFileProvider import io.airbyte.cdk.load.message.Batch @@ -23,7 +24,7 @@ import io.airbyte.cdk.load.state.ReservationManager import io.airbyte.cdk.load.state.Reserved import io.airbyte.cdk.load.state.TimeWindowTrigger import io.airbyte.cdk.load.task.DestinationTaskLauncher -import io.airbyte.cdk.load.task.InternalScope +import io.airbyte.cdk.load.task.KillableScope import io.airbyte.cdk.load.task.implementor.FileAggregateMessage import io.airbyte.cdk.load.util.use import io.airbyte.cdk.load.util.withNextAdjacentValue @@ -39,7 +40,7 @@ import kotlin.io.path.deleteExisting import kotlin.io.path.outputStream import kotlinx.coroutines.flow.fold -interface SpillToDiskTask : InternalScope +interface SpillToDiskTask : KillableScope /** * Reads records from the message queue and writes them to disk. Completes once the upstream @@ -54,7 +55,8 @@ class DefaultSpillToDiskTask( private val flushStrategy: FlushStrategy, val streamDescriptor: DestinationStream.Descriptor, private val diskManager: ReservationManager, - private val taskLauncher: DestinationTaskLauncher + private val taskLauncher: DestinationTaskLauncher, + private val processEmptyFiles: Boolean, ) : SpillToDiskTask { private val log = KotlinLogging.logger {} @@ -124,7 +126,7 @@ class DefaultSpillToDiskTask( event: StreamEndEvent, ): FileAccumulator { val (spillFile, outputStream, timeWindow, range, sizeBytes) = acc - if (sizeBytes == 0L) { + if (sizeBytes == 0L && !processEmptyFiles) { log.info { "Skipping empty file $spillFile" } // Cleanup empty file spillFile.deleteExisting() @@ -134,10 +136,16 @@ class DefaultSpillToDiskTask( BatchEnvelope( SimpleBatch(Batch.State.COMPLETE), TreeRangeSet.create(), + streamDescriptor ) taskLauncher.handleNewBatch(streamDescriptor, empty) } else { - val nextRange = range.withNextAdjacentValue(event.index) + val nextRange = + if (sizeBytes == 0L) { + null + } else { + range.withNextAdjacentValue(event.index) + } val file = SpilledRawMessagesLocalFile( spillFile, @@ -202,6 +210,7 @@ interface SpillToDiskTaskFactory { @Singleton class DefaultSpillToDiskTaskFactory( + private val config: DestinationConfiguration, private val fileAccFactory: FileAccumulatorFactory, private val queueSupplier: MessageQueueSupplier>, @@ -223,6 +232,7 @@ class DefaultSpillToDiskTaskFactory( stream, diskManager, taskLauncher, + config.processEmptyFiles, ) } } @@ -254,6 +264,9 @@ data class FileAccumulator( data class SpilledRawMessagesLocalFile( val localFile: Path, val totalSizeBytes: Long, - val indexRange: Range, + val indexRange: Range?, val endOfStream: Boolean = false -) +) { + val isEmpty + get() = totalSizeBytes == 0L +} diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/write/StreamLoader.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/write/StreamLoader.kt index ce0a21404e3b..3fe495067434 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/write/StreamLoader.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/load/write/StreamLoader.kt @@ -12,32 +12,55 @@ import io.airbyte.cdk.load.message.SimpleBatch import io.airbyte.cdk.load.state.StreamProcessingFailed /** - * Implementor interface. The framework calls open and close once per stream at the beginning and - * end of processing. The framework calls processRecords once per batch of records as batches of the - * configured size become available. (Specified in @ - * [io.airbyte.cdk.command.WriteConfiguration.recordBatchSizeBytes]) + * Implementor interface. * * [start] is called once before any records are processed. * - * [processRecords] is called whenever a batch of records is available for processing, and only - * after [start] has returned successfully. The return value is a client-defined implementation of @ - * [Batch] that the framework may pass to [processBatch] and/or [finalize]. (See @[Batch] for more - * details.) + * [processRecords] is called whenever a batch of records is available for processing (of the size + * configured in [io.airbyte.cdk.load.command.DestinationConfiguration.recordBatchSizeBytes]) and + * only after [start] has returned successfully. The return value is a client-defined implementation + * of @ [Batch] that the framework may pass to [processBatch]. (See @[Batch] for more details.) + * + * [processRecords] may be called concurrently by multiple workers, so it should be thread-safe if + * [io.airbyte.cdk.load.command.DestinationConfiguration.numProcessRecordsWorkers] > 1. For a + * non-thread-safe alternative, use [createBatchAccumulator]. + * + * [createBatchAccumulator] returns an optional new instance of a [BatchAccumulator] to use for + * record processing instead of this stream loader. By default, it returns a reference to the stream + * loader itself. Use this interface if you want each record processing worker to use a separate + * instance (with its own state, etc). * * [processBatch] is called once per incomplete batch returned by either [processRecords] or - * [processBatch] itself. + * [processBatch] itself. It must be thread-safe if + * [io.airbyte.cdk.load.command.DestinationConfiguration.numProcessBatchWorkers] > 1. If + * [processRecords] never returns a non-[Batch.State.COMPLETE] batch, [processBatch] will never be + * called. * - * [finalize] is called once after all records and batches have been processed successfully. + * NOTE: even if [processBatch] returns a not-[Batch.State.COMPLETE] batch, it will be called again. + * TODO: allow the client to specify subsequent processing stages instead. * - * [close] is called once after all records have been processed, regardless of success or failure. - * If there are failed batches, they are passed in as an argument. + * [close] is called once after all records have been processed, regardless of success or failure, + * but only if [start] returned successfully. If any exception was thrown during processing, it is + * passed as an argument to [close]. */ -interface StreamLoader { +interface StreamLoader : BatchAccumulator { val stream: DestinationStream suspend fun start() {} - suspend fun processRecords(records: Iterator, totalSizeBytes: Long): Batch + suspend fun createBatchAccumulator(): BatchAccumulator = this + suspend fun processFile(file: DestinationFile): Batch suspend fun processBatch(batch: Batch): Batch = SimpleBatch(Batch.State.COMPLETE) suspend fun close(streamFailure: StreamProcessingFailed? = null) {} } + +interface BatchAccumulator { + suspend fun processRecords( + records: Iterator, + totalSizeBytes: Long, + endOfStream: Boolean = false + ): Batch = + throw NotImplementedError( + "processRecords must be implemented if createBatchAccumulator is overridden" + ) +} diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/message/MultiProducerChannelTest.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/message/MultiProducerChannelTest.kt index f301d585fe1a..4156c9a220f9 100644 --- a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/message/MultiProducerChannelTest.kt +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/message/MultiProducerChannelTest.kt @@ -23,7 +23,7 @@ class MultiProducerChannelTest { @BeforeEach fun setup() { - channel = MultiProducerChannel(size, wrapped) + channel = MultiProducerChannel(size, wrapped, "test") } @Test diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/state/CheckpointManagerTest.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/state/CheckpointManagerTest.kt index 246f96a8120d..3a6aabdaa86f 100644 --- a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/state/CheckpointManagerTest.kt +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/state/CheckpointManagerTest.kt @@ -446,7 +446,12 @@ class CheckpointManagerTest { it.persistedRanges.forEach { (stream, ranges) -> val mockBatch = SimpleBatch(state = Batch.State.PERSISTED) val rangeSet = TreeRangeSet.create(ranges) - val mockBatchEnvelope = BatchEnvelope(batch = mockBatch, ranges = rangeSet) + val mockBatchEnvelope = + BatchEnvelope( + batch = mockBatch, + ranges = rangeSet, + streamDescriptor = stream.descriptor + ) syncManager .getStreamManager(stream.descriptor) .updateBatchState(mockBatchEnvelope) diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/state/StreamManagerTest.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/state/StreamManagerTest.kt index 1997473e4aed..31d7628400ed 100644 --- a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/state/StreamManagerTest.kt +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/state/StreamManagerTest.kt @@ -266,14 +266,16 @@ class StreamManagerTest { manager.updateBatchState( BatchEnvelope( SimpleBatch(Batch.State.PERSISTED), - Range.closed(event.firstIndex, event.lastIndex) + Range.closed(event.firstIndex, event.lastIndex), + stream.descriptor ) ) is AddComplete -> manager.updateBatchState( BatchEnvelope( SimpleBatch(Batch.State.COMPLETE), - Range.closed(event.firstIndex, event.lastIndex) + Range.closed(event.firstIndex, event.lastIndex), + stream.descriptor ) ) is ExpectPersistedUntil -> @@ -323,10 +325,20 @@ class StreamManagerTest { fun `ranges with the same id conflate to latest state`() { val manager = DefaultStreamManager(stream1) val range1 = Range.closed(0L, 9L) - val batch1 = BatchEnvelope(SimpleBatch(Batch.State.LOCAL, groupId = "foo"), range1) + val batch1 = + BatchEnvelope( + SimpleBatch(Batch.State.LOCAL, groupId = "foo"), + range1, + stream1.descriptor + ) val range2 = Range.closed(10, 19L) - val batch2 = BatchEnvelope(SimpleBatch(Batch.State.PERSISTED, groupId = "foo"), range2) + val batch2 = + BatchEnvelope( + SimpleBatch(Batch.State.PERSISTED, groupId = "foo"), + range2, + stream1.descriptor + ) manager.updateBatchState(batch1) Assertions.assertFalse(manager.areRecordsPersistedUntil(10L), "local < persisted") @@ -338,10 +350,20 @@ class StreamManagerTest { fun `ranges with a different id conflate to latest state`() { val manager = DefaultStreamManager(stream1) val range1 = Range.closed(0L, 9L) - val batch1 = BatchEnvelope(SimpleBatch(Batch.State.LOCAL, groupId = "foo"), range1) + val batch1 = + BatchEnvelope( + SimpleBatch(Batch.State.LOCAL, groupId = "foo"), + range1, + stream1.descriptor + ) val range2 = Range.closed(10, 19L) - val batch2 = BatchEnvelope(SimpleBatch(Batch.State.PERSISTED, groupId = "bar"), range2) + val batch2 = + BatchEnvelope( + SimpleBatch(Batch.State.PERSISTED, groupId = "bar"), + range2, + stream1.descriptor + ) manager.updateBatchState(batch1) Assertions.assertFalse(manager.areRecordsPersistedUntil(10L), "local < persisted") @@ -356,10 +378,20 @@ class StreamManagerTest { fun `state does not conflate between id and no id`() { val manager = DefaultStreamManager(stream1) val range1 = Range.closed(0L, 9L) - val batch1 = BatchEnvelope(SimpleBatch(Batch.State.LOCAL, groupId = null), range1) + val batch1 = + BatchEnvelope( + SimpleBatch(Batch.State.LOCAL, groupId = null), + range1, + stream1.descriptor + ) val range2 = Range.closed(10, 19L) - val batch2 = BatchEnvelope(SimpleBatch(Batch.State.PERSISTED, groupId = "bar"), range2) + val batch2 = + BatchEnvelope( + SimpleBatch(Batch.State.PERSISTED, groupId = "bar"), + range2, + stream1.descriptor + ) manager.updateBatchState(batch1) Assertions.assertFalse(manager.areRecordsPersistedUntil(10L), "local < persisted") @@ -374,10 +406,20 @@ class StreamManagerTest { fun `max of newer and older state is always used`() { val manager = DefaultStreamManager(stream1) val range1 = Range.closed(0L, 9L) - val batch1 = BatchEnvelope(SimpleBatch(Batch.State.PERSISTED, groupId = "foo"), range1) + val batch1 = + BatchEnvelope( + SimpleBatch(Batch.State.PERSISTED, groupId = "foo"), + range1, + stream1.descriptor + ) val range2 = Range.closed(10, 19L) - val batch2 = BatchEnvelope(SimpleBatch(Batch.State.LOCAL, groupId = "foo"), range2) + val batch2 = + BatchEnvelope( + SimpleBatch(Batch.State.LOCAL, groupId = "foo"), + range2, + stream1.descriptor + ) manager.updateBatchState(batch1) Assertions.assertFalse(manager.areRecordsPersistedUntil(20L), "local < persisted") @@ -392,10 +434,20 @@ class StreamManagerTest { fun `max of older and newer state is always used`() { val manager = DefaultStreamManager(stream1) val range1 = Range.closed(0L, 9L) - val batch1 = BatchEnvelope(SimpleBatch(Batch.State.COMPLETE, groupId = "foo"), range1) + val batch1 = + BatchEnvelope( + SimpleBatch(Batch.State.COMPLETE, groupId = "foo"), + range1, + stream1.descriptor + ) val range2 = Range.closed(10, 19L) - val batch2 = BatchEnvelope(SimpleBatch(Batch.State.PERSISTED, groupId = "foo"), range2) + val batch2 = + BatchEnvelope( + SimpleBatch(Batch.State.PERSISTED, groupId = "foo"), + range2, + stream1.descriptor + ) manager.markEndOfStream(true) manager.updateBatchState(batch2) diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/state/SyncManagerUtils.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/state/SyncManagerUtils.kt index 28a24a2b8362..283cbdbabefb 100644 --- a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/state/SyncManagerUtils.kt +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/state/SyncManagerUtils.kt @@ -19,5 +19,7 @@ import io.airbyte.cdk.load.message.SimpleBatch */ fun SyncManager.markPersisted(stream: DestinationStream, range: Range) { this.getStreamManager(stream.descriptor) - .updateBatchState(BatchEnvelope(SimpleBatch(Batch.State.PERSISTED), range)) + .updateBatchState( + BatchEnvelope(SimpleBatch(Batch.State.PERSISTED), range, stream.descriptor) + ) } diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/DestinationTaskLauncherTest.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/DestinationTaskLauncherTest.kt index ebdead74a42f..cb27a8dcff92 100644 --- a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/DestinationTaskLauncherTest.kt +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/DestinationTaskLauncherTest.kt @@ -24,8 +24,6 @@ import io.airbyte.cdk.load.task.implementor.CloseStreamTask import io.airbyte.cdk.load.task.implementor.CloseStreamTaskFactory import io.airbyte.cdk.load.task.implementor.DefaultCloseStreamTaskFactory import io.airbyte.cdk.load.task.implementor.DefaultOpenStreamTaskFactory -import io.airbyte.cdk.load.task.implementor.DefaultProcessBatchTaskFactory -import io.airbyte.cdk.load.task.implementor.DefaultProcessRecordsTaskFactory import io.airbyte.cdk.load.task.implementor.DefaultSetupTaskFactory import io.airbyte.cdk.load.task.implementor.DefaultTeardownTaskFactory import io.airbyte.cdk.load.task.implementor.FailStreamTask @@ -34,9 +32,7 @@ import io.airbyte.cdk.load.task.implementor.FailSyncTask import io.airbyte.cdk.load.task.implementor.FailSyncTaskFactory import io.airbyte.cdk.load.task.implementor.OpenStreamTask import io.airbyte.cdk.load.task.implementor.OpenStreamTaskFactory -import io.airbyte.cdk.load.task.implementor.ProcessBatchTask import io.airbyte.cdk.load.task.implementor.ProcessBatchTaskFactory -import io.airbyte.cdk.load.task.implementor.ProcessRecordsTask import io.airbyte.cdk.load.task.implementor.ProcessRecordsTaskFactory import io.airbyte.cdk.load.task.implementor.SetupTask import io.airbyte.cdk.load.task.implementor.SetupTaskFactory @@ -91,7 +87,7 @@ class DestinationTaskLauncherTest { @Inject lateinit var mockSpillToDiskTaskFactory: MockSpillToDiskTaskFactory @Inject lateinit var mockOpenStreamTaskFactory: MockOpenStreamTaskFactory @Inject lateinit var processRecordsTaskFactory: ProcessRecordsTaskFactory - @Inject lateinit var processBatchTaskFactory: MockProcessBatchTaskFactory + @Inject lateinit var processBatchTaskFactory: ProcessBatchTaskFactory @Inject lateinit var closeStreamTaskFactory: MockCloseStreamTaskFactory @Inject lateinit var teardownTaskFactory: MockTeardownTaskFactory @Inject lateinit var flushCheckpointsTaskFactory: MockFlushCheckpointsTaskFactory @@ -115,6 +111,11 @@ class DestinationTaskLauncherTest { @Requires(env = ["DestinationTaskLauncherTest"]) fun processRecordsTaskFactory(): ProcessRecordsTaskFactory = mockk(relaxed = true) + @Singleton + @Primary + @Requires(env = ["DestinationTaskLauncherTest"]) + fun processBatchTaskFactory(): ProcessBatchTaskFactory = mockk(relaxed = true) + @Singleton @Primary @Requires(env = ["DestinationTaskLauncherTest"]) @@ -233,42 +234,6 @@ class DestinationTaskLauncherTest { } } - @Singleton - @Replaces(DefaultProcessRecordsTaskFactory::class) - @Requires(env = ["DestinationTaskLauncherTest"]) - class MockProcessRecordsTaskFactory : ProcessRecordsTaskFactory { - val hasRun: Channel = Channel(Channel.UNLIMITED) - - override fun make( - taskLauncher: DestinationTaskLauncher, - ): ProcessRecordsTask { - return object : ProcessRecordsTask { - override suspend fun execute() { - hasRun.send(Unit) - } - } - } - } - - @Singleton - @Replaces(DefaultProcessBatchTaskFactory::class) - @Requires(env = ["DestinationTaskLauncherTest"]) - class MockProcessBatchTaskFactory : ProcessBatchTaskFactory { - val hasRun: Channel> = Channel(Channel.UNLIMITED) - - override fun make( - taskLauncher: DestinationTaskLauncher, - stream: DestinationStream.Descriptor, - batchEnvelope: BatchEnvelope<*> - ): ProcessBatchTask { - return object : ProcessBatchTask { - override suspend fun execute() { - hasRun.send(batchEnvelope) - } - } - } - } - @Singleton @Replaces(DefaultCloseStreamTaskFactory::class) @Requires(env = ["DestinationTaskLauncherTest"]) @@ -394,6 +359,8 @@ class DestinationTaskLauncherTest { processRecordsTaskFactory.make(any()) } + coVerify(exactly = config.numProcessBatchWorkers) { processBatchTaskFactory.make(any()) } + // Verify that we kicked off the timed force flush w/o a specific delay Assertions.assertTrue(mockForceFlushTask.didRun.receive()) @@ -415,26 +382,25 @@ class DestinationTaskLauncherTest { @Test fun testHandleNewBatch() = runTest { val range = TreeRangeSet.create(listOf(Range.closed(0L, 100L))) - val streamManager = - syncManager.getStreamManager(MockDestinationCatalogFactory.stream1.descriptor) + val stream1 = MockDestinationCatalogFactory.stream1 + val streamManager = syncManager.getStreamManager(stream1.descriptor) repeat(100) { streamManager.countRecordIn() } streamManager.markEndOfStream(true) // Verify incomplete batch triggers process batch - val incompleteBatch = BatchEnvelope(MockBatch(Batch.State.LOCAL), range) + val incompleteBatch = BatchEnvelope(MockBatch(Batch.State.LOCAL), range, stream1.descriptor) taskLauncher.handleNewBatch( MockDestinationCatalogFactory.stream1.descriptor, incompleteBatch ) Assertions.assertFalse(streamManager.areRecordsPersistedUntil(100L)) - val batchReceived = processBatchTaskFactory.hasRun.receive() - Assertions.assertEquals(incompleteBatch, batchReceived) delay(500) Assertions.assertTrue(flushCheckpointsTaskFactory.hasRun.tryReceive().isFailure) - val persistedBatch = BatchEnvelope(MockBatch(Batch.State.PERSISTED), range) + val persistedBatch = + BatchEnvelope(MockBatch(Batch.State.PERSISTED), range, stream1.descriptor) taskLauncher.handleNewBatch( MockDestinationCatalogFactory.stream1.descriptor, persistedBatch @@ -444,7 +410,8 @@ class DestinationTaskLauncherTest { // Verify complete batch w/o batch processing complete does nothing val halfRange = TreeRangeSet.create(listOf(Range.closed(0L, 50L))) - val completeBatchHalf = BatchEnvelope(MockBatch(Batch.State.COMPLETE), halfRange) + val completeBatchHalf = + BatchEnvelope(MockBatch(Batch.State.COMPLETE), halfRange, stream1.descriptor) taskLauncher.handleNewBatch( MockDestinationCatalogFactory.stream1.descriptor, completeBatchHalf @@ -454,7 +421,8 @@ class DestinationTaskLauncherTest { // Verify complete batch w/ batch processing complete triggers close stream val secondHalf = TreeRangeSet.create(listOf(Range.closed(51L, 100L))) - val completingBatch = BatchEnvelope(MockBatch(Batch.State.COMPLETE), secondHalf) + val completingBatch = + BatchEnvelope(MockBatch(Batch.State.COMPLETE), secondHalf, stream1.descriptor) taskLauncher.handleNewBatch( MockDestinationCatalogFactory.stream1.descriptor, completingBatch @@ -466,11 +434,11 @@ class DestinationTaskLauncherTest { @Test fun handleEmptyBatch() = runTest { val range = TreeRangeSet.create(listOf(Range.closed(0L, 0L))) - val streamManager = - syncManager.getStreamManager(MockDestinationCatalogFactory.stream1.descriptor) + val stream1 = MockDestinationCatalogFactory.stream1 + val streamManager = syncManager.getStreamManager(stream1.descriptor) streamManager.markEndOfStream(true) - val emptyBatch = BatchEnvelope(MockBatch(Batch.State.COMPLETE), range) + val emptyBatch = BatchEnvelope(MockBatch(Batch.State.COMPLETE), range, stream1.descriptor) taskLauncher.handleNewBatch(MockDestinationCatalogFactory.stream1.descriptor, emptyBatch) closeStreamTaskFactory.hasRun.receive() } diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/ProcessBatchTaskTest.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/ProcessBatchTaskTest.kt new file mode 100644 index 000000000000..e9c24316ae8a --- /dev/null +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/ProcessBatchTaskTest.kt @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.load.task + +import io.airbyte.cdk.load.command.DestinationStream +import io.airbyte.cdk.load.message.Batch +import io.airbyte.cdk.load.message.BatchEnvelope +import io.airbyte.cdk.load.message.MultiProducerChannel +import io.airbyte.cdk.load.message.SimpleBatch +import io.airbyte.cdk.load.state.SyncManager +import io.airbyte.cdk.load.task.implementor.DefaultProcessBatchTask +import io.airbyte.cdk.load.write.StreamLoader +import io.mockk.coEvery +import io.mockk.coVerify +import io.mockk.mockk +import kotlinx.coroutines.flow.asFlow +import kotlinx.coroutines.test.runTest +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test + +class ProcessBatchTaskTest { + private lateinit var syncManager: SyncManager + private lateinit var streamLoaders: Map + private lateinit var batchQueue: MultiProducerChannel> + private lateinit var taskLauncher: DestinationTaskLauncher + + @BeforeEach + fun setup() { + val streams = + (0 until 3).map { DestinationStream.Descriptor(namespace = "test", name = "stream$it") } + syncManager = mockk(relaxed = true) + streamLoaders = streams.associateWith { mockk(relaxed = true) } + streamLoaders.values.forEach { + coEvery { it.processBatch(any()) } returns SimpleBatch(Batch.State.COMPLETE) + } + coEvery { syncManager.getOrAwaitStreamLoader(any()) } answers + { + streamLoaders[firstArg()]!! + } + batchQueue = mockk(relaxed = true) + taskLauncher = mockk(relaxed = true) + } + + @Test + fun `test each enqueued batch passes through the associated processBatch`() = runTest { + val task = DefaultProcessBatchTask(syncManager, batchQueue, taskLauncher) + coEvery { batchQueue.consume() } returns + streamLoaders.keys + .map { + BatchEnvelope(streamDescriptor = it, batch = SimpleBatch(Batch.State.LOCAL)) + } + .asFlow() + + task.execute() + + streamLoaders.forEach { (descriptor, loader) -> + coVerify { loader.processBatch(match { it.state == Batch.State.LOCAL }) } + coVerify { + taskLauncher.handleNewBatch( + descriptor, + match { + it.streamDescriptor == descriptor && it.batch.state == Batch.State.COMPLETE + } + ) + } + } + } +} diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/implementor/ProcessFileTaskTest.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/implementor/ProcessFileTaskTest.kt index cb04200282c4..0c550b927c1e 100644 --- a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/implementor/ProcessFileTaskTest.kt +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/implementor/ProcessFileTaskTest.kt @@ -40,7 +40,10 @@ class ProcessFileTaskTest { defaultProcessFileTask.execute() coVerify { - taskLauncher.handleNewBatch(stream, BatchEnvelope(batch, Range.singleton(index))) + taskLauncher.handleNewBatch( + stream, + BatchEnvelope(batch, Range.singleton(index), stream) + ) } } } diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/implementor/ProcessRecordsTaskTest.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/implementor/ProcessRecordsTaskTest.kt index a16359da0fda..b3299a0c4eab 100644 --- a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/implementor/ProcessRecordsTaskTest.kt +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/implementor/ProcessRecordsTaskTest.kt @@ -5,159 +5,176 @@ package io.airbyte.cdk.load.task.implementor import com.google.common.collect.Range -import io.airbyte.cdk.load.command.DestinationStream +import io.airbyte.cdk.load.command.DestinationConfiguration import io.airbyte.cdk.load.command.MockDestinationCatalogFactory import io.airbyte.cdk.load.data.IntegerValue import io.airbyte.cdk.load.message.Batch +import io.airbyte.cdk.load.message.BatchEnvelope import io.airbyte.cdk.load.message.Deserializer -import io.airbyte.cdk.load.message.DestinationFile import io.airbyte.cdk.load.message.DestinationMessage import io.airbyte.cdk.load.message.DestinationRecord import io.airbyte.cdk.load.message.MessageQueue +import io.airbyte.cdk.load.message.MultiProducerChannel import io.airbyte.cdk.load.state.ReservationManager import io.airbyte.cdk.load.state.SyncManager -import io.airbyte.cdk.load.task.MockTaskLauncher +import io.airbyte.cdk.load.task.DefaultDestinationTaskLauncher import io.airbyte.cdk.load.task.internal.SpilledRawMessagesLocalFile import io.airbyte.cdk.load.util.write +import io.airbyte.cdk.load.write.BatchAccumulator import io.airbyte.cdk.load.write.StreamLoader -import io.micronaut.test.extensions.junit5.annotation.MicronautTest import io.mockk.coEvery import io.mockk.coVerify +import io.mockk.coVerifySequence import io.mockk.mockk -import jakarta.inject.Inject import java.nio.file.Files import kotlin.io.path.outputStream -import kotlinx.coroutines.flow.flowOf +import kotlinx.coroutines.flow.asFlow import kotlinx.coroutines.test.runTest import org.junit.jupiter.api.Assertions import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test -@MicronautTest( - environments = - [ - "MockDestinationCatalog", - ] -) class ProcessRecordsTaskTest { + private lateinit var config: DestinationConfiguration private lateinit var diskManager: ReservationManager - private lateinit var fileAggregateQueue: MessageQueue + private lateinit var deserializer: Deserializer + private lateinit var streamLoader: StreamLoader + private lateinit var batchAccumulator: BatchAccumulator + private lateinit var inputQueue: MessageQueue private lateinit var processRecordsTaskFactory: DefaultProcessRecordsTaskFactory - private lateinit var launcher: MockTaskLauncher - @Inject lateinit var syncManager: SyncManager + private lateinit var launcher: DefaultDestinationTaskLauncher + private lateinit var outputQueue: MultiProducerChannel> + private lateinit var syncManager: SyncManager @BeforeEach fun setup() { + config = mockk(relaxed = true) diskManager = mockk(relaxed = true) - fileAggregateQueue = mockk(relaxed = true) - launcher = MockTaskLauncher() + inputQueue = mockk(relaxed = true) + outputQueue = mockk(relaxed = true) + syncManager = mockk(relaxed = true) + streamLoader = mockk(relaxed = true) + batchAccumulator = mockk(relaxed = true) + coEvery { config.processEmptyFiles } returns false + coEvery { syncManager.getOrAwaitStreamLoader(any()) } returns streamLoader + coEvery { streamLoader.createBatchAccumulator() } returns batchAccumulator + launcher = mockk(relaxed = true) + deserializer = mockk(relaxed = true) + coEvery { deserializer.deserialize(any()) } answers + { + DestinationRecord( + stream = MockDestinationCatalogFactory.stream1.descriptor, + data = IntegerValue(firstArg().toLong()), + emittedAtMs = 0L, + meta = null, + serialized = firstArg(), + ) + } processRecordsTaskFactory = DefaultProcessRecordsTaskFactory( - MockDeserializer(), + config, + deserializer, syncManager, diskManager, - fileAggregateQueue, + inputQueue, + outputQueue, ) } class MockBatch( + override val groupId: String?, override val state: Batch.State, - val reportedByteSize: Long, - val recordCount: Long, - val pmChecksum: Long, - override val groupId: String? = null - ) : Batch - - class MockStreamLoader : StreamLoader { - override val stream: DestinationStream = MockDestinationCatalogFactory.stream1 - - data class SumAndCount(val sum: Long = 0, val count: Long = 0) - - override suspend fun processRecords( - records: Iterator, - totalSizeBytes: Long - ): Batch { - // Do a simple sum of the record values and count - // To demonstrate that the primed data was actually processed - val (sum, count) = - records.asSequence().fold(SumAndCount()) { acc, record -> - SumAndCount( - acc.sum + (record.data as IntegerValue).value.toLong(), - acc.count + 1 - ) - } - return MockBatch( - state = Batch.State.COMPLETE, - reportedByteSize = totalSizeBytes, - recordCount = count, - pmChecksum = sum - ) - } - - override suspend fun processFile(file: DestinationFile): Batch { - return MockBatch( - state = Batch.State.COMPLETE, - reportedByteSize = file.fileMessage.bytes ?: 0, - recordCount = 1, - pmChecksum = 1 - ) - } + recordIterator: Iterator + ) : Batch { + val records = recordIterator.asSequence().toList() } - class MockDeserializer : Deserializer { - override fun deserialize(serialized: String): DestinationMessage { - return DestinationRecord( - stream = MockDestinationCatalogFactory.stream1.descriptor, - data = IntegerValue(serialized.toLong()), - emittedAtMs = 0L, - meta = null, - serialized = serialized, - ) + private val recordCount = 1024 + private val serializedRecords = (0 until 1024).map { "$it" } + private fun makeFile(index: Int): SpilledRawMessagesLocalFile { + val mockFile = Files.createTempFile("test_$index", ".jsonl") + mockFile.outputStream().use { outputStream -> + serializedRecords.map { "$it\n" }.forEach { outputStream.write(it) } } + return SpilledRawMessagesLocalFile( + localFile = mockFile, + totalSizeBytes = 999L, + indexRange = Range.closed(0, recordCount.toLong()) + ) } @Test - fun testProcessRecordsTask() = runTest { - val stream1 = MockDestinationCatalogFactory.stream1 + fun `test standard workflow`() = runTest { val byteSize = 999L val recordCount = 1024L + val descriptor = MockDestinationCatalogFactory.stream1.descriptor - val mockFile = Files.createTempFile("test", ".jsonl") - val file = - SpilledRawMessagesLocalFile( - localFile = mockFile, - totalSizeBytes = byteSize, - indexRange = Range.closed(0, recordCount) - ) - mockFile.outputStream().use { outputStream -> - repeat(recordCount.toInt()) { outputStream.write("$it\n") } - } - coEvery { fileAggregateQueue.consume() } returns - flowOf( - FileAggregateMessage( - MockDestinationCatalogFactory.stream1.descriptor, - file, + // Put three files on the flow. + val files = (0 until 3).map { makeFile(it) } + coEvery { inputQueue.consume() } returns + files.map { FileAggregateMessage(descriptor, it) }.asFlow() + + // Process records returns batches in 3 states. + coEvery { batchAccumulator.processRecords(any(), any()) } answers + { + MockBatch( + groupId = null, + state = Batch.State.PERSISTED, + recordIterator = firstArg() ) - ) + } andThenAnswer + { + MockBatch(groupId = null, state = Batch.State.COMPLETE, recordIterator = firstArg()) + } andThenAnswer + { + MockBatch( + groupId = "foo", + state = Batch.State.PERSISTED, + recordIterator = firstArg() + ) + } + // Run the task. val task = processRecordsTaskFactory.make( taskLauncher = launcher, ) - syncManager.registerStartedStreamLoader( - stream1.descriptor, - Result.success(MockStreamLoader()) - ) task.execute() - Assertions.assertEquals(1, launcher.batchEnvelopes.size) - val batch = launcher.batchEnvelopes[0].batch as MockBatch - Assertions.assertEquals(Batch.State.COMPLETE, batch.state) - Assertions.assertEquals(999, batch.reportedByteSize) - Assertions.assertEquals(recordCount, batch.recordCount) - Assertions.assertEquals((0 until recordCount).sum(), batch.pmChecksum) - Assertions.assertFalse(Files.exists(mockFile), "ensure task deleted file") - coVerify { diskManager.release(byteSize) } + fun batchMatcher(groupId: String?, state: Batch.State): (BatchEnvelope<*>) -> Boolean = { + it.ranges.encloses(Range.closed(0, recordCount)) && + it.streamDescriptor == descriptor && + it.batch.groupId == groupId && + it.batch.state == state && + it.batch is MockBatch && + (it.batch as MockBatch).records.map { record -> record.serialized }.toSet() == + serializedRecords.toSet() + } + + // Verify the batch was *handled* 3 times but *published* ONLY when it is not complete AND + // group id is null. + coVerify(exactly = 1) { + outputQueue.publish(match { batchMatcher(null, Batch.State.PERSISTED)(it) }) + } + coVerifySequence { + launcher.handleNewBatch( + MockDestinationCatalogFactory.stream1.descriptor, + match { batchMatcher(null, Batch.State.PERSISTED)(it) } + ) + launcher.handleNewBatch( + MockDestinationCatalogFactory.stream1.descriptor, + match { batchMatcher(null, Batch.State.COMPLETE)(it) } + ) + launcher.handleNewBatch( + MockDestinationCatalogFactory.stream1.descriptor, + match { batchMatcher("foo", Batch.State.PERSISTED)(it) } + ) + } + + files.forEach { + Assertions.assertFalse(Files.exists(it.localFile), "ensure task deleted file $it") + } + coVerify(exactly = 3) { diskManager.release(byteSize) } } } diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/internal/SpillToDiskTaskTest.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/internal/SpillToDiskTaskTest.kt index 8dec6cfc48c4..5220ae432507 100644 --- a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/internal/SpillToDiskTaskTest.kt +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/load/task/internal/SpillToDiskTaskTest.kt @@ -83,6 +83,7 @@ class SpillToDiskTaskTest { MockDestinationCatalogFactory.stream1.descriptor, diskManager, taskLauncher, + false, ) } @@ -183,6 +184,7 @@ class SpillToDiskTaskTest { diskManager = ReservationManager(Fixtures.INITIAL_DISK_CAPACITY) spillToDiskTaskFactory = DefaultSpillToDiskTaskFactory( + MockDestinationConfiguration(), fileAccumulatorFactory, queueSupplier, MockFlushStrategy(), diff --git a/airbyte-cdk/bulk/core/load/src/testFixtures/kotlin/io/airbyte/cdk/load/test/util/destination_process/DockerizedDestination.kt b/airbyte-cdk/bulk/core/load/src/testFixtures/kotlin/io/airbyte/cdk/load/test/util/destination_process/DockerizedDestination.kt index 1d8744afd915..1da2fc8d7892 100644 --- a/airbyte-cdk/bulk/core/load/src/testFixtures/kotlin/io/airbyte/cdk/load/test/util/destination_process/DockerizedDestination.kt +++ b/airbyte-cdk/bulk/core/load/src/testFixtures/kotlin/io/airbyte/cdk/load/test/util/destination_process/DockerizedDestination.kt @@ -116,7 +116,7 @@ class DockerizedDestination( "-v", "$fileTransferMountSource:/tmp", "-e", - "AIRBYTE_DESTINATION_RECORD_BATCH_SIZE=1", + "AIRBYTE_DESTINATION_RECORD_BATCH_SIZE_OVERRIDE=1", "-e", "USE_FILE_TRANSFER=$useFileTransfer", ) + diff --git a/airbyte-cdk/bulk/core/load/src/testFixtures/resources/application.yaml b/airbyte-cdk/bulk/core/load/src/testFixtures/resources/application.yaml index b404727063d3..eaf8e065262f 100644 --- a/airbyte-cdk/bulk/core/load/src/testFixtures/resources/application.yaml +++ b/airbyte-cdk/bulk/core/load/src/testFixtures/resources/application.yaml @@ -9,4 +9,4 @@ airbyte: rate-ms: 900000 # 15 minutes window-ms: 900000 # 15 minutes destination: - record-batch-size: 1 # 1 byte for testing; 1 record => 1 upload + record-batch-size-override: 1 # 1 byte for testing; 1 record => 1 upload diff --git a/airbyte-cdk/bulk/toolkits/load-iceberg-parquet/src/main/kotlin/io/airbyte/cdk/load/data/iceberg/parquet/AirbyteValueToIcebergRecord.kt b/airbyte-cdk/bulk/toolkits/load-iceberg-parquet/src/main/kotlin/io/airbyte/cdk/load/data/iceberg/parquet/AirbyteValueToIcebergRecord.kt index 8227c8eb59ba..44cb3f3d4329 100644 --- a/airbyte-cdk/bulk/toolkits/load-iceberg-parquet/src/main/kotlin/io/airbyte/cdk/load/data/iceberg/parquet/AirbyteValueToIcebergRecord.kt +++ b/airbyte-cdk/bulk/toolkits/load-iceberg-parquet/src/main/kotlin/io/airbyte/cdk/load/data/iceberg/parquet/AirbyteValueToIcebergRecord.kt @@ -58,16 +58,27 @@ class AirbyteValueToIcebergRecord { return array } is BooleanValue -> return airbyteValue.value - is DateValue -> - throw IllegalArgumentException("String-based date types are not supported") + is DateValue -> return TimeStringUtility.toLocalDate(airbyteValue.value) is IntegerValue -> return airbyteValue.value.toLong() is NullValue -> return null is NumberValue -> return airbyteValue.value.toDouble() is StringValue -> return airbyteValue.value is TimeValue -> - throw IllegalArgumentException("String-based time types are not supported") + return when (type.typeId()) { + Type.TypeID.TIME -> TimeStringUtility.toOffset(airbyteValue.value) + else -> + throw IllegalArgumentException( + "${type.typeId()} type is not allowed for TimeValue" + ) + } is TimestampValue -> - throw IllegalArgumentException("String-based timestamp types are not supported") + return when (type.typeId()) { + Type.TypeID.TIMESTAMP -> TimeStringUtility.toOffsetDateTime(airbyteValue.value) + else -> + throw IllegalArgumentException( + "${type.typeId()} type is not allowed for TimestampValue" + ) + } is UnknownValue -> throw IllegalArgumentException("Unknown type is not supported") } } diff --git a/airbyte-cdk/bulk/toolkits/load-iceberg-parquet/src/main/kotlin/io/airbyte/cdk/load/data/iceberg/parquet/IcebergParquetPipelineFactory.kt b/airbyte-cdk/bulk/toolkits/load-iceberg-parquet/src/main/kotlin/io/airbyte/cdk/load/data/iceberg/parquet/IcebergParquetPipelineFactory.kt new file mode 100644 index 000000000000..bbd42ce3c73a --- /dev/null +++ b/airbyte-cdk/bulk/toolkits/load-iceberg-parquet/src/main/kotlin/io/airbyte/cdk/load/data/iceberg/parquet/IcebergParquetPipelineFactory.kt @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.load.data.iceberg.parquet + +import io.airbyte.cdk.load.command.DestinationStream +import io.airbyte.cdk.load.data.AirbyteSchemaNoopMapper +import io.airbyte.cdk.load.data.AirbyteValueNoopMapper +import io.airbyte.cdk.load.data.MapperPipeline +import io.airbyte.cdk.load.data.MapperPipelineFactory +import io.airbyte.cdk.load.data.MergeUnions +import io.airbyte.cdk.load.data.NullOutOfRangeIntegers +import io.airbyte.cdk.load.data.SchemalessValuesToJsonString +import io.airbyte.cdk.load.data.UnionTypeToDisjointRecord +import io.airbyte.cdk.load.data.UnionValueToDisjointRecord + +class IcebergParquetPipelineFactory : MapperPipelineFactory { + override fun create(stream: DestinationStream): MapperPipeline = + MapperPipeline( + stream.schema, + listOf( + AirbyteSchemaNoopMapper() to SchemalessValuesToJsonString(), + AirbyteSchemaNoopMapper() to NullOutOfRangeIntegers(), + MergeUnions() to AirbyteValueNoopMapper(), + UnionTypeToDisjointRecord() to UnionValueToDisjointRecord(), + ), + ) +} diff --git a/airbyte-cdk/bulk/toolkits/load-iceberg-parquet/src/main/kotlin/io/airbyte/cdk/load/data/iceberg/parquet/TimeStringUtility.kt b/airbyte-cdk/bulk/toolkits/load-iceberg-parquet/src/main/kotlin/io/airbyte/cdk/load/data/iceberg/parquet/TimeStringUtility.kt new file mode 100644 index 000000000000..2c41fa720a62 --- /dev/null +++ b/airbyte-cdk/bulk/toolkits/load-iceberg-parquet/src/main/kotlin/io/airbyte/cdk/load/data/iceberg/parquet/TimeStringUtility.kt @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.load.data.iceberg.parquet + +import io.airbyte.cdk.load.data.TimeStringToInteger +import java.time.LocalDate +import java.time.LocalDateTime +import java.time.LocalTime +import java.time.OffsetDateTime +import java.time.OffsetTime +import java.time.ZoneOffset +import java.time.ZonedDateTime + +object TimeStringUtility { + + fun toLocalDate(dateString: String): LocalDate { + return LocalDate.parse(dateString, TimeStringToInteger.DATE_TIME_FORMATTER) + } + + fun toOffset(timeString: String): LocalTime { + return try { + toMicrosOfDayWithTimezone(timeString) + } catch (e: Exception) { + toMicrosOfDayWithoutTimezone(timeString) + } + } + + private fun toMicrosOfDayWithTimezone(timeString: String): LocalTime { + return OffsetTime.parse(timeString, TimeStringToInteger.TIME_FORMATTER).toLocalTime() + } + + private fun toMicrosOfDayWithoutTimezone(timeString: String): LocalTime { + return LocalTime.parse(timeString, TimeStringToInteger.TIME_FORMATTER) + } + + fun toOffsetDateTime(timestampString: String): OffsetDateTime { + return try { + toOffsetDateTimeWithTimezone(timestampString) + } catch (e: Exception) { + toOffsetDateTimeWithoutTimezone(timestampString) + } + } + + private fun toOffsetDateTimeWithTimezone(timestampString: String): OffsetDateTime { + return ZonedDateTime.parse(timestampString, TimeStringToInteger.DATE_TIME_FORMATTER) + .toOffsetDateTime() + } + + private fun toOffsetDateTimeWithoutTimezone(timestampString: String): OffsetDateTime { + return LocalDateTime.parse(timestampString, TimeStringToInteger.DATE_TIME_FORMATTER) + .atOffset(ZoneOffset.UTC) + } +} diff --git a/airbyte-cdk/bulk/toolkits/load-iceberg-parquet/src/testFixtures/kotlin/io/airbyte/cdk/load/data/icerberg/parquet/TimeStringUtilityTest.kt b/airbyte-cdk/bulk/toolkits/load-iceberg-parquet/src/testFixtures/kotlin/io/airbyte/cdk/load/data/icerberg/parquet/TimeStringUtilityTest.kt new file mode 100644 index 000000000000..72ae318a1697 --- /dev/null +++ b/airbyte-cdk/bulk/toolkits/load-iceberg-parquet/src/testFixtures/kotlin/io/airbyte/cdk/load/data/icerberg/parquet/TimeStringUtilityTest.kt @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.load.data.icerberg.parquet + +import io.airbyte.cdk.load.data.iceberg.parquet.TimeStringUtility +import java.time.LocalDate +import java.time.LocalDateTime +import java.time.LocalTime +import java.time.OffsetDateTime +import java.time.ZoneOffset +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertThrows +import org.junit.jupiter.api.Test + +class TimeStringUtilityTest { + + @Test + fun `toLocalDate should parse a valid date string`() { + val dateStr = "2024-12-16T00:00:00" + val date = TimeStringUtility.toLocalDate(dateStr) + assertEquals(LocalDate.of(2024, 12, 16), date) + } + + @Test + fun `toLocalDate should throw exception for invalid date string`() { + val invalidDateStr = "invalid-date" + assertThrows(java.time.format.DateTimeParseException::class.java) { + TimeStringUtility.toLocalDate(invalidDateStr) + } + } + + @Test + fun `toOffset should parse time with timezone`() { + val timeStrWithOffset = "12:34:56+02:00" + val localTime = TimeStringUtility.toOffset(timeStrWithOffset) + assertEquals(LocalTime.of(12, 34, 56), localTime) + } + + @Test + fun `toOffset should parse time without timezone`() { + val timeStrWithoutOffset = "12:34:56" + val localTime = TimeStringUtility.toOffset(timeStrWithoutOffset) + assertEquals(LocalTime.of(12, 34, 56), localTime) + } + + @Test + fun `toOffsetDateTime should parse datetime with timezone`() { + val dateTimeWithTz = "2024-12-16T12:34:56-05:00" + val odt = TimeStringUtility.toOffsetDateTime(dateTimeWithTz) + assertEquals(OffsetDateTime.of(2024, 12, 16, 12, 34, 56, 0, ZoneOffset.of("-05:00")), odt) + } + + @Test + fun `toOffsetDateTime should parse datetime without timezone as UTC`() { + val dateTimeWithoutTz = "2024-12-16T12:34:56" + val odt = TimeStringUtility.toOffsetDateTime(dateTimeWithoutTz) + assertEquals( + OffsetDateTime.of(LocalDateTime.of(2024, 12, 16, 12, 34, 56), ZoneOffset.UTC), + odt + ) + } + + @Test + fun `toOffsetDateTime should throw exception for invalid format`() { + val invalidDateTime = "invalid-datetime" + assertThrows(java.time.format.DateTimeParseException::class.java) { + TimeStringUtility.toOffsetDateTime(invalidDateTime) + } + } +} diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/command/object_storage/ObjectStorageUploadConfiguration.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/command/object_storage/ObjectStorageUploadConfiguration.kt index 9f05d43bcff7..4d17e736239e 100644 --- a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/command/object_storage/ObjectStorageUploadConfiguration.kt +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/command/object_storage/ObjectStorageUploadConfiguration.kt @@ -5,10 +5,12 @@ package io.airbyte.cdk.load.command.object_storage data class ObjectStorageUploadConfiguration( - val streamingUploadPartSize: Long = DEFAULT_STREAMING_UPLOAD_PART_SIZE, + val fileSizeBytes: Long = DEFAULT_FILE_SIZE_BYTES, + val uploadPartSizeBytes: Long = DEFAULT_PART_SIZE_BYTES, ) { companion object { - const val DEFAULT_STREAMING_UPLOAD_PART_SIZE = 5L * 1024L * 1024L + const val DEFAULT_PART_SIZE_BYTES: Long = 10 * 1024 * 1024 // File xfer is still using it + const val DEFAULT_FILE_SIZE_BYTES: Long = 200 * 1024 * 1024 } } diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/file/object_storage/ObjectStorageClient.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/file/object_storage/ObjectStorageClient.kt index 313bd1602bc5..633691bea678 100644 --- a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/file/object_storage/ObjectStorageClient.kt +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/file/object_storage/ObjectStorageClient.kt @@ -34,10 +34,26 @@ interface ObjectStorageClient> { ): T /** Experimental sane replacement interface */ - suspend fun startStreamingUpload(key: String, metadata: Map): StreamingUpload + suspend fun startStreamingUpload( + key: String, + metadata: Map = emptyMap() + ): StreamingUpload } interface StreamingUpload> { - suspend fun uploadPart(part: ByteArray) + /** + * Uploads a part of the object. Each part must have a unique index. The parts do not need to be + * uploaded in order. The index is 1-based. + */ + suspend fun uploadPart(part: ByteArray, index: Int) + + /** + * Completes a multipart upload. All parts must be uploaded before completing the upload, and + * there cannot be gaps in the indexes. Idempotent, Multiple calls will return the same object, + * but only the first call will have side effects. + * + * NOTE: If no parts were uploaded, it will skip the complete call but still return the object. + * This is a temporary hack to support empty files. + */ suspend fun complete(): T } diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/file/object_storage/ObjectStorageFormattingWriter.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/file/object_storage/ObjectStorageFormattingWriter.kt index e2637dc5181f..c5417306a47b 100644 --- a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/file/object_storage/ObjectStorageFormattingWriter.kt +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/file/object_storage/ObjectStorageFormattingWriter.kt @@ -38,6 +38,7 @@ import org.apache.avro.Schema interface ObjectStorageFormattingWriter : Closeable { fun accept(record: DestinationRecord) + fun flush() } @Singleton @@ -86,6 +87,10 @@ class JsonFormattingWriter( outputStream.write("\n") } + override fun flush() { + outputStream.flush() + } + override fun close() { outputStream.close() } @@ -105,6 +110,10 @@ class CSVFormattingWriter( ) } + override fun flush() { + printer.flush() + } + override fun close() { printer.close() } @@ -134,6 +143,10 @@ class AvroFormattingWriter( writer.write(withMeta.toAvroRecord(mappedSchema, avroSchema)) } + override fun flush() { + writer.flush() + } + override fun close() { writer.close() } @@ -163,6 +176,10 @@ class ParquetFormattingWriter( writer.write(withMeta.toAvroRecord(mappedSchema, avroSchema)) } + override fun flush() { + // Parquet writer does not support flushing + } + override fun close() { writer.close() } @@ -197,14 +214,19 @@ class BufferedFormattingWriter( writer.accept(record) } - fun takeBytes(): ByteArray { + fun takeBytes(): ByteArray? { wrappingBuffer.flush() + if (buffer.size() == 0) { + return null + } + val bytes = buffer.toByteArray() buffer.reset() return bytes } fun finish(): ByteArray? { + writer.flush() writer.close() streamProcessor.partFinisher.invoke(wrappingBuffer) return if (buffer.size() > 0) { @@ -214,6 +236,11 @@ class BufferedFormattingWriter( } } + override fun flush() { + writer.flush() + wrappingBuffer.flush() + } + override fun close() { writer.close() } diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/file/object_storage/PartFactory.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/file/object_storage/PartFactory.kt new file mode 100644 index 000000000000..e8c09b56f0ab --- /dev/null +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/file/object_storage/PartFactory.kt @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.load.file.object_storage + +import java.util.concurrent.atomic.AtomicReference +import org.apache.mina.util.ConcurrentHashSet + +/** + * Generates part w/ metadata for a multi-part upload for a given key and file no. parts are + * 1-indexed. For convenience, empty parts are tolerated but not counted by the assembler. + * + * Not thread-safe. It is expected that the parts are generated in order. + */ +class PartFactory( + val key: String, + val fileNumber: Long, +) { + var totalSize: Long = 0 + private var nextIndex: Int = 0 + private var finalProduced = false + + fun nextPart(bytes: ByteArray?, isFinal: Boolean = false): Part { + if (finalProduced) { + throw IllegalStateException("Final part already produced") + } + finalProduced = isFinal + + totalSize += bytes?.size?.toLong() ?: 0 + // Only advance the index if the part isn't empty. + // This way empty parts can be ignored, but empty final parts + // can still convey the final index. + if (bytes != null) { + nextIndex++ // pre-increment as parts are 1-indexed + } + return Part( + key = key, + fileNumber = fileNumber, + partIndex = nextIndex, + bytes = bytes, + isFinal = isFinal + ) + } +} + +/** + * Reassembles part metadata into a view of the upload state. + * + * Usage: add the parts created by the factory. + * + * [PartBookkeeper.isComplete] will be true when all the parts AND the final part have been seen, + * regardless of the order in which they were added. + * + * Thread-safe: parts can be added by multiple threads in any order. + */ +data class Part( + val key: String, + val fileNumber: Long, + val partIndex: Int, + val bytes: ByteArray?, + val isFinal: Boolean, +) { + val isEmpty: Boolean + get() = bytes == null +} + +class PartBookkeeper { + private val partIndexes = ConcurrentHashSet() + private var finalIndex = AtomicReference(null) + + val isEmpty: Boolean + get() = partIndexes.isEmpty() + + fun add(part: Part) { + // Only add non-empty parts + if (part.bytes != null) { + if (part.partIndex in partIndexes) { + throw IllegalStateException( + "Part index ${part.partIndex} already seen for ${part.key}" + ) + } + partIndexes.add(part.partIndex) + } + + // The final part conveys the last + // index even if it is empty. + if (part.isFinal) { + if (!finalIndex.compareAndSet(null, part.partIndex)) { + throw IllegalStateException("Final part already seen for ${part.key}") + } + } + } + + /** + * Complete + * 1. we have seen a final part + * 2. there are no gaps in the part indices + * 3. the last index is the final index + */ + val isComplete: Boolean + get() = finalIndex.get()?.let { it == partIndexes.size } ?: false +} diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/message/object_storage/ObjectStorageBatch.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/message/object_storage/ObjectStorageBatch.kt new file mode 100644 index 000000000000..47c5bd590b98 --- /dev/null +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/message/object_storage/ObjectStorageBatch.kt @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.load.message.object_storage + +import io.airbyte.cdk.load.file.object_storage.Part +import io.airbyte.cdk.load.file.object_storage.RemoteObject +import io.airbyte.cdk.load.message.Batch + +sealed interface ObjectStorageBatch : Batch + +// An indexed bytearray containing an uploadable chunk of a file. +// Returned by the batch accumulator after processing records. +data class LoadablePart(val part: Part) : ObjectStorageBatch { + override val groupId = null + override val state = Batch.State.PROCESSED + + // Hide the data from the logs + override fun toString(): String { + return "LoadablePart(partIndex=${part.partIndex}, key=${part.key}, fileNumber=${part.fileNumber}, empty=${part.isEmpty})" + } +} + +// An UploadablePart that has been uploaded to an incomplete object. +// Returned by processBatch +data class IncompletePartialUpload(val key: String) : ObjectStorageBatch { + override val state: Batch.State = Batch.State.LOCAL + override val groupId: String = key +} + +// An UploadablePart that has triggered a completed upload. +data class LoadedObject>( + val remoteObject: T, + val fileNumber: Long, +) : ObjectStorageBatch { + override val state: Batch.State = Batch.State.COMPLETE + override val groupId = remoteObject.key +} diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/state/object_storage/ObjectStorageDestinationStateManager.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/state/object_storage/ObjectStorageDestinationStateManager.kt index 0488d55c4433..54a649c1517a 100644 --- a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/state/object_storage/ObjectStorageDestinationStateManager.kt +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/state/object_storage/ObjectStorageDestinationStateManager.kt @@ -84,9 +84,8 @@ class ObjectStorageDestinationState( val partNumber: Long, ) - @get:JsonIgnore - val generations: Sequence - get() = + suspend fun getGenerations(): Sequence = + accessLock.withLock { generationMap.entries .asSequence() .map { (state, gens) -> @@ -100,14 +99,16 @@ class ObjectStorageDestinationState( } } .flatten() + } - @get:JsonIgnore - val nextPartNumber: Long - get() = generations.flatMap { it.objects }.map { it.partNumber }.maxOrNull()?.plus(1) ?: 0L + suspend fun getNextPartNumber(): Long = + getGenerations().flatMap { it.objects }.map { it.partNumber }.maxOrNull()?.plus(1) ?: 0L /** Returns generationId -> objectAndPart for all staged objects that should be kept. */ - fun getStagedObjectsToFinalize(minimumGenerationId: Long): Sequence> = - generations + suspend fun getStagedObjectsToFinalize( + minimumGenerationId: Long + ): Sequence> = + getGenerations() .filter { it.isStaging && it.generationId >= minimumGenerationId } .flatMap { it.objects.map { obj -> it.generationId to obj } } @@ -115,8 +116,8 @@ class ObjectStorageDestinationState( * Returns generationId -> objectAndPart for all objects (staged and unstaged) that should be * cleaned up. */ - fun getObjectsToDelete(minimumGenerationId: Long): Sequence> { - val (toKeep, toDrop) = generations.partition { it.generationId >= minimumGenerationId } + suspend fun getObjectsToDelete(minimumGenerationId: Long): Sequence> { + val (toKeep, toDrop) = getGenerations().partition { it.generationId >= minimumGenerationId } val keepKeys = toKeep.flatMap { it.objects.map { obj -> obj.key } }.toSet() return toDrop.asSequence().flatMap { it.objects.filter { obj -> obj.key !in keepKeys }.map { obj -> it.generationId to obj } diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/write/object_storage/ObjectStorageStreamLoaderFactory.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/write/object_storage/ObjectStorageStreamLoaderFactory.kt index c58193d311dc..5f362fcffa49 100644 --- a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/write/object_storage/ObjectStorageStreamLoaderFactory.kt +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/write/object_storage/ObjectStorageStreamLoaderFactory.kt @@ -16,10 +16,12 @@ import io.airbyte.cdk.load.file.object_storage.ObjectStoragePathFactory import io.airbyte.cdk.load.file.object_storage.RemoteObject import io.airbyte.cdk.load.message.Batch import io.airbyte.cdk.load.message.DestinationFile -import io.airbyte.cdk.load.message.DestinationRecord +import io.airbyte.cdk.load.message.object_storage.LoadedObject +import io.airbyte.cdk.load.message.object_storage.ObjectStorageBatch import io.airbyte.cdk.load.state.DestinationStateManager import io.airbyte.cdk.load.state.StreamProcessingFailed import io.airbyte.cdk.load.state.object_storage.ObjectStorageDestinationState +import io.airbyte.cdk.load.write.BatchAccumulator import io.airbyte.cdk.load.write.StreamLoader import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Secondary @@ -38,8 +40,8 @@ class ObjectStorageStreamLoaderFactory, U : OutputStream>( private val compressionConfigurationProvider: ObjectStorageCompressionConfigurationProvider? = null, - private val destinationStateManager: DestinationStateManager, private val uploadConfigurationProvider: ObjectStorageUploadConfigurationProvider, + private val destinationStateManager: DestinationStateManager, ) { fun create(stream: DestinationStream): StreamLoader { return ObjectStorageStreamLoader( @@ -49,7 +51,8 @@ class ObjectStorageStreamLoaderFactory, U : OutputStream>( pathFactory, bufferedWriterFactory, destinationStateManager, - uploadConfigurationProvider.objectStorageUploadConfiguration.streamingUploadPartSize, + uploadConfigurationProvider.objectStorageUploadConfiguration.uploadPartSizeBytes, + uploadConfigurationProvider.objectStorageUploadConfiguration.fileSizeBytes ) } } @@ -65,60 +68,33 @@ class ObjectStorageStreamLoader, U : OutputStream>( private val pathFactory: ObjectStoragePathFactory, private val bufferedWriterFactory: BufferedFormattingWriterFactory, private val destinationStateManager: DestinationStateManager, - private val partSize: Long, + private val partSizeBytes: Long, + private val fileSizeBytes: Long, ) : StreamLoader { private val log = KotlinLogging.logger {} - sealed interface ObjectStorageBatch : Batch - data class RemoteObject( - override val state: Batch.State = Batch.State.COMPLETE, - val remoteObject: T, - val partNumber: Long, - override val groupId: String? = null - ) : ObjectStorageBatch - - private val partNumber = AtomicLong(0L) + // Used for naming files. Distinct from part index, which is used to track uploads. + private val fileNumber = AtomicLong(0L) + private val objectAccumulator = PartToObjectAccumulator(stream, client) override suspend fun start() { val state = destinationStateManager.getState(stream) - val nextPartNumber = state.nextPartNumber - log.info { "Got next part number from destination state: $nextPartNumber" } - partNumber.set(nextPartNumber) + // This is the number used to populate {part_number} on the object path. + // We'll call it file number here to avoid confusion with the part index used for uploads. + val fileNumber = state.getNextPartNumber() + log.info { "Got next file number from destination state: $fileNumber" } + this.fileNumber.set(fileNumber) } - override suspend fun processRecords( - records: Iterator, - totalSizeBytes: Long - ): Batch { - val partNumber = partNumber.getAndIncrement() - val key = - pathFactory.getPathToFile(stream, partNumber, isStaging = pathFactory.supportsStaging) - - log.info { "Writing records to $key" } - val state = destinationStateManager.getState(stream) - state.addObject( - stream.generationId, - key, - partNumber, - isStaging = pathFactory.supportsStaging + override suspend fun createBatchAccumulator(): BatchAccumulator { + return RecordToPartAccumulator( + pathFactory, + bufferedWriterFactory, + partSizeBytes = partSizeBytes, + fileSizeBytes = fileSizeBytes, + stream, + fileNumber ) - - val metadata = ObjectStorageDestinationState.metadataFor(stream) - val upload = client.startStreamingUpload(key, metadata) - bufferedWriterFactory.create(stream).use { writer -> - records.forEach { - writer.accept(it) - if (writer.bufferSize >= partSize) { - upload.uploadPart(writer.takeBytes()) - } - } - writer.finish()?.let { upload.uploadPart(it) } - } - val obj = upload.complete() - - log.info { "Finished writing records to $key, persisting state" } - destinationStateManager.persistState(stream) - return RemoteObject(remoteObject = obj, partNumber = partNumber) } override suspend fun processFile(file: DestinationFile): Batch { @@ -149,15 +125,32 @@ class ObjectStorageStreamLoader, U : OutputStream>( } val localFile = createFile(fileUrl) localFile.delete() - return RemoteObject(remoteObject = obj, partNumber = 0) + return LoadedObject(remoteObject = obj, fileNumber = 0) } @VisibleForTesting fun createFile(url: String) = File(url) override suspend fun processBatch(batch: Batch): Batch { - throw NotImplementedError( - "All post-processing occurs in the close method; this should not be called" - ) + val nextBatch = objectAccumulator.processBatch(batch) as ObjectStorageBatch + when (nextBatch) { + is LoadedObject<*> -> { + // Mark that we've completed the upload and persist the state before returning the + // persisted batch. + // Otherwise, we might lose track of the upload if the process crashes before + // persisting. + // TODO: Migrate all state bookkeeping to the CDK if possible + val state = destinationStateManager.getState(stream) + state.addObject( + stream.generationId, + nextBatch.remoteObject.key, + nextBatch.fileNumber, + isStaging = pathFactory.supportsStaging + ) + destinationStateManager.persistState(stream) + } + else -> {} // Do nothing + } + return nextBatch } override suspend fun close(streamFailure: StreamProcessingFailed?) { diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/write/object_storage/PartToObjectAccumulator.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/write/object_storage/PartToObjectAccumulator.kt new file mode 100644 index 000000000000..1dba37ab3ee4 --- /dev/null +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/write/object_storage/PartToObjectAccumulator.kt @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.load.write.object_storage + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.load.command.DestinationStream +import io.airbyte.cdk.load.file.object_storage.ObjectStorageClient +import io.airbyte.cdk.load.file.object_storage.PartBookkeeper +import io.airbyte.cdk.load.file.object_storage.RemoteObject +import io.airbyte.cdk.load.file.object_storage.StreamingUpload +import io.airbyte.cdk.load.message.Batch +import io.airbyte.cdk.load.message.object_storage.IncompletePartialUpload +import io.airbyte.cdk.load.message.object_storage.LoadablePart +import io.airbyte.cdk.load.message.object_storage.LoadedObject +import io.airbyte.cdk.load.state.object_storage.ObjectStorageDestinationState +import io.airbyte.cdk.load.util.setOnce +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.atomic.AtomicBoolean +import kotlinx.coroutines.CompletableDeferred + +@SuppressFBWarnings("NP_NONNULL_PARAM_VIOLATION", justification = "Kotlin async continuation") +class PartToObjectAccumulator>( + private val stream: DestinationStream, + private val client: ObjectStorageClient, +) { + private val log = KotlinLogging.logger {} + + data class UploadInProgress>( + val streamingUpload: CompletableDeferred> = CompletableDeferred(), + val partBookkeeper: PartBookkeeper = PartBookkeeper(), + val hasStarted: AtomicBoolean = AtomicBoolean(false), + ) + private val uploadsInProgress = ConcurrentHashMap>() + + suspend fun processBatch(batch: Batch): Batch { + batch as LoadablePart + val upload = uploadsInProgress.getOrPut(batch.part.key) { UploadInProgress() } + if (upload.hasStarted.setOnce()) { + // Start the upload if we haven't already. Note that the `complete` + // here refers to the completable deferred, not the streaming upload. + val metadata = ObjectStorageDestinationState.metadataFor(stream) + val streamingUpload = client.startStreamingUpload(batch.part.key, metadata) + upload.streamingUpload.complete(streamingUpload) + } + val streamingUpload = upload.streamingUpload.await() + + log.info { + "Processing loadable part ${batch.part.partIndex} of ${batch.part.key} (empty=${batch.part.isEmpty}; final=${batch.part.isFinal})" + } + + // Upload provided bytes and update indexes. + if (batch.part.bytes != null) { + streamingUpload.uploadPart(batch.part.bytes, batch.part.partIndex) + } + upload.partBookkeeper.add(batch.part) + if (upload.partBookkeeper.isComplete) { + val obj = streamingUpload.complete() + uploadsInProgress.remove(batch.part.key) + + log.info { "Completed upload of ${obj.key}" } + return LoadedObject(remoteObject = obj, fileNumber = batch.part.fileNumber) + } else { + return IncompletePartialUpload(batch.part.key) + } + } +} diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/write/object_storage/RecordToPartAccumulator.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/write/object_storage/RecordToPartAccumulator.kt new file mode 100644 index 000000000000..0a871b74188d --- /dev/null +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/main/kotlin/io/airbyte/cdk/load/write/object_storage/RecordToPartAccumulator.kt @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.load.write.object_storage + +import io.airbyte.cdk.load.command.DestinationStream +import io.airbyte.cdk.load.file.object_storage.BufferedFormattingWriter +import io.airbyte.cdk.load.file.object_storage.BufferedFormattingWriterFactory +import io.airbyte.cdk.load.file.object_storage.ObjectStoragePathFactory +import io.airbyte.cdk.load.file.object_storage.PartFactory +import io.airbyte.cdk.load.message.Batch +import io.airbyte.cdk.load.message.DestinationRecord +import io.airbyte.cdk.load.message.object_storage.* +import io.airbyte.cdk.load.write.BatchAccumulator +import io.github.oshai.kotlinlogging.KotlinLogging +import java.io.OutputStream +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.atomic.AtomicLong + +data class ObjectInProgress( + val partFactory: PartFactory, + val writer: BufferedFormattingWriter, +) + +class RecordToPartAccumulator( + private val pathFactory: ObjectStoragePathFactory, + private val bufferedWriterFactory: BufferedFormattingWriterFactory, + private val partSizeBytes: Long, + private val fileSizeBytes: Long, + private val stream: DestinationStream, + private val fileNumber: AtomicLong, +) : BatchAccumulator { + private val log = KotlinLogging.logger {} + + // Hack because AtomicReference doesn't support lazily evaluated blocks. + private val key = "key" + private val currentObject = ConcurrentHashMap>() + + override suspend fun processRecords( + records: Iterator, + totalSizeBytes: Long, + endOfStream: Boolean + ): Batch { + // Start a new object if there is not one in progress. + val partialUpload = + currentObject.getOrPut(key) { + val fileNo = fileNumber.getAndIncrement() + ObjectInProgress( + partFactory = + PartFactory( + key = + pathFactory.getPathToFile( + stream, + fileNo, + isStaging = pathFactory.supportsStaging + ), + fileNumber = fileNo + ), + writer = bufferedWriterFactory.create(stream), + ) + } + + // Add all the records to the formatting writer. + log.info { "Accumulating ${totalSizeBytes}b records for ${partialUpload.partFactory.key}" } + records.forEach { partialUpload.writer.accept(it) } + partialUpload.writer.flush() + + // Check if we have reached the target size. + val bufferSize = partialUpload.writer.bufferSize + val newSize = partialUpload.partFactory.totalSize + bufferSize + if (newSize >= fileSizeBytes || endOfStream) { + + // If we have reached target file size, clear the object and yield a final part. + val bytes = partialUpload.writer.finish() + partialUpload.writer.close() + val part = partialUpload.partFactory.nextPart(bytes, isFinal = true) + + log.info { + val reason = if (endOfStream) "end of stream" else "file size ${fileSizeBytes}b" + "${partialUpload.partFactory.key}: buffer ${bufferSize}b; total: ${newSize}b; $reason reached, yielding final part ${part.partIndex} (size=${bytes?.size}b)" + } + + currentObject.remove(key) + return LoadablePart(part) + } else if (bufferSize >= partSizeBytes) { + // If we have not reached file size, but have reached part size, yield a non-final part. + val bytes = partialUpload.writer.takeBytes() + val part = partialUpload.partFactory.nextPart(bytes) + log.info { + "${partialUpload.partFactory.key}: buffer ${bufferSize}b; total ${newSize}b; part size ${partSizeBytes}b reached, yielding part ${part.partIndex}" + } + + return LoadablePart(part) + } else { + // If we have not reached either the file or part size, yield a null part. + // TODO: Change this to a generator interface so we never have to do this. + val part = partialUpload.partFactory.nextPart(null) + log.info { + "${partialUpload.partFactory.key}: buffer ${bufferSize}b; total ${newSize}b; part size ${partSizeBytes}b not reached, yielding null part ${part.partIndex}" + } + + return LoadablePart(part) + } + } +} diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/file/object_storage/PartFactoryTest.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/file/object_storage/PartFactoryTest.kt new file mode 100644 index 000000000000..d54e3d1ce0c8 --- /dev/null +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/file/object_storage/PartFactoryTest.kt @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.load.file.object_storage + +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.Job +import kotlinx.coroutines.launch +import kotlinx.coroutines.test.runTest +import kotlinx.coroutines.withContext +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows + +class PartFactoryTest { + @Test + fun `parts are generated in order and empty parts are skipped (empty final)`() { + val factory = PartFactory("key", 1) + val part1 = factory.nextPart(byteArrayOf(1)) + val part2 = factory.nextPart(null) + val part3 = factory.nextPart(byteArrayOf(2)) + val part4 = factory.nextPart(null, isFinal = true) + + assert(part1.partIndex == 1) + assert(!part1.isFinal) + assert(!part1.isEmpty) + + assert(part2.partIndex == 1) + assert(!part2.isFinal) + assert(part2.isEmpty) + + assert(part3.partIndex == 2) + assert(!part3.isFinal) + assert(!part3.isEmpty) + + assert(part4.partIndex == 2) + assert(part4.isFinal) + assert(part4.isEmpty) + + // No more parts can be produced after the final part. + assertThrows { factory.nextPart(byteArrayOf(3)) } + } + + @Test + fun `parts are generated in order and empty parts are skipped (non-empty final)`() { + val factory = PartFactory("key", 1) + val part1 = factory.nextPart(byteArrayOf(1)) + val part2 = factory.nextPart(null) + val part3 = factory.nextPart(byteArrayOf(2)) + val part4 = factory.nextPart(byteArrayOf(3), isFinal = true) + + assert(part1.partIndex == 1) + assert(part2.partIndex == 1) + assert(part3.partIndex == 2) + + assert(part4.partIndex == 3) + assert(part4.isFinal) + assert(!part4.isEmpty) + } + + @Test + fun `total size is calculated correctly`() { + val factory = PartFactory("key", 1) + factory.nextPart(byteArrayOf(1)) + factory.nextPart(null) + factory.nextPart(byteArrayOf(2, 2)) + factory.nextPart(byteArrayOf(3, 3, 3), isFinal = true) + + assert(factory.totalSize == 6L) + } + + @Test + fun `test that assembler is not complete until all parts are seen`() { + val factory = PartFactory("key", 1) + val assembler = PartBookkeeper() + + repeat(10) { + val part = factory.nextPart(byteArrayOf(it.toByte()), it == 9) + assert(!assembler.isComplete) + assembler.add(part) + } + + assert(assembler.isComplete) + } + + @Test + fun `test assembler not complete until all are seen (out-of-order, gaps, and null final)`() { + val factory = PartFactory("key", 1) + val assembler = PartBookkeeper() + + val sortOrder = listOf(2, 1, 0, 9, 8, 7, 6, 4, 5, 3) + val parts = + (0 until 10).map { + // Make a gap every 3rd part + val bytes = + if (it % 3 == 0) { + null + } else { + byteArrayOf(it.toByte()) + } + + // Last in list must be final + factory.nextPart(bytes, it == 9) + } + + val partsSorted = parts.zip(sortOrder).sortedBy { it.second } + partsSorted.forEach { (part, sortIndex) -> + if (sortIndex == 9) { + // Because the last part was null, and the assembler already saw the final part + // it *should* think it is complete. + assert(assembler.isComplete) + } else { + assert(!assembler.isComplete) + } + assembler.add(part) + } + + assert(assembler.isComplete) + } + + @Test + fun `test adding parts asynchronously`() = runTest { + val factory = PartFactory("key", 1) + val parts = (0 until 100000).map { factory.nextPart(byteArrayOf(it.toByte()), it == 99999) } + val assembler = PartBookkeeper() + val jobs = mutableListOf() + withContext(Dispatchers.IO) { + parts.shuffled(random = java.util.Random(0)).forEach { + jobs.add( + launch { + assert(!assembler.isComplete) + assembler.add(it) + } + ) + } + jobs.forEach { it.join() } + } + assert(assembler.isComplete) + } +} diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/state/object_storage/ObjectStorageDestinationStateTest.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/state/object_storage/ObjectStorageDestinationStateTest.kt index f3fa5030ef39..b8f244746b7c 100644 --- a/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/state/object_storage/ObjectStorageDestinationStateTest.kt +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/state/object_storage/ObjectStorageDestinationStateTest.kt @@ -68,7 +68,7 @@ class ObjectStorageDestinationStateTest { val state = d.stateManager.getState(stream1) Assertions.assertEquals( emptyList(), - state.generations.toList(), + state.getGenerations().toList(), "state should initially be empty" ) state.addObject(0, "key1", 0) @@ -77,7 +77,7 @@ class ObjectStorageDestinationStateTest { state.addObject(1, "key4", 1) Assertions.assertEquals( 4, - state.generations.flatMap { it.objects }.toList().size, + state.getGenerations().flatMap { it.objects }.toList().size, "state should contain 4 objects" ) @@ -96,14 +96,14 @@ class ObjectStorageDestinationStateTest { state.removeObject(1, "key4") Assertions.assertEquals( emptyList(), - state.generations.flatMap { it.objects }.toList(), + state.getGenerations().flatMap { it.objects }.toList(), "objects should be removed" ) val fetchedState = d.stateManager.getState(stream1) Assertions.assertEquals( 0, - fetchedState.generations.flatMap { it.objects }.toList().size, + fetchedState.getGenerations().flatMap { it.objects }.toList().size, "state should still contain 0 objects (managed state is in cache)" ) } @@ -137,11 +137,11 @@ class ObjectStorageDestinationStateTest { ) ) ), - state.generations.toList(), + state.getGenerations().toList(), "state should be loaded from storage" ) - Assertions.assertEquals(2L, state.nextPartNumber) + Assertions.assertEquals(2L, state.getNextPartNumber()) } @Test @@ -150,7 +150,7 @@ class ObjectStorageDestinationStateTest { ObjectStorageDestinationStateTestWithoutStaging().loadMetadata(d, stream1) val state = d.stateManager.getState(stream1) ObjectStorageDestinationStateTestWithoutStaging().validateMetadata(state, generations) - Assertions.assertEquals(2L, state.nextPartNumber) + Assertions.assertEquals(2L, state.getNextPartNumber()) } @Test @@ -231,7 +231,7 @@ class ObjectStorageDestinationStateTest { fun validateMetadata( state: ObjectStorageDestinationState, generations: List> - ) { + ) = runTest { Assertions.assertEquals( generations .groupBy { it.first } @@ -250,7 +250,7 @@ class ObjectStorageDestinationStateTest { .toMutableList() ) }, - state.generations.toList().sortedBy { it.generationId }, + state.getGenerations().toList().sortedBy { it.generationId }, "state should be recovered from metadata" ) } @@ -260,7 +260,7 @@ class ObjectStorageDestinationStateTest { val generations = loadMetadata(d, stream1) val state = d.stateManager.getState(stream1) validateMetadata(state, generations) - Assertions.assertEquals(2L, state.nextPartNumber) + Assertions.assertEquals(2L, state.getNextPartNumber()) } } } diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/write/object_storage/ObjectStorageStreamLoaderTest.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/write/object_storage/ObjectStorageStreamLoaderTest.kt index 140131b57426..47631dd552c2 100644 --- a/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/write/object_storage/ObjectStorageStreamLoaderTest.kt +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/write/object_storage/ObjectStorageStreamLoaderTest.kt @@ -11,6 +11,7 @@ import io.airbyte.cdk.load.file.object_storage.ObjectStorageClient import io.airbyte.cdk.load.file.object_storage.ObjectStoragePathFactory import io.airbyte.cdk.load.file.object_storage.RemoteObject import io.airbyte.cdk.load.message.DestinationFile +import io.airbyte.cdk.load.message.object_storage.* import io.airbyte.cdk.load.state.DestinationStateManager import io.airbyte.cdk.load.state.object_storage.ObjectStorageDestinationState import io.mockk.coEvery @@ -36,6 +37,7 @@ class ObjectStorageStreamLoaderTest { mockk(relaxed = true) private val destinationStateManager: DestinationStateManager = mockk(relaxed = true) + private val fileSize: Long = 2 private val partSize: Long = 1 private val objectStorageStreamLoader = @@ -47,7 +49,8 @@ class ObjectStorageStreamLoaderTest { pathFactory, writerFactory, destinationStateManager, - partSize + partSizeBytes = partSize, + fileSizeBytes = fileSize ) ) @@ -68,7 +71,7 @@ class ObjectStorageStreamLoaderTest { val mockedFile = mockk(relaxed = true) every { objectStorageStreamLoader.createFile(any()) } returns mockedFile - val expectedKey = Path.of(stagingDirectory.toString(), fileUrl).toString() + val expectedKey = Path.of(stagingDirectory, fileUrl).toString() val metadata = mapOf( ObjectStorageDestinationState.METADATA_GENERATION_ID_KEY to generationId.toString() @@ -80,10 +83,7 @@ class ObjectStorageStreamLoaderTest { coVerify { mockedStateStorage.addObject(generationId, expectedKey, 0, false) } coVerify { client.streamingUpload(expectedKey, metadata, compressor, any()) } - assertEquals( - mockRemoteObject, - (result as ObjectStorageStreamLoader.RemoteObject<*>).remoteObject - ) + assertEquals(mockRemoteObject, (result as LoadedObject<*>).remoteObject) verify { mockedFile.delete() } Files.deleteIfExists(Path.of(fileUrl)) } diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/write/object_storage/PartToObjectAccumulatorTest.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/write/object_storage/PartToObjectAccumulatorTest.kt new file mode 100644 index 000000000000..083da1bd193b --- /dev/null +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/write/object_storage/PartToObjectAccumulatorTest.kt @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.load.write.object_storage + +import io.airbyte.cdk.load.command.DestinationStream +import io.airbyte.cdk.load.file.object_storage.ObjectStorageClient +import io.airbyte.cdk.load.file.object_storage.Part +import io.airbyte.cdk.load.file.object_storage.StreamingUpload +import io.airbyte.cdk.load.message.object_storage.LoadablePart +import io.airbyte.cdk.load.state.object_storage.ObjectStorageDestinationState +import io.mockk.coEvery +import io.mockk.coVerify +import io.mockk.mockk +import kotlinx.coroutines.test.runTest +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test + +class PartToObjectAccumulatorTest { + private val streamDescriptor = DestinationStream.Descriptor("test", "stream") + + private lateinit var stream: DestinationStream + private lateinit var client: ObjectStorageClient<*> + private lateinit var streamingUpload: StreamingUpload<*> + private lateinit var metadata: Map + + @BeforeEach + fun setup() { + stream = mockk(relaxed = true) + client = mockk(relaxed = true) + streamingUpload = mockk(relaxed = true) + coEvery { stream.descriptor } returns streamDescriptor + metadata = ObjectStorageDestinationState.metadataFor(stream) + coEvery { client.startStreamingUpload(any(), any()) } returns streamingUpload + coEvery { streamingUpload.uploadPart(any(), any()) } returns Unit + coEvery { streamingUpload.complete() } returns mockk(relaxed = true) + } + + private fun makePart( + fileNumber: Int, + index: Int, + isFinal: Boolean = false, + empty: Boolean = false + ): LoadablePart = + LoadablePart( + Part( + "key$fileNumber", + fileNumber.toLong(), + index, + if (empty) { + null + } else { + ByteArray(0) + }, + isFinal + ) + ) + + @Test + fun `test part accumulation`() = runTest { + val acc = PartToObjectAccumulator(stream, client) + + // First part triggers starting the upload + val firstPartFile1 = makePart(1, 1) + acc.processBatch(firstPartFile1) + coVerify { client.startStreamingUpload(firstPartFile1.part.key, metadata) } + coVerify { + streamingUpload.uploadPart(firstPartFile1.part.bytes!!, firstPartFile1.part.partIndex) + } + + // All nonempty parts are added + (2 until 4).forEach { + val nonEmptyPart = makePart(1, it) + acc.processBatch(nonEmptyPart) + coVerify { + streamingUpload.uploadPart(nonEmptyPart.part.bytes!!, nonEmptyPart.part.partIndex) + } + } + + // New key starts new upload + val firstPartFile2 = makePart(2, 1) + acc.processBatch(firstPartFile2) + coVerify { client.startStreamingUpload(firstPartFile2.part.key, metadata) } + + // All empty parts are not added + repeat(2) { + val emptyPartFile1 = makePart(2, it + 2, empty = true) + acc.processBatch(emptyPartFile1) + // Ie, no more calls. + coVerify(exactly = 1) { + streamingUpload.uploadPart(any(), emptyPartFile1.part.partIndex) + } + } + + // The final part will trigger an upload + val finalPartFile1 = makePart(1, 4, isFinal = true) + acc.processBatch(finalPartFile1) + coVerify(exactly = 1) { streamingUpload.complete() } + + // The final part can be empty and/or added at any time and will still count for data + // sufficiency + val emptyFinalPartFile2 = makePart(2, 2, isFinal = true, empty = true) + acc.processBatch(emptyFinalPartFile2) + // empty part won't be uploaded + coVerify(exactly = 1) { + streamingUpload.uploadPart(any(), emptyFinalPartFile2.part.partIndex) + } + + // The missing part, even tho it isn't final, will trigger the completion + val nonEmptyPenultimatePartFile2 = makePart(2, 2) + acc.processBatch(nonEmptyPenultimatePartFile2) + coVerify { + streamingUpload.uploadPart( + nonEmptyPenultimatePartFile2.part.bytes!!, + nonEmptyPenultimatePartFile2.part.partIndex + ) + } + coVerify(exactly = 2) { streamingUpload.complete() } + } +} diff --git a/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/write/object_storage/RecordToPartAccumulatorTest.kt b/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/write/object_storage/RecordToPartAccumulatorTest.kt new file mode 100644 index 000000000000..527651bbc476 --- /dev/null +++ b/airbyte-cdk/bulk/toolkits/load-object-storage/src/test/kotlin/io/airbyte/cdk/load/write/object_storage/RecordToPartAccumulatorTest.kt @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.load.write.object_storage + +import io.airbyte.cdk.load.command.DestinationStream +import io.airbyte.cdk.load.data.ObjectValue +import io.airbyte.cdk.load.file.object_storage.BufferedFormattingWriter +import io.airbyte.cdk.load.file.object_storage.BufferedFormattingWriterFactory +import io.airbyte.cdk.load.file.object_storage.ObjectStoragePathFactory +import io.airbyte.cdk.load.message.DestinationRecord +import io.airbyte.cdk.load.message.object_storage.* +import io.mockk.coEvery +import io.mockk.coVerify +import io.mockk.mockk +import java.io.OutputStream +import java.util.concurrent.atomic.AtomicLong +import kotlinx.coroutines.test.runTest +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test + +class RecordToPartAccumulatorTest { + private val partSizeBytes: Long = 2L + private val fileSizeBytes: Long = 4L + + private lateinit var pathFactory: ObjectStoragePathFactory + private lateinit var bufferedWriterFactory: BufferedFormattingWriterFactory + private lateinit var bufferedWriter: BufferedFormattingWriter + private lateinit var stream: DestinationStream + + @BeforeEach + fun setup() { + pathFactory = mockk() + bufferedWriterFactory = mockk() + stream = mockk() + bufferedWriter = mockk() + coEvery { bufferedWriterFactory.create(any()) } returns bufferedWriter + coEvery { bufferedWriter.flush() } returns Unit + coEvery { bufferedWriter.close() } returns Unit + } + + private fun makeRecord(): DestinationRecord = + DestinationRecord( + DestinationStream.Descriptor("test", "stream"), + ObjectValue(linkedMapOf()), + 0L, + null, + "" + ) + + private fun makeRecords(n: Int): Iterator = + (0 until n).map { makeRecord() }.listIterator() + + private fun makeBytes(n: Int): ByteArray? = + if (n == 0) { + null + } else (0 until n).map { it.toByte() }.toByteArray() + + @Test + fun `test parts are emitted correctly`() = runTest { + val fileNumber = AtomicLong(111L) + val acc = + RecordToPartAccumulator( + pathFactory = pathFactory, + bufferedWriterFactory = bufferedWriterFactory, + partSizeBytes = partSizeBytes, + fileSizeBytes = fileSizeBytes, + stream = stream, + fileNumber = fileNumber + ) + + val bufferSize = AtomicLong(0L) + coEvery { bufferedWriter.accept(any()) } answers + { + bufferSize.getAndIncrement() + Unit + } + coEvery { bufferedWriter.bufferSize } answers { bufferSize.get().toInt() } + coEvery { bufferedWriter.takeBytes() } answers + { + val bytes = makeBytes(bufferSize.get().toInt()) + bufferSize.set(0) + bytes + } + coEvery { bufferedWriter.finish() } answers + { + val bytes = makeBytes(bufferSize.get().toInt()) + bufferSize.set(0) + bytes + } + + coEvery { pathFactory.getPathToFile(any(), any()) } answers { "path.${secondArg()}" } + coEvery { pathFactory.supportsStaging } returns false + + // Object 1 + + // part 0->1/2b of 4b total => not data sufficient, should be first and empty + when (val batch = acc.processRecords(makeRecords(1), 0L, false) as ObjectStorageBatch) { + is LoadablePart -> { + assert(batch.part.isEmpty) + assert(batch.part.partIndex == 0) + assert(batch.part.fileNumber == 111L) + assert(!batch.isPersisted()) + assert(!batch.part.isFinal) + assert(batch.part.key == "path.111") + } + else -> assert(false) + } + + // empty iterator, should be still first, empty, and nonfinal + when (val batch = acc.processRecords(makeRecords(0), 0L, false) as ObjectStorageBatch) { + is LoadablePart -> { + assert(batch.part.isEmpty) + assert(batch.part.partIndex == 0) + assert(batch.part.fileNumber == 111L) + assert(!batch.isPersisted()) + assert(!batch.part.isFinal) + assert(batch.part.key == "path.111") + } + else -> assert(false) + } + + // part 1->3/2b of 4b total => data sufficient for part, should be first part and nonfinal + when (val batch = acc.processRecords(makeRecords(2), 0L, false) as ObjectStorageBatch) { + is LoadablePart -> { + assert(batch.part.bytes.contentEquals(makeBytes(3))) + assert(batch.part.partIndex == 1) + assert(batch.part.fileNumber == 111L) + assert(!batch.isPersisted()) + assert(!batch.part.isFinal) + assert(batch.part.key == "path.111") + } + else -> assert(false) + } + + // part 3->4/2b of 4b total => data sufficient for file (but not part! this is expected!), + // should be second part and final (and not empty) + when (val batch = acc.processRecords(makeRecords(1), 0L, false) as ObjectStorageBatch) { + is LoadablePart -> { + println(batch.part.bytes.contentToString()) + assert(batch.part.bytes.contentEquals(makeBytes(1))) + assert(batch.part.partIndex == 2) + assert(batch.part.fileNumber == 111L) + assert(!batch.isPersisted()) + assert(batch.part.isFinal) + assert(batch.part.key == "path.111") + } + else -> assert(false) + } + + // Object 2 + + // Next part 10/4b => data sufficient, should be first and final + when (val batch = acc.processRecords(makeRecords(10), 0L, false) as ObjectStorageBatch) { + is LoadablePart -> { + assert(batch.part.bytes.contentEquals(makeBytes(10))) + assert(batch.part.partIndex == 1) + assert(batch.part.fileNumber == 112L) + assert(!batch.isPersisted()) + assert(batch.part.isFinal) + assert(batch.part.key == "path.112") + } + else -> assert(false) + } + + // Object 3: empty eos, should be final and empty + + when (val batch = acc.processRecords(makeRecords(0), 0L, true) as ObjectStorageBatch) { + is LoadablePart -> { + assert(batch.part.isEmpty) + assert(batch.part.partIndex == 0) + assert(batch.part.fileNumber == 113L) + assert(!batch.isPersisted()) + assert(batch.part.isFinal) + assert(batch.part.key == "path.113") + } + else -> assert(false) + } + + // One flush per call, one create/close per finished object + coVerify(exactly = 3) { bufferedWriterFactory.create(any()) } + coVerify(exactly = 6) { bufferedWriter.flush() } + coVerify(exactly = 3) { bufferedWriter.close() } + } +} diff --git a/airbyte-cdk/bulk/toolkits/load-parquet/src/main/kotlin/io/airbyte/cdk/load/file/parquet/ParquetWriter.kt b/airbyte-cdk/bulk/toolkits/load-parquet/src/main/kotlin/io/airbyte/cdk/load/file/parquet/ParquetWriter.kt index d5e5bdc881ae..588d31a07803 100644 --- a/airbyte-cdk/bulk/toolkits/load-parquet/src/main/kotlin/io/airbyte/cdk/load/file/parquet/ParquetWriter.kt +++ b/airbyte-cdk/bulk/toolkits/load-parquet/src/main/kotlin/io/airbyte/cdk/load/file/parquet/ParquetWriter.kt @@ -45,7 +45,7 @@ fun OutputStream.toParquetWriter( } override fun createOrOverwrite(blockSizeHint: Long) = create(blockSizeHint) - override fun supportsBlockSize() = false + override fun supportsBlockSize() = true override fun defaultBlockSize() = 0L } @@ -61,6 +61,7 @@ fun OutputStream.toParquetWriter( .withDictionaryPageSize(config.dictionaryPageSizeKb * 1024) .withDictionaryEncoding(config.dictionaryEncoding) .withMaxPaddingSize(config.maxPaddingSizeMb * 1024 * 1024) + .withRowGroupSize(5 * 1024L * 1024L) .build() return ParquetWriter(writer) diff --git a/airbyte-cdk/bulk/toolkits/load-s3/src/main/kotlin/io/airbyte/cdk/load/file/s3/S3MultipartUpload.kt b/airbyte-cdk/bulk/toolkits/load-s3/src/main/kotlin/io/airbyte/cdk/load/file/s3/S3MultipartUpload.kt index f2e746fa3aa2..b12dab4b4c52 100644 --- a/airbyte-cdk/bulk/toolkits/load-s3/src/main/kotlin/io/airbyte/cdk/load/file/s3/S3MultipartUpload.kt +++ b/airbyte-cdk/bulk/toolkits/load-s3/src/main/kotlin/io/airbyte/cdk/load/file/s3/S3MultipartUpload.kt @@ -19,12 +19,12 @@ import io.airbyte.cdk.load.util.setOnce import io.github.oshai.kotlinlogging.KotlinLogging import java.io.ByteArrayOutputStream import java.io.OutputStream -import java.util.concurrent.ConcurrentLinkedQueue import java.util.concurrent.atomic.AtomicBoolean import kotlinx.coroutines.channels.Channel import kotlinx.coroutines.coroutineScope import kotlinx.coroutines.launch import kotlinx.coroutines.runBlocking +import org.apache.mina.util.ConcurrentHashSet /** * An S3MultipartUpload that provides an [OutputStream] abstraction for writing data. This should @@ -46,7 +46,7 @@ class S3MultipartUpload( ) { private val log = KotlinLogging.logger {} private val partSize = - uploadConfig?.streamingUploadPartSize + uploadConfig?.uploadPartSizeBytes ?: throw IllegalStateException("Streaming upload part size is not configured") private val wrappingBuffer = streamProcessor.wrapper(underlyingBuffer) private val partQueue = Channel(Channel.UNLIMITED) @@ -157,36 +157,69 @@ class S3StreamingUpload( private val response: CreateMultipartUploadResponse, ) : StreamingUpload { private val log = KotlinLogging.logger {} - private val uploadedParts = ConcurrentLinkedQueue() + private val uploadedParts = ConcurrentHashSet() + private val isComplete = AtomicBoolean(false) - override suspend fun uploadPart(part: ByteArray) { - val partNumber = uploadedParts.size + 1 - val request = UploadPartRequest { - uploadId = response.uploadId - bucket = response.bucket - key = response.key - body = ByteStream.fromBytes(part) - this.partNumber = partNumber - } - val uploadResponse = client.uploadPart(request) - uploadedParts.add( - CompletedPart { - this.partNumber = partNumber - this.eTag = uploadResponse.eTag + override suspend fun uploadPart(part: ByteArray, index: Int) { + log.info { "Uploading part $index to ${response.key} (uploadId=${response.uploadId}" } + + try { + val request = UploadPartRequest { + uploadId = response.uploadId + bucket = response.bucket + key = response.key + body = ByteStream.fromBytes(part) + this.partNumber = index + } + val uploadResponse = client.uploadPart(request) + uploadedParts.add( + CompletedPart { + this.partNumber = index + this.eTag = uploadResponse.eTag + } + ) + } catch (e: Exception) { + log.error(e) { + "Failed to upload part $index to ${response.key} (uploadId=${response.uploadId}" } - ) + throw e + } } override suspend fun complete(): S3Object { - log.info { "Completing multipart upload to ${response.key} (uploadId=${response.uploadId}" } + try { + if (isComplete.setOnce()) { + log.info { + "Completing multipart upload to ${response.key} (uploadId=${response.uploadId}" + } + val partsSorted = uploadedParts.toList().sortedBy { it.partNumber } + if (partsSorted.isEmpty()) { + log.warn { + "Skipping empty upload to ${response.key} (uploadId=${response.uploadId}" + } + return S3Object(response.key!!, bucketConfig) + } - val request = CompleteMultipartUploadRequest { - uploadId = response.uploadId - bucket = response.bucket - key = response.key - this.multipartUpload = CompletedMultipartUpload { parts = uploadedParts.toList() } + val request = CompleteMultipartUploadRequest { + uploadId = response.uploadId + bucket = response.bucket + key = response.key + this.multipartUpload = CompletedMultipartUpload { parts = partsSorted } + } + // S3 will handle enforcing no gaps in the part numbers + client.completeMultipartUpload(request) + } else { + log.warn { + "Complete called multiple times for ${response.key} (uploadId=${response.uploadId}" + } + } + } catch (e: Exception) { + log.error(e) { + "Failed to complete upload to ${response.key} (uploadId=${response.uploadId}; parts=${uploadedParts.map {it.partNumber}.sortedBy { it }}" + } + throw e } - client.completeMultipartUpload(request) + return S3Object(response.key!!, bucketConfig) } } diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationAcceptanceTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationAcceptanceTest.kt index cfc19c2fb5b2..04bec228cc53 100644 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationAcceptanceTest.kt +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/testFixtures/kotlin/io/airbyte/cdk/integrations/destination/s3/S3DestinationAcceptanceTest.kt @@ -491,7 +491,7 @@ protected constructor( * both syncs are preserved. */ @Test - fun testOverwriteSyncFailedResumedGeneration() { + open fun testOverwriteSyncFailedResumedGeneration() { assumeTrue( implementsOverwrite(), "Destination's spec.json does not support overwrite sync mode." @@ -525,7 +525,7 @@ protected constructor( /** Test runs 2 failed syncs and verifies the previous sync objects are not cleaned up. */ @Test - fun testOverwriteSyncMultipleFailedGenerationsFilesPreserved() { + open fun testOverwriteSyncMultipleFailedGenerationsFilesPreserved() { assumeTrue( implementsOverwrite(), "Destination's spec.json does not support overwrite sync mode." diff --git a/airbyte-ci/connectors/base_images/README.md b/airbyte-ci/connectors/base_images/README.md index fbe05942d497..8b6bf9b40237 100644 --- a/airbyte-ci/connectors/base_images/README.md +++ b/airbyte-ci/connectors/base_images/README.md @@ -6,7 +6,7 @@ Our connector build pipeline ([`airbyte-ci`](https://github.com/airbytehq/airbyt Our base images are declared in code, using the [Dagger Python SDK](https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/). - [Python base image code declaration](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/base_images/base_images/python/bases.py) -- ~Java base image code declaration~ *TODO* +- [Java base image code declaration](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/base_images/base_images/java/bases.py) ## Where are the Dockerfiles? @@ -39,6 +39,20 @@ RUN mkdir -p 755 /usr/share/nltk_data +### Example for `airbyte/java-connector-base`: +```dockerfile +FROM docker.io/amazoncorretto:21-al2023@sha256:5454cb606e803fce56861fdbc9eab365eaa2ab4f357ceb8c1d56f4f8c8a7bc33 +RUN sh -c set -o xtrace && yum update -y --security && yum install -y tar openssl findutils && yum clean all +ENV AIRBYTE_SPEC_CMD=/airbyte/javabase.sh --spec +ENV AIRBYTE_CHECK_CMD=/airbyte/javabase.sh --check +ENV AIRBYTE_DISCOVER_CMD=/airbyte/javabase.sh --discover +ENV AIRBYTE_READ_CMD=/airbyte/javabase.sh --read +ENV AIRBYTE_WRITE_CMD=/airbyte/javabase.sh --write +ENV AIRBYTE_ENTRYPOINT=/airbyte/base.sh +``` + + + ## Base images @@ -59,6 +73,17 @@ RUN mkdir -p 755 /usr/share/nltk_data | 1.0.0 | ✅| docker.io/airbyte/python-connector-base:1.0.0@sha256:dd17e347fbda94f7c3abff539be298a65af2d7fc27a307d89297df1081a45c27 | Initial release: based on Python 3.9.18, on slim-bookworm system, with pip==23.2.1 and poetry==1.6.1 | +### `airbyte/java-connector-base` + +| Version | Published | Docker Image Address | Changelog | +|---------|-----------|--------------|-----------| +| 1.0.0 | ✅| docker.io/airbyte/java-connector-base:1.0.0@sha256:be86e5684e1e6d9280512d3d8071b47153698fe08ad990949c8eeff02803201a | Create a base image for our java connectors based on Amazon Corretto. | +| 1.0.0-rc.4 | ✅| docker.io/airbyte/java-connector-base:1.0.0-rc.4@sha256:be86e5684e1e6d9280512d3d8071b47153698fe08ad990949c8eeff02803201a | Bundle yum calls in a single RUN | +| 1.0.0-rc.3 | ✅| docker.io/airbyte/java-connector-base:1.0.0-rc.3@sha256:be86e5684e1e6d9280512d3d8071b47153698fe08ad990949c8eeff02803201a | | +| 1.0.0-rc.2 | ✅| docker.io/airbyte/java-connector-base:1.0.0-rc.2@sha256:fca66e81b4d2e4869a03b57b1b34beb048e74f5d08deb2046c3bb9919e7e2273 | Set entrypoint to base.sh | +| 1.0.0-rc.1 | ✅| docker.io/airbyte/java-connector-base:1.0.0-rc.1@sha256:886a7ce7eccfe3c8fb303511d0e46b83b7edb4f28e3705818c090185ba511fe7 | Create a base image for our java connectors. | + + ## How to release a new base image version (example for Python) ### Requirements @@ -102,6 +127,9 @@ poetry run mypy base_images --check-untyped-defs ## CHANGELOG +### 1.4.0 +- Declare a base image for our java connectors. + ### 1.3.1 - Update the crane image address. The previous address was deleted by the maintainer. @@ -120,4 +148,4 @@ poetry run mypy base_images --check-untyped-defs ### 1.0.1 -- Bumped dependencies ([#42581](https://github.com/airbytehq/airbyte/pull/42581)) +- Bumped dependencies ([#42581](https://github.com/airbytehq/airbyte/pull/42581)) \ No newline at end of file diff --git a/airbyte-ci/connectors/base_images/base_images/java/__init__.py b/airbyte-ci/connectors/base_images/base_images/java/__init__.py new file mode 100644 index 000000000000..c941b3045795 --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/java/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/base_images/base_images/java/bases.py b/airbyte-ci/connectors/base_images/base_images/java/bases.py new file mode 100644 index 000000000000..ed820e5b9863 --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/java/bases.py @@ -0,0 +1,102 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +from __future__ import annotations + +from typing import Callable, Final + +import dagger +from base_images import bases, published_image +from base_images import sanity_checks as base_sanity_checks +from base_images.python import sanity_checks as python_sanity_checks +from base_images.root_images import AMAZON_CORRETTO_21_AL_2023 +from base_images.utils.dagger import sh_dash_c + + +class AirbyteJavaConnectorBaseImage(bases.AirbyteConnectorBaseImage): + # TODO: remove this once we want to build the base image with the airbyte user. + USER: Final[str] = "root" + + root_image: Final[published_image.PublishedImage] = AMAZON_CORRETTO_21_AL_2023 + repository: Final[str] = "airbyte/java-connector-base" + + DD_AGENT_JAR_URL: Final[str] = "https://dtdg.co/latest-java-tracer" + BASE_SCRIPT_URL = "https://raw.githubusercontent.com/airbytehq/airbyte/6d8a3a2bc4f4ca79f10164447a90fdce5c9ad6f9/airbyte-integrations/bases/base/base.sh" + JAVA_BASE_SCRIPT_URL: Final[ + str + ] = "https://raw.githubusercontent.com/airbytehq/airbyte/6d8a3a2bc4f4ca79f10164447a90fdce5c9ad6f9/airbyte-integrations/bases/base-java/javabase.sh" + + def get_container(self, platform: dagger.Platform) -> dagger.Container: + """Returns the container used to build the base image for java connectors + We currently use the Amazon coretto image as a base. + We install some packages required to build java connectors. + We also download the datadog java agent jar and the javabase.sh script. + We set some env variables used by the javabase.sh script. + + Args: + platform (dagger.Platform): The platform this container should be built for. + + Returns: + dagger.Container: The container used to build the base image. + """ + + return ( + # TODO: Call this when we want to build the base image with the airbyte user + # self.get_base_container(platform) + self.dagger_client.container(platform=platform) + .from_(self.root_image.address) + # Bundle RUN commands together to reduce the number of layers. + .with_exec( + sh_dash_c( + [ + # Update first, but in the same .with_exec step as the package installation. + # Otherwise, we risk caching stale package URLs. + "yum update -y --security", + # tar is equired to untar java connector binary distributions. + # openssl is required because we need to ssh and scp sometimes. + # findutils is required for xargs, which is shipped as part of findutils. + f"yum install -y tar openssl findutils", + # Remove any dangly bits. + "yum clean all", + ] + ) + ) + .with_workdir("/airbyte") + # Copy the datadog java agent jar from the internet. + .with_file("dd-java-agent.jar", self.dagger_client.http(self.DD_AGENT_JAR_URL)) + # Copy base.sh from the git repo. + .with_file("base.sh", self.dagger_client.http(self.BASE_SCRIPT_URL)) + # Copy javabase.sh from the git repo. + .with_file("javabase.sh", self.dagger_client.http(self.JAVA_BASE_SCRIPT_URL)) + # Set a bunch of env variables used by base.sh. + .with_env_variable("AIRBYTE_SPEC_CMD", "/airbyte/javabase.sh --spec") + .with_env_variable("AIRBYTE_CHECK_CMD", "/airbyte/javabase.sh --check") + .with_env_variable("AIRBYTE_DISCOVER_CMD", "/airbyte/javabase.sh --discover") + .with_env_variable("AIRBYTE_READ_CMD", "/airbyte/javabase.sh --read") + .with_env_variable("AIRBYTE_WRITE_CMD", "/airbyte/javabase.sh --write") + .with_env_variable("AIRBYTE_ENTRYPOINT", "/airbyte/base.sh") + .with_entrypoint(["/airbyte/base.sh"]) + ) + + async def run_sanity_checks(self, platform: dagger.Platform): + """Runs sanity checks on the base image container. + This method is called before image publication. + Consider it like a pre-flight check before take-off to the remote registry. + + Args: + platform (dagger.Platform): The platform on which the sanity checks should run. + """ + container = self.get_container(platform) + await base_sanity_checks.check_user_can_read_dir(container, self.USER, self.AIRBYTE_DIR_PATH) + await base_sanity_checks.check_user_can_write_dir(container, self.USER, self.AIRBYTE_DIR_PATH) + await base_sanity_checks.check_file_exists(container, "/airbyte/dd-java-agent.jar") + await base_sanity_checks.check_file_exists(container, "/airbyte/base.sh") + await base_sanity_checks.check_file_exists(container, "/airbyte/javabase.sh") + await base_sanity_checks.check_env_var_with_printenv(container, "AIRBYTE_SPEC_CMD", "/airbyte/javabase.sh --spec") + await base_sanity_checks.check_env_var_with_printenv(container, "AIRBYTE_CHECK_CMD", "/airbyte/javabase.sh --check") + await base_sanity_checks.check_env_var_with_printenv(container, "AIRBYTE_DISCOVER_CMD", "/airbyte/javabase.sh --discover") + await base_sanity_checks.check_env_var_with_printenv(container, "AIRBYTE_READ_CMD", "/airbyte/javabase.sh --read") + await base_sanity_checks.check_env_var_with_printenv(container, "AIRBYTE_WRITE_CMD", "/airbyte/javabase.sh --write") + await base_sanity_checks.check_env_var_with_printenv(container, "AIRBYTE_ENTRYPOINT", "/airbyte/base.sh") + await base_sanity_checks.check_a_command_is_available_using_version_option(container, "tar") + await base_sanity_checks.check_a_command_is_available_using_version_option(container, "openssl", "version") diff --git a/airbyte-ci/connectors/base_images/base_images/root_images.py b/airbyte-ci/connectors/base_images/base_images/root_images.py index 8cb7036d22ef..dcd0892a8f6c 100644 --- a/airbyte-ci/connectors/base_images/base_images/root_images.py +++ b/airbyte-ci/connectors/base_images/base_images/root_images.py @@ -24,3 +24,10 @@ tag="3.10.14-slim-bookworm", sha="2407c61b1a18067393fecd8a22cf6fceede893b6aaca817bf9fbfe65e33614a3", ) + +AMAZON_CORRETTO_21_AL_2023 = PublishedImage( + registry="docker.io", + repository="amazoncorretto", + tag="21-al2023", + sha="5454cb606e803fce56861fdbc9eab365eaa2ab4f357ceb8c1d56f4f8c8a7bc33", +) diff --git a/airbyte-ci/connectors/base_images/base_images/sanity_checks.py b/airbyte-ci/connectors/base_images/base_images/sanity_checks.py index a88b137a028d..287636cef73c 100644 --- a/airbyte-ci/connectors/base_images/base_images/sanity_checks.py +++ b/airbyte-ci/connectors/base_images/base_images/sanity_checks.py @@ -178,3 +178,19 @@ async def check_user_can_write_dir(container: dagger.Container, user: str, dir_p await container.with_user(user).with_exec(["touch", f"{dir_path}/foo.txt"]) except dagger.ExecError: raise errors.SanityCheckError(f"{dir_path} is not writable by the {user}.") + + +async def check_file_exists(container: dagger.Container, file_path: str): + """Check that a file exists in the container. + + Args: + container (dagger.Container): The container on which the sanity checks should run. + file_path (str): The file path to check. + + Raises: + errors.SanityCheckError: Raised if the file does not exist. + """ + try: + await container.with_exec(["test", "-f", file_path]) + except dagger.ExecError: + raise errors.SanityCheckError(f"{file_path} does not exist.") diff --git a/airbyte-ci/connectors/base_images/base_images/templates/README.md.j2 b/airbyte-ci/connectors/base_images/base_images/templates/README.md.j2 index 89314b1491d5..c5484077a291 100644 --- a/airbyte-ci/connectors/base_images/base_images/templates/README.md.j2 +++ b/airbyte-ci/connectors/base_images/base_images/templates/README.md.j2 @@ -6,7 +6,7 @@ Our connector build pipeline ([`airbyte-ci`](https://github.com/airbytehq/airbyt Our base images are declared in code, using the [Dagger Python SDK](https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/). - [Python base image code declaration](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/base_images/base_images/python/bases.py) -- ~Java base image code declaration~ *TODO* +- [Java base image code declaration](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/base_images/base_images/java/bases.py) ## Where are the Dockerfiles? @@ -79,6 +79,12 @@ poetry run mypy base_images --check-untyped-defs ## CHANGELOG +### 1.4.0 +- Declare a base image for our java connectors. + +### 1.3.1 +- Update the crane image address. The previous address was deleted by the maintainer. + ### 1.2.0 - Improve new version prompt to pick bump type with optional pre-release version. diff --git a/airbyte-ci/connectors/base_images/base_images/utils/dagger.py b/airbyte-ci/connectors/base_images/base_images/utils/dagger.py new file mode 100644 index 000000000000..0a71d9e80416 --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/utils/dagger.py @@ -0,0 +1,6 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + + +def sh_dash_c(lines: list[str]) -> list[str]: + """Wrap sequence of commands in shell for safe usage of dagger Container's with_exec method.""" + return ["sh", "-c", " && ".join(["set -o xtrace"] + lines)] diff --git a/airbyte-ci/connectors/base_images/base_images/version_registry.py b/airbyte-ci/connectors/base_images/base_images/version_registry.py index 41337c8006a8..1495c77c327c 100644 --- a/airbyte-ci/connectors/base_images/base_images/version_registry.py +++ b/airbyte-ci/connectors/base_images/base_images/version_registry.py @@ -13,11 +13,12 @@ import semver from base_images import consts, published_image from base_images.bases import AirbyteConnectorBaseImage +from base_images.java.bases import AirbyteJavaConnectorBaseImage from base_images.python.bases import AirbyteManifestOnlyConnectorBaseImage, AirbytePythonConnectorBaseImage from base_images.utils import docker from connector_ops.utils import ConnectorLanguage # type: ignore -MANAGED_BASE_IMAGES = [AirbytePythonConnectorBaseImage] +MANAGED_BASE_IMAGES = [AirbytePythonConnectorBaseImage, AirbyteJavaConnectorBaseImage] @dataclass @@ -270,6 +271,12 @@ async def get_manifest_only_registry( ) +async def get_java_registry( + dagger_client: dagger.Client, docker_credentials: Tuple[str, str], cache_ttl_seconds: int = 0 +) -> VersionRegistry: + return await VersionRegistry.load(AirbyteJavaConnectorBaseImage, dagger_client, docker_credentials, cache_ttl_seconds=cache_ttl_seconds) + + async def get_registry_for_language( dagger_client: dagger.Client, language: ConnectorLanguage, docker_credentials: Tuple[str, str], cache_ttl_seconds: int = 0 ) -> VersionRegistry: @@ -291,6 +298,8 @@ async def get_registry_for_language( return await get_python_registry(dagger_client, docker_credentials, cache_ttl_seconds=cache_ttl_seconds) elif language is ConnectorLanguage.MANIFEST_ONLY: return await get_manifest_only_registry(dagger_client, docker_credentials, cache_ttl_seconds=cache_ttl_seconds) + elif language is ConnectorLanguage.JAVA: + return await get_java_registry(dagger_client, docker_credentials, cache_ttl_seconds=cache_ttl_seconds) else: raise NotImplementedError(f"Registry for language {language} is not implemented yet.") @@ -298,5 +307,6 @@ async def get_registry_for_language( async def get_all_registries(dagger_client: dagger.Client, docker_credentials: Tuple[str, str]) -> List[VersionRegistry]: return [ await get_python_registry(dagger_client, docker_credentials), - # await get_java_registry(dagger_client), + await get_java_registry(dagger_client, docker_credentials), + # await get_manifest_only_registry(dagger_client, docker_credentials), ] diff --git a/airbyte-ci/connectors/base_images/generated/changelogs/airbyte_java_connector_base.json b/airbyte-ci/connectors/base_images/generated/changelogs/airbyte_java_connector_base.json new file mode 100644 index 000000000000..ca9ab3d5008a --- /dev/null +++ b/airbyte-ci/connectors/base_images/generated/changelogs/airbyte_java_connector_base.json @@ -0,0 +1,22 @@ +[ + { + "version": "1.0.0", + "changelog_entry": "Create a base image for our java connectors based on Amazon Corretto.", + "dockerfile_example": "FROM docker.io/amazoncorretto:21-al2023@sha256:5454cb606e803fce56861fdbc9eab365eaa2ab4f357ceb8c1d56f4f8c8a7bc33\nRUN sh -c set -o xtrace && yum update -y --security && yum install -y tar openssl findutils && yum clean all\nENV AIRBYTE_SPEC_CMD=/airbyte/javabase.sh --spec\nENV AIRBYTE_CHECK_CMD=/airbyte/javabase.sh --check\nENV AIRBYTE_DISCOVER_CMD=/airbyte/javabase.sh --discover\nENV AIRBYTE_READ_CMD=/airbyte/javabase.sh --read\nENV AIRBYTE_WRITE_CMD=/airbyte/javabase.sh --write\nENV AIRBYTE_ENTRYPOINT=/airbyte/base.sh" + }, + { + "version": "1.0.0-rc.4", + "changelog_entry": "Bundle yum calls in a single RUN", + "dockerfile_example": "FROM docker.io/amazoncorretto:21-al2023@sha256:5454cb606e803fce56861fdbc9eab365eaa2ab4f357ceb8c1d56f4f8c8a7bc33\nRUN sh -c set -o xtrace && yum update -y --security && yum install -y tar openssl findutils && yum clean all\nENV AIRBYTE_SPEC_CMD=/airbyte/javabase.sh --spec\nENV AIRBYTE_CHECK_CMD=/airbyte/javabase.sh --check\nENV AIRBYTE_DISCOVER_CMD=/airbyte/javabase.sh --discover\nENV AIRBYTE_READ_CMD=/airbyte/javabase.sh --read\nENV AIRBYTE_WRITE_CMD=/airbyte/javabase.sh --write\nENV AIRBYTE_ENTRYPOINT=/airbyte/base.sh" + }, + { + "version": "1.0.0-rc.2", + "changelog_entry": "Set entrypoint to base.sh", + "dockerfile_example": "FROM docker.io/amazoncorretto:21-al2023@sha256:5454cb606e803fce56861fdbc9eab365eaa2ab4f357ceb8c1d56f4f8c8a7bc33\nRUN yum update -y --security\nRUN yum install -y tar openssl findutils\nENV AIRBYTE_SPEC_CMD=/airbyte/javabase.sh --spec\nENV AIRBYTE_CHECK_CMD=/airbyte/javabase.sh --check\nENV AIRBYTE_DISCOVER_CMD=/airbyte/javabase.sh --discover\nENV AIRBYTE_READ_CMD=/airbyte/javabase.sh --read\nENV AIRBYTE_WRITE_CMD=/airbyte/javabase.sh --write\nENV AIRBYTE_ENTRYPOINT=/airbyte/base.sh" + }, + { + "version": "1.0.0-rc.1", + "changelog_entry": "Create a base image for our java connectors.", + "dockerfile_example": "FROM docker.io/amazoncorretto:21-al2023@sha256:5454cb606e803fce56861fdbc9eab365eaa2ab4f357ceb8c1d56f4f8c8a7bc33\nRUN yum update -y --security\nRUN yum install -y tar openssl findutils\nENV AIRBYTE_SPEC_CMD=/airbyte/javabase.sh --spec\nENV AIRBYTE_CHECK_CMD=/airbyte/javabase.sh --check\nENV AIRBYTE_DISCOVER_CMD=/airbyte/javabase.sh --discover\nENV AIRBYTE_READ_CMD=/airbyte/javabase.sh --read\nENV AIRBYTE_WRITE_CMD=/airbyte/javabase.sh --write\nENV AIRBYTE_ENTRYPOINT=/airbyte/base.sh" + } +] diff --git a/airbyte-ci/connectors/base_images/pyproject.toml b/airbyte-ci/connectors/base_images/pyproject.toml index f6afeb14e5cb..6c4e41f34fea 100644 --- a/airbyte-ci/connectors/base_images/pyproject.toml +++ b/airbyte-ci/connectors/base_images/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "airbyte-connectors-base-images" -version = "1.3.1" +version = "1.4.0" description = "This package is used to generate and publish the base images for Airbyte Connectors." authors = ["Augustin Lafanechere "] readme = "README.md" diff --git a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/metadata.py b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/metadata.py index d06a7945ef5c..da2b73b25c72 100644 --- a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/metadata.py +++ b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/metadata.py @@ -48,7 +48,9 @@ def get_expected_language_tag(self, connector: Connector) -> str: connector.code_directory / consts.PYPROJECT_FILE_NAME ).exists(): return self.PYTHON_LANGUAGE_TAG - elif (connector.code_directory / consts.GRADLE_FILE_NAME).exists(): + elif (connector.code_directory / consts.GRADLE_FILE_NAME).exists() or ( + connector.code_directory / consts.GRADLE_KOTLIN_FILE_NAME + ).exists(): return self.JAVA_LANGUAGE_TAG else: raise ValueError("Could not infer the language tag from the connector directory") diff --git a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/consts.py b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/consts.py index ba17a607cc18..2d4d863aab65 100644 --- a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/consts.py +++ b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/consts.py @@ -9,6 +9,7 @@ DOCKERFILE_NAME = "Dockerfile" DOCUMENTATION_STANDARDS_URL = "https://hackmd.io/Bz75cgATSbm7DjrAqgl4rw" GRADLE_FILE_NAME = "build.gradle" +GRADLE_KOTLIN_FILE_NAME = "build.gradle.kts" LICENSE_FAQ_URL = "https://docs.airbyte.com/developer-guides/licenses/license-faq" LOW_CODE_MANIFEST_FILE_NAME = "manifest.yaml" METADATA_DOCUMENTATION_URL = "https://docs.airbyte.com/connector-development/connector-metadata-file" diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index ec4f0875090a..297fc7145936 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -853,8 +853,10 @@ airbyte-ci connectors --language=low-code migrate-to-manifest-only ## Changelog | Version | PR | Description | -|---------|------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------| -| 4.46.4 | [#49462](https://github.com/airbytehq/airbyte/pull/49462) | Support Kotlin Gradle build scripts in connectors. | +| ------- | ---------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | +| 4.47.0 | [#49832](https://github.com/airbytehq/airbyte/pull/49462) | Build java connectors from the base image declared in `metadata.yaml`. | +| 4.46.5 | [#49835](https://github.com/airbytehq/airbyte/pull/49835) | Fix connector language discovery for projects with Kotlin Gradle build scripts. | +| 4.46.4 | [#49462](https://github.com/airbytehq/airbyte/pull/49462) | Support Kotlin Gradle build scripts in connectors. | | 4.46.3 | [#49465](https://github.com/airbytehq/airbyte/pull/49465) | Fix `--use-local-cdk` on rootless connectors. | | 4.46.2 | [#49136](https://github.com/airbytehq/airbyte/pull/49136) | Fix failed install of python components due to non-root permissions. | | 4.46.1 | [#49146](https://github.com/airbytehq/airbyte/pull/49146) | Update `crane` image address as the one we were using has been deleted by the maintainer. | diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py index ad7fb7e4bcdf..47bbe7822b21 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py @@ -9,9 +9,10 @@ from pipelines.consts import AMAZONCORRETTO_IMAGE from pipelines.dagger.actions.connector.hooks import finalize_build from pipelines.dagger.actions.connector.normalization import DESTINATION_NORMALIZATION_BUILD_CONFIGURATION, with_normalization -from pipelines.helpers.utils import sh_dash_c +from pipelines.helpers.utils import deprecated, sh_dash_c +@deprecated("This function is deprecated. Please declare an explicit base image to use in the java connector metadata.") def with_integration_base(context: PipelineContext, build_platform: Platform) -> Container: return ( context.dagger_client.container(platform=build_platform) @@ -24,6 +25,7 @@ def with_integration_base(context: PipelineContext, build_platform: Platform) -> ) +@deprecated("This function is deprecated. Please declare an explicit base image to use in the java connector metadata.") def with_integration_base_java(context: PipelineContext, build_platform: Platform) -> Container: integration_base = with_integration_base(context, build_platform) yum_packages_to_install = [ @@ -72,6 +74,7 @@ def with_integration_base_java(context: PipelineContext, build_platform: Platfor ) +@deprecated("This function is deprecated. Please declare an explicit base image to use in the java connector metadata.") def with_integration_base_java_and_normalization(context: ConnectorContext, build_platform: Platform) -> Container: yum_packages_to_install = [ "python3", @@ -158,22 +161,31 @@ async def with_airbyte_java_connector(context: ConnectorContext, connector_java_ ) ) ) - - if ( + # TODO: remove the condition below once all connectors have a base image declared in their metadata. + if "connectorBuildOptions" in context.connector.metadata and "baseImage" in context.connector.metadata["connectorBuildOptions"]: + base_image_address = context.connector.metadata["connectorBuildOptions"]["baseImage"] + context.logger.info(f"Using base image {base_image_address} from connector metadata to build connector.") + base = context.dagger_client.container(platform=build_platform).from_(base_image_address) + elif ( context.connector.supports_normalization and DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["supports_in_connector_normalization"] ): - base = with_integration_base_java_and_normalization(context, build_platform) - entrypoint = ["/airbyte/run_with_normalization.sh"] + context.logger.warn( + f"Connector {context.connector.technical_name} has in-connector normalization enabled. This is supposed to be deprecated. " + f"Please declare a base image address in the connector metadata.yaml file (connectorBuildOptions.baseImage)." + ) + base = with_integration_base_java_and_normalization(context, build_platform).with_entrypoint(["/airbyte/run_with_normalization.sh"]) else: - base = with_integration_base_java(context, build_platform) - entrypoint = ["/airbyte/base.sh"] + context.logger.warn( + f"Connector {context.connector.technical_name} does not declare a base image in its connector metadata. " + f"Please declare a base image address in the connector metadata.yaml file (connectorBuildOptions.baseImage)." + ) + base = with_integration_base_java(context, build_platform).with_entrypoint(["/airbyte/base.sh"]) connector_container = ( base.with_workdir("/airbyte") .with_env_variable("APPLICATION", application) .with_mounted_directory("built_artifacts", build_stage.directory("/airbyte")) .with_exec(sh_dash_c(["mv built_artifacts/* ."])) - .with_entrypoint(entrypoint) ) return await finalize_build(context, connector_container) diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py index 8ab32e8754e3..ca397153becd 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py @@ -7,10 +7,12 @@ import contextlib import datetime +import functools import os import re import sys import unicodedata +import warnings import xml.sax.saxutils from io import TextIOWrapper from pathlib import Path @@ -388,3 +390,15 @@ async def raise_if_not_user(container: Container, expected_user: str) -> None: assert ( actual_user == expected_user ), f"Container is not running as the expected user '{expected_user}', it is running as '{actual_user}'." + + +def deprecated(reason: str) -> Callable: + def decorator(func: Callable) -> Callable: + @functools.wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> Any: + warnings.warn(f"{func.__name__} is deprecated: {reason}", DeprecationWarning, stacklevel=2) + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index 92a0aee6df37..468b9b0e925c 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "4.46.4" +version = "4.47.0" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "] diff --git a/airbyte-integrations/connectors/destination-dev-null/metadata.yaml b/airbyte-integrations/connectors/destination-dev-null/metadata.yaml index 925f8f0b2890..001c187341a0 100644 --- a/airbyte-integrations/connectors/destination-dev-null/metadata.yaml +++ b/airbyte-integrations/connectors/destination-dev-null/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: file connectorType: destination definitionId: a7bcc9d8-13b3-4e49-b80d-d020b90045e3 - dockerImageTag: 0.7.12 + dockerImageTag: 0.7.13 dockerRepository: airbyte/destination-dev-null githubIssueLabel: destination-dev-null icon: airbyte.svg diff --git a/airbyte-integrations/connectors/destination-dev-null/src/main/kotlin/io/airbyte/integrations/destination/dev_null/DevNullConfiguration.kt b/airbyte-integrations/connectors/destination-dev-null/src/main/kotlin/io/airbyte/integrations/destination/dev_null/DevNullConfiguration.kt index a117151500ab..58a5249c4562 100644 --- a/airbyte-integrations/connectors/destination-dev-null/src/main/kotlin/io/airbyte/integrations/destination/dev_null/DevNullConfiguration.kt +++ b/airbyte-integrations/connectors/destination-dev-null/src/main/kotlin/io/airbyte/integrations/destination/dev_null/DevNullConfiguration.kt @@ -41,12 +41,13 @@ data class DevNullConfiguration( */ @Singleton class DevNullConfigurationFactory( - @Value("\${airbyte.destination.record-batch-size}") private val recordBatchSizeBytes: Long + @Value("\${airbyte.destination.record-batch-size-override}") + private val recordBatchSizeBytesOverride: Long? ) : DestinationConfigurationFactory { private val log = KotlinLogging.logger {} override fun makeWithoutExceptionHandling(pojo: DevNullSpecification): DevNullConfiguration { - log.info { "Record batch size from environment: $recordBatchSizeBytes" } + log.info { "Record batch size from environment: $recordBatchSizeBytesOverride" } return when (pojo) { is DevNullSpecificationOss -> { when (pojo.testDestination) { @@ -107,7 +108,10 @@ class DevNullConfigurationFactory( } } } - }.copy(recordBatchSizeBytes = recordBatchSizeBytes) + }.copy( + recordBatchSizeBytes = recordBatchSizeBytesOverride + ?: DestinationConfiguration.DEFAULT_RECORD_BATCH_SIZE_BYTES + ) } } diff --git a/airbyte-integrations/connectors/destination-dev-null/src/main/kotlin/io/airbyte/integrations/destination/dev_null/DevNullWriter.kt b/airbyte-integrations/connectors/destination-dev-null/src/main/kotlin/io/airbyte/integrations/destination/dev_null/DevNullWriter.kt index 06ca8d09e5cb..1bf7d57284b7 100644 --- a/airbyte-integrations/connectors/destination-dev-null/src/main/kotlin/io/airbyte/integrations/destination/dev_null/DevNullWriter.kt +++ b/airbyte-integrations/connectors/destination-dev-null/src/main/kotlin/io/airbyte/integrations/destination/dev_null/DevNullWriter.kt @@ -69,7 +69,8 @@ class LoggingStreamLoader(override val stream: DestinationStream, loggingConfig: override suspend fun processRecords( records: Iterator, - totalSizeBytes: Long + totalSizeBytes: Long, + endOfStream: Boolean, ): Batch { log.info { "Processing record batch with logging" } @@ -100,7 +101,8 @@ class LoggingStreamLoader(override val stream: DestinationStream, loggingConfig: class SilentStreamLoader(override val stream: DestinationStream) : StreamLoader { override suspend fun processRecords( records: Iterator, - totalSizeBytes: Long + totalSizeBytes: Long, + endOfStream: Boolean ): Batch { return SimpleBatch(state = Batch.State.COMPLETE) } @@ -122,7 +124,8 @@ class ThrottledStreamLoader( override suspend fun processRecords( records: Iterator, - totalSizeBytes: Long + totalSizeBytes: Long, + endOfStream: Boolean ): Batch { log.info { "Processing record batch with delay of $millisPerRecord per record" } @@ -151,7 +154,8 @@ class FailingStreamLoader(override val stream: DestinationStream, private val nu override suspend fun processRecords( records: Iterator, - totalSizeBytes: Long + totalSizeBytes: Long, + endOfStream: Boolean ): Batch { log.info { "Processing record batch with failure after $numMessages messages" } diff --git a/airbyte-integrations/connectors/destination-dev-null/src/main/resources/application.yaml b/airbyte-integrations/connectors/destination-dev-null/src/main/resources/application.yaml index 7f616e6ca770..3d7d25d71e0e 100644 --- a/airbyte-integrations/connectors/destination-dev-null/src/main/resources/application.yaml +++ b/airbyte-integrations/connectors/destination-dev-null/src/main/resources/application.yaml @@ -12,4 +12,4 @@ airbyte: rate-ms: 900000 # 15 minutes window-ms: 900000 # 15 minutes destination: - record-batch-size: ${AIRBYTE_DESTINATION_RECORD_BATCH_SIZE:209715200} + record-batch-size-override: ${AIRBYTE_DESTINATION_RECORD_BATCH_SIZE_OVERRIDE:null} diff --git a/airbyte-integrations/connectors/destination-dev-null/src/test-integration/resources/application.yaml b/airbyte-integrations/connectors/destination-dev-null/src/test-integration/resources/application.yaml index bd4b640c9949..92039a45ab59 100644 --- a/airbyte-integrations/connectors/destination-dev-null/src/test-integration/resources/application.yaml +++ b/airbyte-integrations/connectors/destination-dev-null/src/test-integration/resources/application.yaml @@ -11,4 +11,4 @@ airbyte: rate-ms: 900000 # 15 minutes window-ms: 900000 # 15 minutes destination: - record-batch-size: 1 # Microbatch for testing + record-batch-size-override: 1 # Microbatch for testing diff --git a/airbyte-integrations/connectors/destination-iceberg-v2/metadata.yaml b/airbyte-integrations/connectors/destination-iceberg-v2/metadata.yaml index 7f67f917ea30..f03a5e3e0fb2 100644 --- a/airbyte-integrations/connectors/destination-iceberg-v2/metadata.yaml +++ b/airbyte-integrations/connectors/destination-iceberg-v2/metadata.yaml @@ -2,10 +2,10 @@ data: connectorSubtype: file connectorType: destination definitionId: 37a928c1-2d5c-431a-a97d-ae236bd1ea0c - dockerImageTag: 0.1.13 + dockerImageTag: 0.1.15 dockerRepository: airbyte/destination-iceberg-v2 githubIssueLabel: destination-iceberg-v2 - icon: s3.svg + icon: icon.svg license: ELv2 name: Iceberg V2 Destination registryOverrides: diff --git a/airbyte-integrations/connectors/destination-iceberg-v2/src/main/kotlin/io/airbyte/integrations/destination/iceberg/v2/IcebergStreamLoader.kt b/airbyte-integrations/connectors/destination-iceberg-v2/src/main/kotlin/io/airbyte/integrations/destination/iceberg/v2/IcebergStreamLoader.kt index 28695edcd9a8..18546300050b 100644 --- a/airbyte-integrations/connectors/destination-iceberg-v2/src/main/kotlin/io/airbyte/integrations/destination/iceberg/v2/IcebergStreamLoader.kt +++ b/airbyte-integrations/connectors/destination-iceberg-v2/src/main/kotlin/io/airbyte/integrations/destination/iceberg/v2/IcebergStreamLoader.kt @@ -33,7 +33,8 @@ class IcebergStreamLoader( override suspend fun processRecords( records: Iterator, - totalSizeBytes: Long + totalSizeBytes: Long, + endOfStream: Boolean ): Batch { icebergTableWriterFactory .create( diff --git a/airbyte-integrations/connectors/destination-iceberg-v2/src/main/kotlin/io/airbyte/integrations/destination/iceberg/v2/IcebergV2Writer.kt b/airbyte-integrations/connectors/destination-iceberg-v2/src/main/kotlin/io/airbyte/integrations/destination/iceberg/v2/IcebergV2Writer.kt index b9b20d5f69d0..84619536cbeb 100644 --- a/airbyte-integrations/connectors/destination-iceberg-v2/src/main/kotlin/io/airbyte/integrations/destination/iceberg/v2/IcebergV2Writer.kt +++ b/airbyte-integrations/connectors/destination-iceberg-v2/src/main/kotlin/io/airbyte/integrations/destination/iceberg/v2/IcebergV2Writer.kt @@ -5,7 +5,7 @@ package io.airbyte.integrations.destination.iceberg.v2 import io.airbyte.cdk.load.command.DestinationStream -import io.airbyte.cdk.load.data.parquet.ParquetMapperPipelineFactory +import io.airbyte.cdk.load.data.iceberg.parquet.IcebergParquetPipelineFactory import io.airbyte.cdk.load.write.DestinationWriter import io.airbyte.cdk.load.write.StreamLoader import io.airbyte.integrations.destination.iceberg.v2.io.IcebergTableWriterFactory @@ -23,7 +23,7 @@ class IcebergV2Writer( override fun createStreamLoader(stream: DestinationStream): StreamLoader { val properties = icebergUtil.toCatalogProperties(config = icebergConfiguration) val catalog = icebergUtil.createCatalog(DEFAULT_CATALOG_NAME, properties) - val pipeline = ParquetMapperPipelineFactory().create(stream) + val pipeline = IcebergParquetPipelineFactory().create(stream) val schema = icebergUtil.toIcebergSchema(stream = stream, pipeline = pipeline) val table = icebergUtil.createTable( diff --git a/airbyte-integrations/connectors/destination-mssql-v2/README.md b/airbyte-integrations/connectors/destination-mssql-v2/README.md new file mode 100644 index 000000000000..4f2fe1572dfa --- /dev/null +++ b/airbyte-integrations/connectors/destination-mssql-v2/README.md @@ -0,0 +1,91 @@ +# Microsoft SQL Server V2 (Bulk CDK) Destination + +## Build + +### airbyte-ci + +To build the connector via the [Airbyte CI CLI tool](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md), navigate to the root of the [Airbyte repository](https://github.com/airbytehq/airbyte) and execute the following command: + +```shell +> airbyte-ci connectors --name=destination-mssql-v2 build +``` + +### Gradle + +To build the connector via [Gradle](https://gradle.org/), navigate to the root of the [Airbyte repository](https://github.com/airbytehq/airbyte) and execute the following command: + +```shell +> ./gradlew :airbyte-integrations:connectors:destination-mssql-v2:build +``` +## Execute + +### Local Execution via Docker + +In order to run the connector image locally, first either build the connector's [Docker](https://www.docker.com/) image using the commands found +in this section of this document OR build the image using the following command: + +```shell +> ./gradlew :airbyte-integrations:connectors:destination-mssql-v2:buildConnectorImage +``` + +The built image will automatically be tagged with the `dev` label. To run the connector image, use the following commands: + +```shell +docker run --rm airbyte/destination-mssql-v2:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-mssql-v2:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-mssql-v2:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-mssql-v2:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Test + +The connector contains both unit and acceptance tests which can each be executed from the local environment. + +### Unit Tests + +The connector uses a combination of [Kotlin](https://kotlinlang.org/), [JUnit 5](https://junit.org/junit5/) and [MockK](https://mockk.io/) +to implement unit tests. Existing tests can be found within the destination-mssql-v2 module in the conventional `src/test/kotlin` source folder. New tests should also be added to this location. + +The unit tests can be executed either via the [Airbyte CI CLI tool](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) or [Gradle](https://gradle.org/): + +###### Airbyte CI CLI +```shell +> airbyte-ci connectors --name=destination-mssql-v2 test +``` + +###### Gradle +```shell +> ./gradlew :airbyte-integrations:connectors:destination-mssql-v2:test +``` + +### Acceptance Tests + +The [Airbyte project](https://github.com/airbytehq/airbyte) a standard test suite that all destination connectors must pass. The tests require specific implementations of a few components in order to connect the acceptance test suite with the connector's specific logic. The existing acceptance test scaffolding can be found in the conventional `src/test-integration/kotlin` source folder. + +The acceptance tests can be executed either via the [Airbyte CI CLI tool](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) or [Gradle](https://gradle.org/): + +###### Airbyte CI CLI +```shell +> airbyte-ci connectors --name=destination-mssql-v2 test +``` + +###### Gradle +```shell +> ./gradlew :airbyte-integrations:connectors:destination-mssql-v2:integrationTest +``` + +## Release + +### Publishing a new version of the connector + +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? + +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-mssql-v2 test` +2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +3. Make sure the `metadata.yaml` content is up to date. +4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/mssql-v2.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. + + diff --git a/airbyte-integrations/connectors/destination-mssql-v2/build.gradle.kts b/airbyte-integrations/connectors/destination-mssql-v2/build.gradle.kts new file mode 100644 index 000000000000..4cc0467ff7bd --- /dev/null +++ b/airbyte-integrations/connectors/destination-mssql-v2/build.gradle.kts @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +plugins { + id("application") + id("airbyte-bulk-connector") +} + +airbyteBulkConnector { + core = "load" + toolkits = listOf() + cdk = "local" +} + +application { + mainClass = "io.airbyte.integrations.destination.mssql.v2.MSSQLDestination" + + applicationDefaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + + // Uncomment and replace to run locally + //applicationDefaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED") +} + +val junitVersion = "5.11.3" + +configurations.configureEach { + // Exclude additional SLF4J providers from all classpaths + exclude(mapOf("group" to "org.slf4j", "module" to "slf4j-reload4j")) +} + +// Uncomment to run locally +//tasks.run.configure { +// standardInput = System.`in` +//} + +dependencies { + implementation("com.microsoft.sqlserver:mssql-jdbc:12.8.1.jre11") + implementation("io.github.oshai:kotlin-logging-jvm:7.0.0") + implementation("jakarta.inject:jakarta.inject-api:2.0.1") + implementation("com.github.spotbugs:spotbugs-annotations:4.8.6") + implementation("io.micronaut:micronaut-inject:4.6.1") + + testImplementation("io.mockk:mockk:1.13.13") + testImplementation("org.junit.jupiter:junit-jupiter-api:$junitVersion") + testImplementation("org.junit.jupiter:junit-jupiter-params:$junitVersion") + testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:$junitVersion") +} + +tasks.named("test") { + systemProperties(mapOf("mockk.junit.extension.keepmocks" to "true", "mockk.junit.extension.requireParallelTesting" to "true")) +} diff --git a/airbyte-integrations/connectors/destination-mssql-v2/gradle.properties b/airbyte-integrations/connectors/destination-mssql-v2/gradle.properties new file mode 100644 index 000000000000..4dbe8b8729df --- /dev/null +++ b/airbyte-integrations/connectors/destination-mssql-v2/gradle.properties @@ -0,0 +1 @@ +testExecutionConcurrency=-1 diff --git a/airbyte-integrations/connectors/destination-mssql-v2/icon.svg b/airbyte-integrations/connectors/destination-mssql-v2/icon.svg new file mode 100644 index 000000000000..edcaeb77c8f2 --- /dev/null +++ b/airbyte-integrations/connectors/destination-mssql-v2/icon.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-mssql-v2/metadata.yaml b/airbyte-integrations/connectors/destination-mssql-v2/metadata.yaml new file mode 100644 index 000000000000..1b09be9a1ad8 --- /dev/null +++ b/airbyte-integrations/connectors/destination-mssql-v2/metadata.yaml @@ -0,0 +1,34 @@ +data: + connectorSubtype: database + connectorType: destination + definitionId: 37a928c1-2d5c-431a-a97d-ae236bd1ea0c + dockerImageTag: 0.1.0 + dockerRepository: airbyte/destination-mssql-v2 + githubIssueLabel: destination-mssql-v2 + icon: icon.svg + license: ELv2 + name: MSSQL V2 Destination + registryOverrides: + cloud: + enabled: false + oss: + enabled: false + releaseStage: alpha + documentationUrl: https://docs.airbyte.com/integrations/destinations/mssql-v2 + tags: + - language:java + ab_internal: + sl: 100 + ql: 100 + supportLevel: community + supportsRefreshes: true + connectorTestSuitesOptions: + - suite: unitTests + - suite: integrationTests + testSecrets: + - name: SECRET_DESTINATION-S3-V2-MINIMAL-REQUIRED-CONFIG + fileName: s3_dest_v2_minimal_required_config.json + secretStore: + type: GSM + alias: airbyte-connector-testing-secret-store +metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-mssql-v2/src/main/kotlin/io/airbyte/integrations/destination/mssql/v2/MSSQLDestination.kt b/airbyte-integrations/connectors/destination-mssql-v2/src/main/kotlin/io/airbyte/integrations/destination/mssql/v2/MSSQLDestination.kt new file mode 100644 index 000000000000..cb1d288390aa --- /dev/null +++ b/airbyte-integrations/connectors/destination-mssql-v2/src/main/kotlin/io/airbyte/integrations/destination/mssql/v2/MSSQLDestination.kt @@ -0,0 +1,14 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.mssql.v2 + +import io.airbyte.cdk.AirbyteDestinationRunner + +object MSSQLDestination { + @JvmStatic + fun main(args: Array) { + AirbyteDestinationRunner.run(*args) + } +} diff --git a/airbyte-integrations/connectors/destination-mssql-v2/src/main/kotlin/io/airbyte/integrations/destination/mssql/v2/config/MSSQLConfiguration.kt b/airbyte-integrations/connectors/destination-mssql-v2/src/main/kotlin/io/airbyte/integrations/destination/mssql/v2/config/MSSQLConfiguration.kt new file mode 100644 index 000000000000..ca468c08caa7 --- /dev/null +++ b/airbyte-integrations/connectors/destination-mssql-v2/src/main/kotlin/io/airbyte/integrations/destination/mssql/v2/config/MSSQLConfiguration.kt @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.mssql.v2.config + +import dagger.Component.Factory +import io.airbyte.cdk.load.command.DestinationConfiguration +import io.airbyte.cdk.load.command.DestinationConfigurationFactory +import jakarta.inject.Singleton + +data class MSSQLConfiguration(val placeholder: String) : DestinationConfiguration() + +@Singleton +class MSSQLConfigurationFactory : + DestinationConfigurationFactory { + override fun makeWithoutExceptionHandling(pojo: MSSQLSpecification): MSSQLConfiguration { + TODO("Not yet implemented") + } +} + +@Factory +class MSSQLConfigurationProvider(private val config: DestinationConfiguration) { + @Singleton + fun get(): MSSQLConfiguration { + return config as MSSQLConfiguration + } +} diff --git a/airbyte-integrations/connectors/destination-mssql-v2/src/main/kotlin/io/airbyte/integrations/destination/mssql/v2/config/MSSQLSpecification.kt b/airbyte-integrations/connectors/destination-mssql-v2/src/main/kotlin/io/airbyte/integrations/destination/mssql/v2/config/MSSQLSpecification.kt new file mode 100644 index 000000000000..30a7f9769c63 --- /dev/null +++ b/airbyte-integrations/connectors/destination-mssql-v2/src/main/kotlin/io/airbyte/integrations/destination/mssql/v2/config/MSSQLSpecification.kt @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.mssql.v2.config + +import com.kjetland.jackson.jsonSchema.annotations.JsonSchemaTitle +import io.airbyte.cdk.command.ConfigurationSpecification +import io.airbyte.cdk.load.spec.DestinationSpecificationExtension +import io.airbyte.protocol.models.v0.DestinationSyncMode +import jakarta.inject.Singleton + +@Singleton +@JsonSchemaTitle("MSSQL V2 Destination Specification") +class MSSQLSpecification : ConfigurationSpecification() {} + +@Singleton +class MSSQLSpecificationExtension : DestinationSpecificationExtension { + override val supportedSyncModes = + listOf( + DestinationSyncMode.OVERWRITE, + DestinationSyncMode.APPEND, + DestinationSyncMode.APPEND_DEDUP + ) + override val supportsIncremental = true +} diff --git a/airbyte-integrations/connectors/destination-s3-v2/build.gradle b/airbyte-integrations/connectors/destination-s3-v2/build.gradle index 49b2a22aaf01..d370a2d14c0c 100644 --- a/airbyte-integrations/connectors/destination-s3-v2/build.gradle +++ b/airbyte-integrations/connectors/destination-s3-v2/build.gradle @@ -12,10 +12,20 @@ airbyteBulkConnector { application { mainClass = 'io.airbyte.integrations.destination.s3_v2.S3V2Destination' - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] - - // Uncomment and replace to run locally - //applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0', '--add-opens', 'java.base/sun.nio.ch=ALL-UNNAMED', '--add-opens', 'java.base/sun.security.action=ALL-UNNAMED', '--add-opens', 'java.base/java.lang=ALL-UNNAMED'] + applicationDefaultJvmArgs = [ + '-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0', +// Uncomment to run locally: +// '--add-opens', 'java.base/java.lang=ALL-UNNAMED' +// Uncomment to enable remote profiling: +// '-XX:NativeMemoryTracking=detail', +// '-Djava.rmi.server.hostname=localhost', +// '-Dcom.sun.management.jmxremote=true', +// '-Dcom.sun.management.jmxremote.port=6000', +// '-Dcom.sun.management.jmxremote.rmi.port=6000', +// '-Dcom.sun.management.jmxremote.local.only=false', +// '-Dcom.sun.management.jmxremote.authenticate=false', +// '-Dcom.sun.management.jmxremote.ssl=false' + ] } // Uncomment to run locally diff --git a/airbyte-integrations/connectors/destination-s3-v2/metadata.yaml b/airbyte-integrations/connectors/destination-s3-v2/metadata.yaml index 947599e42402..2dfbda00c691 100644 --- a/airbyte-integrations/connectors/destination-s3-v2/metadata.yaml +++ b/airbyte-integrations/connectors/destination-s3-v2/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: file connectorType: destination definitionId: d6116991-e809-4c7c-ae09-c64712df5b66 - dockerImageTag: 0.3.2 + dockerImageTag: 0.3.4 dockerRepository: airbyte/destination-s3-v2 githubIssueLabel: destination-s3-v2 icon: s3.svg diff --git a/airbyte-integrations/connectors/destination-s3-v2/src/main/kotlin/S3V2Checker.kt b/airbyte-integrations/connectors/destination-s3-v2/src/main/kotlin/S3V2Checker.kt index dda91c0f7ff5..c445eb199086 100644 --- a/airbyte-integrations/connectors/destination-s3-v2/src/main/kotlin/S3V2Checker.kt +++ b/airbyte-integrations/connectors/destination-s3-v2/src/main/kotlin/S3V2Checker.kt @@ -12,6 +12,7 @@ import io.airbyte.cdk.load.file.s3.S3Object import io.airbyte.cdk.load.util.write import io.github.oshai.kotlinlogging.KotlinLogging import jakarta.inject.Singleton +import java.io.ByteArrayOutputStream import java.io.OutputStream import java.nio.file.Paths import kotlinx.coroutines.flow.toList @@ -37,18 +38,19 @@ class S3V2Checker(private val timeProvider: TimeProvider) : var s3Object: S3Object? = null val compressor = config.objectStorageCompressionConfiguration.compressor try { - s3Object = - s3Client.streamingUpload(key, streamProcessor = compressor) { - it.write("""{"data": 1}""") - } - val results = s3Client.list(path.toString()).toList() + val upload = s3Client.startStreamingUpload(key) + val byteStream = ByteArrayOutputStream() + compressor.wrapper(byteStream).use { it.write("""{"data": 1}""") } + upload.uploadPart(byteStream.toByteArray(), 1) + s3Object = upload.complete() + val results = s3Client.list(path).toList() if (results.isEmpty() || results.find { it.key == key } == null) { throw IllegalStateException("Failed to write to S3 bucket") } log.info { "Successfully wrote test file: $results" } } finally { s3Object?.also { s3Client.delete(it) } - val results = s3Client.list(path.toString()).toList() + val results = s3Client.list(path).toList() log.info { "Successfully removed test tile: $results" } } } diff --git a/airbyte-integrations/connectors/destination-s3-v2/src/main/kotlin/S3V2Configuration.kt b/airbyte-integrations/connectors/destination-s3-v2/src/main/kotlin/S3V2Configuration.kt index d0aee92a431d..e0c36924d450 100644 --- a/airbyte-integrations/connectors/destination-s3-v2/src/main/kotlin/S3V2Configuration.kt +++ b/airbyte-integrations/connectors/destination-s3-v2/src/main/kotlin/S3V2Configuration.kt @@ -20,7 +20,6 @@ import io.airbyte.cdk.load.command.object_storage.ObjectStorageUploadConfigurati import io.airbyte.cdk.load.command.object_storage.ObjectStorageUploadConfigurationProvider import io.airbyte.cdk.load.command.s3.S3BucketConfiguration import io.airbyte.cdk.load.command.s3.S3BucketConfigurationProvider -import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Factory import io.micronaut.context.annotation.Value import jakarta.inject.Singleton @@ -38,8 +37,10 @@ data class S3V2Configuration( // Internal configuration override val objectStorageUploadConfiguration: ObjectStorageUploadConfiguration = ObjectStorageUploadConfiguration(), + override val numProcessRecordsWorkers: Int = 2, + override val estimatedRecordMemoryOverheadRatio: Double = 5.0, override val recordBatchSizeBytes: Long, - override val numProcessRecordsWorkers: Int = 2 + override val processEmptyFiles: Boolean = true, ) : DestinationConfiguration(), AWSAccessKeyConfigurationProvider, @@ -52,12 +53,10 @@ data class S3V2Configuration( @Singleton class S3V2ConfigurationFactory( - @Value("\${airbyte.destination.record-batch-size}") private val recordBatchSizeBytes: Long + @Value("\${airbyte.destination.record-batch-size-override}") + val recordBatchSizeOverride: Long? = null ) : DestinationConfigurationFactory> { - private val log = KotlinLogging.logger {} - override fun makeWithoutExceptionHandling(pojo: S3V2Specification): S3V2Configuration<*> { - log.info { "Record batch size override: $recordBatchSizeBytes" } return S3V2Configuration( awsAccessKeyConfiguration = pojo.toAWSAccessKeyConfiguration(), awsArnRoleConfiguration = pojo.toAWSArnRoleConfiguration(), @@ -65,7 +64,13 @@ class S3V2ConfigurationFactory( objectStoragePathConfiguration = pojo.toObjectStoragePathConfiguration(), objectStorageFormatConfiguration = pojo.toObjectStorageFormatConfiguration(), objectStorageCompressionConfiguration = pojo.toCompressionConfiguration(), - recordBatchSizeBytes = recordBatchSizeBytes + recordBatchSizeBytes = recordBatchSizeOverride + ?: ObjectStorageUploadConfiguration.DEFAULT_PART_SIZE_BYTES, + objectStorageUploadConfiguration = + ObjectStorageUploadConfiguration( + fileSizeBytes = recordBatchSizeOverride + ?: ObjectStorageUploadConfiguration.DEFAULT_FILE_SIZE_BYTES, + ) ) } } diff --git a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2AvroDestinationAcceptanceTest.kt b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2AvroDestinationAcceptanceTest.kt index 13165999d5be..1ca6a2aaf11f 100644 --- a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2AvroDestinationAcceptanceTest.kt +++ b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2AvroDestinationAcceptanceTest.kt @@ -21,4 +21,8 @@ class S3V2AvroDestinationAcceptanceTest : S3BaseAvroDestinationAcceptanceTest() override val baseConfigJson: JsonNode get() = S3V2DestinationTestUtils.baseConfigJsonFilePath + + // Disable these tests until we fix the incomplete stream handling behavior. + override fun testOverwriteSyncMultipleFailedGenerationsFilesPreserved() {} + override fun testOverwriteSyncFailedResumedGeneration() {} } diff --git a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2CsvAssumeRoleDestinationAcceptanceTest.kt b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2CsvAssumeRoleDestinationAcceptanceTest.kt index b7e8700c2aed..f46ff5513fce 100644 --- a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2CsvAssumeRoleDestinationAcceptanceTest.kt +++ b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2CsvAssumeRoleDestinationAcceptanceTest.kt @@ -22,4 +22,8 @@ class S3V2CsvAssumeRoleDestinationAcceptanceTest : S3BaseCsvDestinationAcceptanc override fun testFakeFileTransfer() { super.testFakeFileTransfer() } + + // Disable these tests until we fix the incomplete stream handling behavior. + override fun testOverwriteSyncMultipleFailedGenerationsFilesPreserved() {} + override fun testOverwriteSyncFailedResumedGeneration() {} } diff --git a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2CsvDestinationAcceptanceTest.kt b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2CsvDestinationAcceptanceTest.kt index 9c106d38588c..b695bf4c7a20 100644 --- a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2CsvDestinationAcceptanceTest.kt +++ b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2CsvDestinationAcceptanceTest.kt @@ -15,4 +15,8 @@ class S3V2CsvDestinationAcceptanceTest : S3BaseCsvDestinationAcceptanceTest() { override val baseConfigJson: JsonNode get() = S3V2DestinationTestUtils.baseConfigJsonFilePath + + // Disable these tests until we fix the incomplete stream handling behavior. + override fun testOverwriteSyncMultipleFailedGenerationsFilesPreserved() {} + override fun testOverwriteSyncFailedResumedGeneration() {} } diff --git a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2CsvGzipDestinationAcceptanceTest.kt b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2CsvGzipDestinationAcceptanceTest.kt index 880315a616ef..922312d10e62 100644 --- a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2CsvGzipDestinationAcceptanceTest.kt +++ b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2CsvGzipDestinationAcceptanceTest.kt @@ -15,4 +15,8 @@ class S3V2CsvGzipDestinationAcceptanceTest : S3BaseCsvGzipDestinationAcceptanceT override val baseConfigJson: JsonNode get() = S3V2DestinationTestUtils.baseConfigJsonFilePath + + // Disable these tests until we fix the incomplete stream handling behavior. + override fun testOverwriteSyncMultipleFailedGenerationsFilesPreserved() {} + override fun testOverwriteSyncFailedResumedGeneration() {} } diff --git a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2JsonlDestinationAcceptanceTest.kt b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2JsonlDestinationAcceptanceTest.kt index b6c68c8c1009..1090fdc4e595 100644 --- a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2JsonlDestinationAcceptanceTest.kt +++ b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2JsonlDestinationAcceptanceTest.kt @@ -15,4 +15,8 @@ class S3V2JsonlDestinationAcceptanceTest : S3BaseJsonlDestinationAcceptanceTest( override val baseConfigJson: JsonNode get() = S3V2DestinationTestUtils.baseConfigJsonFilePath + + // Disable these tests until we fix the incomplete stream handling behavior. + override fun testOverwriteSyncMultipleFailedGenerationsFilesPreserved() {} + override fun testOverwriteSyncFailedResumedGeneration() {} } diff --git a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2JsonlGzipDestinationAcceptanceTest.kt b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2JsonlGzipDestinationAcceptanceTest.kt index 7798966caf3e..e6ffe789bdf1 100644 --- a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2JsonlGzipDestinationAcceptanceTest.kt +++ b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2JsonlGzipDestinationAcceptanceTest.kt @@ -15,4 +15,8 @@ class S3V2JsonlGzipDestinationAcceptanceTest : S3BaseJsonlGzipDestinationAccepta override val baseConfigJson: JsonNode get() = S3V2DestinationTestUtils.baseConfigJsonFilePath + + // Disable these tests until we fix the incomplete stream handling behavior. + override fun testOverwriteSyncMultipleFailedGenerationsFilesPreserved() {} + override fun testOverwriteSyncFailedResumedGeneration() {} } diff --git a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2ParquetDestinationAcceptanceTest.kt b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2ParquetDestinationAcceptanceTest.kt index c5c02597e7a2..5c19502dd729 100644 --- a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2ParquetDestinationAcceptanceTest.kt +++ b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration-legacy/kotlin/io/airbyte/integrations/destination/s3/S3V2ParquetDestinationAcceptanceTest.kt @@ -73,4 +73,8 @@ class S3V2ParquetDestinationAcceptanceTest : S3BaseParquetDestinationAcceptanceT runSyncAndVerifyStateOutput(config, messages, configuredCatalog, false) } + + // Disable these tests until we fix the incomplete stream handling behavior. + override fun testOverwriteSyncMultipleFailedGenerationsFilesPreserved() {} + override fun testOverwriteSyncFailedResumedGeneration() {} } diff --git a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_v2/S3V2DataDumper.kt b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_v2/S3V2DataDumper.kt index 1faf3871cb00..c397fb614025 100644 --- a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_v2/S3V2DataDumper.kt +++ b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_v2/S3V2DataDumper.kt @@ -32,7 +32,7 @@ object S3V2DataDumper : DestinationDataDumper { stream: DestinationStream ): ObjectStorageDataDumper { val config = - S3V2ConfigurationFactory(0L).makeWithoutExceptionHandling(spec as S3V2Specification) + S3V2ConfigurationFactory().makeWithoutExceptionHandling(spec as S3V2Specification) val s3Client = S3ClientFactory.make(config) val pathFactory = ObjectStoragePathFactory.from(config) return ObjectStorageDataDumper( diff --git a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_v2/S3V2WriteTest.kt b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_v2/S3V2WriteTest.kt index 02bedbd70e4c..a4b3d56ab8b1 100644 --- a/airbyte-integrations/connectors/destination-s3-v2/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_v2/S3V2WriteTest.kt +++ b/airbyte-integrations/connectors/destination-s3-v2/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_v2/S3V2WriteTest.kt @@ -67,6 +67,11 @@ class S3V2WriteTestJsonUncompressed : override fun testInterruptedTruncateWithPriorData() { super.testInterruptedTruncateWithPriorData() } + + @Test + override fun testBasicTypes() { + super.testBasicTypes() + } } class S3V2WriteTestJsonRootLevelFlattening : diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/metadata.yaml b/airbyte-integrations/connectors/source-google-analytics-data-api/metadata.yaml index 5adca8bb5352..08956880d91b 100644 --- a/airbyte-integrations/connectors/source-google-analytics-data-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/metadata.yaml @@ -8,11 +8,11 @@ data: - www.googleapis.com - analyticsdata.googleapis.com connectorBuildOptions: - baseImage: docker.io/airbyte/python-connector-base:2.0.0@sha256:c44839ba84406116e8ba68722a0f30e8f6e7056c726f447681bb9e9ece8bd916 + baseImage: docker.io/airbyte/python-connector-base:3.0.0@sha256:1a0845ff2b30eafa793c6eee4e8f4283c2e52e1bbd44eed6cb9e9abd5d34d844 connectorSubtype: api connectorType: source definitionId: 3cc2eafd-84aa-4dca-93af-322d9dfeec1a - dockerImageTag: 2.6.1 + dockerImageTag: 2.6.2 dockerRepository: airbyte/source-google-analytics-data-api documentationUrl: https://docs.airbyte.com/integrations/sources/google-analytics-data-api githubIssueLabel: source-google-analytics-data-api diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/poetry.lock b/airbyte-integrations/connectors/source-google-analytics-data-api/poetry.lock index 517184e314a7..e567557509d2 100644 --- a/airbyte-integrations/connectors/source-google-analytics-data-api/poetry.lock +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/poetry.lock @@ -48,13 +48,13 @@ vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings [[package]] name = "airbyte-protocol-models-dataclasses" -version = "0.13.0" +version = "0.13.1" description = "Declares the Airbyte Protocol using Python Dataclasses. Dataclasses in Python have less performance overhead compared to Pydantic models, making them a more efficient choice for scenarios where speed and memory usage are critical" optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models_dataclasses-0.13.0-py3-none-any.whl", hash = "sha256:0aedb99ffc4f9aab0ce91bba2c292fa17cd8fd4b42eeba196d6a16c20bbbd7a5"}, - {file = "airbyte_protocol_models_dataclasses-0.13.0.tar.gz", hash = "sha256:72e67850d661e2808406aec5839b3158ebb94d3553b798dbdae1b4a278548d2f"}, + {file = "airbyte_protocol_models_dataclasses-0.13.1-py3-none-any.whl", hash = "sha256:20a734b7b1c3479a643777830db6a2e0a34428f33d16abcfd320552576fabe5a"}, + {file = "airbyte_protocol_models_dataclasses-0.13.1.tar.gz", hash = "sha256:ec6a0fb6b16267bde910f52279445d06c8e1a3e4ed82ac2937b405ab280449d5"}, ] [[package]] @@ -70,24 +70,24 @@ files = [ [[package]] name = "anyio" -version = "4.6.2.post1" +version = "4.7.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" files = [ - {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, - {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, + {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, + {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [[package]] @@ -474,20 +474,20 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "deprecated" -version = "1.2.14" +version = "1.2.15" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, + {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"}, + {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"}, ] [package.dependencies] wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "jinja2 (>=3.0.3,<3.1.0)", "setuptools", "sphinx (<2)", "tox"] [[package]] name = "dpath" @@ -551,13 +551,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.6" +version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, - {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] [package.dependencies] @@ -572,13 +572,13 @@ trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" -version = "0.27.2" +version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, - {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [package.dependencies] @@ -586,7 +586,6 @@ anyio = "*" certifi = "*" httpcore = "==1.*" idna = "*" -sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] @@ -743,22 +742,25 @@ extended-testing = ["jinja2 (>=3,<4)"] [[package]] name = "langsmith" -version = "0.1.137" +version = "0.1.147" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.137-py3-none-any.whl", hash = "sha256:4256d5c61133749890f7b5c88321dbb133ce0f440c621ea28e76513285859b81"}, - {file = "langsmith-0.1.137.tar.gz", hash = "sha256:56cdfcc6c74cb20a3f437d5bd144feb5bf93f54c5a2918d1e568cbd084a372d4"}, + {file = "langsmith-0.1.147-py3-none-any.whl", hash = "sha256:7166fc23b965ccf839d64945a78e9f1157757add228b086141eb03a60d699a15"}, + {file = "langsmith-0.1.147.tar.gz", hash = "sha256:2e933220318a4e73034657103b3b1a3a6109cc5db3566a7e8e03be8d6d7def7a"}, ] [package.dependencies] httpx = ">=0.23.0,<1" -orjson = ">=3.9.14,<4.0.0" +orjson = {version = ">=3.9.14,<4.0.0", markers = "platform_python_implementation != \"PyPy\""} pydantic = {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""} requests = ">=2,<3" requests-toolbelt = ">=1.0.0,<2.0.0" +[package.extras] +langsmith-pyo3 = ["langsmith-pyo3 (>=0.1.0rc2,<0.2.0)"] + [[package]] name = "markupsafe" version = "3.0.2" @@ -856,131 +858,150 @@ twitter = ["twython"] [[package]] name = "numpy" -version = "2.1.2" +version = "2.2.0" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" files = [ - {file = "numpy-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee"}, - {file = "numpy-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884"}, - {file = "numpy-2.1.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648"}, - {file = "numpy-2.1.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d"}, - {file = "numpy-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86"}, - {file = "numpy-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7"}, - {file = "numpy-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03"}, - {file = "numpy-2.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466"}, - {file = "numpy-2.1.2-cp310-cp310-win32.whl", hash = "sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb"}, - {file = "numpy-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2"}, - {file = "numpy-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe"}, - {file = "numpy-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1"}, - {file = "numpy-2.1.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f"}, - {file = "numpy-2.1.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4"}, - {file = "numpy-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a"}, - {file = "numpy-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1"}, - {file = "numpy-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2"}, - {file = "numpy-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146"}, - {file = "numpy-2.1.2-cp311-cp311-win32.whl", hash = "sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c"}, - {file = "numpy-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9"}, - {file = "numpy-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b"}, - {file = "numpy-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db"}, - {file = "numpy-2.1.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1"}, - {file = "numpy-2.1.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426"}, - {file = "numpy-2.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0"}, - {file = "numpy-2.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df"}, - {file = "numpy-2.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366"}, - {file = "numpy-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142"}, - {file = "numpy-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550"}, - {file = "numpy-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e"}, - {file = "numpy-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d"}, - {file = "numpy-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf"}, - {file = "numpy-2.1.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e"}, - {file = "numpy-2.1.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3"}, - {file = "numpy-2.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8"}, - {file = "numpy-2.1.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a"}, - {file = "numpy-2.1.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98"}, - {file = "numpy-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe"}, - {file = "numpy-2.1.2-cp313-cp313-win32.whl", hash = "sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a"}, - {file = "numpy-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445"}, - {file = "numpy-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5"}, - {file = "numpy-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0"}, - {file = "numpy-2.1.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17"}, - {file = "numpy-2.1.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6"}, - {file = "numpy-2.1.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8"}, - {file = "numpy-2.1.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35"}, - {file = "numpy-2.1.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62"}, - {file = "numpy-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a"}, - {file = "numpy-2.1.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952"}, - {file = "numpy-2.1.2-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5"}, - {file = "numpy-2.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7"}, - {file = "numpy-2.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e"}, - {file = "numpy-2.1.2.tar.gz", hash = "sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e25507d85da11ff5066269d0bd25d06e0a0f2e908415534f3e603d2a78e4ffa"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a62eb442011776e4036af5c8b1a00b706c5bc02dc15eb5344b0c750428c94219"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:b606b1aaf802e6468c2608c65ff7ece53eae1a6874b3765f69b8ceb20c5fa78e"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:36b2b43146f646642b425dd2027730f99bac962618ec2052932157e213a040e9"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fe8f3583e0607ad4e43a954e35c1748b553bfe9fdac8635c02058023277d1b3"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122fd2fcfafdefc889c64ad99c228d5a1f9692c3a83f56c292618a59aa60ae83"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3f2f5cddeaa4424a0a118924b988746db6ffa8565e5829b1841a8a3bd73eb59a"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe4bb0695fe986a9e4deec3b6857003b4cfe5c5e4aac0b95f6a658c14635e31"}, + {file = "numpy-2.2.0-cp310-cp310-win32.whl", hash = "sha256:b30042fe92dbd79f1ba7f6898fada10bdaad1847c44f2dff9a16147e00a93661"}, + {file = "numpy-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dc1d6d66f8d37843ed281773c7174f03bf7ad826523f73435deb88ba60d2d4"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9874bc2ff574c40ab7a5cbb7464bf9b045d617e36754a7bc93f933d52bd9ffc6"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0da8495970f6b101ddd0c38ace92edea30e7e12b9a926b57f5fabb1ecc25bb90"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0557eebc699c1c34cccdd8c3778c9294e8196df27d713706895edc6f57d29608"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3579eaeb5e07f3ded59298ce22b65f877a86ba8e9fe701f5576c99bb17c283da"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40deb10198bbaa531509aad0cd2f9fadb26c8b94070831e2208e7df543562b74"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2aed8fcf8abc3020d6a9ccb31dbc9e7d7819c56a348cc88fd44be269b37427e"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a222d764352c773aa5ebde02dd84dba3279c81c6db2e482d62a3fa54e5ece69b"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4e58666988605e251d42c2818c7d3d8991555381be26399303053b58a5bbf30d"}, + {file = "numpy-2.2.0-cp311-cp311-win32.whl", hash = "sha256:4723a50e1523e1de4fccd1b9a6dcea750c2102461e9a02b2ac55ffeae09a4410"}, + {file = "numpy-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:16757cf28621e43e252c560d25b15f18a2f11da94fea344bf26c599b9cf54b73"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cff210198bb4cae3f3c100444c5eaa573a823f05c253e7188e1362a5555235b3"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58b92a5828bd4d9aa0952492b7de803135038de47343b2aa3cc23f3b71a3dc4e"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ebe5e59545401fbb1b24da76f006ab19734ae71e703cdb4a8b347e84a0cece67"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e2b8cd48a9942ed3f85b95ca4105c45758438c7ed28fff1e4ce3e57c3b589d8e"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fcc997ffc0bef234b8875a54d4058afa92b0b0c4223fc1f62f24b3b5e86038"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ad7d11b309bd132d74397fcf2920933c9d1dc865487128f5c03d580f2c3d03"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cb24cca1968b21355cc6f3da1a20cd1cebd8a023e3c5b09b432444617949085a"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0798b138c291d792f8ea40fe3768610f3c7dd2574389e37c3f26573757c8f7ef"}, + {file = "numpy-2.2.0-cp312-cp312-win32.whl", hash = "sha256:afe8fb968743d40435c3827632fd36c5fbde633b0423da7692e426529b1759b1"}, + {file = "numpy-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a4199f519e57d517ebd48cb76b36c82da0360781c6a0353e64c0cac30ecaad3"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742"}, + {file = "numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e"}, + {file = "numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72"}, + {file = "numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066"}, + {file = "numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e12c6c1ce84628c52d6367863773f7c8c8241be554e8b79686e91a43f1733773"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:b6207dc8fb3c8cb5668e885cef9ec7f70189bec4e276f0ff70d5aa078d32c88e"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a50aeff71d0f97b6450d33940c7181b08be1441c6c193e678211bff11aa725e7"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:df12a1f99b99f569a7c2ae59aa2d31724e8d835fc7f33e14f4792e3071d11221"}, + {file = "numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0"}, ] [[package]] name = "orjson" -version = "3.10.10" +version = "3.10.12" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.10-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b788a579b113acf1c57e0a68e558be71d5d09aa67f62ca1f68e01117e550a998"}, - {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:804b18e2b88022c8905bb79bd2cbe59c0cd014b9328f43da8d3b28441995cda4"}, - {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9972572a1d042ec9ee421b6da69f7cc823da5962237563fa548ab17f152f0b9b"}, - {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc6993ab1c2ae7dd0711161e303f1db69062955ac2668181bfdf2dd410e65258"}, - {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d78e4cacced5781b01d9bc0f0cd8b70b906a0e109825cb41c1b03f9c41e4ce86"}, - {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6eb2598df518281ba0cbc30d24c5b06124ccf7e19169e883c14e0831217a0bc"}, - {file = "orjson-3.10.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23776265c5215ec532de6238a52707048401a568f0fa0d938008e92a147fe2c7"}, - {file = "orjson-3.10.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8cc2a654c08755cef90b468ff17c102e2def0edd62898b2486767204a7f5cc9c"}, - {file = "orjson-3.10.10-cp310-none-win32.whl", hash = "sha256:081b3fc6a86d72efeb67c13d0ea7c030017bd95f9868b1e329a376edc456153b"}, - {file = "orjson-3.10.10-cp310-none-win_amd64.whl", hash = "sha256:ff38c5fb749347768a603be1fb8a31856458af839f31f064c5aa74aca5be9efe"}, - {file = "orjson-3.10.10-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:879e99486c0fbb256266c7c6a67ff84f46035e4f8749ac6317cc83dacd7f993a"}, - {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019481fa9ea5ff13b5d5d95e6fd5ab25ded0810c80b150c2c7b1cc8660b662a7"}, - {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0dd57eff09894938b4c86d4b871a479260f9e156fa7f12f8cad4b39ea8028bb5"}, - {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dbde6d70cd95ab4d11ea8ac5e738e30764e510fc54d777336eec09bb93b8576c"}, - {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2625cb37b8fb42e2147404e5ff7ef08712099197a9cd38895006d7053e69d6"}, - {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbf3c20c6a7db69df58672a0d5815647ecf78c8e62a4d9bd284e8621c1fe5ccb"}, - {file = "orjson-3.10.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:75c38f5647e02d423807d252ce4528bf6a95bd776af999cb1fb48867ed01d1f6"}, - {file = "orjson-3.10.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:23458d31fa50ec18e0ec4b0b4343730928296b11111df5f547c75913714116b2"}, - {file = "orjson-3.10.10-cp311-none-win32.whl", hash = "sha256:2787cd9dedc591c989f3facd7e3e86508eafdc9536a26ec277699c0aa63c685b"}, - {file = "orjson-3.10.10-cp311-none-win_amd64.whl", hash = "sha256:6514449d2c202a75183f807bc755167713297c69f1db57a89a1ef4a0170ee269"}, - {file = "orjson-3.10.10-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8564f48f3620861f5ef1e080ce7cd122ee89d7d6dacf25fcae675ff63b4d6e05"}, - {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5bf161a32b479034098c5b81f2608f09167ad2fa1c06abd4e527ea6bf4837a9"}, - {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68b65c93617bcafa7f04b74ae8bc2cc214bd5cb45168a953256ff83015c6747d"}, - {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8e28406f97fc2ea0c6150f4c1b6e8261453318930b334abc419214c82314f85"}, - {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4d0d9fe174cc7a5bdce2e6c378bcdb4c49b2bf522a8f996aa586020e1b96cee"}, - {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3be81c42f1242cbed03cbb3973501fcaa2675a0af638f8be494eaf37143d999"}, - {file = "orjson-3.10.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65f9886d3bae65be026219c0a5f32dbbe91a9e6272f56d092ab22561ad0ea33b"}, - {file = "orjson-3.10.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:730ed5350147db7beb23ddaf072f490329e90a1d059711d364b49fe352ec987b"}, - {file = "orjson-3.10.10-cp312-none-win32.whl", hash = "sha256:a8f4bf5f1c85bea2170800020d53a8877812892697f9c2de73d576c9307a8a5f"}, - {file = "orjson-3.10.10-cp312-none-win_amd64.whl", hash = "sha256:384cd13579a1b4cd689d218e329f459eb9ddc504fa48c5a83ef4889db7fd7a4f"}, - {file = "orjson-3.10.10-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44bffae68c291f94ff5a9b4149fe9d1bdd4cd0ff0fb575bcea8351d48db629a1"}, - {file = "orjson-3.10.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e27b4c6437315df3024f0835887127dac2a0a3ff643500ec27088d2588fa5ae1"}, - {file = "orjson-3.10.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca84df16d6b49325a4084fd8b2fe2229cb415e15c46c529f868c3387bb1339d"}, - {file = "orjson-3.10.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c14ce70e8f39bd71f9f80423801b5d10bf93d1dceffdecd04df0f64d2c69bc01"}, - {file = "orjson-3.10.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:24ac62336da9bda1bd93c0491eff0613003b48d3cb5d01470842e7b52a40d5b4"}, - {file = "orjson-3.10.10-cp313-none-win32.whl", hash = "sha256:eb0a42831372ec2b05acc9ee45af77bcaccbd91257345f93780a8e654efc75db"}, - {file = "orjson-3.10.10-cp313-none-win_amd64.whl", hash = "sha256:f0c4f37f8bf3f1075c6cc8dd8a9f843689a4b618628f8812d0a71e6968b95ffd"}, - {file = "orjson-3.10.10-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:829700cc18503efc0cf502d630f612884258020d98a317679cd2054af0259568"}, - {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0ceb5e0e8c4f010ac787d29ae6299846935044686509e2f0f06ed441c1ca949"}, - {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c25908eb86968613216f3db4d3003f1c45d78eb9046b71056ca327ff92bdbd4"}, - {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:218cb0bc03340144b6328a9ff78f0932e642199ac184dd74b01ad691f42f93ff"}, - {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2277ec2cea3775640dc81ab5195bb5b2ada2fe0ea6eee4677474edc75ea6785"}, - {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:848ea3b55ab5ccc9d7bbd420d69432628b691fba3ca8ae3148c35156cbd282aa"}, - {file = "orjson-3.10.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e3e67b537ac0c835b25b5f7d40d83816abd2d3f4c0b0866ee981a045287a54f3"}, - {file = "orjson-3.10.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7948cfb909353fce2135dcdbe4521a5e7e1159484e0bb024c1722f272488f2b8"}, - {file = "orjson-3.10.10-cp38-none-win32.whl", hash = "sha256:78bee66a988f1a333dc0b6257503d63553b1957889c17b2c4ed72385cd1b96ae"}, - {file = "orjson-3.10.10-cp38-none-win_amd64.whl", hash = "sha256:f1d647ca8d62afeb774340a343c7fc023efacfd3a39f70c798991063f0c681dd"}, - {file = "orjson-3.10.10-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5a059afddbaa6dd733b5a2d76a90dbc8af790b993b1b5cb97a1176ca713b5df8"}, - {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f9b5c59f7e2a1a410f971c5ebc68f1995822837cd10905ee255f96074537ee6"}, - {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d5ef198bafdef4aa9d49a4165ba53ffdc0a9e1c7b6f76178572ab33118afea25"}, - {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf29ce0bb5d3320824ec3d1508652421000ba466abd63bdd52c64bcce9eb1fa"}, - {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dddd5516bcc93e723d029c1633ae79c4417477b4f57dad9bfeeb6bc0315e654a"}, - {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12f2003695b10817f0fa8b8fca982ed7f5761dcb0d93cff4f2f9f6709903fd7"}, - {file = "orjson-3.10.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:672f9874a8a8fb9bb1b771331d31ba27f57702c8106cdbadad8bda5d10bc1019"}, - {file = "orjson-3.10.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1dcbb0ca5fafb2b378b2c74419480ab2486326974826bbf6588f4dc62137570a"}, - {file = "orjson-3.10.10-cp39-none-win32.whl", hash = "sha256:d9bbd3a4b92256875cb058c3381b782649b9a3c68a4aa9a2fff020c2f9cfc1be"}, - {file = "orjson-3.10.10-cp39-none-win_amd64.whl", hash = "sha256:766f21487a53aee8524b97ca9582d5c6541b03ab6210fbaf10142ae2f3ced2aa"}, - {file = "orjson-3.10.10.tar.gz", hash = "sha256:37949383c4df7b4337ce82ee35b6d7471e55195efa7dcb45ab8226ceadb0fe3b"}, + {file = "orjson-3.10.12-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ece01a7ec71d9940cc654c482907a6b65df27251255097629d0dea781f255c6d"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c34ec9aebc04f11f4b978dd6caf697a2df2dd9b47d35aa4cc606cabcb9df69d7"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd6ec8658da3480939c79b9e9e27e0db31dffcd4ba69c334e98c9976ac29140e"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17e6baf4cf01534c9de8a16c0c611f3d94925d1701bf5f4aff17003677d8ced"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6402ebb74a14ef96f94a868569f5dccf70d791de49feb73180eb3c6fda2ade56"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0000758ae7c7853e0a4a6063f534c61656ebff644391e1f81698c1b2d2fc8cd2"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:888442dcee99fd1e5bd37a4abb94930915ca6af4db50e23e746cdf4d1e63db13"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c1f7a3ce79246aa0e92f5458d86c54f257fb5dfdc14a192651ba7ec2c00f8a05"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:802a3935f45605c66fb4a586488a38af63cb37aaad1c1d94c982c40dcc452e85"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1da1ef0113a2be19bb6c557fb0ec2d79c92ebd2fed4cfb1b26bab93f021fb885"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a3273e99f367f137d5b3fecb5e9f45bcdbfac2a8b2f32fbc72129bbd48789c2"}, + {file = "orjson-3.10.12-cp310-none-win32.whl", hash = "sha256:475661bf249fd7907d9b0a2a2421b4e684355a77ceef85b8352439a9163418c3"}, + {file = "orjson-3.10.12-cp310-none-win_amd64.whl", hash = "sha256:87251dc1fb2b9e5ab91ce65d8f4caf21910d99ba8fb24b49fd0c118b2362d509"}, + {file = "orjson-3.10.12-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a734c62efa42e7df94926d70fe7d37621c783dea9f707a98cdea796964d4cf74"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:750f8b27259d3409eda8350c2919a58b0cfcd2054ddc1bd317a643afc646ef23"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb52c22bfffe2857e7aa13b4622afd0dd9d16ea7cc65fd2bf318d3223b1b6252"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:440d9a337ac8c199ff8251e100c62e9488924c92852362cd27af0e67308c16ef"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9e15c06491c69997dfa067369baab3bf094ecb74be9912bdc4339972323f252"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:362d204ad4b0b8724cf370d0cd917bb2dc913c394030da748a3bb632445ce7c4"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b57cbb4031153db37b41622eac67329c7810e5f480fda4cfd30542186f006ae"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:165c89b53ef03ce0d7c59ca5c82fa65fe13ddf52eeb22e859e58c237d4e33b9b"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5dee91b8dfd54557c1a1596eb90bcd47dbcd26b0baaed919e6861f076583e9da"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77a4e1cfb72de6f905bdff061172adfb3caf7a4578ebf481d8f0530879476c07"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:038d42c7bc0606443459b8fe2d1f121db474c49067d8d14c6a075bbea8bf14dd"}, + {file = "orjson-3.10.12-cp311-none-win32.whl", hash = "sha256:03b553c02ab39bed249bedd4abe37b2118324d1674e639b33fab3d1dafdf4d79"}, + {file = "orjson-3.10.12-cp311-none-win_amd64.whl", hash = "sha256:8b8713b9e46a45b2af6b96f559bfb13b1e02006f4242c156cbadef27800a55a8"}, + {file = "orjson-3.10.12-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:53206d72eb656ca5ac7d3a7141e83c5bbd3ac30d5eccfe019409177a57634b0d"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac8010afc2150d417ebda810e8df08dd3f544e0dd2acab5370cfa6bcc0662f8f"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed459b46012ae950dd2e17150e838ab08215421487371fa79d0eced8d1461d70"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dcb9673f108a93c1b52bfc51b0af422c2d08d4fc710ce9c839faad25020bb69"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22a51ae77680c5c4652ebc63a83d5255ac7d65582891d9424b566fb3b5375ee9"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910fdf2ac0637b9a77d1aad65f803bac414f0b06f720073438a7bd8906298192"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:24ce85f7100160936bc2116c09d1a8492639418633119a2224114f67f63a4559"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a76ba5fc8dd9c913640292df27bff80a685bed3a3c990d59aa6ce24c352f8fc"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ff70ef093895fd53f4055ca75f93f047e088d1430888ca1229393a7c0521100f"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f4244b7018b5753ecd10a6d324ec1f347da130c953a9c88432c7fbc8875d13be"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:16135ccca03445f37921fa4b585cff9a58aa8d81ebcb27622e69bfadd220b32c"}, + {file = "orjson-3.10.12-cp312-none-win32.whl", hash = "sha256:2d879c81172d583e34153d524fcba5d4adafbab8349a7b9f16ae511c2cee8708"}, + {file = "orjson-3.10.12-cp312-none-win_amd64.whl", hash = "sha256:fc23f691fa0f5c140576b8c365bc942d577d861a9ee1142e4db468e4e17094fb"}, + {file = "orjson-3.10.12-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47962841b2a8aa9a258b377f5188db31ba49af47d4003a32f55d6f8b19006543"}, + {file = "orjson-3.10.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6334730e2532e77b6054e87ca84f3072bee308a45a452ea0bffbbbc40a67e296"}, + {file = "orjson-3.10.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:accfe93f42713c899fdac2747e8d0d5c659592df2792888c6c5f829472e4f85e"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a7974c490c014c48810d1dede6c754c3cc46598da758c25ca3b4001ac45b703f"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3f250ce7727b0b2682f834a3facff88e310f52f07a5dcfd852d99637d386e79e"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f31422ff9486ae484f10ffc51b5ab2a60359e92d0716fcce1b3593d7bb8a9af6"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5f29c5d282bb2d577c2a6bbde88d8fdcc4919c593f806aac50133f01b733846e"}, + {file = "orjson-3.10.12-cp313-none-win32.whl", hash = "sha256:f45653775f38f63dc0e6cd4f14323984c3149c05d6007b58cb154dd080ddc0dc"}, + {file = "orjson-3.10.12-cp313-none-win_amd64.whl", hash = "sha256:229994d0c376d5bdc91d92b3c9e6be2f1fbabd4cc1b59daae1443a46ee5e9825"}, + {file = "orjson-3.10.12-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7d69af5b54617a5fac5c8e5ed0859eb798e2ce8913262eb522590239db6c6763"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ed119ea7d2953365724a7059231a44830eb6bbb0cfead33fcbc562f5fd8f935"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c5fc1238ef197e7cad5c91415f524aaa51e004be5a9b35a1b8a84ade196f73f"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43509843990439b05f848539d6f6198d4ac86ff01dd024b2f9a795c0daeeab60"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f72e27a62041cfb37a3de512247ece9f240a561e6c8662276beaf4d53d406db4"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a904f9572092bb6742ab7c16c623f0cdccbad9eeb2d14d4aa06284867bddd31"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:855c0833999ed5dc62f64552db26f9be767434917d8348d77bacaab84f787d7b"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:897830244e2320f6184699f598df7fb9db9f5087d6f3f03666ae89d607e4f8ed"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:0b32652eaa4a7539f6f04abc6243619c56f8530c53bf9b023e1269df5f7816dd"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:36b4aa31e0f6a1aeeb6f8377769ca5d125db000f05c20e54163aef1d3fe8e833"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5535163054d6cbf2796f93e4f0dbc800f61914c0e3c4ed8499cf6ece22b4a3da"}, + {file = "orjson-3.10.12-cp38-none-win32.whl", hash = "sha256:90a5551f6f5a5fa07010bf3d0b4ca2de21adafbbc0af6cb700b63cd767266cb9"}, + {file = "orjson-3.10.12-cp38-none-win_amd64.whl", hash = "sha256:703a2fb35a06cdd45adf5d733cf613cbc0cb3ae57643472b16bc22d325b5fb6c"}, + {file = "orjson-3.10.12-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f29de3ef71a42a5822765def1febfb36e0859d33abf5c2ad240acad5c6a1b78d"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de365a42acc65d74953f05e4772c974dad6c51cfc13c3240899f534d611be967"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a5a0158648a67ff0004cb0df5df7dcc55bfc9ca154d9c01597a23ad54c8d0c"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c47ce6b8d90fe9646a25b6fb52284a14ff215c9595914af63a5933a49972ce36"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0eee4c2c5bfb5c1b47a5db80d2ac7aaa7e938956ae88089f098aff2c0f35d5d8"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35d3081bbe8b86587eb5c98a73b97f13d8f9fea685cf91a579beddacc0d10566"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73c23a6e90383884068bc2dba83d5222c9fcc3b99a0ed2411d38150734236755"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5472be7dc3269b4b52acba1433dac239215366f89dc1d8d0e64029abac4e714e"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:7319cda750fca96ae5973efb31b17d97a5c5225ae0bc79bf5bf84df9e1ec2ab6"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:74d5ca5a255bf20b8def6a2b96b1e18ad37b4a122d59b154c458ee9494377f80"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ff31d22ecc5fb85ef62c7d4afe8301d10c558d00dd24274d4bbe464380d3cd69"}, + {file = "orjson-3.10.12-cp39-none-win32.whl", hash = "sha256:c22c3ea6fba91d84fcb4cda30e64aff548fcf0c44c876e681f47d61d24b12e6b"}, + {file = "orjson-3.10.12-cp39-none-win_amd64.whl", hash = "sha256:be604f60d45ace6b0b33dd990a66b4526f1a7a186ac411c942674625456ca548"}, + {file = "orjson-3.10.12.tar.gz", hash = "sha256:0a78bbda3aea0f9f079057ee1ee8a1ecf790d4f1af88dd67493c6b8ee52506ff"}, ] [[package]] @@ -1155,19 +1176,19 @@ files = [ [[package]] name = "pydantic" -version = "2.9.2" +version = "2.10.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, - {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, + {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, + {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.23.4" -typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} +pydantic-core = "2.27.1" +typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -1175,100 +1196,111 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.23.4" +version = "2.27.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, - {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, - {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, - {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, - {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, - {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, - {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, - {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, - {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, - {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, - {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, - {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, - {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, - {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, + {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, + {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, + {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, + {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, + {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, + {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, + {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, + {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, + {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, + {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, + {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, + {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, + {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, + {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, + {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, + {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, ] [package.dependencies] @@ -1488,105 +1520,105 @@ files = [ [[package]] name = "regex" -version = "2024.9.11" +version = "2024.11.6" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" files = [ - {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408"}, - {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d"}, - {file = "regex-2024.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16e13a7929791ac1216afde26f712802e3df7bf0360b32e4914dca3ab8baeea5"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46989629904bad940bbec2106528140a218b4a36bb3042d8406980be1941429c"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a906ed5e47a0ce5f04b2c981af1c9acf9e8696066900bf03b9d7879a6f679fc8"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a091b0550b3b0207784a7d6d0f1a00d1d1c8a11699c1a4d93db3fbefc3ad35"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ddcd9a179c0a6fa8add279a4444015acddcd7f232a49071ae57fa6e278f1f71"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b41e1adc61fa347662b09398e31ad446afadff932a24807d3ceb955ed865cc8"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ced479f601cd2f8ca1fd7b23925a7e0ad512a56d6e9476f79b8f381d9d37090a"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:635a1d96665f84b292e401c3d62775851aedc31d4f8784117b3c68c4fcd4118d"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0256beda696edcf7d97ef16b2a33a8e5a875affd6fa6567b54f7c577b30a137"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ce4f1185db3fbde8ed8aa223fc9620f276c58de8b0d4f8cc86fd1360829edb6"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:09d77559e80dcc9d24570da3745ab859a9cf91953062e4ab126ba9d5993688ca"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a22ccefd4db3f12b526eccb129390942fe874a3a9fdbdd24cf55773a1faab1a"}, - {file = "regex-2024.9.11-cp310-cp310-win32.whl", hash = "sha256:f745ec09bc1b0bd15cfc73df6fa4f726dcc26bb16c23a03f9e3367d357eeedd0"}, - {file = "regex-2024.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:01c2acb51f8a7d6494c8c5eafe3d8e06d76563d8a8a4643b37e9b2dd8a2ff623"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2cce2449e5927a0bf084d346da6cd5eb016b2beca10d0013ab50e3c226ffc0df"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b37fa423beefa44919e009745ccbf353d8c981516e807995b2bd11c2c77d268"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64ce2799bd75039b480cc0360907c4fb2f50022f030bf9e7a8705b636e408fad"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4cc92bb6db56ab0c1cbd17294e14f5e9224f0cc6521167ef388332604e92679"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d05ac6fa06959c4172eccd99a222e1fbf17b5670c4d596cb1e5cde99600674c4"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040562757795eeea356394a7fb13076ad4f99d3c62ab0f8bdfb21f99a1f85664"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6113c008a7780792efc80f9dfe10ba0cd043cbf8dc9a76ef757850f51b4edc50"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e5fb5f77c8745a60105403a774fe2c1759b71d3e7b4ca237a5e67ad066c7199"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54d9ff35d4515debf14bc27f1e3b38bfc453eff3220f5bce159642fa762fe5d4"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:df5cbb1fbc74a8305b6065d4ade43b993be03dbe0f8b30032cced0d7740994bd"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7fb89ee5d106e4a7a51bce305ac4efb981536301895f7bdcf93ec92ae0d91c7f"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a738b937d512b30bf75995c0159c0ddf9eec0775c9d72ac0202076c72f24aa96"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e28f9faeb14b6f23ac55bfbbfd3643f5c7c18ede093977f1df249f73fd22c7b1"}, - {file = "regex-2024.9.11-cp311-cp311-win32.whl", hash = "sha256:18e707ce6c92d7282dfce370cd205098384b8ee21544e7cb29b8aab955b66fa9"}, - {file = "regex-2024.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:313ea15e5ff2a8cbbad96ccef6be638393041b0a7863183c2d31e0c6116688cf"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b0d0a6c64fcc4ef9c69bd5b3b3626cc3776520a1637d8abaa62b9edc147a58f7"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:49b0e06786ea663f933f3710a51e9385ce0cba0ea56b67107fd841a55d56a231"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b513b6997a0b2f10e4fd3a1313568e373926e8c252bd76c960f96fd039cd28d"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee439691d8c23e76f9802c42a95cfeebf9d47cf4ffd06f18489122dbb0a7ad64"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8f877c89719d759e52783f7fe6e1c67121076b87b40542966c02de5503ace42"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23b30c62d0f16827f2ae9f2bb87619bc4fba2044911e2e6c2eb1af0161cdb766"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ab7824093d8f10d44330fe1e6493f756f252d145323dd17ab6b48733ff6c0a"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8dee5b4810a89447151999428fe096977346cf2f29f4d5e29609d2e19e0199c9"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98eeee2f2e63edae2181c886d7911ce502e1292794f4c5ee71e60e23e8d26b5d"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:57fdd2e0b2694ce6fc2e5ccf189789c3e2962916fb38779d3e3521ff8fe7a822"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d552c78411f60b1fdaafd117a1fca2f02e562e309223b9d44b7de8be451ec5e0"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a0b2b80321c2ed3fcf0385ec9e51a12253c50f146fddb2abbb10f033fe3d049a"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:18406efb2f5a0e57e3a5881cd9354c1512d3bb4f5c45d96d110a66114d84d23a"}, - {file = "regex-2024.9.11-cp312-cp312-win32.whl", hash = "sha256:e464b467f1588e2c42d26814231edecbcfe77f5ac414d92cbf4e7b55b2c2a776"}, - {file = "regex-2024.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:9e8719792ca63c6b8340380352c24dcb8cd7ec49dae36e963742a275dfae6009"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c157bb447303070f256e084668b702073db99bbb61d44f85d811025fcf38f784"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4db21ece84dfeefc5d8a3863f101995de646c6cb0536952c321a2650aa202c36"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:220e92a30b426daf23bb67a7962900ed4613589bab80382be09b48896d211e92"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1ae19e64c14c7ec1995f40bd932448713d3c73509e82d8cd7744dc00e29e86"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47cd43a5bfa48f86925fe26fbdd0a488ff15b62468abb5d2a1e092a4fb10e85"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d4a76b96f398697fe01117093613166e6aa8195d63f1b4ec3f21ab637632963"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea51dcc0835eea2ea31d66456210a4e01a076d820e9039b04ae8d17ac11dee6"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7aaa315101c6567a9a45d2839322c51c8d6e81f67683d529512f5bcfb99c802"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c57d08ad67aba97af57a7263c2d9006d5c404d721c5f7542f077f109ec2a4a29"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8404bf61298bb6f8224bb9176c1424548ee1181130818fcd2cbffddc768bed8"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dd4490a33eb909ef5078ab20f5f000087afa2a4daa27b4c072ccb3cb3050ad84"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eee9130eaad130649fd73e5cd92f60e55708952260ede70da64de420cdcad554"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a2644a93da36c784e546de579ec1806bfd2763ef47babc1b03d765fe560c9f8"}, - {file = "regex-2024.9.11-cp313-cp313-win32.whl", hash = "sha256:e997fd30430c57138adc06bba4c7c2968fb13d101e57dd5bb9355bf8ce3fa7e8"}, - {file = "regex-2024.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:042c55879cfeb21a8adacc84ea347721d3d83a159da6acdf1116859e2427c43f"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:35f4a6f96aa6cb3f2f7247027b07b15a374f0d5b912c0001418d1d55024d5cb4"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:55b96e7ce3a69a8449a66984c268062fbaa0d8ae437b285428e12797baefce7e"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb130fccd1a37ed894824b8c046321540263013da72745d755f2d35114b81a60"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:323c1f04be6b2968944d730e5c2091c8c89767903ecaa135203eec4565ed2b2b"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be1c8ed48c4c4065ecb19d882a0ce1afe0745dfad8ce48c49586b90a55f02366"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5b029322e6e7b94fff16cd120ab35a253236a5f99a79fb04fda7ae71ca20ae8"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6fff13ef6b5f29221d6904aa816c34701462956aa72a77f1f151a8ec4f56aeb"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d4af3979376652010e400accc30404e6c16b7df574048ab1f581af82065e4"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:079400a8269544b955ffa9e31f186f01d96829110a3bf79dc338e9910f794fca"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f9268774428ec173654985ce55fc6caf4c6d11ade0f6f914d48ef4719eb05ebb"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:23f9985c8784e544d53fc2930fc1ac1a7319f5d5332d228437acc9f418f2f168"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2941333154baff9838e88aa71c1d84f4438189ecc6021a12c7573728b5838e"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e93f1c331ca8e86fe877a48ad64e77882c0c4da0097f2212873a69bbfea95d0c"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:846bc79ee753acf93aef4184c040d709940c9d001029ceb7b7a52747b80ed2dd"}, - {file = "regex-2024.9.11-cp38-cp38-win32.whl", hash = "sha256:c94bb0a9f1db10a1d16c00880bdebd5f9faf267273b8f5bd1878126e0fbde771"}, - {file = "regex-2024.9.11-cp38-cp38-win_amd64.whl", hash = "sha256:2b08fce89fbd45664d3df6ad93e554b6c16933ffa9d55cb7e01182baaf971508"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:07f45f287469039ffc2c53caf6803cd506eb5f5f637f1d4acb37a738f71dd066"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4838e24ee015101d9f901988001038f7f0d90dc0c3b115541a1365fb439add62"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6edd623bae6a737f10ce853ea076f56f507fd7726bee96a41ee3d68d347e4d16"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c69ada171c2d0e97a4b5aa78fbb835e0ffbb6b13fc5da968c09811346564f0d3"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02087ea0a03b4af1ed6ebab2c54d7118127fee8d71b26398e8e4b05b78963199"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69dee6a020693d12a3cf892aba4808fe168d2a4cef368eb9bf74f5398bfd4ee8"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297f54910247508e6e5cae669f2bc308985c60540a4edd1c77203ef19bfa63ca"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecea58b43a67b1b79805f1a0255730edaf5191ecef84dbc4cc85eb30bc8b63b9"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eab4bb380f15e189d1313195b062a6aa908f5bd687a0ceccd47c8211e9cf0d4a"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0cbff728659ce4bbf4c30b2a1be040faafaa9eca6ecde40aaff86f7889f4ab39"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:54c4a097b8bc5bb0dfc83ae498061d53ad7b5762e00f4adaa23bee22b012e6ba"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:73d6d2f64f4d894c96626a75578b0bf7d9e56dcda8c3d037a2118fdfe9b1c664"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e53b5fbab5d675aec9f0c501274c467c0f9a5d23696cfc94247e1fb56501ed89"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ffbcf9221e04502fc35e54d1ce9567541979c3fdfb93d2c554f0ca583a19b35"}, - {file = "regex-2024.9.11-cp39-cp39-win32.whl", hash = "sha256:e4c22e1ac1f1ec1e09f72e6c44d8f2244173db7eb9629cc3a346a8d7ccc31142"}, - {file = "regex-2024.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:faa3c142464efec496967359ca99696c896c591c56c53506bac1ad465f66e919"}, - {file = "regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, + {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, + {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, + {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, + {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, + {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, + {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, + {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, + {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, + {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, + {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, + {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, + {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, + {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, ] [[package]] @@ -1727,33 +1759,33 @@ typing-extensions = "*" [[package]] name = "setuptools" -version = "75.3.0" +version = "75.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd"}, - {file = "setuptools-75.3.0.tar.gz", hash = "sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686"}, + {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, + {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.12.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -1795,20 +1827,21 @@ files = [ [[package]] name = "tqdm" -version = "4.66.6" +version = "4.67.1" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.6-py3-none-any.whl", hash = "sha256:223e8b5359c2efc4b30555531f09e9f2f3589bcd7fdd389271191031b49b7a63"}, - {file = "tqdm-4.66.6.tar.gz", hash = "sha256:4bdd694238bef1485ce839d67967ab50af8f9272aab687c0d7702a01da0be090"}, + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] +discord = ["requests"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] @@ -1882,81 +1915,76 @@ bracex = ">=2.1.1" [[package]] name = "wrapt" -version = "1.16.0" +version = "1.17.0" description = "Module for decorators, wrappers and monkey patching." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, + {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e185ec6060e301a7e5f8461c86fb3640a7beb1a0f0208ffde7a65ec4074931df"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb90765dd91aed05b53cd7a87bd7f5c188fcd95960914bae0d32c5e7f899719d"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:879591c2b5ab0a7184258274c42a126b74a2c3d5a329df16d69f9cee07bba6ea"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fce6fee67c318fdfb7f285c29a82d84782ae2579c0e1b385b7f36c6e8074fffb"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0698d3a86f68abc894d537887b9bbf84d29bcfbc759e23f4644be27acf6da301"}, + {file = "wrapt-1.17.0-cp310-cp310-win32.whl", hash = "sha256:69d093792dc34a9c4c8a70e4973a3361c7a7578e9cd86961b2bbf38ca71e4e22"}, + {file = "wrapt-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f28b29dc158ca5d6ac396c8e0a2ef45c4e97bb7e65522bfc04c989e6fe814575"}, + {file = "wrapt-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74bf625b1b4caaa7bad51d9003f8b07a468a704e0644a700e936c357c17dd45a"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f2a28eb35cf99d5f5bd12f5dd44a0f41d206db226535b37b0c60e9da162c3ed"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81b1289e99cf4bad07c23393ab447e5e96db0ab50974a280f7954b071d41b489"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2939cd4a2a52ca32bc0b359015718472d7f6de870760342e7ba295be9ebaf9"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a9653131bda68a1f029c52157fd81e11f07d485df55410401f745007bd6d339"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4e4b4385363de9052dac1a67bfb535c376f3d19c238b5f36bddc95efae15e12d"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bdf62d25234290db1837875d4dceb2151e4ea7f9fff2ed41c0fde23ed542eb5b"}, + {file = "wrapt-1.17.0-cp311-cp311-win32.whl", hash = "sha256:5d8fd17635b262448ab8f99230fe4dac991af1dabdbb92f7a70a6afac8a7e346"}, + {file = "wrapt-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:92a3d214d5e53cb1db8b015f30d544bc9d3f7179a05feb8f16df713cecc2620a"}, + {file = "wrapt-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:89fc28495896097622c3fc238915c79365dd0ede02f9a82ce436b13bd0ab7569"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875d240fdbdbe9e11f9831901fb8719da0bd4e6131f83aa9f69b96d18fae7504"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ed16d95fd142e9c72b6c10b06514ad30e846a0d0917ab406186541fe68b451"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b956061b8db634120b58f668592a772e87e2e78bc1f6a906cfcaa0cc7991c1"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:daba396199399ccabafbfc509037ac635a6bc18510ad1add8fd16d4739cdd106"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d63f4d446e10ad19ed01188d6c1e1bb134cde8c18b0aa2acfd973d41fcc5ada"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8a5e7cc39a45fc430af1aefc4d77ee6bad72c5bcdb1322cfde852c15192b8bd4"}, + {file = "wrapt-1.17.0-cp312-cp312-win32.whl", hash = "sha256:0a0a1a1ec28b641f2a3a2c35cbe86c00051c04fffcfcc577ffcdd707df3f8635"}, + {file = "wrapt-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c34f6896a01b84bab196f7119770fd8466c8ae3dfa73c59c0bb281e7b588ce7"}, + {file = "wrapt-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:714c12485aa52efbc0fc0ade1e9ab3a70343db82627f90f2ecbc898fdf0bb181"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da427d311782324a376cacb47c1a4adc43f99fd9d996ffc1b3e8529c4074d393"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba1739fb38441a27a676f4de4123d3e858e494fac05868b7a281c0a383c098f4"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e711fc1acc7468463bc084d1b68561e40d1eaa135d8c509a65dd534403d83d7b"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:140ea00c87fafc42739bd74a94a5a9003f8e72c27c47cd4f61d8e05e6dec8721"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73a96fd11d2b2e77d623a7f26e004cc31f131a365add1ce1ce9a19e55a1eef90"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0b48554952f0f387984da81ccfa73b62e52817a4386d070c75e4db7d43a28c4a"}, + {file = "wrapt-1.17.0-cp313-cp313-win32.whl", hash = "sha256:498fec8da10e3e62edd1e7368f4b24aa362ac0ad931e678332d1b209aec93045"}, + {file = "wrapt-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd136bb85f4568fffca995bd3c8d52080b1e5b225dbf1c2b17b66b4c5fa02838"}, + {file = "wrapt-1.17.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17fcf043d0b4724858f25b8826c36e08f9fb2e475410bece0ec44a22d533da9b"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a557d97f12813dc5e18dad9fa765ae44ddd56a672bb5de4825527c847d6379"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0229b247b0fc7dee0d36176cbb79dbaf2a9eb7ecc50ec3121f40ef443155fb1d"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8425cfce27b8b20c9b89d77fb50e368d8306a90bf2b6eef2cdf5cd5083adf83f"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c900108df470060174108012de06d45f514aa4ec21a191e7ab42988ff42a86c"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4e547b447073fc0dbfcbff15154c1be8823d10dab4ad401bdb1575e3fdedff1b"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:914f66f3b6fc7b915d46c1cc424bc2441841083de01b90f9e81109c9759e43ab"}, + {file = "wrapt-1.17.0-cp313-cp313t-win32.whl", hash = "sha256:a4192b45dff127c7d69b3bdfb4d3e47b64179a0b9900b6351859f3001397dabf"}, + {file = "wrapt-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4f643df3d4419ea3f856c5c3f40fec1d65ea2e89ec812c83f7767c8730f9827a"}, + {file = "wrapt-1.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:69c40d4655e078ede067a7095544bcec5a963566e17503e75a3a3e0fe2803b13"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f495b6754358979379f84534f8dd7a43ff8cff2558dcdea4a148a6e713a758f"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa7ef4e0886a6f482e00d1d5bcd37c201b383f1d314643dfb0367169f94f04c"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fc931382e56627ec4acb01e09ce66e5c03c384ca52606111cee50d931a342d"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8f8909cdb9f1b237786c09a810e24ee5e15ef17019f7cecb207ce205b9b5fcce"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad47b095f0bdc5585bced35bd088cbfe4177236c7df9984b3cc46b391cc60627"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:948a9bd0fb2c5120457b07e59c8d7210cbc8703243225dbd78f4dfc13c8d2d1f"}, + {file = "wrapt-1.17.0-cp38-cp38-win32.whl", hash = "sha256:5ae271862b2142f4bc687bdbfcc942e2473a89999a54231aa1c2c676e28f29ea"}, + {file = "wrapt-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:f335579a1b485c834849e9075191c9898e0731af45705c2ebf70e0cd5d58beed"}, + {file = "wrapt-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d751300b94e35b6016d4b1e7d0e7bbc3b5e1751e2405ef908316c2a9024008a1"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264cbb4a18dc4acfd73b63e4bcfec9c9802614572025bdd44d0721983fc1d9c"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33539c6f5b96cf0b1105a0ff4cf5db9332e773bb521cc804a90e58dc49b10578"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30970bdee1cad6a8da2044febd824ef6dc4cc0b19e39af3085c763fdec7de33"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc7f729a72b16ee21795a943f85c6244971724819819a41ddbaeb691b2dd85ad"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6ff02a91c4fc9b6a94e1c9c20f62ea06a7e375f42fe57587f004d1078ac86ca9"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dfb7cff84e72e7bf975b06b4989477873dcf160b2fd89959c629535df53d4e0"}, + {file = "wrapt-1.17.0-cp39-cp39-win32.whl", hash = "sha256:2399408ac33ffd5b200480ee858baa58d77dd30e0dd0cab6a8a9547135f30a88"}, + {file = "wrapt-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f763a29ee6a20c529496a20a7bcb16a73de27f5da6a843249c7047daf135977"}, + {file = "wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371"}, + {file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"}, ] [[package]] diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/pyproject.toml b/airbyte-integrations/connectors/source-google-analytics-data-api/pyproject.toml index 3f5bedb75b6e..d658b8879b45 100644 --- a/airbyte-integrations/connectors/source-google-analytics-data-api/pyproject.toml +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.6.1" +version = "2.6.2" name = "source-google-analytics-data-api" description = "Source implementation for Google Analytics Data Api." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-hoorayhr/README.md b/airbyte-integrations/connectors/source-hoorayhr/README.md new file mode 100644 index 000000000000..95b2dcb571fe --- /dev/null +++ b/airbyte-integrations/connectors/source-hoorayhr/README.md @@ -0,0 +1,33 @@ +# HoorayHR +This directory contains the manifest-only connector for `source-hoorayhr`. + +Source connector for HoorayHR (https://hoorayhr.io). The connector uses https://api.hoorayhr.io + +## Usage +There are multiple ways to use this connector: +- You can use this connector as any other connector in Airbyte Marketplace. +- You can load this connector in `pyairbyte` using `get_source`! +- You can open this connector in Connector Builder, edit it, and publish to your workspaces. + +Please refer to the manifest-only connector documentation for more details. + +## Local Development +We recommend you use the Connector Builder to edit this connector. + +But, if you want to develop this connector locally, you can use the following steps. + +### Environment Setup +You will need `airbyte-ci` installed. You can find the documentation [here](airbyte-ci). + +### Build +This will create a dev image (`source-hoorayhr:dev`) that you can use to test the connector locally. +```bash +airbyte-ci connectors --name=source-hoorayhr build +``` + +### Test +This will run the acceptance tests for the connector. +```bash +airbyte-ci connectors --name=source-hoorayhr test +``` + diff --git a/airbyte-integrations/connectors/source-hoorayhr/acceptance-test-config.yml b/airbyte-integrations/connectors/source-hoorayhr/acceptance-test-config.yml new file mode 100644 index 000000000000..68a236e2cd9f --- /dev/null +++ b/airbyte-integrations/connectors/source-hoorayhr/acceptance-test-config.yml @@ -0,0 +1,17 @@ +# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-hoorayhr:dev +acceptance_tests: + spec: + tests: + - spec_path: "manifest.yaml" + connection: + bypass_reason: "This is a builder contribution, and we do not have secrets at this time" + discovery: + bypass_reason: "This is a builder contribution, and we do not have secrets at this time" + basic_read: + bypass_reason: "This is a builder contribution, and we do not have secrets at this time" + incremental: + bypass_reason: "This is a builder contribution, and we do not have secrets at this time" + full_refresh: + bypass_reason: "This is a builder contribution, and we do not have secrets at this time" diff --git a/airbyte-integrations/connectors/source-hoorayhr/icon.svg b/airbyte-integrations/connectors/source-hoorayhr/icon.svg new file mode 100644 index 000000000000..3904611c98a1 --- /dev/null +++ b/airbyte-integrations/connectors/source-hoorayhr/icon.svg @@ -0,0 +1,72 @@ + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-hoorayhr/manifest.yaml b/airbyte-integrations/connectors/source-hoorayhr/manifest.yaml new file mode 100644 index 000000000000..c3b932d1a4df --- /dev/null +++ b/airbyte-integrations/connectors/source-hoorayhr/manifest.yaml @@ -0,0 +1,803 @@ +version: 6.1.0 + +type: DeclarativeSource + +description: >- + Source connector for HoorayHR (https://hoorayhr.io). The connector uses + https://api.hoorayhr.io + +check: + type: CheckStream + stream_names: + - sick-leaves + +definitions: + streams: + users: + type: DeclarativeStream + name: users + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: /users + http_method: GET + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/users" + time-off: + type: DeclarativeStream + name: time-off + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: /time-off + http_method: GET + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/time-off" + leave-types: + type: DeclarativeStream + name: leave-types + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: /leave-types + http_method: GET + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/leave-types" + sick-leaves: + type: DeclarativeStream + name: sick-leaves + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: /sick-leave + http_method: GET + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + primary_key: + - id + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/sick-leaves" + base_requester: + type: HttpRequester + url_base: https://api.hooray.nl + authenticator: + type: SessionTokenAuthenticator + login_requester: + type: HttpRequester + path: authentication + url_base: https://api.hooray.nl + http_method: POST + authenticator: + type: NoAuth + request_headers: {} + request_body_json: + email: "{{ config[\"hoorayhrusername\"] }}" + password: "{{ config[\"hoorayhrpassword\"] }}" + strategy: local + request_parameters: {} + session_token_path: + - accessToken + request_authentication: + type: ApiKey + inject_into: + type: RequestOption + field_name: Authorization + inject_into: header + +streams: + - $ref: "#/definitions/streams/sick-leaves" + - $ref: "#/definitions/streams/time-off" + - $ref: "#/definitions/streams/leave-types" + - $ref: "#/definitions/streams/users" + +spec: + type: Spec + connection_specification: + type: object + $schema: http://json-schema.org/draft-07/schema# + required: + - hoorayhrusername + - hoorayhrpassword + properties: + hoorayhrpassword: + type: string + order: 1 + title: HoorayHR Password + airbyte_secret: true + hoorayhrusername: + type: string + order: 0 + title: HoorayHR Username + additionalProperties: true + +metadata: + assist: {} + testedStreams: + users: + hasRecords: true + streamHash: 33982ec24949870d1dd15b7191b46c03296fd15d + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + time-off: + hasRecords: true + streamHash: 768ac0388c513fc36cf2af868244f3e0e13997a2 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + leave-types: + hasRecords: true + streamHash: aca371fa790b98baa7a814d47ca7e66d0b17c399 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + sick-leaves: + hasRecords: true + streamHash: 2bb47034ff795f366f0bafa8258077020af607c0 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + autoImportSchema: + users: true + time-off: true + leave-types: true + sick-leaves: true + +schemas: + users: + type: object + $schema: http://json-schema.org/schema# + required: + - id + properties: + id: + type: number + city: + type: + - string + - "null" + email: + type: + - string + - "null" + phone: + type: + - string + - "null" + teams: + type: + - string + - "null" + avatar: + type: + - string + - "null" + gender: + type: + - string + - "null" + locale: + type: + - string + - "null" + status: + type: + - number + - "null" + country: + type: + - string + - "null" + isAdmin: + type: + - number + - "null" + zipcode: + type: + - string + - "null" + initials: + type: + - string + - "null" + jobTitle: + type: + - string + - "null" + lastName: + type: + - string + - "null" + nickName: + type: + - string + - "null" + timezone: + type: + - string + - "null" + abilities: + type: + - array + - "null" + biography: + type: + - string + - "null" + birthdate: + type: + - string + - "null" + companyId: + type: + - number + - "null" + createdAt: + type: + - string + - "null" + firstName: + type: + - string + - "null" + insertion: + type: + - string + - "null" + invitedAt: + type: + - string + - "null" + updatedAt: + type: + - string + - "null" + birthPlace: + type: + - string + - "null" + civilStatus: + type: + - string + - "null" + costCenters: + type: + - array + - "null" + nationality: + type: + - string + - "null" + onBoardedAt: + type: + - string + - "null" + birthCountry: + type: + - string + - "null" + emailPrivate: + type: + - string + - "null" + integrations: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + type: + type: + - string + - "null" + enabled: + type: + - string + - "null" + addressNumber: + type: + - string + - "null" + addressStreet: + type: + - string + - "null" + emergencyName: + type: + - string + - "null" + holidayPolicy: + type: + - object + - "null" + properties: + id: + type: + - number + - "null" + name: + type: + - string + - "null" + lastNameUsage: + type: + - string + - "null" + companyEndDate: + type: + - string + - "null" + employeeNumber: + type: + - string + - "null" + addressAddition: + type: + - string + - "null" + holidayPolicyId: + type: + - number + - "null" + invitedByUserId: + type: + - number + - "null" + travelAllowance: + type: + - number + - "null" + companyEndReason: + type: + - string + - "null" + companyStartDate: + type: + - string + - "null" + inviteAcceptedAt: + type: + - string + - "null" + inviteRemindedAt: + type: + - string + - "null" + bankAccountNumber: + type: + - string + - "null" + emergencyRelation: + type: + - string + - "null" + emergencyWorkPhone: + type: + - string + - "null" + citizenServiceNumber: + type: + - string + - "null" + emergencyPersonalPhone: + type: + - string + - "null" + twoFactorAuthentication: + type: + - number + - "null" + bankAccountNumberOnBehalfOf: + type: + - string + - "null" + additionalProperties: true + time-off: + type: object + $schema: http://json-schema.org/schema# + required: + - id + properties: + id: + type: number + end: + type: + - string + - "null" + notes: + type: + - string + - "null" + pause: + type: + - number + - "null" + reply: + type: + - string + - "null" + start: + type: + - string + - "null" + labels: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + type: + type: + - string + - "null" + id: + type: + - number + - "null" + name: + type: + - object + - "null" + properties: + de: + type: + - string + - "null" + en: + type: + - string + - "null" + nl: + type: + - string + - "null" + createdAt: + type: + - string + - "null" + updatedAt: + type: + - string + - "null" + archivedAt: + type: + - string + - "null" + status: + type: + - number + - "null" + userId: + type: + - number + - "null" + timezone: + type: + - string + - "null" + createdAt: + type: + - string + - "null" + holidayId: + type: + - number + - "null" + isPrivate: + type: + - number + - "null" + leaveUnit: + type: + - string + - "null" + updatedAt: + type: + - string + - "null" + budgetTotal: + type: + - number + - "null" + leaveTypeId: + type: + - number + - "null" + timeOffType: + type: + - string + - "null" + userIdApproved: + type: + - number + - "null" + baseTimeOffType: + type: + - string + - "null" + isNotCalculated: + type: + - number + - "null" + leaveTypeRuleId: + type: + - number + - "null" + budgetAdjustment: + type: + - number + - "null" + budgetCalculated: + type: + - number + - "null" + additionalProperties: true + leave-types: + type: object + $schema: http://json-schema.org/schema# + required: + - id + properties: + id: + type: number + icon: + type: + - string + - "null" + name: + type: + - string + - "null" + color: + type: + - string + - "null" + budget: + type: + - number + - "null" + default: + type: + - number + - "null" + isLegacy: + type: + - number + - "null" + createdAt: + type: + - string + - "null" + updatedAt: + type: + - string + - "null" + updatedBy: + type: + - number + - "null" + leaveInDays: + type: + - number + - "null" + unpaidLeave: + type: + - number + - "null" + periodOffset: + type: + - number + - "null" + leaveTypeRules: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + id: + type: + - number + - "null" + name: + type: + - string + - "null" + order: + type: + - number + - "null" + budget: + type: + - number + - "null" + createdAt: + type: + - string + - "null" + updatedAt: + type: + - string + - "null" + leaveTypeId: + type: + - number + - "null" + transferTerm: + type: + - number + - "null" + transferPeriod: + type: + - string + - "null" + expirationMoment: + type: + - string + - "null" + ruleSystemCategory: + type: + - string + - "null" + autoApproveLimit: + type: + - number + - "null" + subtractHolidays: + type: + - number + - "null" + calculationMethod: + type: + - string + - "null" + budgetReleaseTiming: + type: + - string + - "null" + invisibleInCalendar: + type: + - number + - "null" + budgetReleaseRecurrence: + type: + - string + - "null" + leaveTypeSystemCategory: + type: + - string + - "null" + accumulateBudgetWhenAbsent: + type: + - number + - "null" + additionalProperties: true + sick-leaves: + type: object + $schema: http://json-schema.org/schema# + required: + - id + properties: + id: + type: number + notes: + type: + - string + - "null" + status: + type: + - number + - "null" + userId: + type: + - number + - "null" + timezone: + type: + - string + - "null" + createdAt: + type: + - string + - "null" + updatedAt: + type: + - string + - "null" + percentage: + type: + - number + - "null" + actualStart: + type: + - string + - "null" + actualTotal: + type: + - number + - "null" + actualReturn: + type: + - string + - "null" + expectedTotal: + type: + - number + - "null" + reportedStart: + type: + - string + - "null" + actualStartEnd: + type: + - string + - "null" + expectedReturn: + type: + - string + - "null" + reportedReturn: + type: + - string + - "null" + userIdReported: + type: + - number + - "null" + actualReturnEnd: + type: + - string + - "null" + userIdConfirmed: + type: + - number + - "null" + additionalProperties: true diff --git a/airbyte-integrations/connectors/source-hoorayhr/metadata.yaml b/airbyte-integrations/connectors/source-hoorayhr/metadata.yaml new file mode 100644 index 000000000000..3852a4a56609 --- /dev/null +++ b/airbyte-integrations/connectors/source-hoorayhr/metadata.yaml @@ -0,0 +1,35 @@ +metadataSpecVersion: "1.0" +data: + allowedHosts: + hosts: + - "api.hooray.nl" + registryOverrides: + oss: + enabled: true + cloud: + enabled: true + remoteRegistries: + pypi: + enabled: false + packageName: airbyte-source-hoorayhr + connectorBuildOptions: + baseImage: docker.io/airbyte/source-declarative-manifest:6.11.1@sha256:0d0f562a70c0ed19ab605f0c83802a2e052712587692e2f3a1cc794fe7cd7007 + connectorSubtype: api + connectorType: source + definitionId: a2e34f7c-7de1-422c-b909-ce12f3e051af + dockerImageTag: 0.1.0 + dockerRepository: airbyte/source-hoorayhr + githubIssueLabel: source-hoorayhr + icon: icon.svg + license: MIT + name: HoorayHR + releaseDate: 2024-12-17 + releaseStage: alpha + supportLevel: community + documentationUrl: https://docs.airbyte.com/integrations/sources/hoorayhr + tags: + - language:manifest-only + - cdk:low-code + ab_internal: + ql: 100 + sl: 100 diff --git a/airbyte-integrations/connectors/source-intercom/README.md b/airbyte-integrations/connectors/source-intercom/README.md index 258f2dd41d78..da0194c9aab1 100644 --- a/airbyte-integrations/connectors/source-intercom/README.md +++ b/airbyte-integrations/connectors/source-intercom/README.md @@ -1,49 +1,22 @@ # Intercom source connector -This is the repository for the Intercom source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/intercom). +This directory contains the manifest-only connector for `source-intercom`. +This _manifest-only_ connector is not a Python package on its own, as it runs inside of the base `source-declarative-manifest` image. -## Local development - -### Prerequisites - -- Python (~=3.9) -- Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) - -### Installing the connector - -From this connector directory, run: - -```bash -poetry install --with dev -``` - -### Create credentials +For information about how to configure and use this connector within Airbyte, see [the connector's full documentation](https://docs.airbyte.com/integrations/sources/intercom). -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/intercom) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_intercom/spec.yaml` file. -Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `sample_files/sample_config.json` for a sample config file. - -### Locally running the connector - -``` -poetry run source-intercom spec -poetry run source-intercom check --config secrets/config.json -poetry run source-intercom discover --config secrets/config.json -poetry run source-intercom read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` +## Local development -### Running unit tests +We recommend using the Connector Builder to edit this connector. +Using either Airbyte Cloud or your local Airbyte OSS instance, navigate to the **Builder** tab and select **Import a YAML**. +Then select the connector's `manifest.yaml` file to load the connector into the Builder. You're now ready to make changes to the connector! -To run unit tests locally, from the connector directory run: - -``` -poetry run pytest unit_tests -``` +If you prefer to develop locally, you can follow the instructions below. ### Building the docker image +You can build any manifest-only connector with `airbyte-ci`: + 1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) 2. Run the following command to build the docker image: @@ -53,18 +26,24 @@ airbyte-ci connectors --name=source-intercom build An image will be available on your host with the tag `airbyte/source-intercom:dev`. +### Creating credentials + +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/intercom) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `spec` object in the connector's `manifest.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. + ### Running as a docker container -Then run any of the connector commands as follows: +Then run any of the standard source connector commands: -``` +```bash docker run --rm airbyte/source-intercom:dev spec docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-intercom:dev check --config /secrets/config.json docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-intercom:dev discover --config /secrets/config.json docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-intercom:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -### Running our CI test suite +### Running the CI test suite You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): @@ -72,33 +51,15 @@ You can run our full test suite locally using [`airbyte-ci`](https://github.com/ airbyte-ci connectors --name=source-intercom test ``` -### Customizing acceptance Tests - -Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -### Dependency Management - -All of your dependencies should be managed via Poetry. -To add a new dependency, run: - -```bash -poetry add -``` - -Please commit the changes to `pyproject.toml` and `poetry.lock` files. - ## Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? - -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-intercom test` -2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): - - bump the `dockerImageTag` value in in `metadata.yaml` - - bump the `version` value in `pyproject.toml` -3. Make sure the `metadata.yaml` content is up to date. +If you want to contribute changes to `source-intercom`, here's how you can do that: +1. Make your changes locally, or load the connector's manifest into Connector Builder and make changes there. +2. Make sure your changes are passing our test suite with `airbyte-ci connectors --name=source-intercom test` +3. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` 4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/intercom.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. -8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-intercom/acceptance-test-config.yml b/airbyte-integrations/connectors/source-intercom/acceptance-test-config.yml index 37dd822467c7..bbcd393ac644 100644 --- a/airbyte-integrations/connectors/source-intercom/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-intercom/acceptance-test-config.yml @@ -5,7 +5,7 @@ test_strictness_level: high acceptance_tests: spec: tests: - - spec_path: "source_intercom/spec.json" + - spec_path: "manifest.yaml" connection: tests: - config_path: "secrets/config.json" @@ -20,6 +20,9 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" + empty_streams: + - name: conversation_parts + bypass_reason: Deeply nested response which could not be seeded with sandbox incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-intercom/source_intercom/components.py b/airbyte-integrations/connectors/source-intercom/components.py similarity index 100% rename from airbyte-integrations/connectors/source-intercom/source_intercom/components.py rename to airbyte-integrations/connectors/source-intercom/components.py diff --git a/airbyte-integrations/connectors/source-intercom/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-intercom/integration_tests/abnormal_state.json index e874bc451c67..477d722433d6 100755 --- a/airbyte-integrations/connectors/source-intercom/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-intercom/integration_tests/abnormal_state.json @@ -28,10 +28,7 @@ "name": "company_segments" }, "stream_state": { - "updated_at": 7626086649, - "companies": { - "updated_at": 7626086649 - } + "updated_at": 7626086649 } } }, @@ -42,10 +39,7 @@ "name": "conversations" }, "stream_state": { - "updated_at": 7626086649, - "conversations": { - "updated_at": 7626086649 - } + "updated_at": 7626086649 } } }, @@ -56,10 +50,7 @@ "name": "conversation_parts" }, "stream_state": { - "updated_at": 7626086649, - "conversations": { - "updated_at": 7626086649 - } + "updated_at": 7626086649 } } }, diff --git a/airbyte-integrations/connectors/source-intercom/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-intercom/integration_tests/expected_records.jsonl index d06ce90f22b7..26538cf32231 100644 --- a/airbyte-integrations/connectors/source-intercom/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-intercom/integration_tests/expected_records.jsonl @@ -28,9 +28,6 @@ {"stream": "conversations", "data": {"type": "conversation", "id": "1", "created_at": 1607553243, "updated_at": 1626346673, "waiting_since": null, "snoozed_until": null, "source": {"type": "conversation", "id": "701718739", "delivered_as": "customer_initiated", "subject": "", "body": "

hey there

", "author": {"type": "lead", "id": "5fd150d50697b6d0bbc4a2c2", "name": null, "email": ""}, "attachments": [], "url": "http://localhost:63342/airbyte-python/airbyte-integrations/bases/base-java/build/tmp/expandedArchives/org.jacoco.agent-0.8.5.jar_6a2df60c47de373ea127d14406367999/about.html?_ijt=uosck1k6vmp2dnl4oqib2g3u9d"}, "contacts": {"type": "contact.list", "contacts": [{"type": "contact", "id": "5fd150d50697b6d0bbc4a2c2"}]}, "first_contact_reply": {"created_at": 1607553243, "type": "conversation", "url": "http://localhost:63342/airbyte-python/airbyte-integrations/bases/base-java/build/tmp/expandedArchives/org.jacoco.agent-0.8.5.jar_6a2df60c47de373ea127d14406367999/about.html?_ijt=uosck1k6vmp2dnl4oqib2g3u9d"}, "open": true, "state": "open", "read": false, "tags": {"type": "tag.list", "tags": []}, "priority": "not_priority", "sla_applied": null, "statistics": {"type": "conversation_statistics", "time_to_assignment": null, "time_to_admin_reply": 4317957, "time_to_first_close": null, "time_to_last_close": null, "median_time_to_reply": 4317954, "first_contact_reply_at": 1607553243, "first_assignment_at": null, "first_admin_reply_at": 1625654131, "first_close_at": null, "last_assignment_at": null, "last_assignment_admin_reply_at": null, "last_contact_reply_at": 1607553246, "last_admin_reply_at": 1625656000, "last_close_at": null, "last_closed_by_id": null, "count_reopens": 0, "count_assignments": 0, "count_conversation_parts": 7}, "conversation_rating": null, "teammates": {"type": "admin.list", "admins": [{"type": "admin", "id": "4423433"}]}, "assignee": null}, "emitted_at": 1707747714058} {"stream": "conversations", "data": {"type": "conversation", "id": "60", "created_at": 1676461133, "updated_at": 1676461134, "waiting_since": null, "snoozed_until": null, "source": {"type": "conversation", "id": "51952871", "delivered_as": "automated", "subject": "", "body": "

Test 3

", "author": {"type": "admin", "id": "4423433", "name": "Airbyte Team", "email": "integration-test@airbyte.io"}, "attachments": [], "url": null}, "contacts": {"type": "contact.list", "contacts": [{"type": "contact", "id": "63ea41a0eddb9b625fb712c9"}]}, "first_contact_reply": null, "open": true, "state": "open", "read": false, "tags": {"type": "tag.list", "tags": []}, "priority": "not_priority", "sla_applied": null, "statistics": {"type": "conversation_statistics", "time_to_assignment": null, "time_to_admin_reply": null, "time_to_first_close": null, "time_to_last_close": null, "median_time_to_reply": null, "first_contact_reply_at": null, "first_assignment_at": null, "first_admin_reply_at": null, "first_close_at": null, "last_assignment_at": null, "last_assignment_admin_reply_at": null, "last_contact_reply_at": null, "last_admin_reply_at": null, "last_close_at": null, "last_closed_by_id": null, "count_reopens": 0, "count_assignments": 0, "count_conversation_parts": 2}, "conversation_rating": null, "teammates": {"type": "admin.list", "admins": [{"type": "admin", "id": "4423433"}]}, "assignee": {"type": "admin", "id": "4423433"}}, "emitted_at": 1707747714064} {"stream": "conversations", "data": {"type": "conversation", "id": "61", "created_at": 1676461196, "updated_at": 1676461197, "waiting_since": null, "snoozed_until": null, "source": {"type": "conversation", "id": "51952963", "delivered_as": "automated", "subject": "", "body": "

Test 4

", "author": {"type": "admin", "id": "4423433", "name": "Airbyte Team", "email": "integration-test@airbyte.io"}, "attachments": [], "url": null}, "contacts": {"type": "contact.list", "contacts": [{"type": "contact", "id": "63ea41a1b0e17c53248c7956"}]}, "first_contact_reply": null, "open": true, "state": "open", "read": false, "tags": {"type": "tag.list", "tags": []}, "priority": "not_priority", "sla_applied": null, "statistics": {"type": "conversation_statistics", "time_to_assignment": null, "time_to_admin_reply": null, "time_to_first_close": null, "time_to_last_close": null, "median_time_to_reply": null, "first_contact_reply_at": null, "first_assignment_at": null, "first_admin_reply_at": null, "first_close_at": null, "last_assignment_at": null, "last_assignment_admin_reply_at": null, "last_contact_reply_at": null, "last_admin_reply_at": null, "last_close_at": null, "last_closed_by_id": null, "count_reopens": 0, "count_assignments": 0, "count_conversation_parts": 2}, "conversation_rating": null, "teammates": {"type": "admin.list", "admins": [{"type": "admin", "id": "4423433"}]}, "assignee": {"type": "admin", "id": "4423433"}}, "emitted_at": 1707747714069} -{"stream": "conversation_parts", "data": {"type": "conversation_part", "id": "7288120839", "part_type": "comment", "body": "

is this showing up

", "created_at": 1607553246, "updated_at": 1607553246, "notified_at": 1607553246, "assigned_to": null, "author": {"id": "5fd150d50697b6d0bbc4a2c2", "type": "user", "name": null, "email": ""}, "attachments": [], "external_id": null, "conversation_id": "1"}, "emitted_at": 1707747716219} -{"stream": "conversation_parts", "data": {"type": "conversation_part", "id": "7288121348", "part_type": "comment", "body": "

Airbyte [DEV] will reply as soon as they can.

", "created_at": 1607553249, "updated_at": 1607553249, "notified_at": 1607553249, "assigned_to": null, "author": {"id": "4423434", "type": "bot", "name": "Operator", "email": "operator+wjw5eps7@intercom.io"}, "attachments": [], "external_id": null, "conversation_id": "1"}, "emitted_at": 1707747716222} -{"stream": "conversation_parts", "data": {"type": "conversation_part", "id": "7288121392", "part_type": "comment", "body": "

Give the team a way to reach you:

", "created_at": 1607553250, "updated_at": 1607553250, "notified_at": 1607553250, "assigned_to": null, "author": {"id": "4423434", "type": "bot", "name": "Operator", "email": "operator+wjw5eps7@intercom.io"}, "attachments": [], "external_id": null, "conversation_id": "1"}, "emitted_at": 1707747716225} {"stream": "company_segments", "data": {"type": "segment", "id": "63ea1a19d248071b8d297b39", "name": "Companies less then 100 people", "created_at": 1676286489, "updated_at": 1676461957, "person_type": "user"}, "emitted_at": 1707747722461} {"stream": "company_segments", "data": {"type": "segment", "id": "63eb62f228758099dbc7fabe", "name": "Companies not IT", "created_at": 1676370674, "updated_at": 1676461960, "person_type": "user"}, "emitted_at": 1707747722463} {"stream": "company_segments", "data": {"type": "segment", "id": "63eb63c3046264426ef4bfd6", "name": "Companies tag not 3", "created_at": 1676370883, "updated_at": 1676461915, "person_type": "user"}, "emitted_at": 1707747722465} diff --git a/airbyte-integrations/connectors/source-intercom/main.py b/airbyte-integrations/connectors/source-intercom/main.py deleted file mode 100644 index 410860c90fd8..000000000000 --- a/airbyte-integrations/connectors/source-intercom/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_intercom.run import run - -if __name__ == "__main__": - run() diff --git a/airbyte-integrations/connectors/source-intercom/manifest.yaml b/airbyte-integrations/connectors/source-intercom/manifest.yaml new file mode 100644 index 000000000000..87b11e536025 --- /dev/null +++ b/airbyte-integrations/connectors/source-intercom/manifest.yaml @@ -0,0 +1,2966 @@ +version: 5.7.0 + +type: DeclarativeSource + +check: + type: CheckStream + stream_names: + - tags + +definitions: + streams: + activity_logs: + type: DeclarativeStream + name: activity_logs + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: admins/activity_logs + http_method: GET + request_headers: + Accept: application/json + Intercom-Version: "2.11" + error_handler: + type: CustomErrorHandler + class_name: source_declarative_manifest.components.ErrorHandlerWithRateLimiter + response_filters: + - type: HttpResponseFilter + action: FAIL + failure_type: config_error + error_message: >- + Failed to perform request. Error: Active subscription needed. + Please, validate your current Intercom plan to continue using + API. + error_message_contains: Active subscription needed + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - activity_logs + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get('pages', {}).get('next') }}" + stop_condition: "{{ 'next' not in response.get('pages', {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: created_at + lookback_window: P{{ config.get('lookback_window', 0) }}D + cursor_datetime_formats: + - "%s" + datetime_format: "%s" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config[\"start_date\"] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: created_at_after + inject_into: request_parameter + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/activity_logs" + admins: + type: DeclarativeStream + description: >- + https://developers.intercom.com/intercom-api-reference/reference#list-admins + name: admins + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: admins + http_method: GET + request_headers: + Accept: application/json + Intercom-Version: "2.11" + error_handler: + type: CustomErrorHandler + class_name: source_declarative_manifest.components.ErrorHandlerWithRateLimiter + response_filters: + - type: HttpResponseFilter + action: FAIL + failure_type: config_error + error_message: >- + Failed to perform request. Error: Active subscription needed. + Please, validate your current Intercom plan to continue using + API. + error_message_contains: Active subscription needed + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - admins + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + type: RequestOption + field_name: per_page + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 150 + cursor_value: "{{ response.get('pages', {}).get('next') }}" + stop_condition: "{{ 'next' not in response.get('pages', {}) }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/admins" + tags: + type: DeclarativeStream + description: >- + https://developers.intercom.com/intercom-api-reference/reference#list-tags-for-an-app + name: tags + primary_key: + - name + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: tags + http_method: GET + request_headers: + Accept: application/json + Intercom-Version: "2.11" + error_handler: + type: CustomErrorHandler + class_name: source_declarative_manifest.components.ErrorHandlerWithRateLimiter + response_filters: + - type: HttpResponseFilter + action: FAIL + failure_type: config_error + error_message: >- + Failed to perform request. Error: Active subscription needed. + Please, validate your current Intercom plan to continue using + API. + error_message_contains: Active subscription needed + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - data + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + type: RequestOption + field_name: per_page + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 150 + cursor_value: "{{ response.get('pages', {}).get('next') }}" + stop_condition: "{{ 'next' not in response.get('pages', {}) }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/tags" + teams: + type: DeclarativeStream + description: >- + https://developers.intercom.com/intercom-api-reference/reference#list-teams + name: teams + primary_key: + - name + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: teams + http_method: GET + request_headers: + Accept: application/json + Intercom-Version: "2.11" + error_handler: + type: CustomErrorHandler + class_name: source_declarative_manifest.components.ErrorHandlerWithRateLimiter + response_filters: + - type: HttpResponseFilter + action: FAIL + failure_type: config_error + error_message: >- + Failed to perform request. Error: Active subscription needed. + Please, validate your current Intercom plan to continue using + API. + error_message_contains: Active subscription needed + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - teams + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + type: RequestOption + field_name: per_page + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 150 + cursor_value: "{{ response.get('pages', {}).get('next') }}" + stop_condition: "{{ 'next' not in response.get('pages', {}) }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/teams" + segments: + type: DeclarativeStream + description: >- + https://developers.intercom.com/intercom-api-reference/reference#list-segments + name: segments + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: segments + http_method: GET + request_headers: + Accept: application/json + Intercom-Version: "2.11" + error_handler: + type: CustomErrorHandler + class_name: source_declarative_manifest.components.ErrorHandlerWithRateLimiter + response_filters: + - type: HttpResponseFilter + action: FAIL + failure_type: config_error + error_message: >- + Failed to perform request. Error: Active subscription needed. + Please, validate your current Intercom plan to continue using + API. + error_message_contains: Active subscription needed + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - segments + record_filter: + type: RecordFilter + condition: >- + {{ record['updated_at'] >= ( stream_state.get('prior_state', + {}).get('updated_at', 0) - (config.get('lookback_window', 0) * + 86400) if stream_state else stream_slice.get('prior_state', + {}).get('updated_at', 0) ) - (config.get('lookback_window', 0) * + 86400) }} + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + type: RequestOption + field_name: per_page + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 150 + cursor_value: "{{ response.get('pages', {}).get('next') }}" + stop_condition: "{{ 'next' not in response.get('pages', {}) }}" + incremental_sync: + type: CustomIncrementalSync + class_name: source_declarative_manifest.components.IncrementalSingleSliceCursor + cursor_field: updated_at + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/segments" + companies: + type: DeclarativeStream + description: >- + https://developers.intercom.com/intercom-api-reference/reference/scroll-over-all-companies + name: companies + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: companies/scroll + http_method: GET + request_headers: + Accept: application/json + Intercom-Version: "2.11" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: RETRY + http_codes: + - 400 + error_message: >- + A scroll (export) job is already in progress for this + Intercom account, causing the request to fail. Only one + active scroll per Intercom account is allowed; + ensure no overlap by limiting active connections or + scheduling jobs appropriately. + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: + - 404 + - type: DefaultErrorHandler + backoff_strategies: + - type: ConstantBackoffStrategy + backoff_time_in_seconds: 60 + response_filters: + - type: HttpResponseFilter + action: RETRY + http_codes: + - 500 + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - data + record_filter: + type: RecordFilter + condition: >- + {{ record['updated_at'] >= ( stream_state.get('prior_state', + {}).get('updated_at', 0) - (config.get('lookback_window', 0) * + 86400) if stream_state else stream_slice.get('prior_state', + {}).get('updated_at', 0) ) - (config.get('lookback_window', 0) * + 86400) }} + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: scroll_param + page_size_option: + type: RequestOption + field_name: per_page + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 150 + cursor_value: "{{ response.get('scroll_param') }}" + stop_condition: "{{ not response.get('data') }}" + incremental_sync: + type: CustomIncrementalSync + class_name: source_declarative_manifest.components.IncrementalSingleSliceCursor + cursor_field: updated_at + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/companies" + company_attributes: + type: DeclarativeStream + description: >- + https://developers.intercom.com/intercom-api-reference/reference#list-data-attributes + name: company_attributes + primary_key: + - name + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: data_attributes + http_method: GET + request_parameters: + model: company + request_headers: + Accept: application/json + Intercom-Version: "2.11" + error_handler: + type: CustomErrorHandler + class_name: source_declarative_manifest.components.ErrorHandlerWithRateLimiter + response_filters: + - type: HttpResponseFilter + action: FAIL + failure_type: config_error + error_message: >- + Failed to perform request. Error: Active subscription needed. + Please, validate your current Intercom plan to continue using + API. + error_message_contains: Active subscription needed + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - data + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + type: RequestOption + field_name: per_page + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 150 + cursor_value: "{{ response.get('pages', {}).get('next') }}" + stop_condition: "{{ 'next' not in response.get('pages', {}) }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/company_attributes" + contact_attributes: + type: DeclarativeStream + description: >- + https://developers.intercom.com/intercom-api-reference/reference#list-data-attributes + name: contact_attributes + primary_key: + - name + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: data_attributes + http_method: GET + request_parameters: + model: contact + request_headers: + Accept: application/json + Intercom-Version: "2.11" + error_handler: + type: CustomErrorHandler + class_name: source_declarative_manifest.components.ErrorHandlerWithRateLimiter + response_filters: + - type: HttpResponseFilter + action: FAIL + failure_type: config_error + error_message: >- + Failed to perform request. Error: Active subscription needed. + Please, validate your current Intercom plan to continue using + API. + error_message_contains: Active subscription needed + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - data + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + page_size_option: + type: RequestOption + field_name: per_page + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 150 + cursor_value: "{{ response.get('pages', {}).get('next') }}" + stop_condition: "{{ 'next' not in response.get('pages', {}) }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/contact_attributes" + contacts: + type: DeclarativeStream + description: >- + https://developers.intercom.com/intercom-api-reference/reference/pagination-sorting-search + name: contacts + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: contacts/search + http_method: POST + request_headers: + Accept: application/json + Intercom-Version: "2.11" + request_body_json: + sort: "{'field': 'updated_at', 'order': 'ascending'}" + query: >- + { 'operator': 'OR', 'value': [ { 'field': 'updated_at', + 'operator': '>', 'value': {{ stream_slice.get('prior_state', + stream_state.get('prior_state', {})).get('updated_at') or + format_datetime(config['start_date'], '%s') }} }, { 'field': + 'updated_at', 'operator': '=', 'value': {{ + stream_slice.get('prior_state', stream_state.get('prior_state', + {})).get('updated_at') or format_datetime(config['start_date'], + '%s') }} }, ], } + pagination: >- + { 'per_page': {{ parameters.get('page_size') }}, 'page': {{ + next_page_token.get('next_page_token').get('page') }}, + 'starting_after': '{{ + next_page_token.get('next_page_token').get('starting_after') }}' } + error_handler: + type: CustomErrorHandler + class_name: source_declarative_manifest.components.ErrorHandlerWithRateLimiter + response_filters: + - type: HttpResponseFilter + action: FAIL + failure_type: config_error + error_message: >- + Failed to perform request. Error: Active subscription needed. + Please, validate your current Intercom plan to continue using + API. + error_message_contains: Active subscription needed + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - data + record_filter: + type: RecordFilter + condition: >- + {{ record['updated_at'] >= ( stream_state.get('prior_state', + {}).get('updated_at', 0) - (config.get('lookback_window', 0) * + 86400) if stream_state else stream_slice.get('prior_state', + {}).get('updated_at', 0) ) - (config.get('lookback_window', 0) * + 86400)}} + paginator: + type: DefaultPaginator + pagination_strategy: + type: CursorPagination + page_size: 150 + cursor_value: "{{ response.get('pages', {}).get('next') }}" + stop_condition: "{{ 'next' not in response.get('pages', {}) }}" + incremental_sync: + type: CustomIncrementalSync + class_name: source_declarative_manifest.components.IncrementalSingleSliceCursor + cursor_field: updated_at + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/contacts" + conversations: + type: DeclarativeStream + description: >- + https://developers.intercom.com/intercom-api-reference/reference/pagination-sorting-search + name: conversations + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: conversations/search + http_method: POST + request_headers: + Accept: application/json + Intercom-Version: "2.11" + request_body_json: + sort: "{'field': 'updated_at', 'order': 'ascending'}" + query: >- + { 'operator': 'OR', 'value': [ { 'field': 'updated_at', + 'operator': '>', 'value': {{ stream_slice.get('prior_state', + stream_state.get('prior_state', {})).get('updated_at') or + format_datetime(config['start_date'], '%s') }} }, { 'field': + 'updated_at', 'operator': '=', 'value': {{ + stream_slice.get('prior_state', stream_state.get('prior_state', + {})).get('updated_at') or format_datetime(config['start_date'], + '%s') }} }, ], } + pagination: >- + { 'per_page': {{ parameters.get('page_size') }}, 'page': {{ + next_page_token.get('next_page_token').get('page') }}, + 'starting_after': '{{ + next_page_token.get('next_page_token').get('starting_after') }}' } + error_handler: + type: CustomErrorHandler + class_name: source_declarative_manifest.components.ErrorHandlerWithRateLimiter + response_filters: + - type: HttpResponseFilter + action: FAIL + failure_type: config_error + error_message: >- + Failed to perform request. Error: Active subscription needed. + Please, validate your current Intercom plan to continue using + API. + error_message_contains: Active subscription needed + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - conversations + record_filter: + type: RecordFilter + condition: >- + {{ record['updated_at'] >= ( stream_state.get('prior_state', + {}).get('updated_at', 0) - (config.get('lookback_window', 0) * + 86400) if stream_state else stream_slice.get('prior_state', + {}).get('updated_at', 0) ) - (config.get('lookback_window', 0) * + 86400)}} + paginator: + type: DefaultPaginator + pagination_strategy: + type: CursorPagination + page_size: 150 + cursor_value: "{{ response.get('pages', {}).get('next') }}" + stop_condition: "{{ 'next' not in response.get('pages', {}) }}" + incremental_sync: + type: CustomIncrementalSync + class_name: source_declarative_manifest.components.IncrementalSingleSliceCursor + cursor_field: updated_at + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/conversations" + conversation_parts: + type: DeclarativeStream + name: conversation_parts + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: /conversations/{{ stream_slice.id }} + http_method: GET + request_headers: + Accept: application/json + Intercom-Version: "2.11" + error_handler: + type: CompositeErrorHandler + error_handlers: + - type: DefaultErrorHandler + response_filters: + - type: HttpResponseFilter + action: IGNORE + http_codes: + - 404 + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - conversation_parts + - conversation_parts + record_filter: + type: RecordFilter + condition: >- + {{ record['updated_at'] >= stream_state.get('prior_state', + {}).get('updated_at', 0) - (config.get('lookback_window', 0) * + 86400)}} + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: id + stream: + $ref: "#/definitions/streams/conversations" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%s" + - "%ms" + datetime_format: "%s" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config[\"start_date\"] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + transformations: + - type: AddFields + fields: + - path: + - conversation_id + value: "{{ stream_slice.id }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/conversation_parts" + company_segments: + type: DeclarativeStream + name: company_segments + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: /companies/{{ stream_slice.id }}/segments + http_method: GET + request_headers: + Accept: application/json + Intercom-Version: "2.11" + error_handler: + type: CustomErrorHandler + class_name: source_declarative_manifest.components.ErrorHandlerWithRateLimiter + response_filters: + - type: HttpResponseFilter + action: FAIL + failure_type: config_error + error_message: >- + Failed to perform request. Error: Active subscription needed. + Please, validate your current Intercom plan to continue using + API. + error_message_contains: Active subscription needed + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - data + record_filter: + type: RecordFilter + condition: >- + {{ record['updated_at'] >= stream_state.get('prior_state', + {}).get('updated_at', 0) - (config.get('lookback_window', 0) * + 86400)}} + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: id + stream: + $ref: "#/definitions/streams/companies" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updated_at + cursor_datetime_formats: + - "%s" + - "%ms" + datetime_format: "%s" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config[\"start_date\"] }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/company_segments" + base_requester: + type: HttpRequester + url_base: https://api.intercom.io/ + authenticator: + type: BearerAuthenticator + api_token: "{{ config[\"access_token\"] }}" + +streams: + - $ref: "#/definitions/streams/activity_logs" + - $ref: "#/definitions/streams/admins" + - $ref: "#/definitions/streams/tags" + - $ref: "#/definitions/streams/teams" + - $ref: "#/definitions/streams/segments" + - $ref: "#/definitions/streams/companies" + - $ref: "#/definitions/streams/company_attributes" + - $ref: "#/definitions/streams/contact_attributes" + - $ref: "#/definitions/streams/contacts" + - $ref: "#/definitions/streams/conversations" + - $ref: "#/definitions/streams/conversation_parts" + - $ref: "#/definitions/streams/company_segments" + +spec: + type: Spec + connection_specification: + type: object + $schema: http://json-schema.org/draft-07/schema# + required: + - access_token + - start_date + properties: + access_token: + type: string + description: >- + Access token for making authenticated requests. See the Intercom + docs for more information. + order: 0 + title: Access token + airbyte_secret: true + client_id: + type: string + description: Client Id for your Intercom application. + order: 1 + title: Client Id + airbyte_secret: true + client_secret: + type: string + description: Client Secret for your Intercom application. + order: 2 + title: Client Secret + airbyte_secret: true + activity_logs_time_step: + type: integer + description: >- + Set lower value in case of failing long running sync of Activity Logs + stream. + order: 3 + title: Activity logs stream slice step size (in days) + default: 30 + maximum: 91 + minimum: 1 + examples: + - 30 + - 10 + - 5 + lookback_window: + type: integer + description: The number of days to shift the state value backward for record sync + order: 4 + title: Lookback window + default: 0 + minimum: 0 + examples: + - 60 + start_date: + type: string + description: >- + UTC date and time in the format 2017-01-25T00:00:00Z. Any data before + this date will not be replicated. + order: 5 + title: Start date + format: date-time + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ + examples: + - "2020-11-16T00:00:00Z" + additionalProperties: true + +metadata: + autoImportSchema: + activity_logs: false + admins: false + tags: false + teams: false + segments: false + companies: false + company_attributes: false + contact_attributes: false + contacts: false + conversations: false + conversation_parts: false + company_segments: false + yamlComponents: + streams: + activity_logs: + - errorHandler + admins: + - errorHandler + tags: + - errorHandler + teams: + - errorHandler + segments: + - errorHandler + - incrementalSync + companies: + - incrementalSync + company_attributes: + - errorHandler + contact_attributes: + - errorHandler + contacts: + - errorHandler + - incrementalSync + conversations: + - errorHandler + - incrementalSync + company_segments: + - errorHandler + testedStreams: + activity_logs: + hasRecords: true + streamHash: dae54ab01efc8a7c580121de6902d67a82a87775 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + admins: + hasRecords: true + streamHash: 62fdd12c98bec28f09edd3a34cd16bc5c1afedb6 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + tags: + hasRecords: true + streamHash: 53f9b051b3d5c8747598b848c006c80f3ffc5957 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + teams: + hasRecords: true + streamHash: d27cc586e29ddf6347d6356ea4970af99b7980c1 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + segments: + hasRecords: true + streamHash: 96ba554609834b986e20813cedf03a96183196fd + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + companies: + hasRecords: true + streamHash: 801c62e3a0a4028608efc4bf5f1ad5119b986973 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + company_attributes: + hasRecords: true + streamHash: e59558fb65b2c762d2b3f6b67e9071db2773cf13 + hasResponse: true + primaryKeysAreUnique: false + primaryKeysArePresent: true + responsesAreSuccessful: true + contact_attributes: + hasRecords: true + streamHash: 70d4afa7c3ad63877ab1fd9c185bd58c27ce06bc + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + contacts: + hasRecords: false + streamHash: e796494e6a861bce39d40f66e78e19b9ec633e29 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + conversations: + hasRecords: false + streamHash: 5838b59eba7d58cae3fe42eca72062bc9d29e1c4 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + conversation_parts: + streamHash: 67469544cde547846d7636a110f775313d6e04fc + company_segments: + hasRecords: true + streamHash: 41220115640c54deccc0e5a421991a60aa808d3c + hasResponse: true + primaryKeysAreUnique: false + primaryKeysArePresent: true + responsesAreSuccessful: true + assist: {} + +schemas: + activity_logs: + type: object + additionalProperties: true + properties: + metadata: + type: + - "null" + - object + description: Additional data or information related to the activity + activity_description: + type: + - "null" + - string + description: A description of the activity that took place + activity_type: + type: + - "null" + - string + description: The type or category of the activity + created_at: + type: + - "null" + - integer + description: The timestamp when the activity occurred + id: + type: + - "null" + - string + description: Unique identifier for the activity log entry + performed_by: + type: + - "null" + - object + description: The user who performed the activity + properties: + type: + type: + - "null" + - string + description: Type of the user who performed the activity (e.g., admin, user) + email: + type: + - "null" + - string + description: Email of the user who performed the activity + id: + type: + - "null" + - string + description: Unique identifier of the user who performed the activity + ip: + type: + - "null" + - string + description: IP address from where the activity was performed + admins: + type: object + additionalProperties: true + properties: + type: + type: + - "null" + - string + description: Type of the admin (e.g., full-time, part-time) + admin_ids: + description: Array of unique identifiers for admins + anyOf: + - type: array + items: + type: integer + - type: "null" + avatar: + type: + - "null" + - object + description: Admin avatar details + properties: + image_url: + type: + - "null" + - string + description: URL of the admin's avatar image + away_mode_enabled: + type: + - "null" + - boolean + description: Flag indicating if away mode is enabled for the admin + away_mode_reassign: + type: + - "null" + - boolean + description: Flag indicating if away mode reassignment is enabled for the admin + email: + type: + - "null" + - string + description: Email address of the admin + has_inbox_seat: + type: + - "null" + - boolean + description: Flag indicating if the admin has a seat in the inbox + id: + type: + - "null" + - string + description: Unique identifier for the admin + job_title: + type: + - "null" + - string + description: Job title of the admin + name: + type: + - "null" + - string + description: Name of the admin + team_ids: + description: Array of team identifiers the admin belongs to + anyOf: + - type: array + items: + type: integer + - type: "null" + team_priority_level: + type: + - "null" + - object + description: Detailed team priority level information for the admin + properties: + primary_team_ids: + type: + - "null" + - array + description: Array of primary team identifiers for the admin + items: + type: + - "null" + - integer + secondary_team_ids: + type: + - "null" + - array + description: Array of secondary team identifiers for the admin + items: + type: + - "null" + - integer + tags: + type: object + additionalProperties: true + properties: + type: + type: + - "null" + - string + description: Type of the tag indicating its purpose or category. + id: + type: + - "null" + - string + description: Unique identifier for the tag. + name: + type: + - "null" + - string + description: Name of the tag used for identification. + teams: + type: object + additionalProperties: true + properties: + type: + type: + - "null" + - string + description: Type of team (e.g., 'internal', 'external'). + admin_ids: + description: Array of user IDs representing the admins of the team. + anyOf: + - type: array + items: + type: integer + - type: "null" + id: + type: + - "null" + - string + description: Unique identifier for the team. + name: + type: + - "null" + - string + description: Name of the team. + segments: + type: object + additionalProperties: true + properties: + type: + type: + - "null" + - string + description: The type or category of the segment + count: + type: + - "null" + - integer + description: The number of items in the segment + created_at: + type: + - "null" + - integer + description: The date and time when the segment was created + id: + type: + - "null" + - string + description: Unique identifier for the segment + name: + type: + - "null" + - string + description: The name or title of the segment + person_type: + type: + - "null" + - string + description: Type of persons included in the segment + updated_at: + type: + - "null" + - integer + description: The date and time when the segment was last updated + companies: + type: object + additionalProperties: true + properties: + type: + type: + - "null" + - string + description: The type of the company + app_id: + type: + - "null" + - string + description: The ID of the application associated with the company + company_id: + type: + - "null" + - string + description: The unique identifier of the company + created_at: + type: + - "null" + - integer + description: The date and time when the company was created + custom_attributes: + type: + - "null" + - object + description: Custom attributes specific to the company + additionalProperties: true + id: + type: + - "null" + - string + description: The ID of the company + industry: + type: + - "null" + - string + description: The industry in which the company operates + monthly_spend: + type: + - "null" + - number + description: The monthly spend of the company + multipleOf: 1.e-8 + name: + type: + - "null" + - string + description: The name of the company + plan: + type: + - "null" + - object + description: Details of the company's subscription plan + properties: + type: + type: + - "null" + - string + description: The type of the subscription plan + id: + type: + - "null" + - string + description: The ID of the subscription plan + name: + type: + - "null" + - string + description: The name of the subscription plan + remote_created_at: + type: + - "null" + - integer + description: The remote date and time when the company was created + segments: + type: object + description: Segments associated with the company + properties: + type: + type: string + description: The type of segments associated with the company + segments: + type: array + description: List of segments + items: + type: + - "null" + - object + properties: + type: + type: string + description: The type of the segment + id: + type: string + description: The ID of the segment + session_count: + type: + - "null" + - integer + description: The number of sessions related to the company + size: + type: + - "null" + - integer + description: The size of the company + tags: + type: object + description: Tags associated with the company + properties: + type: + type: string + description: The type of tags associated with the company + tags: + type: array + description: List of tags + items: + type: + - "null" + - object + properties: + type: + type: string + description: The type of the tag + id: + description: The ID of the tag + oneOf: + - type: + - "null" + - string + - type: + - "null" + - integer + name: + type: string + description: The name of the tag + updated_at: + type: + - "null" + - integer + description: The date and time when the company was last updated + user_count: + type: + - "null" + - integer + description: The number of users associated with the company + website: + type: + - "null" + - string + description: The website of the company + company_attributes: + type: object + additionalProperties: true + properties: + type: + type: + - "null" + - string + description: Type of data structure for the company attribute. + description: + type: + - "null" + - string + description: Description or details about the company attribute. + admin_id: + type: + - "null" + - string + description: The ID of the admin user associated with the company. + api_writable: + type: + - "null" + - boolean + description: Indicates whether the field is writable through the API. + archived: + type: + - "null" + - boolean + description: Flag to indicate if the company data is archived. + created_at: + type: + - "null" + - integer + description: Timestamp when the company data was created. + custom: + type: + - "null" + - boolean + description: Custom attribute specific to the company. + data_type: + type: + - "null" + - string + description: Type of data stored in the attribute field. + full_name: + type: + - "null" + - string + description: Full name associated with the company. + id: + type: + - "null" + - integer + description: Unique ID assigned to the company attribute. + label: + type: + - "null" + - string + description: Label or display name for the company attribute. + messenger_writable: + type: + - "null" + - boolean + description: Indicates whether the field is writable through the messenger. + model: + type: + - "null" + - string + description: Model or schema used for storing the company attribute. + name: + type: + - "null" + - string + description: Name of the company attribute. + options: + description: Available options or values for the company attribute. + anyOf: + - type: array + items: + type: string + - type: "null" + ui_writable: + type: + - "null" + - boolean + description: Indicates whether the field is writable through the UI. + updated_at: + type: + - "null" + - integer + description: Timestamp when the company data was last updated. + contact_attributes: + type: object + additionalProperties: true + properties: + type: + type: + - "null" + - string + description: The type of contact attribute (e.g., text, number, boolean). + description: + type: + - "null" + - string + description: Description of the contact attribute for better understanding. + admin_id: + type: + - "null" + - string + description: Unique identifier for the admin associated with the contact attribute. + api_writable: + type: + - "null" + - boolean + description: Indicates whether the attribute is writable via API. + archived: + type: + - "null" + - boolean + description: Flag to signify if the contact attribute is archived. + created_at: + type: + - "null" + - integer + description: Timestamp of when the contact attribute was created. + custom: + type: + - "null" + - boolean + description: Indicates if the attribute is a custom user-defined field. + data_type: + type: + - "null" + - string + description: The data type of the contact attribute value. + full_name: + type: + - "null" + - string + description: The full name associated with the contact attribute. + id: + type: + - "null" + - integer + description: Unique identifier for the contact attribute. + label: + type: + - "null" + - string + description: Label representing the attribute in user interfaces. + messenger_writable: + type: + - "null" + - boolean + description: Indicates whether the attribute is writable via messenger. + model: + type: + - "null" + - string + description: Model to which the contact attribute is associated. + name: + type: + - "null" + - string + description: The name of the contact attribute. + options: + type: + - "null" + - array + description: List of available options for the attribute. + items: + type: + - "null" + - string + ui_writable: + type: + - "null" + - boolean + description: Indicates whether the attribute is writable via user interface. + updated_at: + type: + - "null" + - integer + description: Timestamp of when the contact attribute was last updated. + contacts: + type: object + additionalProperties: true + properties: + type: + type: + - "null" + - string + description: Type of contact. + android_app_name: + type: + - "null" + - string + description: The name of the Android app associated with the contact. + android_app_version: + type: + - "null" + - string + description: The version of the Android app associated with the contact. + android_device: + type: + - "null" + - string + description: The device used by the contact for Android. + android_last_seen_at: + type: + - "null" + - string + description: The date and time when the contact was last seen on Android. + format: date-time + android_os_version: + type: + - "null" + - string + description: The operating system version of the Android device. + android_sdk_version: + type: + - "null" + - string + description: The SDK version of the Android device. + avatar: + type: + - "null" + - string + description: URL pointing to the contact's avatar image. + browser: + type: + - "null" + - string + description: The browser used by the contact. + browser_language: + type: + - "null" + - string + description: The language preference set in the contact's browser. + browser_version: + type: + - "null" + - string + description: The version of the browser used by the contact. + companies: + type: + - "null" + - object + description: Companies associated with the contact. + properties: + type: + type: + - "null" + - string + description: Type of connection with the companies. + data: + type: + - "null" + - array + description: Array of company data associated with the contact. + items: + type: + - "null" + - object + properties: + type: + type: + - "null" + - string + description: Type of company. + id: + type: + - "null" + - string + description: The unique identifier of the company. + url: + type: + - "null" + - string + description: URL of the company. + has_more: + type: + - "null" + - boolean + description: Flag indicating if there are more companies to load. + total_count: + type: + - "null" + - integer + description: Total count of companies associated with the contact. + url: + type: + - "null" + - string + description: URL to access more company information. + created_at: + type: + - "null" + - integer + description: The date and time when the contact was created. + custom_attributes: + type: + - "null" + - object + description: Custom attributes defined for the contact. + additionalProperties: true + properties: {} + email: + type: + - "null" + - string + description: The email address of the contact. + external_id: + type: + - "null" + - string + description: External identifier for the contact. + has_hard_bounced: + type: + - "null" + - boolean + description: Flag indicating if the contact has hard bounced. + id: + type: + - "null" + - string + description: The unique identifier of the contact. + ios_app_name: + type: + - "null" + - string + description: The name of the iOS app associated with the contact. + ios_app_version: + type: + - "null" + - string + description: The version of the iOS app associated with the contact. + ios_device: + type: + - "null" + - string + description: The device used by the contact for iOS. + ios_last_seen_at: + type: + - "null" + - integer + description: The date and time when the contact was last seen on iOS. + ios_os_version: + type: + - "null" + - string + description: The operating system version of the iOS device. + ios_sdk_version: + type: + - "null" + - string + description: The SDK version of the iOS device. + language_override: + type: + - "null" + - string + description: Language override set for the contact. + last_contacted_at: + type: + - "null" + - integer + description: The date and time when the contact was last contacted. + last_email_clicked_at: + type: + - "null" + - integer + description: The date and time when the contact last clicked an email. + last_email_opened_at: + type: + - "null" + - integer + description: The date and time when the contact last opened an email. + last_replied_at: + type: + - "null" + - integer + description: The date and time when the contact last replied. + last_seen_at: + type: + - "null" + - integer + description: The date and time when the contact was last seen overall. + location: + type: + - "null" + - object + description: Location details of the contact. + properties: + type: + type: + - "null" + - string + description: Type of location. + city: + type: + - "null" + - string + description: City of the contact's location. + continent_code: + type: + - "null" + - string + description: Continent code of the contact's location. + country: + type: + - "null" + - string + description: Country of the contact's location. + country_code: + type: + - "null" + - string + description: Country code of the contact's location. + region: + type: + - "null" + - string + description: Region of the contact's location. + marked_email_as_spam: + type: + - "null" + - boolean + description: Flag indicating if the contact's email was marked as spam. + name: + type: + - "null" + - string + description: The name of the contact. + notes: + type: + - "null" + - object + description: Notes associated with the contact. + properties: + type: + type: + - "null" + - string + description: Type of connection with the notes. + data: + type: + - "null" + - array + description: Array of note data associated with the contact. + items: + type: + - "null" + - object + properties: + type: + type: + - "null" + - string + description: Type of note. + id: + type: + - "null" + - string + description: The unique identifier of the note. + url: + type: + - "null" + - string + description: URL of the note. + has_more: + type: + - "null" + - boolean + description: Flag indicating if there are more notes to load. + total_count: + type: + - "null" + - integer + description: Total count of notes associated with the contact. + url: + type: + - "null" + - string + description: URL to access more note information. + opted_in_subscription_types: + type: + - "null" + - object + description: Subscription types the contact opted into. + properties: + type: + type: + - "null" + - string + description: Type of connection with the subscription types. + data: + type: + - "null" + - array + description: Array of subscription type data opted into by the contact. + items: + type: + - "null" + - object + properties: + type: + type: + - "null" + - string + description: Type of subscription. + id: + type: + - "null" + - string + description: The unique identifier of the subscription type. + url: + type: + - "null" + - string + description: URL of the subscription type. + has_more: + type: + - "null" + - boolean + description: Flag indicating if there are more subscription types to load. + total_count: + type: + - "null" + - integer + description: Total count of subscription types the contact opted into. + url: + type: + - "null" + - string + description: URL to access more subscription type information. + opted_out_subscription_types: + type: + - "null" + - object + description: Subscription types the contact opted out from. + properties: + type: + type: + - "null" + - string + description: Type of connection with the subscription types. + data: + type: + - "null" + - array + description: Array of subscription type data opted out from by the contact. + items: + type: + - "null" + - object + properties: + type: + type: + - "null" + - string + description: Type of subscription. + id: + type: + - "null" + - string + description: The unique identifier of the subscription type. + url: + type: + - "null" + - string + description: URL of the subscription type. + has_more: + type: + - "null" + - boolean + description: Flag indicating if there are more subscription types to load. + total_count: + type: + - "null" + - integer + description: Total count of subscription types the contact opted out from. + url: + type: + - "null" + - string + description: URL to access more subscription type information. + os: + type: + - "null" + - string + description: Operating system of the contact's device. + owner_id: + type: + - "null" + - integer + description: The unique identifier of the contact's owner. + phone: + type: + - "null" + - string + description: The phone number of the contact. + referrer: + type: + - "null" + - string + description: Referrer information related to the contact. + role: + type: + - "null" + - string + description: Role or position of the contact. + signed_up_at: + type: + - "null" + - integer + description: The date and time when the contact signed up. + sms_consent: + type: + - "null" + - boolean + description: Consent status for SMS communication. + social_profiles: + type: + - "null" + - object + description: Social profiles associated with the contact. + properties: + type: + type: + - "null" + - string + description: Type of social profile connection. + data: + type: + - "null" + - array + description: Array of social profile data associated with the contact. + items: + type: + - "null" + - object + properties: + type: + type: + - "null" + - string + description: Type of social profile. + name: + type: + - "null" + - string + description: Name of the social profile. + url: + type: + - "null" + - string + description: URL of the social profile. + tags: + type: + - "null" + - object + description: Tags associated with the contact. + properties: + type: + type: + - "null" + - string + description: Type of connection with the tags. + data: + type: + - "null" + - array + description: Array of tag data associated with the contact. + items: + type: + - "null" + - object + properties: + type: + type: + - "null" + - string + description: Type of tag. + id: + type: + - "null" + - string + description: The unique identifier of the tag. + url: + type: + - "null" + - string + description: URL of the tag. + has_more: + type: + - "null" + - boolean + description: Flag indicating if there are more tags to load. + total_count: + type: + - "null" + - integer + description: Total count of tags associated with the contact. + url: + type: + - "null" + - string + description: URL to access more tag information. + unsubscribed_from_emails: + type: + - "null" + - boolean + description: Flag indicating if the contact unsubscribed from emails. + unsubscribed_from_sms: + type: + - "null" + - boolean + description: Flag indicating if the contact unsubscribed from SMS. + updated_at: + type: + - "null" + - integer + description: The date and time when the contact was last updated. + utm_campaign: + type: + - "null" + - string + description: Campaign data from UTM parameters. + utm_content: + type: + - "null" + - string + description: Content data from UTM parameters. + utm_medium: + type: + - "null" + - string + description: Medium data from UTM parameters. + utm_source: + type: + - "null" + - string + description: Source data from UTM parameters. + utm_term: + type: + - "null" + - string + description: Term data from UTM parameters. + workspace_id: + type: + - "null" + - string + description: The unique identifier of the workspace associated with the contact. + conversations: + type: object + additionalProperties: true + properties: + type: + type: + - "null" + - string + description: The type of the conversation + admin_assignee_id: + type: + - "null" + - integer + description: The ID of the administrator assigned to the conversation + assignee: + type: + - "null" + - object + description: The assigned user responsible for the conversation. + properties: + type: + type: + - "null" + - string + description: The type of the assignee (e.g., admin, agent) + email: + type: + - "null" + - string + description: The email of the assignee + id: + type: + - "null" + - string + description: The ID of the assignee + name: + type: + - "null" + - string + description: The name of the assignee + contacts: + type: + - "null" + - object + description: List of contacts involved in the conversation. + items: + type: + - "null" + - object + properties: + type: + type: + - "null" + - string + description: The type of the contact + id: + type: + - "null" + - string + description: The ID of the contact + conversation_message: + type: + - "null" + - object + description: The main message content of the conversation. + properties: + type: + type: + - "null" + - string + description: The type of the conversation message + attachments: + description: Attachments in the conversation message + anyOf: + - type: array + items: + type: object + properties: + type: + type: + - "null" + - string + content_type: + type: + - "null" + - string + filesize: + type: + - "null" + - integer + height: + type: + - "null" + - integer + name: + type: + - "null" + - string + url: + type: + - "null" + - string + width: + type: + - "null" + - integer + - type: "null" + author: + type: + - "null" + - object + description: The author of the conversation message. + properties: + type: + type: + - "null" + - string + description: The type of the author (e.g., admin, customer) + email: + type: + - "null" + - string + description: The email of the author of the message + id: + type: + - "null" + - string + description: The ID of the author of the message + name: + type: + - "null" + - string + description: The name of the author of the message + body: + type: + - "null" + - string + description: The body/content of the conversation message + delivered_as: + type: + - "null" + - string + description: The delivery status of the message + id: + type: + - "null" + - string + description: The ID of the conversation message + subject: + type: + - "null" + - string + description: The subject of the conversation message + url: + type: + - "null" + - string + description: The URL of the conversation message + conversation_rating: + type: + - "null" + - object + description: Ratings given to the conversation by the customer and teammate. + properties: + created_at: + type: + - "null" + - integer + description: The timestamp when the rating was created + customer: + type: + - "null" + - object + description: Rating given by the customer. + properties: + type: + type: + - "null" + - string + description: The type of the customer providing the rating + id: + type: + - "null" + - string + description: The ID of the customer who provided the rating + rating: + type: + - "null" + - integer + description: The rating given to the conversation + remark: + type: + - "null" + - string + description: Any remarks provided with the rating + teammate: + type: + - "null" + - object + description: Rating given by the teammate. + properties: + type: + type: + - "null" + - string + description: The type of the teammate being rated + id: + type: + - "null" + - integer + description: The ID of the teammate being rated + created_at: + type: + - "null" + - integer + description: The timestamp when the conversation was created + custom_attributes: + type: + - "null" + - object + description: Custom attributes associated with the conversation + customer_first_reply: + type: + - "null" + - object + description: Timestamp indicating when the customer first replied. + properties: + type: + type: + - "null" + - string + description: The type of the first customer reply + created_at: + type: + - "null" + - integer + description: The timestamp of the customer's first reply + url: + type: + - "null" + - string + description: The URL of the first customer reply + customers: + description: List of customers involved in the conversation + anyOf: + - type: array + items: + type: + - "null" + - object + properties: + type: + type: + - "null" + - string + id: + type: + - "null" + - string + - type: "null" + first_contact_reply: + type: + - "null" + - object + description: Timestamp indicating when the first contact replied. + properties: + type: + type: + - "null" + - string + description: The type of the first contact reply + created_at: + type: + - "null" + - integer + description: The timestamp of the first contact's reply + url: + type: + - "null" + - string + description: The URL of the first contact reply + id: + type: + - "null" + - string + description: The unique ID of the conversation + open: + type: + - "null" + - boolean + description: Indicates if the conversation is open or closed + priority: + type: + - "null" + - string + description: The priority level of the conversation + read: + type: + - "null" + - boolean + description: Indicates if the conversation has been read + redacted: + type: + - "null" + - boolean + description: Indicates if the conversation is redacted + sent_at: + type: + - "null" + - integer + description: The timestamp when the conversation was sent + sla_applied: + type: + - "null" + - object + description: Service Level Agreement details applied to the conversation. + properties: + sla_name: + type: + - "null" + - string + description: The name of the SLA applied + sla_status: + type: + - "null" + - string + description: The status of the SLA applied + snoozed_until: + type: + - "null" + - integer + description: Timestamp until the conversation is snoozed + source: + type: + - "null" + - object + description: Source details of the conversation. + properties: + type: + type: + - "null" + - string + description: The type of the source + attachments: + type: + - "null" + - array + description: Attachments related to the conversation. + items: + type: + - "null" + - object + additionalProperties: true + properties: {} + author: + type: + - "null" + - object + description: Author of the source. + properties: + type: + type: + - "null" + - string + description: The type of the source author (e.g., admin, customer) + email: + type: + - "null" + - string + description: The email of the source author + id: + type: + - "null" + - string + description: The ID of the source author + name: + type: + - "null" + - string + description: The name of the source author + body: + type: + - "null" + - string + description: The body/content of the source + delivered_as: + type: + - "null" + - string + description: The delivery status of the source + id: + type: + - "null" + - string + description: The ID of the source + redacted: + type: + - "null" + - boolean + description: Indicates if the source is redacted + subject: + type: + - "null" + - string + description: The subject of the source + url: + type: + - "null" + - string + description: The URL of the source + state: + type: + - "null" + - string + description: The state of the conversation (e.g., new, in progress) + statistics: + type: + - "null" + - object + description: Statistics related to the conversation. + properties: + type: + type: + - "null" + - string + description: The type of conversation statistics + count_assignments: + type: + - "null" + - integer + description: The total count of assignments for the conversation + count_conversation_parts: + type: + - "null" + - integer + description: The total count of conversation parts + count_reopens: + type: + - "null" + - integer + description: The total count of conversation reopens + first_admin_reply_at: + type: + - "null" + - integer + description: Timestamp of the first admin reply + first_assignment_at: + type: + - "null" + - integer + description: Timestamp of the first assignment + first_close_at: + type: + - "null" + - integer + description: Timestamp of the first conversation close + first_contact_reply_at: + type: + - "null" + - integer + description: Timestamp of the first contact reply + last_admin_reply_at: + type: + - "null" + - integer + description: Timestamp of the last admin reply + last_assignment_admin_reply_at: + type: + - "null" + - integer + description: Timestamp of the last assignment admin reply + last_assignment_at: + type: + - "null" + - integer + description: Timestamp of the last assignment + last_close_at: + type: + - "null" + - integer + description: Timestamp of the last conversation close + last_closed_by_id: + type: + - "null" + - integer + description: The ID of the last user who closed the conversation + last_contact_reply_at: + type: + - "null" + - integer + description: Timestamp of the last contact reply + median_time_to_reply: + type: + - "null" + - integer + description: The median time taken to reply to the conversation + time_to_admin_reply: + type: + - "null" + - integer + description: Time taken to reply by admin + time_to_assignment: + type: + - "null" + - integer + description: Time taken for assignment + time_to_first_close: + type: + - "null" + - integer + description: Time taken to first close the conversation + time_to_last_close: + type: + - "null" + - integer + description: Time taken to last close the conversation + tags: + type: + - "null" + - object + description: Tags applied to the conversation. + items: + type: + - "null" + - object + properties: + type: + type: + - "null" + - string + description: The type of the tag + applied_at: + type: + - "null" + - integer + description: Timestamp when the tag was applied + applied_by: + type: + - "null" + - object + description: User who applied the tag. + properties: + type: + type: + - "null" + - string + description: The type of the user who applied the tag + id: + type: + - "null" + - string + description: The ID of the user who applied the tag + id: + type: + - "null" + - string + description: The ID of the tag + name: + type: + - "null" + - string + description: The name of the tag + team_assignee_id: + type: + - "null" + - integer + description: The ID of the team assigned to the conversation + teammates: + type: + - "null" + - object + description: List of teammates involved in the conversation. + properties: + type: + type: + - "null" + - string + description: The type of teammates + admins: + type: + - "null" + - array + description: Admin teammates involved in the conversation. + items: + type: + - "null" + - object + properties: + type: + type: + - "null" + - string + description: The type of the teammate (admin) + id: + type: + - "null" + - string + description: The ID of the teammate admin + title: + type: + - "null" + - string + description: The title of the conversation + topics: + type: + - "null" + - object + description: Topics associated with the conversation. + properties: + type: + type: + - "null" + - string + description: The type of topics + topics: + type: + - "null" + - array + description: List of topics related to the conversation. + items: + type: + - "null" + - object + properties: + type: + type: + - "null" + - string + description: The type of the topic + id: + type: + - "null" + - integer + description: The ID of the topic + name: + type: + - "null" + - string + description: The name of the topic + total_count: + type: + - "null" + - integer + description: The total count of topics + updated_at: + type: + - "null" + - integer + description: The timestamp when the conversation was last updated + user: + type: + - "null" + - object + description: The user related to the conversation. + properties: + type: + type: + - "null" + - string + description: The type of the user + id: + type: + - "null" + - string + description: The ID of the user associated with the conversation + waiting_since: + type: + - "null" + - integer + description: Timestamp since waiting for a response + conversation_parts: + type: object + additionalProperties: true + properties: + type: + type: + - "null" + - string + description: The type of conversation part, such as message or note. + assigned_to: + description: >- + The user or team member who is assigned to handle this conversation + part. + oneOf: + - type: object + properties: + type: + type: + - "null" + - string + id: + type: + - "null" + - string + - type: string + - type: "null" + attachments: + type: + - "null" + - array + description: Represents the attachments associated with the conversation part. + items: + type: + - "null" + - object + properties: + type: + type: + - "null" + - string + description: The type or category of the attachment. + content_type: + type: + - "null" + - string + description: The MIME type of the attachment content. + filesize: + type: + - "null" + - integer + description: The size of the attachment file in bytes. + height: + type: + - "null" + - integer + description: The height dimension of the attachment in pixels. + name: + type: + - "null" + - string + description: The filename or name of the attachment. + url: + type: + - "null" + - string + description: The URL or location where the attachment can be accessed. + width: + type: + - "null" + - integer + description: The width dimension of the attachment in pixels. + author: + type: + - "null" + - object + description: Represents the author of the conversation part. + properties: + type: + type: + - "null" + - string + description: The type of author, such as customer or agent. + email: + type: + - "null" + - string + description: The email address of the conversation author. + id: + type: + - "null" + - string + description: The unique identifier of the conversation author. + name: + type: + - "null" + - string + description: The name of the conversation author. + body: + type: + - "null" + - string + description: The main content or message body of the conversation part. + conversation_created_at: + type: + - "null" + - integer + description: The date and time when the conversation was created. + conversation_id: + type: + - "null" + - string + description: The unique identifier of the conversation. + conversation_total_parts: + type: + - "null" + - integer + description: The total number of parts in the conversation. + conversation_updated_at: + type: + - "null" + - integer + description: The date and time when the conversation was last updated. + created_at: + type: + - "null" + - integer + description: The date and time when the conversation part was created. + external_id: + type: + - "null" + - string + description: An external identifier associated with the conversation part. + id: + type: + - "null" + - string + description: The unique identifier of the conversation part. + notified_at: + type: + - "null" + - integer + description: The date and time when the conversation part was last notified. + part_type: + type: + - "null" + - string + description: The type or category of the conversation part. + redacted: + type: + - "null" + - boolean + description: Indicates if the conversation part has been redacted or censored. + updated_at: + type: + - "null" + - integer + description: The date and time when the conversation part was last updated. + company_segments: + type: object + additionalProperties: true + properties: + type: + type: + - "null" + - string + description: The category or type of the company segment. + count: + type: + - "null" + - integer + description: The count of company segments returned in the response. + created_at: + type: + - "null" + - integer + description: The timestamp when the company segment was created. + id: + type: + - "null" + - string + description: The unique identifier associated with the company segment. + name: + type: + - "null" + - string + description: The name of the company segment. + person_type: + type: + - "null" + - string + description: The type of person associated with the company segment. + updated_at: + type: + - "null" + - integer + description: The timestamp when the company segment was last updated. diff --git a/airbyte-integrations/connectors/source-intercom/metadata.yaml b/airbyte-integrations/connectors/source-intercom/metadata.yaml index 2c41b0e6f0d8..46d6ae0c8d19 100644 --- a/airbyte-integrations/connectors/source-intercom/metadata.yaml +++ b/airbyte-integrations/connectors/source-intercom/metadata.yaml @@ -6,11 +6,11 @@ data: hosts: - api.intercom.io connectorBuildOptions: - baseImage: docker.io/airbyte/python-connector-base:2.0.0@sha256:c44839ba84406116e8ba68722a0f30e8f6e7056c726f447681bb9e9ece8bd916 + baseImage: docker.io/airbyte/source-declarative-manifest:5.14.0@sha256:accdf6c1bbcabd45b40f836692e4f3b1a1e1f0b28267973802ee212cd9c2c16a connectorSubtype: api connectorType: source definitionId: d8313939-3782-41b0-be29-b3ca20d8dd3a - dockerImageTag: 0.8.3 + dockerImageTag: 0.9.0-rc.1 dockerRepository: airbyte/source-intercom documentationUrl: https://docs.airbyte.com/integrations/sources/intercom githubIssueLabel: source-intercom @@ -18,9 +18,12 @@ data: license: MIT maxSecondsBetweenMessages: 60 name: Intercom + releases: + rolloutConfiguration: + enableProgressiveRollout: true remoteRegistries: pypi: - enabled: true + enabled: false packageName: airbyte-source-intercom registryOverrides: cloud: @@ -37,14 +40,16 @@ data: - companies supportLevel: certified tags: - - language:python - cdk:low-code + - language:manifest-only connectorTestSuitesOptions: - suite: liveTests testConnections: - name: intercom_config_dev_null id: 09ebd6bb-2756-4cd3-8ad5-7120088cc553 - - suite: unitTests + # We should enable unit tests once the connector has been updated to CDK version >= 6.10.0 + # Until then, the test suites won't be able to run successfully in CI as the fixtures are not present in earlier versions + # - suite: unitTests - suite: integrationTests testSecrets: - name: SECRET_SOURCE-INTERCOM_APIKEY__CREDS diff --git a/airbyte-integrations/connectors/source-intercom/pyproject.toml b/airbyte-integrations/connectors/source-intercom/pyproject.toml deleted file mode 100644 index 3d1c62bdfe63..000000000000 --- a/airbyte-integrations/connectors/source-intercom/pyproject.toml +++ /dev/null @@ -1,28 +0,0 @@ -[build-system] -requires = [ "poetry-core>=1.0.0",] -build-backend = "poetry.core.masonry.api" - -[tool.poetry] -version = "0.8.3" -name = "source-intercom" -description = "Source implementation for Intercom Yaml." -authors = [ "Airbyte ",] -license = "MIT" -readme = "README.md" -documentation = "https://docs.airbyte.com/integrations/sources/intercom" -homepage = "https://airbyte.com" -repository = "https://github.com/airbytehq/airbyte" -[[tool.poetry.packages]] -include = "source_intercom" - -[tool.poetry.dependencies] -python = "^3.10,<3.12" -airbyte-cdk = "^4.5.4" - -[tool.poetry.scripts] -source-intercom = "source_intercom.run:run" - -[tool.poetry.group.dev.dependencies] -requests-mock = "^1.9.3" -pytest-mock = "^3.12.0" -pytest = "^8.0.0" diff --git a/airbyte-integrations/connectors/source-intercom/source_intercom/__init__.py b/airbyte-integrations/connectors/source-intercom/source_intercom/__init__.py deleted file mode 100644 index de88c85697a0..000000000000 --- a/airbyte-integrations/connectors/source-intercom/source_intercom/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -from .source import SourceIntercom - -__all__ = ["SourceIntercom"] diff --git a/airbyte-integrations/connectors/source-intercom/source_intercom/manifest.yaml b/airbyte-integrations/connectors/source-intercom/source_intercom/manifest.yaml deleted file mode 100644 index feb1122b22e1..000000000000 --- a/airbyte-integrations/connectors/source-intercom/source_intercom/manifest.yaml +++ /dev/null @@ -1,2408 +0,0 @@ -version: 0.72.1 - -definitions: - ## bases - selector: - description: "Base records selector for Full Refresh streams" - extractor: - type: DpathExtractor - field_path: ["{{ parameters.get('data_field', 'data')}}"] - requester: - description: "Base Requester for Full Refresh streams" - type: HttpRequester - url_base: "https://api.intercom.io/" - http_method: "GET" - authenticator: - type: BearerAuthenticator - api_token: "{{ config['access_token'] }}" - request_headers: - # There is a bug in interpolation, causing the `2.10` string to be evaluated to `2.1`, cutting off the `0`. - # the workaround is to put the `string` inside the `string`, then it's evaluated properly to `2.10` - Intercom-Version: "'2.10'" - Accept: "application/json" - error_handler: - type: CustomErrorHandler - class_name: source_intercom.components.ErrorHandlerWithRateLimiter - response_filters: - - type: HttpResponseFilter - error_message_contains: "Active subscription needed" - action: FAIL - failure_type: config_error - error_message: "Failed to perform request. Error: Active subscription needed. Please, validate your current Intercom plan to continue using API." - retriever: - description: "Base Retriever for Full Refresh streams" - record_selector: - $ref: "#/definitions/selector" - requester: - $ref: "#/definitions/requester" - paginator: - type: "DefaultPaginator" - url_base: "#/definitions/requester/url_base" - pagination_strategy: - type: "CursorPagination" - cursor_value: "{{ response.get('pages', {}).get('next') }}" - stop_condition: "{{ 'next' not in response.get('pages', {}) }}" - page_size: 150 - page_size_option: - inject_into: request_parameter - field_name: per_page - page_token_option: - type: RequestPath - requester_incremental_search: - $ref: "#/definitions/requester" - http_method: "POST" - request_body_json: - query: - "{ 'operator': 'OR', 'value': [ { 'field': 'updated_at', 'operator': - '>', 'value': {{ stream_slice.get('prior_state', stream_state.get('prior_state', - {})).get('updated_at') or format_datetime(config['start_date'], '%s') }} }, - { 'field': 'updated_at', 'operator': '=', 'value': {{ stream_slice.get('prior_state', - stream_state.get('prior_state', {})).get('updated_at') or format_datetime(config['start_date'], - '%s') }} }, ], }" - sort: "{'field': 'updated_at', 'order': 'ascending'}" - pagination: - "{ 'per_page': {{ parameters.get('page_size') }}, 'page': {{ next_page_token.get('next_page_token').get('page') - }}, 'starting_after': '{{ next_page_token.get('next_page_token').get('starting_after') - }}' }" - - ## streams - # full-refresh - stream_full_refresh: - retriever: - $ref: "#/definitions/retriever" - admins: - description: "https://developers.intercom.com/intercom-api-reference/reference#list-admins" - $ref: "#/definitions/stream_full_refresh" - $parameters: - name: "admins" - primary_key: "id" - path: "admins" - data_field: "admins" - schema_loader: - type: InlineSchemaLoader - schema: - type: object - properties: - admin_ids: - description: Array of unique identifiers for admins - anyOf: - - type: array - items: - type: integer - - type: "null" - avatar: - description: Admin avatar details - type: - - "null" - - object - properties: - image_url: - description: URL of the admin's avatar image - type: - - "null" - - string - away_mode_enabled: - description: Flag indicating if away mode is enabled for the admin - type: - - "null" - - boolean - away_mode_reassign: - description: - Flag indicating if away mode reassignment is enabled for - the admin - type: - - "null" - - boolean - email: - description: Email address of the admin - type: - - "null" - - string - has_inbox_seat: - description: Flag indicating if the admin has a seat in the inbox - type: - - "null" - - boolean - id: - description: Unique identifier for the admin - type: - - "null" - - string - job_title: - description: Job title of the admin - type: - - "null" - - string - name: - description: Name of the admin - type: - - "null" - - string - team_ids: - description: Array of team identifiers the admin belongs to - anyOf: - - type: array - items: - type: integer - - type: "null" - type: - description: Type of the admin (e.g., full-time, part-time) - type: - - "null" - - string - team_priority_level: - description: Detailed team priority level information for the admin - type: - - "null" - - object - properties: - primary_team_ids: - description: Array of primary team identifiers for the admin - type: - - "null" - - array - items: - type: - - "null" - - integer - secondary_team_ids: - description: Array of secondary team identifiers for the admin - type: - - "null" - - array - items: - type: - - "null" - - integer - tags: - description: "https://developers.intercom.com/intercom-api-reference/reference#list-tags-for-an-app" - $ref: "#/definitions/stream_full_refresh" - $parameters: - name: "tags" - primary_key: "name" - path: "tags" - schema_loader: - type: InlineSchemaLoader - schema: - type: object - properties: - id: - description: Unique identifier for the tag. - type: - - "null" - - string - name: - description: Name of the tag used for identification. - type: - - "null" - - string - type: - description: Type of the tag indicating its purpose or category. - type: - - "null" - - string - teams: - description: "https://developers.intercom.com/intercom-api-reference/reference#list-teams" - $ref: "#/definitions/stream_full_refresh" - $parameters: - name: "teams" - primary_key: "name" - path: "teams" - data_field: "teams" - - schema_loader: - type: InlineSchemaLoader - schema: - type: object - properties: - admin_ids: - description: Array of user IDs representing the admins of the team. - anyOf: - - type: array - items: - type: integer - - type: "null" - id: - description: Unique identifier for the team. - type: - - "null" - - string - name: - description: Name of the team. - type: - - "null" - - string - type: - description: Type of team (e.g., 'internal', 'external'). - type: - - "null" - - string - stream_data_attributes: - description: "https://developers.intercom.com/intercom-api-reference/reference#list-data-attributes" - $ref: "#/definitions/stream_full_refresh" - retriever: - $ref: "#/definitions/retriever" - requester: - $ref: "#/definitions/requester" - request_parameters: - model: "{{ parameters.get('model') }}" - company_attributes: - description: "https://developers.intercom.com/intercom-api-reference/reference#list-data-attributes" - $ref: "#/definitions/stream_data_attributes" - $parameters: - name: "company_attributes" - primary_key: "name" - path: "data_attributes" - model: "company" - schema_loader: - type: InlineSchemaLoader - schema: - type: object - properties: - id: - description: Unique ID assigned to the company attribute. - type: - - "null" - - integer - admin_id: - description: The ID of the admin user associated with the company. - type: - - "null" - - string - api_writable: - description: Indicates whether the field is writable through the API. - type: - - "null" - - boolean - archived: - description: Flag to indicate if the company data is archived. - type: - - "null" - - boolean - created_at: - description: Timestamp when the company data was created. - type: - - "null" - - integer - custom: - description: Custom attribute specific to the company. - type: - - "null" - - boolean - data_type: - description: Type of data stored in the attribute field. - type: - - "null" - - string - description: - description: Description or details about the company attribute. - type: - - "null" - - string - full_name: - description: Full name associated with the company. - type: - - "null" - - string - label: - description: Label or display name for the company attribute. - type: - - "null" - - string - model: - description: Model or schema used for storing the company attribute. - type: - - "null" - - string - name: - description: Name of the company attribute. - type: - - "null" - - string - options: - description: Available options or values for the company attribute. - anyOf: - - type: array - items: - type: string - - type: "null" - type: - description: Type of data structure for the company attribute. - type: - - "null" - - string - ui_writable: - description: Indicates whether the field is writable through the UI. - type: - - "null" - - boolean - updated_at: - description: Timestamp when the company data was last updated. - type: - - "null" - - integer - messenger_writable: - description: Indicates whether the field is writable through the messenger. - type: - - "null" - - boolean - contact_attributes: - description: "https://developers.intercom.com/intercom-api-reference/reference#list-data-attributes" - $ref: "#/definitions/stream_data_attributes" - $parameters: - name: "contact_attributes" - primary_key: "name" - path: "data_attributes" - model: "contact" - - # semi-incremental - # (full-refresh and emit records >= *prior state) - # (prior state - frozen state from previous sync, it automatically updates with next sync) - schema_loader: - type: InlineSchemaLoader - schema: - type: object - properties: - id: - description: Unique identifier for the contact attribute. - type: - - "null" - - integer - type: - description: The type of contact attribute (e.g., text, number, boolean). - type: - - "null" - - string - model: - description: Model to which the contact attribute is associated. - type: - - "null" - - string - name: - description: The name of the contact attribute. - type: - - "null" - - string - full_name: - description: The full name associated with the contact attribute. - type: - - "null" - - string - label: - description: Label representing the attribute in user interfaces. - type: - - "null" - - string - description: - description: Description of the contact attribute for better understanding. - type: - - "null" - - string - data_type: - description: The data type of the contact attribute value. - type: - - "null" - - string - options: - description: List of available options for the attribute. - type: - - "null" - - array - items: - type: - - "null" - - string - api_writable: - description: Indicates whether the attribute is writable via API. - type: - - "null" - - boolean - ui_writable: - description: Indicates whether the attribute is writable via user interface. - type: - - "null" - - boolean - custom: - description: Indicates if the attribute is a custom user-defined field. - type: - - "null" - - boolean - archived: - description: Flag to signify if the contact attribute is archived. - type: - - "null" - - boolean - admin_id: - description: - Unique identifier for the admin associated with the contact - attribute. - type: - - "null" - - string - created_at: - description: Timestamp of when the contact attribute was created. - type: - - "null" - - integer - updated_at: - description: Timestamp of when the contact attribute was last updated. - type: - - "null" - - integer - messenger_writable: - description: Indicates whether the attribute is writable via messenger. - type: - - "null" - - boolean - stream_semi_incremental: - $ref: "#/definitions/stream_full_refresh" - incremental_sync: - type: CustomIncrementalSync - class_name: source_intercom.components.IncrementalSingleSliceCursor - cursor_field: "updated_at" - retriever: - $ref: "#/definitions/stream_full_refresh/retriever" - record_selector: - $ref: "#/definitions/selector" - record_filter: - condition: - "{{ record['updated_at'] >= ( stream_state.get('prior_state', - {}).get('updated_at', 0) - (config.get('lookback_window', 0) * 86400) if stream_state else stream_slice.get('prior_state', - {}).get('updated_at', 0) ) - (config.get('lookback_window', 0) * 86400) }}" - segments: - description: "https://developers.intercom.com/intercom-api-reference/reference#list-segments" - $ref: "#/definitions/stream_semi_incremental" - $parameters: - name: "segments" - primary_key: "id" - path: "segments" - data_field: "segments" - schema_loader: - type: InlineSchemaLoader - schema: - type: object - properties: - created_at: - description: The date and time when the segment was created - type: - - "null" - - integer - count: - description: The number of items in the segment - type: - - "null" - - integer - id: - description: Unique identifier for the segment - type: - - "null" - - string - name: - description: The name or title of the segment - type: - - "null" - - string - type: - description: The type or category of the segment - type: - - "null" - - string - person_type: - description: Type of persons included in the segment - type: - - "null" - - string - updated_at: - description: The date and time when the segment was last updated - type: - - "null" - - integer - companies: - description: "https://developers.intercom.com/intercom-api-reference/reference/scroll-over-all-companies" - $ref: "#/definitions/stream_semi_incremental" - $parameters: - name: "companies" - primary_key: "id" - path: "companies/scroll" - retriever: - $ref: "#/definitions/stream_semi_incremental/retriever" - paginator: - type: "DefaultPaginator" - url_base: "#/definitions/requester/url_base" - pagination_strategy: - type: "CursorPagination" - cursor_value: "{{ response.get('scroll_param') }}" - stop_condition: "{{ not response.get('data') }}" - page_size: 150 - page_size_option: - inject_into: request_parameter - field_name: per_page - page_token_option: - type: RequestOption - field_name: scroll_param - inject_into: request_parameter - requester: - $ref: "#/definitions/requester" - error_handler: - type: CompositeErrorHandler - error_handlers: - - type: DefaultErrorHandler - description: - " 400 - existing scroll_param, need to wait at least 60 sec - to continue and retry 500 - server-side error, should retry after 60 - sec. " - response_filters: - - http_codes: [400] - action: RETRY - failure_type: transient_error - error_message: "A scroll (export) job is already in progress for this Intercom account, causing the request to fail. Only one active scroll per Intercom account is allowed; ensure no overlap by limiting active connections or scheduling jobs appropriately." - - http_codes: [500] - action: RETRY - failure_type: transient_error - backoff_strategies: - - type: ConstantBackoffStrategy - backoff_time_in_seconds: 60 - - type: DefaultErrorHandler - description: - "404 - scroll_param is expired or not found while requesting, - ignore" - response_filters: - - http_codes: [404] - action: IGNORE - - # semi-incremental substreams - schema_loader: - type: InlineSchemaLoader - schema: - type: object - properties: - type: - description: The type of the company - type: - - "null" - - string - company_id: - description: The unique identifier of the company - type: - - "null" - - string - id: - description: The ID of the company - type: - - "null" - - string - app_id: - description: The ID of the application associated with the company - type: - - "null" - - string - name: - description: The name of the company - type: - - "null" - - string - created_at: - description: The date and time when the company was created - type: - - "null" - - integer - updated_at: - description: The date and time when the company was last updated - type: - - "null" - - integer - monthly_spend: - description: The monthly spend of the company - type: - - "null" - - number - multipleOf: 0.00000001 - session_count: - description: The number of sessions related to the company - type: - - "null" - - integer - user_count: - description: The number of users associated with the company - type: - - "null" - - integer - size: - description: The size of the company - type: - - "null" - - integer - tags: - description: Tags associated with the company - type: object - properties: - type: - description: The type of tags associated with the company - type: string - tags: - description: List of tags - type: array - items: - type: - - "null" - - object - properties: - type: - description: The type of the tag - type: string - name: - description: The name of the tag - type: string - id: - description: The ID of the tag - oneOf: - - type: - - "null" - - string - - type: - - "null" - - integer - segments: - description: Segments associated with the company - type: object - properties: - type: - description: The type of segments associated with the company - type: string - segments: - description: List of segments - type: array - items: - type: - - "null" - - object - properties: - type: - description: The type of the segment - type: string - id: - description: The ID of the segment - type: string - plan: - description: Details of the company's subscription plan - type: - - "null" - - object - properties: - id: - description: The ID of the subscription plan - type: - - "null" - - string - name: - description: The name of the subscription plan - type: - - "null" - - string - type: - description: The type of the subscription plan - type: - - "null" - - string - custom_attributes: - description: Custom attributes specific to the company - type: - - "null" - - object - additionalProperties: true - industry: - description: The industry in which the company operates - type: - - "null" - - string - remote_created_at: - description: The remote date and time when the company was created - type: - - "null" - - integer - website: - description: The website of the company - type: - - "null" - - string - substream_semi_incremental: - $ref: "#/definitions/stream_full_refresh" - incremental_sync: - type: CustomIncrementalSync - class_name: source_intercom.components.IncrementalSubstreamSlicerCursor - cursor_field: "updated_at" - retriever: - $ref: "#/definitions/stream_full_refresh/retriever" - paginator: - type: "NoPagination" - record_selector: - $ref: "#/definitions/selector" - record_filter: - condition: - "{{ record['updated_at'] >= stream_state.get('prior_state', {}).get('updated_at', - 0) - (config.get('lookback_window', 0) * 86400)}}" - conversation_parts: - $ref: "#/definitions/substream_semi_incremental" - incremental_sync: - $ref: "#/definitions/substream_semi_incremental/incremental_sync" - parent_stream_configs: - - type: ParentStreamConfig - stream: "#/definitions/conversations" - parent_key: "id" - partition_field: "id" - $parameters: - name: "conversation_parts" - primary_key: "id" - path: "/conversations/{{ stream_slice.id }}" - transformations: - - type: AddFields - fields: - - path: ["conversation_id"] - value: "'{{ stream_slice.id }}'" - retriever: - $ref: "#/definitions/substream_semi_incremental/retriever" - record_selector: - $ref: "#/definitions/substream_semi_incremental/retriever/record_selector" - extractor: - field_path: ["conversation_parts", "conversation_parts"] - requester: - $ref: "#/definitions/requester" - error_handler: - type: DefaultErrorHandler - description: "404 - conversation is not found while requesting, ignore" - response_filters: - - http_codes: [404] - action: IGNORE - schema_loader: - type: InlineSchemaLoader - schema: - type: object - properties: - assigned_to: - description: - The user or team member who is assigned to handle this conversation - part. - oneOf: - - type: object - properties: - type: - type: - - "null" - - string - id: - type: - - "null" - - string - - type: string - - type: "null" - attachments: - description: - Represents the attachments associated with the conversation - part. - type: - - "null" - - array - items: - type: - - "null" - - object - properties: - type: - description: The type or category of the attachment. - type: - - "null" - - string - name: - description: The filename or name of the attachment. - type: - - "null" - - string - url: - description: The URL or location where the attachment can be accessed. - type: - - "null" - - string - content_type: - description: The MIME type of the attachment content. - type: - - "null" - - string - filesize: - description: The size of the attachment file in bytes. - type: - - "null" - - integer - height: - description: The height dimension of the attachment in pixels. - type: - - "null" - - integer - width: - description: The width dimension of the attachment in pixels. - type: - - "null" - - integer - author: - description: Represents the author of the conversation part. - type: - - "null" - - object - properties: - id: - description: The unique identifier of the conversation author. - type: - - "null" - - string - type: - description: The type of author, such as customer or agent. - type: - - "null" - - string - name: - description: The name of the conversation author. - type: - - "null" - - string - email: - description: The email address of the conversation author. - type: - - "null" - - string - body: - description: The main content or message body of the conversation part. - type: - - "null" - - string - conversation_id: - description: The unique identifier of the conversation. - type: - - "null" - - string - conversation_created_at: - description: The date and time when the conversation was created. - type: - - "null" - - integer - conversation_updated_at: - description: The date and time when the conversation was last updated. - type: - - "null" - - integer - conversation_total_parts: - description: The total number of parts in the conversation. - type: - - "null" - - integer - created_at: - description: The date and time when the conversation part was created. - type: - - "null" - - integer - external_id: - description: An external identifier associated with the conversation part. - type: - - "null" - - string - id: - description: The unique identifier of the conversation part. - type: - - "null" - - string - notified_at: - description: The date and time when the conversation part was last notified. - type: - - "null" - - integer - part_type: - description: The type or category of the conversation part. - type: - - "null" - - string - type: - description: The type of conversation part, such as message or note. - type: - - "null" - - string - updated_at: - description: The date and time when the conversation part was last updated. - type: - - "null" - - integer - redacted: - description: Indicates if the conversation part has been redacted or censored. - type: - - "null" - - boolean - company_segments: - $ref: "#/definitions/substream_semi_incremental" - $parameters: - name: "company_segments" - primary_key: "id" - path: "/companies/{{ stream_slice.id }}/segments" - incremental_sync: - $ref: "#/definitions/substream_semi_incremental/incremental_sync" - parent_complete_fetch: true - parent_stream_configs: - - type: ParentStreamConfig - stream: "#/definitions/companies" - parent_key: "id" - partition_field: "id" - retriever: - $ref: "#/definitions/substream_semi_incremental/retriever" - - # incremental search - schema_loader: - type: InlineSchemaLoader - schema: - type: object - properties: - created_at: - description: The timestamp when the company segment was created. - type: - - "null" - - integer - count: - description: The count of company segments returned in the response. - type: - - "null" - - integer - id: - description: The unique identifier associated with the company segment. - type: - - "null" - - string - name: - description: The name of the company segment. - type: - - "null" - - string - type: - description: The category or type of the company segment. - type: - - "null" - - string - person_type: - description: The type of person associated with the company segment. - type: - - "null" - - string - updated_at: - description: The timestamp when the company segment was last updated. - type: - - "null" - - integer - stream_incremental_search: - description: "https://developers.intercom.com/intercom-api-reference/reference/pagination-sorting-search" - $ref: "#/definitions/stream_full_refresh" - incremental_sync: - type: CustomIncrementalSync - class_name: source_intercom.components.IncrementalSingleSliceCursor - cursor_field: "updated_at" - retriever: - $ref: "#/definitions/stream_full_refresh/retriever" - requester: - $ref: "#/definitions/requester_incremental_search" - record_selector: - $ref: "#/definitions/selector" - record_filter: - description: "https://developers.intercom.com/intercom-api-reference/reference/pagination-sorting-search#pagination" - condition: - "{{ record['updated_at'] >= ( stream_state.get('prior_state', - {}).get('updated_at', 0) - (config.get('lookback_window', 0) * 86400) if stream_state else stream_slice.get('prior_state', - {}).get('updated_at', 0) ) - (config.get('lookback_window', 0) * 86400)}}" - paginator: - type: "DefaultPaginator" - url_base: "#/definitions/requester/url_base" - pagination_strategy: - type: "CursorPagination" - cursor_value: "{{ response.get('pages', {}).get('next') }}" - stop_condition: "{{ 'next' not in response.get('pages', {}) }}" - contacts: - $ref: "#/definitions/stream_incremental_search" - $parameters: - name: "contacts" - path: "contacts/search" - page_size: 150 - schema_loader: - type: InlineSchemaLoader - schema: - type: object - properties: - type: - description: Type of contact. - type: - - "null" - - string - id: - description: The unique identifier of the contact. - type: - - "null" - - string - workspace_id: - description: - The unique identifier of the workspace associated with the - contact. - type: - - "null" - - string - external_id: - description: External identifier for the contact. - type: - - "null" - - string - role: - description: Role or position of the contact. - type: - - "null" - - string - email: - description: The email address of the contact. - type: - - "null" - - string - phone: - description: The phone number of the contact. - type: - - "null" - - string - name: - description: The name of the contact. - type: - - "null" - - string - avatar: - description: URL pointing to the contact's avatar image. - type: - - "null" - - string - owner_id: - description: The unique identifier of the contact's owner. - type: - - "null" - - integer - social_profiles: - description: Social profiles associated with the contact. - type: - - "null" - - object - properties: - type: - description: Type of social profile connection. - type: - - "null" - - string - data: - description: Array of social profile data associated with the contact. - type: - - "null" - - array - items: - type: - - "null" - - object - properties: - type: - description: Type of social profile. - type: - - "null" - - string - name: - description: Name of the social profile. - type: - - "null" - - string - url: - description: URL of the social profile. - type: - - "null" - - string - has_hard_bounced: - description: Flag indicating if the contact has hard bounced. - type: - - "null" - - boolean - marked_email_as_spam: - description: Flag indicating if the contact's email was marked as spam. - type: - - "null" - - boolean - unsubscribed_from_emails: - description: Flag indicating if the contact unsubscribed from emails. - type: - - "null" - - boolean - unsubscribed_from_sms: - description: Flag indicating if the contact unsubscribed from SMS. - type: - - "null" - - boolean - created_at: - description: The date and time when the contact was created. - type: - - "null" - - integer - updated_at: - description: The date and time when the contact was last updated. - type: - - "null" - - integer - signed_up_at: - description: The date and time when the contact signed up. - type: - - "null" - - integer - sms_consent: - description: Consent status for SMS communication. - type: - - "null" - - boolean - last_seen_at: - description: The date and time when the contact was last seen overall. - type: - - "null" - - integer - last_replied_at: - description: The date and time when the contact last replied. - type: - - "null" - - integer - last_contacted_at: - description: The date and time when the contact was last contacted. - type: - - "null" - - integer - last_email_opened_at: - description: The date and time when the contact last opened an email. - type: - - "null" - - integer - last_email_clicked_at: - description: The date and time when the contact last clicked an email. - type: - - "null" - - integer - language_override: - description: Language override set for the contact. - type: - - "null" - - string - browser: - description: The browser used by the contact. - type: - - "null" - - string - browser_version: - description: The version of the browser used by the contact. - type: - - "null" - - string - browser_language: - description: The language preference set in the contact's browser. - type: - - "null" - - string - os: - description: Operating system of the contact's device. - type: - - "null" - - string - location: - description: Location details of the contact. - type: - - "null" - - object - properties: - type: - description: Type of location. - type: - - "null" - - string - country: - description: Country of the contact's location. - type: - - "null" - - string - region: - description: Region of the contact's location. - type: - - "null" - - string - city: - description: City of the contact's location. - type: - - "null" - - string - continent_code: - description: Continent code of the contact's location. - type: - - "null" - - string - country_code: - description: Country code of the contact's location. - type: - - "null" - - string - android_app_name: - description: The name of the Android app associated with the contact. - type: - - "null" - - string - android_app_version: - description: The version of the Android app associated with the contact. - type: - - "null" - - string - android_device: - description: The device used by the contact for Android. - type: - - "null" - - string - android_os_version: - description: The operating system version of the Android device. - type: - - "null" - - string - android_sdk_version: - description: The SDK version of the Android device. - type: - - "null" - - string - android_last_seen_at: - description: The date and time when the contact was last seen on Android. - type: - - "null" - - string - format: date-time - ios_app_name: - description: The name of the iOS app associated with the contact. - type: - - "null" - - string - ios_app_version: - description: The version of the iOS app associated with the contact. - type: - - "null" - - string - ios_device: - description: The device used by the contact for iOS. - type: - - "null" - - string - ios_os_version: - description: The operating system version of the iOS device. - type: - - "null" - - string - ios_sdk_version: - description: The SDK version of the iOS device. - type: - - "null" - - string - ios_last_seen_at: - description: The date and time when the contact was last seen on iOS. - type: - - "null" - - integer - custom_attributes: - description: Custom attributes defined for the contact. - type: - - "null" - - object - additionalProperties: true - properties: {} - tags: - description: Tags associated with the contact. - type: - - "null" - - object - properties: - type: - description: Type of connection with the tags. - type: - - "null" - - string - data: - description: Array of tag data associated with the contact. - type: - - "null" - - array - items: - type: - - "null" - - object - properties: - type: - description: Type of tag. - type: - - "null" - - string - id: - description: The unique identifier of the tag. - type: - - "null" - - string - url: - description: URL of the tag. - type: - - "null" - - string - url: - description: URL to access more tag information. - type: - - "null" - - string - total_count: - description: Total count of tags associated with the contact. - type: - - "null" - - integer - has_more: - description: Flag indicating if there are more tags to load. - type: - - "null" - - boolean - notes: - description: Notes associated with the contact. - type: - - "null" - - object - properties: - type: - description: Type of connection with the notes. - type: - - "null" - - string - data: - description: Array of note data associated with the contact. - type: - - "null" - - array - items: - type: - - "null" - - object - properties: - type: - description: Type of note. - type: - - "null" - - string - id: - description: The unique identifier of the note. - type: - - "null" - - string - url: - description: URL of the note. - type: - - "null" - - string - url: - description: URL to access more note information. - type: - - "null" - - string - total_count: - description: Total count of notes associated with the contact. - type: - - "null" - - integer - has_more: - description: Flag indicating if there are more notes to load. - type: - - "null" - - boolean - companies: - description: Companies associated with the contact. - type: - - "null" - - object - properties: - type: - description: Type of connection with the companies. - type: - - "null" - - string - data: - description: Array of company data associated with the contact. - type: - - "null" - - array - items: - type: - - "null" - - object - properties: - type: - description: Type of company. - type: - - "null" - - string - id: - description: The unique identifier of the company. - type: - - "null" - - string - url: - description: URL of the company. - type: - - "null" - - string - url: - description: URL to access more company information. - type: - - "null" - - string - total_count: - description: Total count of companies associated with the contact. - type: - - "null" - - integer - has_more: - description: Flag indicating if there are more companies to load. - type: - - "null" - - boolean - opted_out_subscription_types: - description: Subscription types the contact opted out from. - type: - - "null" - - object - properties: - type: - description: Type of connection with the subscription types. - type: - - "null" - - string - data: - description: - Array of subscription type data opted out from by the - contact. - type: - - "null" - - array - items: - type: - - "null" - - object - properties: - type: - description: Type of subscription. - type: - - "null" - - string - id: - description: The unique identifier of the subscription type. - type: - - "null" - - string - url: - description: URL of the subscription type. - type: - - "null" - - string - url: - description: URL to access more subscription type information. - type: - - "null" - - string - total_count: - description: - Total count of subscription types the contact opted out - from. - type: - - "null" - - integer - has_more: - description: - Flag indicating if there are more subscription types - to load. - type: - - "null" - - boolean - opted_in_subscription_types: - description: Subscription types the contact opted into. - type: - - "null" - - object - properties: - type: - description: Type of connection with the subscription types. - type: - - "null" - - string - data: - description: Array of subscription type data opted into by the contact. - type: - - "null" - - array - items: - type: - - "null" - - object - properties: - type: - description: Type of subscription. - type: - - "null" - - string - id: - description: The unique identifier of the subscription type. - type: - - "null" - - string - url: - description: URL of the subscription type. - type: - - "null" - - string - url: - description: URL to access more subscription type information. - type: - - "null" - - string - total_count: - description: Total count of subscription types the contact opted into. - type: - - "null" - - integer - has_more: - description: - Flag indicating if there are more subscription types - to load. - type: - - "null" - - boolean - utm_content: - description: Content data from UTM parameters. - type: - - "null" - - string - utm_campaign: - description: Campaign data from UTM parameters. - type: - - "null" - - string - utm_source: - description: Source data from UTM parameters. - type: - - "null" - - string - referrer: - description: Referrer information related to the contact. - type: - - "null" - - string - utm_term: - description: Term data from UTM parameters. - type: - - "null" - - string - utm_medium: - description: Medium data from UTM parameters. - type: - - "null" - - string - conversations: - $ref: "#/definitions/stream_incremental_search" - retriever: - $ref: "#/definitions/stream_incremental_search/retriever" - requester: - $ref: "#/definitions/requester_incremental_search" - request_headers: - # API version header - # There are 404 - User Not Found issue, when `2.10` is used, for certain users: - # https://github.com/airbytehq/oncall/issues/4514 - Intercom-Version: "2.9" - Accept: "application/json" - $parameters: - name: "conversations" - path: "conversations/search" - data_field: "conversations" - page_size: 150 - - # activity logs stream is incremental based on created_at field - schema_loader: - type: InlineSchemaLoader - schema: - type: object - properties: - assignee: - description: The assigned user responsible for the conversation. - type: - - "null" - - object - properties: - id: - description: The ID of the assignee - type: - - "null" - - string - type: - description: The type of the assignee (e.g., admin, agent) - type: - - "null" - - string - name: - description: The name of the assignee - type: - - "null" - - string - email: - description: The email of the assignee - type: - - "null" - - string - source: - description: Source details of the conversation. - type: - - "null" - - object - properties: - type: - description: The type of the source - type: - - "null" - - string - id: - description: The ID of the source - type: - - "null" - - string - redacted: - description: Indicates if the source is redacted - type: - - "null" - - boolean - delivered_as: - description: The delivery status of the source - type: - - "null" - - string - subject: - description: The subject of the source - type: - - "null" - - string - body: - description: The body/content of the source - type: - - "null" - - string - author: - description: Author of the source. - type: - - "null" - - object - properties: - id: - description: The ID of the source author - type: - - "null" - - string - type: - description: The type of the source author (e.g., admin, customer) - type: - - "null" - - string - name: - description: The name of the source author - type: - - "null" - - string - email: - description: The email of the source author - type: - - "null" - - string - attachments: - description: Attachments related to the conversation. - type: - - "null" - - array - items: - type: - - "null" - - object - additionalProperties: true - properties: {} - url: - description: The URL of the source - type: - - "null" - - string - contacts: - description: List of contacts involved in the conversation. - type: - - "null" - - object - items: - type: - - "null" - - object - properties: - type: - description: The type of the contact - type: - - "null" - - string - id: - description: The ID of the contact - type: - - "null" - - string - teammates: - description: List of teammates involved in the conversation. - type: - - "null" - - object - properties: - admins: - description: Admin teammates involved in the conversation. - type: - - "null" - - array - items: - type: - - "null" - - object - properties: - id: - description: The ID of the teammate admin - type: - - "null" - - string - type: - description: The type of the teammate (admin) - type: - - "null" - - string - type: - description: The type of teammates - type: - - "null" - - string - first_contact_reply: - description: Timestamp indicating when the first contact replied. - type: - - "null" - - object - properties: - type: - description: The type of the first contact reply - type: - - "null" - - string - url: - description: The URL of the first contact reply - type: - - "null" - - string - created_at: - description: The timestamp of the first contact's reply - type: - - "null" - - integer - custom_attributes: - description: Custom attributes associated with the conversation - type: - - "null" - - object - priority: - description: The priority level of the conversation - type: - - "null" - - string - conversation_message: - description: The main message content of the conversation. - type: - - "null" - - object - properties: - attachments: - description: Attachments in the conversation message - anyOf: - - type: array - items: - type: object - properties: - type: - type: - - "null" - - string - name: - type: - - "null" - - string - url: - type: - - "null" - - string - content_type: - type: - - "null" - - string - filesize: - type: - - "null" - - integer - height: - type: - - "null" - - integer - width: - type: - - "null" - - integer - - type: "null" - author: - description: The author of the conversation message. - type: - - "null" - - object - properties: - id: - description: The ID of the author of the message - type: - - "null" - - string - type: - description: The type of the author (e.g., admin, customer) - type: - - "null" - - string - name: - description: The name of the author of the message - type: - - "null" - - string - email: - description: The email of the author of the message - type: - - "null" - - string - body: - description: The body/content of the conversation message - type: - - "null" - - string - delivered_as: - description: The delivery status of the message - type: - - "null" - - string - id: - description: The ID of the conversation message - type: - - "null" - - string - subject: - description: The subject of the conversation message - type: - - "null" - - string - type: - description: The type of the conversation message - type: - - "null" - - string - url: - description: The URL of the conversation message - type: - - "null" - - string - conversation_rating: - description: Ratings given to the conversation by the customer and teammate. - type: - - "null" - - object - properties: - created_at: - description: The timestamp when the rating was created - type: - - "null" - - integer - customer: - description: Rating given by the customer. - type: - - "null" - - object - properties: - id: - description: The ID of the customer who provided the rating - type: - - "null" - - string - type: - description: The type of the customer providing the rating - type: - - "null" - - string - rating: - description: The rating given to the conversation - type: - - "null" - - integer - remark: - description: Any remarks provided with the rating - type: - - "null" - - string - teammate: - description: Rating given by the teammate. - type: - - "null" - - object - properties: - id: - description: The ID of the teammate being rated - type: - - "null" - - integer - type: - description: The type of the teammate being rated - type: - - "null" - - string - created_at: - description: The timestamp when the conversation was created - type: - - "null" - - integer - customer_first_reply: - description: Timestamp indicating when the customer first replied. - type: - - "null" - - object - properties: - created_at: - description: The timestamp of the customer's first reply - type: - - "null" - - integer - type: - description: The type of the first customer reply - type: - - "null" - - string - url: - description: The URL of the first customer reply - type: - - "null" - - string - customers: - description: List of customers involved in the conversation - anyOf: - - type: array - items: - type: - - "null" - - object - properties: - id: - type: - - "null" - - string - type: - type: - - "null" - - string - - type: "null" - id: - description: The unique ID of the conversation - type: - - "null" - - string - open: - description: Indicates if the conversation is open or closed - type: - - "null" - - boolean - read: - description: Indicates if the conversation has been read - type: - - "null" - - boolean - sent_at: - description: The timestamp when the conversation was sent - type: - - "null" - - integer - snoozed_until: - description: Timestamp until the conversation is snoozed - type: - - "null" - - integer - sla_applied: - description: Service Level Agreement details applied to the conversation. - type: - - "null" - - object - properties: - sla_name: - description: The name of the SLA applied - type: - - "null" - - string - sla_status: - description: The status of the SLA applied - type: - - "null" - - string - state: - description: The state of the conversation (e.g., new, in progress) - type: - - "null" - - string - statistics: - description: Statistics related to the conversation. - type: - - "null" - - object - properties: - type: - description: The type of conversation statistics - type: - - "null" - - string - time_to_assignment: - description: Time taken for assignment - type: - - "null" - - integer - time_to_admin_reply: - description: Time taken to reply by admin - type: - - "null" - - integer - time_to_first_close: - description: Time taken to first close the conversation - type: - - "null" - - integer - time_to_last_close: - description: Time taken to last close the conversation - type: - - "null" - - integer - median_time_to_reply: - description: The median time taken to reply to the conversation - type: - - "null" - - integer - first_contact_reply_at: - description: Timestamp of the first contact reply - type: - - "null" - - integer - first_assignment_at: - description: Timestamp of the first assignment - type: - - "null" - - integer - first_admin_reply_at: - description: Timestamp of the first admin reply - type: - - "null" - - integer - first_close_at: - description: Timestamp of the first conversation close - type: - - "null" - - integer - last_assignment_at: - description: Timestamp of the last assignment - type: - - "null" - - integer - last_assignment_admin_reply_at: - description: Timestamp of the last assignment admin reply - type: - - "null" - - integer - last_contact_reply_at: - description: Timestamp of the last contact reply - type: - - "null" - - integer - last_admin_reply_at: - description: Timestamp of the last admin reply - type: - - "null" - - integer - last_close_at: - description: Timestamp of the last conversation close - type: - - "null" - - integer - last_closed_by_id: - description: The ID of the last user who closed the conversation - type: - - "null" - - integer - count_reopens: - description: The total count of conversation reopens - type: - - "null" - - integer - count_assignments: - description: The total count of assignments for the conversation - type: - - "null" - - integer - count_conversation_parts: - description: The total count of conversation parts - type: - - "null" - - integer - tags: - description: Tags applied to the conversation. - type: - - "null" - - object - items: - type: - - "null" - - object - properties: - applied_at: - description: Timestamp when the tag was applied - type: - - "null" - - integer - applied_by: - description: User who applied the tag. - type: - - "null" - - object - properties: - id: - description: The ID of the user who applied the tag - type: - - "null" - - string - type: - description: The type of the user who applied the tag - type: - - "null" - - string - id: - description: The ID of the tag - type: - - "null" - - string - name: - description: The name of the tag - type: - - "null" - - string - type: - description: The type of the tag - type: - - "null" - - string - type: - description: The type of the conversation - type: - - "null" - - string - updated_at: - description: The timestamp when the conversation was last updated - type: - - "null" - - integer - user: - description: The user related to the conversation. - type: - - "null" - - object - properties: - id: - description: The ID of the user associated with the conversation - type: - - "null" - - string - type: - description: The type of the user - type: - - "null" - - string - waiting_since: - description: Timestamp since waiting for a response - type: - - "null" - - integer - admin_assignee_id: - description: The ID of the administrator assigned to the conversation - type: - - "null" - - integer - title: - description: The title of the conversation - type: - - "null" - - string - team_assignee_id: - description: The ID of the team assigned to the conversation - type: - - "null" - - integer - redacted: - description: Indicates if the conversation is redacted - type: - - "null" - - boolean - topics: - description: Topics associated with the conversation. - type: - - "null" - - object - properties: - type: - description: The type of topics - type: - - "null" - - string - topics: - description: List of topics related to the conversation. - type: - - "null" - - array - items: - type: - - "null" - - object - properties: - type: - description: The type of the topic - type: - - "null" - - string - id: - description: The ID of the topic - type: - - "null" - - integer - name: - description: The name of the topic - type: - - "null" - - string - total_count: - description: The total count of topics - type: - - "null" - - integer - activity_logs: - $ref: "#/definitions/stream_full_refresh" - primary_key: id - $parameters: - name: "activity_logs" - path: "admins/activity_logs" - data_field: "activity_logs" - retriever: - $ref: "#/definitions/retriever" - description: "The Retriever without passing page size option" - paginator: - type: "DefaultPaginator" - url_base: "#/definitions/requester/url_base" - pagination_strategy: - type: "CursorPagination" - cursor_value: "{{ response.get('pages', {}).get('next') }}" - stop_condition: "{{ 'next' not in response.get('pages', {}) }}" - page_token_option: - type: RequestPath - incremental_sync: - type: DatetimeBasedCursor - cursor_field: created_at - cursor_datetime_formats: - - "%s" - datetime_format: "%s" - lookback_window: "P{{ config.get('lookback_window', 0) }}D" - cursor_granularity: "PT1S" - step: "P{{ config.get('activity_logs_time_step', 30) }}D" - start_datetime: - datetime: "{{ config['start_date'] }}" - datetime_format: "%Y-%m-%dT%H:%M:%SZ" - end_time_option: - field_name: "created_at_before" - inject_into: "request_parameter" - start_time_option: - field_name: "created_at_after" - inject_into: "request_parameter" - - schema_loader: - type: InlineSchemaLoader - schema: - type: object - properties: - performed_by: - description: The user who performed the activity - type: - - "null" - - object - properties: - id: - description: Unique identifier of the user who performed the activity - type: - - "null" - - string - type: - description: - Type of the user who performed the activity (e.g., admin, - user) - type: - - "null" - - string - ip: - description: IP address from where the activity was performed - type: - - "null" - - string - email: - description: Email of the user who performed the activity - type: - - "null" - - string - id: - description: Unique identifier for the activity log entry - type: - - "null" - - string - metadata: - description: Additional data or information related to the activity - type: - - "null" - - object - activity_type: - description: The type or category of the activity - type: - - "null" - - string - activity_description: - description: A description of the activity that took place - type: - - "null" - - string - created_at: - description: The timestamp when the activity occurred - type: - - "null" - - integer -streams: - - "#/definitions/activity_logs" - - "#/definitions/admins" - - "#/definitions/tags" - - "#/definitions/teams" - - "#/definitions/segments" - - "#/definitions/companies" - - "#/definitions/company_attributes" - - "#/definitions/contact_attributes" - - "#/definitions/contacts" - - "#/definitions/conversations" - - "#/definitions/conversation_parts" - - "#/definitions/company_segments" - -check: - stream_names: - - "tags" diff --git a/airbyte-integrations/connectors/source-intercom/source_intercom/run.py b/airbyte-integrations/connectors/source-intercom/source_intercom/run.py deleted file mode 100644 index 434766998b6e..000000000000 --- a/airbyte-integrations/connectors/source-intercom/source_intercom/run.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_intercom import SourceIntercom - - -def run(): - source = SourceIntercom() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-intercom/source_intercom/source.py b/airbyte-integrations/connectors/source-intercom/source_intercom/source.py deleted file mode 100644 index bd527bbced67..000000000000 --- a/airbyte-integrations/connectors/source-intercom/source_intercom/source.py +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource - -""" -This file provides the necessary constructs to interpret a provided declarative YAML configuration file into -source connector. - -WARNING: Do not modify this file. -""" - - -# Declarative Source -class SourceIntercom(YamlDeclarativeSource): - def __init__(self): - super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-intercom/source_intercom/spec.json b/airbyte-integrations/connectors/source-intercom/source_intercom/spec.json deleted file mode 100644 index b544fd8b43bd..000000000000 --- a/airbyte-integrations/connectors/source-intercom/source_intercom/spec.json +++ /dev/null @@ -1,98 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/intercom", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Source Intercom Spec", - "type": "object", - "required": ["start_date", "access_token"], - "additionalProperties": true, - "properties": { - "start_date": { - "type": "string", - "title": "Start date", - "description": "UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.", - "examples": ["2020-11-16T00:00:00Z"], - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - "format": "date-time" - }, - "access_token": { - "title": "Access token", - "type": "string", - "description": "Access token for making authenticated requests. See the Intercom docs for more information.", - "airbyte_secret": true, - "order": 0 - }, - "client_id": { - "title": "Client Id", - "type": "string", - "description": "Client Id for your Intercom application.", - "airbyte_secret": true, - "order": 1 - }, - "client_secret": { - "title": "Client Secret", - "type": "string", - "description": "Client Secret for your Intercom application.", - "airbyte_secret": true, - "order": 2 - }, - "activity_logs_time_step": { - "type": "integer", - "default": 30, - "minimum": 1, - "maximum": 91, - "title": "Activity logs stream slice step size (in days)", - "description": "Set lower value in case of failing long running sync of Activity Logs stream.", - "examples": [30, 10, 5], - "order": 3 - }, - "lookback_window": { - "title": "Lookback window", - "description": "The number of days to shift the state value backward for record sync", - "examples": [60], - "default": 0, - "minimum": 0, - "type": "integer", - "order": 4 - } - } - }, - "advanced_auth": { - "auth_flow_type": "oauth2.0", - "oauth_config_specification": { - "complete_oauth_output_specification": { - "type": "object", - "properties": { - "access_token": { - "type": "string", - "path_in_connector_config": ["access_token"] - } - } - }, - "complete_oauth_server_input_specification": { - "type": "object", - "properties": { - "client_id": { - "type": "string" - }, - "client_secret": { - "type": "string" - } - } - }, - "complete_oauth_server_output_specification": { - "type": "object", - "properties": { - "client_id": { - "type": "string", - "path_in_connector_config": ["client_id"] - }, - "client_secret": { - "type": "string", - "path_in_connector_config": ["client_secret"] - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-intercom/unit_tests/conftest.py b/airbyte-integrations/connectors/source-intercom/unit_tests/conftest.py new file mode 100644 index 000000000000..d3826f66680c --- /dev/null +++ b/airbyte-integrations/connectors/source-intercom/unit_tests/conftest.py @@ -0,0 +1,3 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +pytest_plugins = ["airbyte_cdk.test.utils.manifest_only_fixtures"] diff --git a/airbyte-integrations/connectors/source-intercom/poetry.lock b/airbyte-integrations/connectors/source-intercom/unit_tests/poetry.lock similarity index 73% rename from airbyte-integrations/connectors/source-intercom/poetry.lock rename to airbyte-integrations/connectors/source-intercom/unit_tests/poetry.lock index d8fc8ce7ae9e..847fa693b143 100644 --- a/airbyte-integrations/connectors/source-intercom/poetry.lock +++ b/airbyte-integrations/connectors/source-intercom/unit_tests/poetry.lock @@ -1,61 +1,65 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "4.6.2" +version = "6.10.0" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = "<4.0,>=3.10" +python-versions = "<3.13,>=3.10" files = [ - {file = "airbyte_cdk-4.6.2-py3-none-any.whl", hash = "sha256:3a37bd96c4b4f874b15fc18839b1e163eb30d1e4ef80d7dde2854e6a48efe934"}, - {file = "airbyte_cdk-4.6.2.tar.gz", hash = "sha256:c034f11ba6abe73dd7346ce2bc7017ff71ef0db1fd1ae86fb86beaeae35d8baf"}, + {file = "airbyte_cdk-6.10.0-py3-none-any.whl", hash = "sha256:0f953711332dae67f294751044bc4abfcd988c40a176a32e2d02d2a679377acf"}, + {file = "airbyte_cdk-6.10.0.tar.gz", hash = "sha256:90aeb0a87e89e9fc43f27ebccabb64ac96966ce2f398805c93a52cd4580a4641"}, ] [package.dependencies] -airbyte-protocol-models-pdv2 = ">=0.12.2,<0.13.0" +airbyte-protocol-models-dataclasses = ">=0.14,<0.15" backoff = "*" cachetools = "*" -cryptography = ">=42.0.5,<43.0.0" -Deprecated = ">=1.2,<1.3" +cryptography = ">=42.0.5,<44.0.0" dpath = ">=2.1.6,<3.0.0" -genson = "1.2.2" +dunamai = ">=1.22.0,<2.0.0" +genson = "1.3.0" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" jsonref = ">=0.2,<0.3" -jsonschema = ">=3.2.0,<3.3.0" +jsonschema = ">=4.17.3,<4.18.0" langchain_core = "0.1.42" -nltk = "3.8.1" +nltk = "3.9.1" +numpy = "<2" orjson = ">=3.10.7,<4.0.0" +pandas = "2.2.2" pendulum = "<3.0.0" +psutil = "6.1.0" pydantic = ">=2.7,<3.0" pyjwt = ">=2.8.0,<3.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" +python-ulid = ">=3.0.0,<4.0.0" pytz = "2024.1" PyYAML = ">=6.0.1,<7.0.0" +rapidfuzz = ">=3.10.1,<4.0.0" requests = "*" requests_cache = "*" -wcmatch = "8.4" +serpyco-rs = ">=1.10.2,<2.0.0" +wcmatch = "10.0" +xmltodict = ">=0.13.0,<0.14.0" [package.extras] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pandas (==2.2.0)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "python-calamine (==0.2.3)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] -vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "python-calamine (==0.2.3)", "python-snappy (==0.7.3)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sql = ["sqlalchemy (>=2.0,!=2.0.36,<3.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.8.0)"] [[package]] -name = "airbyte-protocol-models-pdv2" -version = "0.12.2" -description = "Declares the Airbyte Protocol." +name = "airbyte-protocol-models-dataclasses" +version = "0.14.1" +description = "Declares the Airbyte Protocol using Python Dataclasses. Dataclasses in Python have less performance overhead compared to Pydantic models, making them a more efficient choice for scenarios where speed and memory usage are critical" optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models_pdv2-0.12.2-py3-none-any.whl", hash = "sha256:8b3f9d0388928547cdf2e9134c0d589e4bcaa6f63bf71a21299f6824bfb7ad0e"}, - {file = "airbyte_protocol_models_pdv2-0.12.2.tar.gz", hash = "sha256:130c9ab289f3f53749ce63ff1abbfb67a44b7e5bd2794865315a2976138b672b"}, + {file = "airbyte_protocol_models_dataclasses-0.14.1-py3-none-any.whl", hash = "sha256:dfe10b32ee09e6ba9b4f17bd309e841b61cbd61ec8f80b1937ff104efd6209a9"}, + {file = "airbyte_protocol_models_dataclasses-0.14.1.tar.gz", hash = "sha256:f62a46556b82ea0d55de144983141639e8049d836dd4e0a9d7234c5b2e103c08"}, ] -[package.dependencies] -pydantic = ">=2.7.2,<3.0.0" - [[package]] name = "annotated-types" version = "0.7.0" @@ -89,21 +93,32 @@ doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] +[[package]] +name = "attributes-doc" +version = "0.4.0" +description = "PEP 224 implementation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "attributes-doc-0.4.0.tar.gz", hash = "sha256:b1576c94a714e9fc2c65c47cf10d0c8e1a5f7c4f5ae7f69006be108d95cbfbfb"}, + {file = "attributes_doc-0.4.0-py2.py3-none-any.whl", hash = "sha256:4c3007d9e58f3a6cb4b9c614c4d4ce2d92161581f28e594ddd8241cc3a113bdd"}, +] + [[package]] name = "attrs" -version = "24.2.0" +version = "24.3.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, + {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, ] [package.extras] benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] @@ -169,13 +184,13 @@ ujson = ["ujson (>=5.7.0)"] [[package]] name = "certifi" -version = "2024.8.30" +version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] @@ -398,43 +413,38 @@ files = [ [[package]] name = "cryptography" -version = "42.0.8" +version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, - {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, - {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, - {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, - {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, - {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, - {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] [package.dependencies] @@ -447,26 +457,9 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -[[package]] -name = "deprecated" -version = "1.2.15" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -files = [ - {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"}, - {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "jinja2 (>=3.0.3,<3.1.0)", "setuptools", "sphinx (<2)", "tox"] - [[package]] name = "dpath" version = "2.2.0" @@ -478,6 +471,20 @@ files = [ {file = "dpath-2.2.0.tar.gz", hash = "sha256:34f7e630dc55ea3f219e555726f5da4b4b25f2200319c8e6902c394258dd6a3e"}, ] +[[package]] +name = "dunamai" +version = "1.23.0" +description = "Dynamic version generation" +optional = false +python-versions = ">=3.5" +files = [ + {file = "dunamai-1.23.0-py3-none-any.whl", hash = "sha256:a0906d876e92441793c6a423e16a4802752e723e9c9a5aabdc5535df02dbe041"}, + {file = "dunamai-1.23.0.tar.gz", hash = "sha256:a163746de7ea5acb6dacdab3a6ad621ebc612ed1e528aaa8beedb8887fccd2c4"}, +] + +[package.dependencies] +packaging = ">=20.9" + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -494,12 +501,13 @@ test = ["pytest (>=6)"] [[package]] name = "genson" -version = "1.2.2" +version = "1.3.0" description = "GenSON is a powerful, user-friendly JSON Schema generator." optional = false python-versions = "*" files = [ - {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, + {file = "genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7"}, + {file = "genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37"}, ] [[package]] @@ -663,24 +671,22 @@ files = [ [[package]] name = "jsonschema" -version = "3.2.0" +version = "4.17.3" description = "An implementation of JSON Schema validation for Python" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, - {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, ] [package.dependencies] attrs = ">=17.4.0" -pyrsistent = ">=0.14.0" -setuptools = "*" -six = ">=1.11.0" +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" [package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] [[package]] name = "langchain-core" @@ -718,7 +724,10 @@ files = [ [package.dependencies] httpx = ">=0.23.0,<1" orjson = {version = ">=3.9.14,<4.0.0", markers = "platform_python_implementation != \"PyPy\""} -pydantic = {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""} +pydantic = [ + {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, + {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, +] requests = ">=2,<3" requests-toolbelt = ">=1.0.0,<2.0.0" @@ -797,13 +806,13 @@ files = [ [[package]] name = "nltk" -version = "3.8.1" +version = "3.9.1" description = "Natural Language Toolkit" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, - {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, + {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, + {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, ] [package.dependencies] @@ -820,6 +829,51 @@ plot = ["matplotlib"] tgrep = ["pyparsing"] twitter = ["twython"] +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + [[package]] name = "orjson" version = "3.10.12" @@ -915,6 +969,79 @@ files = [ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + [[package]] name = "pendulum" version = "2.1.2" @@ -980,6 +1107,36 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "psutil" +version = "6.1.0" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc"}, + {file = "psutil-6.1.0-cp27-none-win32.whl", hash = "sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e"}, + {file = "psutil-6.1.0-cp27-none-win_amd64.whl", hash = "sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, + {file = "psutil-6.1.0-cp36-cp36m-win32.whl", hash = "sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca"}, + {file = "psutil-6.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747"}, + {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, + {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, + {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, +] + +[package.extras] +dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] + [[package]] name = "pycparser" version = "2.22" @@ -1218,23 +1375,6 @@ tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] -[[package]] -name = "pytest-mock" -version = "3.14.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, - {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, -] - -[package.dependencies] -pytest = ">=6.2.5" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -1249,6 +1389,20 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-ulid" +version = "3.0.0" +description = "Universally unique lexicographically sortable identifier" +optional = false +python-versions = ">=3.9" +files = [ + {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, + {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, +] + +[package.extras] +pydantic = ["pydantic (>=2.0)"] + [[package]] name = "pytz" version = "2024.1" @@ -1333,6 +1487,106 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +[[package]] +name = "rapidfuzz" +version = "3.10.1" +description = "rapid fuzzy string matching" +optional = false +python-versions = ">=3.9" +files = [ + {file = "rapidfuzz-3.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f17d9f21bf2f2f785d74f7b0d407805468b4c173fa3e52c86ec94436b338e74a"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b31f358a70efc143909fb3d75ac6cd3c139cd41339aa8f2a3a0ead8315731f2b"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f4f43f2204b56a61448ec2dd061e26fd344c404da99fb19f3458200c5874ba2"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d81bf186a453a2757472133b24915768abc7c3964194406ed93e170e16c21cb"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3611c8f45379a12063d70075c75134f2a8bd2e4e9b8a7995112ddae95ca1c982"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c3b537b97ac30da4b73930fa8a4fe2f79c6d1c10ad535c5c09726612cd6bed9"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:231ef1ec9cf7b59809ce3301006500b9d564ddb324635f4ea8f16b3e2a1780da"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ed4f3adc1294834955b7e74edd3c6bd1aad5831c007f2d91ea839e76461a5879"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7b6015da2e707bf632a71772a2dbf0703cff6525732c005ad24987fe86e8ec32"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1b35a118d61d6f008e8e3fb3a77674d10806a8972c7b8be433d6598df4d60b01"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:bc308d79a7e877226f36bdf4e149e3ed398d8277c140be5c1fd892ec41739e6d"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f017dbfecc172e2d0c37cf9e3d519179d71a7f16094b57430dffc496a098aa17"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-win32.whl", hash = "sha256:36c0e1483e21f918d0f2f26799fe5ac91c7b0c34220b73007301c4f831a9c4c7"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:10746c1d4c8cd8881c28a87fd7ba0c9c102346dfe7ff1b0d021cdf093e9adbff"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-win_arm64.whl", hash = "sha256:dfa64b89dcb906835e275187569e51aa9d546a444489e97aaf2cc84011565fbe"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:92958ae075c87fef393f835ed02d4fe8d5ee2059a0934c6c447ea3417dfbf0e8"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba7521e072c53e33c384e78615d0718e645cab3c366ecd3cc8cb732befd94967"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d02cbd75d283c287471b5b3738b3e05c9096150f93f2d2dfa10b3d700f2db9"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efa1582a397da038e2f2576c9cd49b842f56fde37d84a6b0200ffebc08d82350"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f12912acee1f506f974f58de9fdc2e62eea5667377a7e9156de53241c05fdba8"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666d5d8b17becc3f53447bcb2b6b33ce6c2df78792495d1fa82b2924cd48701a"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26f71582c0d62445067ee338ddad99b655a8f4e4ed517a90dcbfbb7d19310474"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8a2ef08b27167bcff230ffbfeedd4c4fa6353563d6aaa015d725dd3632fc3de7"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:365e4fc1a2b95082c890f5e98489b894e6bf8c338c6ac89bb6523c2ca6e9f086"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1996feb7a61609fa842e6b5e0c549983222ffdedaf29644cc67e479902846dfe"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:cf654702f144beaa093103841a2ea6910d617d0bb3fccb1d1fd63c54dde2cd49"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ec108bf25de674781d0a9a935030ba090c78d49def3d60f8724f3fc1e8e75024"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-win32.whl", hash = "sha256:031f8b367e5d92f7a1e27f7322012f3c321c3110137b43cc3bf678505583ef48"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:f98f36c6a1bb9a6c8bbec99ad87c8c0e364f34761739b5ea9adf7b48129ae8cf"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-win_arm64.whl", hash = "sha256:f1da2028cb4e41be55ee797a82d6c1cf589442504244249dfeb32efc608edee7"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1340b56340896bede246f612b6ecf685f661a56aabef3d2512481bfe23ac5835"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2316515169b7b5a453f0ce3adbc46c42aa332cae9f2edb668e24d1fc92b2f2bb"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e06fe6a12241ec1b72c0566c6b28cda714d61965d86569595ad24793d1ab259"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d99c1cd9443b19164ec185a7d752f4b4db19c066c136f028991a480720472e23"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1d9aa156ed52d3446388ba4c2f335e312191d1ca9d1f5762ee983cf23e4ecf6"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:54bcf4efaaee8e015822be0c2c28214815f4f6b4f70d8362cfecbd58a71188ac"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0c955e32afdbfdf6e9ee663d24afb25210152d98c26d22d399712d29a9b976b"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:191633722203f5b7717efcb73a14f76f3b124877d0608c070b827c5226d0b972"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:195baad28057ec9609e40385991004e470af9ef87401e24ebe72c064431524ab"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0fff4a6b87c07366662b62ae994ffbeadc472e72f725923f94b72a3db49f4671"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4ffed25f9fdc0b287f30a98467493d1e1ce5b583f6317f70ec0263b3c97dbba6"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d02cf8e5af89a9ac8f53c438ddff6d773f62c25c6619b29db96f4aae248177c0"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-win32.whl", hash = "sha256:f3bb81d4fe6a5d20650f8c0afcc8f6e1941f6fecdb434f11b874c42467baded0"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:aaf83e9170cb1338922ae42d320699dccbbdca8ffed07faeb0b9257822c26e24"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-win_arm64.whl", hash = "sha256:c5da802a0d085ad81b0f62828fb55557996c497b2d0b551bbdfeafd6d447892f"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fc22d69a1c9cccd560a5c434c0371b2df0f47c309c635a01a913e03bbf183710"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38b0dac2c8e057562b8f0d8ae5b663d2d6a28c5ab624de5b73cef9abb6129a24"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fde3bbb14e92ce8fcb5c2edfff72e474d0080cadda1c97785bf4822f037a309"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9141fb0592e55f98fe9ac0f3ce883199b9c13e262e0bf40c5b18cdf926109d16"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:237bec5dd1bfc9b40bbd786cd27949ef0c0eb5fab5eb491904c6b5df59d39d3c"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18123168cba156ab5794ea6de66db50f21bb3c66ae748d03316e71b27d907b95"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b75fe506c8e02769cc47f5ab21ce3e09b6211d3edaa8f8f27331cb6988779be"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da82aa4b46973aaf9e03bb4c3d6977004648c8638febfc0f9d237e865761270"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c34c022d5ad564f1a5a57a4a89793bd70d7bad428150fb8ff2760b223407cdcf"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e96c84d6c2a0ca94e15acb5399118fff669f4306beb98a6d8ec6f5dccab4412"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e8e154b84a311263e1aca86818c962e1fa9eefdd643d1d5d197fcd2738f88cb9"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:335fee93188f8cd585552bb8057228ce0111bd227fa81bfd40b7df6b75def8ab"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-win32.whl", hash = "sha256:6729b856166a9e95c278410f73683957ea6100c8a9d0a8dbe434c49663689255"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:0e06d99ad1ad97cb2ef7f51ec6b1fedd74a3a700e4949353871cf331d07b382a"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-win_arm64.whl", hash = "sha256:8d1b7082104d596a3eb012e0549b2634ed15015b569f48879701e9d8db959dbb"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:779027d3307e1a2b1dc0c03c34df87a470a368a1a0840a9d2908baf2d4067956"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:440b5608ab12650d0390128d6858bc839ae77ffe5edf0b33a1551f2fa9860651"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82cac41a411e07a6f3dc80dfbd33f6be70ea0abd72e99c59310819d09f07d945"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:958473c9f0bca250590200fd520b75be0dbdbc4a7327dc87a55b6d7dc8d68552"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ef60dfa73749ef91cb6073be1a3e135f4846ec809cc115f3cbfc6fe283a5584"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7fbac18f2c19fc983838a60611e67e3262e36859994c26f2ee85bb268de2355"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a0d519ff39db887cd73f4e297922786d548f5c05d6b51f4e6754f452a7f4296"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bebb7bc6aeb91cc57e4881b222484c26759ca865794187217c9dcea6c33adae6"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe07f8b9c3bb5c5ad1d2c66884253e03800f4189a60eb6acd6119ebaf3eb9894"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bfa48a4a2d45a41457f0840c48e579db157a927f4e97acf6e20df8fc521c79de"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2cf44d01bfe8ee605b7eaeecbc2b9ca64fc55765f17b304b40ed8995f69d7716"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e6bbca9246d9eedaa1c84e04a7f555493ba324d52ae4d9f3d9ddd1b740dcd87"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-win32.whl", hash = "sha256:567f88180f2c1423b4fe3f3ad6e6310fc97b85bdba574801548597287fc07028"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6b2cd7c29d6ecdf0b780deb587198f13213ac01c430ada6913452fd0c40190fc"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-win_arm64.whl", hash = "sha256:9f912d459e46607ce276128f52bea21ebc3e9a5ccf4cccfef30dd5bddcf47be8"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ac4452f182243cfab30ba4668ef2de101effaedc30f9faabb06a095a8c90fd16"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:565c2bd4f7d23c32834652b27b51dd711814ab614b4e12add8476be4e20d1cf5"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d9747149321607be4ccd6f9f366730078bed806178ec3eeb31d05545e9e8f"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:616290fb9a8fa87e48cb0326d26f98d4e29f17c3b762c2d586f2b35c1fd2034b"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073a5b107e17ebd264198b78614c0206fa438cce749692af5bc5f8f484883f50"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39c4983e2e2ccb9732f3ac7d81617088822f4a12291d416b09b8a1eadebb3e29"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ac7adee6bcf0c6fee495d877edad1540a7e0f5fc208da03ccb64734b43522d7a"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:425f4ac80b22153d391ee3f94bc854668a0c6c129f05cf2eaf5ee74474ddb69e"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65a2fa13e8a219f9b5dcb9e74abe3ced5838a7327e629f426d333dfc8c5a6e66"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75561f3df9a906aaa23787e9992b228b1ab69007932dc42070f747103e177ba8"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edd062490537e97ca125bc6c7f2b7331c2b73d21dc304615afe61ad1691e15d5"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfcc8feccf63245a22dfdd16e222f1a39771a44b870beb748117a0e09cbb4a62"}, + {file = "rapidfuzz-3.10.1.tar.gz", hash = "sha256:5a15546d847a915b3f42dc79ef9b0c78b998b4e2c53b252e7166284066585979"}, +] + +[package.extras] +all = ["numpy"] + [[package]] name = "regex" version = "2024.11.6" @@ -1487,23 +1741,6 @@ redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] yaml = ["pyyaml (>=6.0.1)"] -[[package]] -name = "requests-mock" -version = "1.12.1" -description = "Mock out responses from the requests package" -optional = false -python-versions = ">=3.5" -files = [ - {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, - {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, -] - -[package.dependencies] -requests = ">=2.22,<3" - -[package.extras] -fixture = ["fixtures"] - [[package]] name = "requests-toolbelt" version = "1.0.0" @@ -1519,24 +1756,58 @@ files = [ requests = ">=2.0.1,<3.0.0" [[package]] -name = "setuptools" -version = "75.6.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" +name = "serpyco-rs" +version = "1.11.0" +description = "" optional = false python-versions = ">=3.9" files = [ - {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, - {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:4b2bd933539bd8c84315e2fb5ae52ef7a58ace5a6dfe3f8b73f74dc71216779e"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:627f957889ff73c4d2269fc7b6bba93212381befe03633e7cb5495de66ba9a33"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b0933620abc01434023e0e3e22255b7e4ab9b427b5a9a5ee00834656d792377a"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9ce46683d92e34abb20304817fc5ac6cb141a06fc7468dedb1d8865a8a9682f6"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bda437d86e8859bf91c189c1f4650899822f6d6d7b02b48f5729da904eb7bb7d"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a72bfbd282af17ebe76d122639013e802c09902543fdbbd828fb2159ec9755e"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d4808df5384e3e8581e31a90ba7a1fa501c0837b1f174284bb8a4555b6864ea"}, + {file = "serpyco_rs-1.11.0-cp310-none-win_amd64.whl", hash = "sha256:c7b60aef4c16d68efb0d6241f05d0a434d873d98449cbb4366b0d385f0a7172b"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8d47ee577cf4d69b53917615cb031ad8708eb2f59fe78194b1968c13130fc2f7"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6090d9a1487237cdd4e9362a823eede23249602019b917e7bd57846179286e79"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7192eb3df576386fefd595ea31ae25c62522841ffec7e7aeb37a80b55bdc3213"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b52ef8affb7e71b9b98a7d5216d6a7ad03b04e990acb147cd9211c8b931c5487"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3480e09e473560c60e74aaa789e6b4d079637371aae0a98235440111464bbba7"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c92e36b0ab6fe866601c2331f7e99c809a126d21963c03d8a5c29331526deed"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84f497361952d4566bc1f77e9e15a84a2614f593cc671fbf0a0fa80046f9c3d7"}, + {file = "serpyco_rs-1.11.0-cp311-none-win_amd64.whl", hash = "sha256:37fc1cf192bef9784fbf1f4e03cec21750b9e704bef55cc0442f71a715eee920"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3ea93d485f03dc8b0cfb0d477f0ad2e86e78f0461b53010656ab5b4db1b41fb0"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7772410d15694b03f9c5500a2c47d62eed76e191bea4087ad042250346b1a38e"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42118463c1679846cffd2f06f47744c9b9eb33c5d0448afd88ea19e1a81a8ddd"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:79481a455b76cc56021dc55bb6d5bdda1b2b32bcb6a1ee711b597140d112e9b1"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8fd79051f9af9591fc03cf7d3033ff180416301f6a4fd3d1e3d92ebd2d68697"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d29c8f9aeed734a3b51f7349d04ec9063516ffa4e10b632d75e9b1309e4930e4"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15609158b0d9591ffa118302cd9d0039970cb3faf91dce32975f7d276e7411d5"}, + {file = "serpyco_rs-1.11.0-cp312-none-win_amd64.whl", hash = "sha256:00081eae77fbf4c5d88371c5586317ab02ccb293a330b460869a283edf2b7b69"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3028893366a1985adcedb13fa8f6f98c087c185efc427f94c2ccdafa40f45832"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c18bf511316f3abf648a68ee62ef88617bec57d3fcde69466b4361102715ae5"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7dde9ef09cdfaf7c62378186b9e29f54ec76114be4c347be6a06dd559c5681e"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:18500ebc5e75285841e35585a238629a990b709e14f68933233640d15ca17d5f"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47c23132d4e03982703a7630aa09877b41e499722142f76b6153f6619b612f3"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5f8e6ba499f6a0825bee0d8f8764569d367af871b563fc6512c171474e8e5383"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15438a076047c34cff6601a977df54948e8d39d1a86f89d05c48bc60f4c12a61"}, + {file = "serpyco_rs-1.11.0-cp313-none-win_amd64.whl", hash = "sha256:84ee2c109415bd81904fc9abb9aec86a5dd13166808c21142cf23ec639f683bd"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5c97c16c865261577fac4effeccc7ef5e0a1e8e35e7a3ee6c90c77c3a4cd7ff9"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47825e70f86fd6ef7c4a835dea3d6e8eef4fee354ed7b39ced99f31aba74a86e"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24d220220365110edba2f778f41ab3cf396883da0f26e1361a3ada9bd0227f73"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3a46f334af5a9d77acc6e1e58f355ae497900a2798929371f0545e274f6e6166"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d72b748acce4b4e3c7c9724e1eb33d033a1c26b08a698b393e0288060e0901"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2b8b6f205e8cc038d4d30dd0e70eece7bbecc816eb2f3787c330dc2218e232d"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:038d748bfff31f150f0c3edab2766b8843edb952cb1bd3bf547886beb0912dae"}, + {file = "serpyco_rs-1.11.0-cp39-none-win_amd64.whl", hash = "sha256:0fee1c89ec2cb013dc232e4ebef88e2844357ce8631063b56639dbfb83762f20"}, + {file = "serpyco_rs-1.11.0.tar.gz", hash = "sha256:70a844615ffb229e6e89c204b3ab7404aacaf2838911814c7d847969b8da2e3a"}, ] -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] -core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] +[package.dependencies] +attributes-doc = "*" +typing-extensions = "*" [[package]] name = "six" @@ -1648,6 +1919,17 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "tzdata" +version = "2024.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, +] + [[package]] name = "url-normalize" version = "1.4.3" @@ -1681,93 +1963,30 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "wcmatch" -version = "8.4" +version = "10.0" description = "Wildcard/glob file name matcher." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, - {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, + {file = "wcmatch-10.0-py3-none-any.whl", hash = "sha256:0dd927072d03c0a6527a20d2e6ad5ba8d0380e60870c383bc533b71744df7b7a"}, + {file = "wcmatch-10.0.tar.gz", hash = "sha256:e72f0de09bba6a04e0de70937b0cf06e55f36f37b3deb422dfaf854b867b840a"}, ] [package.dependencies] bracex = ">=2.1.1" [[package]] -name = "wrapt" -version = "1.17.0" -description = "Module for decorators, wrappers and monkey patching." +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" optional = false -python-versions = ">=3.8" +python-versions = ">=3.4" files = [ - {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e185ec6060e301a7e5f8461c86fb3640a7beb1a0f0208ffde7a65ec4074931df"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb90765dd91aed05b53cd7a87bd7f5c188fcd95960914bae0d32c5e7f899719d"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:879591c2b5ab0a7184258274c42a126b74a2c3d5a329df16d69f9cee07bba6ea"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fce6fee67c318fdfb7f285c29a82d84782ae2579c0e1b385b7f36c6e8074fffb"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0698d3a86f68abc894d537887b9bbf84d29bcfbc759e23f4644be27acf6da301"}, - {file = "wrapt-1.17.0-cp310-cp310-win32.whl", hash = "sha256:69d093792dc34a9c4c8a70e4973a3361c7a7578e9cd86961b2bbf38ca71e4e22"}, - {file = "wrapt-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f28b29dc158ca5d6ac396c8e0a2ef45c4e97bb7e65522bfc04c989e6fe814575"}, - {file = "wrapt-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74bf625b1b4caaa7bad51d9003f8b07a468a704e0644a700e936c357c17dd45a"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f2a28eb35cf99d5f5bd12f5dd44a0f41d206db226535b37b0c60e9da162c3ed"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81b1289e99cf4bad07c23393ab447e5e96db0ab50974a280f7954b071d41b489"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2939cd4a2a52ca32bc0b359015718472d7f6de870760342e7ba295be9ebaf9"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a9653131bda68a1f029c52157fd81e11f07d485df55410401f745007bd6d339"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4e4b4385363de9052dac1a67bfb535c376f3d19c238b5f36bddc95efae15e12d"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bdf62d25234290db1837875d4dceb2151e4ea7f9fff2ed41c0fde23ed542eb5b"}, - {file = "wrapt-1.17.0-cp311-cp311-win32.whl", hash = "sha256:5d8fd17635b262448ab8f99230fe4dac991af1dabdbb92f7a70a6afac8a7e346"}, - {file = "wrapt-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:92a3d214d5e53cb1db8b015f30d544bc9d3f7179a05feb8f16df713cecc2620a"}, - {file = "wrapt-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:89fc28495896097622c3fc238915c79365dd0ede02f9a82ce436b13bd0ab7569"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875d240fdbdbe9e11f9831901fb8719da0bd4e6131f83aa9f69b96d18fae7504"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ed16d95fd142e9c72b6c10b06514ad30e846a0d0917ab406186541fe68b451"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b956061b8db634120b58f668592a772e87e2e78bc1f6a906cfcaa0cc7991c1"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:daba396199399ccabafbfc509037ac635a6bc18510ad1add8fd16d4739cdd106"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d63f4d446e10ad19ed01188d6c1e1bb134cde8c18b0aa2acfd973d41fcc5ada"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8a5e7cc39a45fc430af1aefc4d77ee6bad72c5bcdb1322cfde852c15192b8bd4"}, - {file = "wrapt-1.17.0-cp312-cp312-win32.whl", hash = "sha256:0a0a1a1ec28b641f2a3a2c35cbe86c00051c04fffcfcc577ffcdd707df3f8635"}, - {file = "wrapt-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c34f6896a01b84bab196f7119770fd8466c8ae3dfa73c59c0bb281e7b588ce7"}, - {file = "wrapt-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:714c12485aa52efbc0fc0ade1e9ab3a70343db82627f90f2ecbc898fdf0bb181"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da427d311782324a376cacb47c1a4adc43f99fd9d996ffc1b3e8529c4074d393"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba1739fb38441a27a676f4de4123d3e858e494fac05868b7a281c0a383c098f4"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e711fc1acc7468463bc084d1b68561e40d1eaa135d8c509a65dd534403d83d7b"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:140ea00c87fafc42739bd74a94a5a9003f8e72c27c47cd4f61d8e05e6dec8721"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73a96fd11d2b2e77d623a7f26e004cc31f131a365add1ce1ce9a19e55a1eef90"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0b48554952f0f387984da81ccfa73b62e52817a4386d070c75e4db7d43a28c4a"}, - {file = "wrapt-1.17.0-cp313-cp313-win32.whl", hash = "sha256:498fec8da10e3e62edd1e7368f4b24aa362ac0ad931e678332d1b209aec93045"}, - {file = "wrapt-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd136bb85f4568fffca995bd3c8d52080b1e5b225dbf1c2b17b66b4c5fa02838"}, - {file = "wrapt-1.17.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17fcf043d0b4724858f25b8826c36e08f9fb2e475410bece0ec44a22d533da9b"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a557d97f12813dc5e18dad9fa765ae44ddd56a672bb5de4825527c847d6379"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0229b247b0fc7dee0d36176cbb79dbaf2a9eb7ecc50ec3121f40ef443155fb1d"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8425cfce27b8b20c9b89d77fb50e368d8306a90bf2b6eef2cdf5cd5083adf83f"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c900108df470060174108012de06d45f514aa4ec21a191e7ab42988ff42a86c"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4e547b447073fc0dbfcbff15154c1be8823d10dab4ad401bdb1575e3fdedff1b"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:914f66f3b6fc7b915d46c1cc424bc2441841083de01b90f9e81109c9759e43ab"}, - {file = "wrapt-1.17.0-cp313-cp313t-win32.whl", hash = "sha256:a4192b45dff127c7d69b3bdfb4d3e47b64179a0b9900b6351859f3001397dabf"}, - {file = "wrapt-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4f643df3d4419ea3f856c5c3f40fec1d65ea2e89ec812c83f7767c8730f9827a"}, - {file = "wrapt-1.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:69c40d4655e078ede067a7095544bcec5a963566e17503e75a3a3e0fe2803b13"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f495b6754358979379f84534f8dd7a43ff8cff2558dcdea4a148a6e713a758f"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa7ef4e0886a6f482e00d1d5bcd37c201b383f1d314643dfb0367169f94f04c"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fc931382e56627ec4acb01e09ce66e5c03c384ca52606111cee50d931a342d"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8f8909cdb9f1b237786c09a810e24ee5e15ef17019f7cecb207ce205b9b5fcce"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad47b095f0bdc5585bced35bd088cbfe4177236c7df9984b3cc46b391cc60627"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:948a9bd0fb2c5120457b07e59c8d7210cbc8703243225dbd78f4dfc13c8d2d1f"}, - {file = "wrapt-1.17.0-cp38-cp38-win32.whl", hash = "sha256:5ae271862b2142f4bc687bdbfcc942e2473a89999a54231aa1c2c676e28f29ea"}, - {file = "wrapt-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:f335579a1b485c834849e9075191c9898e0731af45705c2ebf70e0cd5d58beed"}, - {file = "wrapt-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d751300b94e35b6016d4b1e7d0e7bbc3b5e1751e2405ef908316c2a9024008a1"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264cbb4a18dc4acfd73b63e4bcfec9c9802614572025bdd44d0721983fc1d9c"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33539c6f5b96cf0b1105a0ff4cf5db9332e773bb521cc804a90e58dc49b10578"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30970bdee1cad6a8da2044febd824ef6dc4cc0b19e39af3085c763fdec7de33"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc7f729a72b16ee21795a943f85c6244971724819819a41ddbaeb691b2dd85ad"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6ff02a91c4fc9b6a94e1c9c20f62ea06a7e375f42fe57587f004d1078ac86ca9"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dfb7cff84e72e7bf975b06b4989477873dcf160b2fd89959c629535df53d4e0"}, - {file = "wrapt-1.17.0-cp39-cp39-win32.whl", hash = "sha256:2399408ac33ffd5b200480ee858baa58d77dd30e0dd0cab6a8a9547135f30a88"}, - {file = "wrapt-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f763a29ee6a20c529496a20a7bcb16a73de27f5da6a843249c7047daf135977"}, - {file = "wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371"}, - {file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"}, + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, ] [metadata] lock-version = "2.0" -python-versions = "^3.10,<3.12" -content-hash = "e75fecfda21bb185122d6218ee0e5cc1318b240bef1db615ed5052597e23ba11" +python-versions = "^3.10,<3.13" +content-hash = "5e8366b535518df8f014fbbecff6bfaf17a0fdf43bd99b1405d4896f6a9cfd00" diff --git a/airbyte-integrations/connectors/source-intercom/unit_tests/pyproject.toml b/airbyte-integrations/connectors/source-intercom/unit_tests/pyproject.toml new file mode 100644 index 000000000000..8363acb21e57 --- /dev/null +++ b/airbyte-integrations/connectors/source-intercom/unit_tests/pyproject.toml @@ -0,0 +1,16 @@ +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" +[tool.poetry] +name = "source-intercom-tests" +version = "0.0.0" +description = "Unit tests for source-intercom" +authors = ["Airbyte "] +[tool.poetry.dependencies] +python = "^3.10,<3.13" +airbyte-cdk = "6.10.0" +pytest = "^8" +[tool.pytest.ini_options] +filterwarnings = [ + "ignore:This class is experimental*" +] diff --git a/airbyte-integrations/connectors/source-intercom/unit_tests/test_components.py b/airbyte-integrations/connectors/source-intercom/unit_tests/test_components.py index cb3ca58063dd..a2a8db7650e5 100644 --- a/airbyte-integrations/connectors/source-intercom/unit_tests/test_components.py +++ b/airbyte-integrations/connectors/source-intercom/unit_tests/test_components.py @@ -8,11 +8,11 @@ import requests from airbyte_cdk.sources.declarative.partition_routers.substream_partition_router import ParentStreamConfig from airbyte_cdk.sources.streams import Stream -from source_intercom.components import IncrementalSingleSliceCursor, IncrementalSubstreamSlicerCursor, IntercomRateLimiter -def test_slicer(): +def test_slicer(components_module): date_time_dict = {"updated_at": 1662459010} + IncrementalSingleSliceCursor = components_module.IncrementalSingleSliceCursor slicer = IncrementalSingleSliceCursor(config={}, parameters={}, cursor_field="updated_at") slicer.observe(date_time_dict, date_time_dict) slicer.close_slice(date_time_dict) @@ -36,7 +36,7 @@ def test_slicer(): ) ], ) -def test_sub_slicer(last_record, expected, records): +def test_sub_slicer(components_module, last_record, expected, records): parent_stream = Mock(spec=Stream) parent_stream.name = "parent_stream_name" parent_stream.cursor_field = "parent_cursor_field" @@ -52,6 +52,8 @@ def test_sub_slicer(last_record, expected, records): config={}, ) + IncrementalSubstreamSlicerCursor = components_module.IncrementalSubstreamSlicerCursor + slicer = IncrementalSubstreamSlicerCursor( config={}, parameters={}, cursor_field="first_stream_cursor", parent_stream_configs=[parent_config], parent_complete_fetch=True ) @@ -72,7 +74,9 @@ def test_sub_slicer(last_record, expected, records): ({}, 1.0), ], ) -def test_rate_limiter(rate_limit_header, backoff_time): +def test_rate_limiter(components_module, rate_limit_header, backoff_time): + + IntercomRateLimiter = components_module.IntercomRateLimiter def check_backoff_time(t): """A replacer for original `IntercomRateLimiter.backoff_time`""" assert backoff_time == t, f"Expected {backoff_time}, got {t}" diff --git a/airbyte-integrations/connectors/source-intercom/unit_tests/test_source.py b/airbyte-integrations/connectors/source-intercom/unit_tests/test_source.py deleted file mode 100644 index fe7a765a2ee0..000000000000 --- a/airbyte-integrations/connectors/source-intercom/unit_tests/test_source.py +++ /dev/null @@ -1,9 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_intercom import SourceIntercom - - -def test_source(): - assert SourceIntercom() diff --git a/airbyte-integrations/connectors/source-mixmax/metadata.yaml b/airbyte-integrations/connectors/source-mixmax/metadata.yaml index 665f0b8ebad8..43d4afbe8b41 100644 --- a/airbyte-integrations/connectors/source-mixmax/metadata.yaml +++ b/airbyte-integrations/connectors/source-mixmax/metadata.yaml @@ -13,11 +13,11 @@ data: enabled: false packageName: airbyte-source-mixmax connectorBuildOptions: - baseImage: docker.io/airbyte/source-declarative-manifest:6.10.0@sha256:58722e84dbd06bb2af9250e37d24d1c448e247fc3a84d75ee4407d52771b6f03 + baseImage: docker.io/airbyte/source-declarative-manifest:6.11.1@sha256:0d0f562a70c0ed19ab605f0c83802a2e052712587692e2f3a1cc794fe7cd7007 connectorSubtype: api connectorType: source definitionId: 63df2e59-d086-4980-af83-01948325eacd - dockerImageTag: 0.0.6 + dockerImageTag: 0.0.7 dockerRepository: airbyte/source-mixmax githubIssueLabel: source-mixmax icon: icon.svg diff --git a/airbyte-integrations/connectors/source-mysql/metadata.yaml b/airbyte-integrations/connectors/source-mysql/metadata.yaml index ecbe84225d03..18297abf395a 100644 --- a/airbyte-integrations/connectors/source-mysql/metadata.yaml +++ b/airbyte-integrations/connectors/source-mysql/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: 435bb9a5-7887-4809-aa58-28c27df0d7ad - dockerImageTag: 3.9.1 + dockerImageTag: 3.9.2 dockerRepository: airbyte/source-mysql documentationUrl: https://docs.airbyte.com/integrations/sources/mysql githubIssueLabel: source-mysql diff --git a/airbyte-integrations/connectors/source-mysql/src/main/kotlin/io/airbyte/integrations/source/mysql/cdc/converters/MySQLBooleanConverter.kt b/airbyte-integrations/connectors/source-mysql/src/main/kotlin/io/airbyte/integrations/source/mysql/cdc/converters/MySQLBooleanConverter.kt index cceaab4151da..dabeb49ee611 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/kotlin/io/airbyte/integrations/source/mysql/cdc/converters/MySQLBooleanConverter.kt +++ b/airbyte-integrations/connectors/source-mysql/src/main/kotlin/io/airbyte/integrations/source/mysql/cdc/converters/MySQLBooleanConverter.kt @@ -40,7 +40,7 @@ class MySQLBooleanConverter : CustomConverter { } else if (field.hasDefaultValue()) { field.defaultValue() } else { - null + false } } when (x) { diff --git a/airbyte-integrations/connectors/source-mysql/src/test/kotlin/io/airbyte/integrations/source/mysql/MySqlDatatypeIntegrationTest.kt b/airbyte-integrations/connectors/source-mysql/src/test/kotlin/io/airbyte/integrations/source/mysql/MySqlDatatypeIntegrationTest.kt new file mode 100644 index 000000000000..a9401eedb7a2 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql/src/test/kotlin/io/airbyte/integrations/source/mysql/MySqlDatatypeIntegrationTest.kt @@ -0,0 +1,396 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.mysql + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.data.AirbyteSchemaType +import io.airbyte.cdk.data.LeafAirbyteSchemaType +import io.airbyte.cdk.discover.MetaField +import io.airbyte.cdk.jdbc.JdbcConnectionFactory +import io.airbyte.cdk.read.DatatypeTestCase +import io.airbyte.cdk.read.DatatypeTestOperations +import io.airbyte.cdk.read.DynamicDatatypeTestFactory +import io.airbyte.cdk.util.Jsons +import io.github.oshai.kotlinlogging.KotlinLogging +import java.sql.Connection +import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.DynamicNode +import org.junit.jupiter.api.TestFactory +import org.junit.jupiter.api.Timeout +import org.testcontainers.containers.MySQLContainer + +class MySqlDatatypeIntegrationTest { + + @TestFactory + @Timeout(300) + fun syncTests(): Iterable = + DynamicDatatypeTestFactory(MySqlDatatypeTestOperations).build(dbContainer) + + companion object { + + lateinit var dbContainer: MySQLContainer<*> + + @JvmStatic + @BeforeAll + @Timeout(value = 300) + fun startAndProvisionTestContainer() { + dbContainer = MysqlContainerFactory.shared("mysql:8.0", MysqlContainerFactory.WithCdc) + } + } +} + +object MySqlDatatypeTestOperations : + DatatypeTestOperations< + MySQLContainer<*>, + MysqlSourceConfigurationSpecification, + MysqlSourceConfiguration, + MysqlSourceConfigurationFactory, + MySqlDatatypeTestCase + > { + + private val log = KotlinLogging.logger {} + + override val withGlobal: Boolean = true + override val globalCursorMetaField: MetaField = MysqlCdcMetaFields.CDC_CURSOR + + override fun streamConfigSpec( + container: MySQLContainer<*> + ): MysqlSourceConfigurationSpecification = + MysqlContainerFactory.config(container).also { it.setMethodValue(UserDefinedCursor) } + + override fun globalConfigSpec( + container: MySQLContainer<*> + ): MysqlSourceConfigurationSpecification = + MysqlContainerFactory.config(container).also { it.setMethodValue(CdcCursor()) } + + override val configFactory: MysqlSourceConfigurationFactory = MysqlSourceConfigurationFactory() + + override fun createStreams(config: MysqlSourceConfiguration) { + JdbcConnectionFactory(config).get().use { connection: Connection -> + connection.isReadOnly = false + connection.createStatement().use { it.execute("CREATE DATABASE IF NOT EXISTS test") } + connection.createStatement().use { it.execute("USE test") } + for ((_, case) in testCases) { + for (ddl in case.ddl) { + log.info { "test case ${case.id}: executing $ddl" } + connection.createStatement().use { stmt -> stmt.execute(ddl) } + } + } + } + } + + override fun populateStreams(config: MysqlSourceConfiguration) { + JdbcConnectionFactory(config).get().use { connection: Connection -> + connection.isReadOnly = false + connection.createStatement().use { it.execute("USE test") } + for ((_, case) in testCases) { + for (dml in case.dml) { + log.info { "test case ${case.id}: executing $dml" } + connection.createStatement().use { stmt -> stmt.execute(dml) } + } + } + } + } + + val bitValues = + mapOf( + "b'1'" to "true", + "b'0'" to "false", + ) + + val longBitValues = + mapOf( + "b'10101010'" to """-86""", + ) + + val longBitCdcValues = + mapOf( + "b'10101010'" to """"qg=="""", + ) + + val stringValues = + mapOf( + "'abcdef'" to """"abcdef"""", + "'ABCD'" to """"ABCD"""", + "'OXBEEF'" to """"OXBEEF"""", + ) + + val jsonValues = mapOf("""'{"col1": "v1"}'""" to """"{\"col1\": \"v1\"}"""") + + val jsonCdcValues = mapOf("""'{"col1": "v1"}'""" to """"{\"col1\":\"v1\"}"""") + + val yearValues = + mapOf( + "1992" to """1992""", + "2002" to """2002""", + "70" to """1970""", + ) + + val decimalValues = + mapOf( + "0.2" to """0.2""", + ) + + val floatValues = + mapOf( + "123.4567" to """123.4567""", + ) + + val zeroPrecisionDecimalValues = + mapOf( + "2" to """2""", + ) + + val zeroPrecisionDecimalCdcValues = + mapOf( + "2" to """2.0""", + ) + + val tinyintValues = + mapOf( + "10" to "10", + "4" to "4", + "2" to "2", + ) + + val intValues = + mapOf( + "10" to "10", + "100000000" to "100000000", + "200000000" to "200000000", + ) + + val dateValues = + mapOf( + "'2022-01-01'" to """"2022-01-01"""", + ) + + val timeValues = + mapOf( + "'14:30:00'" to """"14:30:00.000000"""", + ) + + val dateTimeValues = + mapOf( + "'2024-09-13 14:30:00'" to """"2024-09-13T14:30:00.000000"""", + "'2024-09-13T14:40:00+00:00'" to """"2024-09-13T14:40:00.000000"""" + ) + + val timestampValues = + mapOf( + "'2024-09-12 14:30:00'" to """"2024-09-12T14:30:00.000000Z"""", + "CONVERT_TZ('2024-09-12 14:30:00', 'America/Los_Angeles', 'UTC')" to + """"2024-09-12T21:30:00.000000Z"""", + ) + + val booleanValues = + mapOf( + "TRUE" to "true", + "FALSE" to "false", + ) + + val enumValues = + mapOf( + "'a'" to """"a"""", + "'b'" to """"b"""", + "'c'" to """"c"""", + ) + + // Encoded into base64 + val binaryValues = + mapOf( + "X'89504E470D0A1A0A0000000D49484452'" to """"iVBORw0KGgoAAAANSUhEUg=="""", + ) + + override val testCases: Map = + listOf( + MySqlDatatypeTestCase( + "BOOLEAN", + booleanValues, + LeafAirbyteSchemaType.BOOLEAN, + ), + MySqlDatatypeTestCase( + "VARCHAR(10)", + stringValues, + LeafAirbyteSchemaType.STRING, + ), + MySqlDatatypeTestCase( + "DECIMAL(10,2)", + decimalValues, + LeafAirbyteSchemaType.NUMBER, + ), + MySqlDatatypeTestCase( + "DECIMAL(10,2) UNSIGNED", + decimalValues, + LeafAirbyteSchemaType.NUMBER, + ), + MySqlDatatypeTestCase( + "DECIMAL UNSIGNED", + zeroPrecisionDecimalValues, + LeafAirbyteSchemaType.INTEGER, + isGlobal = false, + ), + MySqlDatatypeTestCase( + "DECIMAL UNSIGNED", + zeroPrecisionDecimalCdcValues, + LeafAirbyteSchemaType.INTEGER, + isStream = false, + ), + MySqlDatatypeTestCase("FLOAT", floatValues, LeafAirbyteSchemaType.NUMBER), + MySqlDatatypeTestCase( + "FLOAT(7,4)", + floatValues, + LeafAirbyteSchemaType.NUMBER, + ), + MySqlDatatypeTestCase( + "FLOAT(53,8)", + floatValues, + LeafAirbyteSchemaType.NUMBER, + ), + MySqlDatatypeTestCase("DOUBLE", decimalValues, LeafAirbyteSchemaType.NUMBER), + MySqlDatatypeTestCase( + "DOUBLE UNSIGNED", + decimalValues, + LeafAirbyteSchemaType.NUMBER, + ), + MySqlDatatypeTestCase( + "TINYINT", + tinyintValues, + LeafAirbyteSchemaType.INTEGER, + ), + MySqlDatatypeTestCase( + "TINYINT UNSIGNED", + tinyintValues, + LeafAirbyteSchemaType.INTEGER, + ), + MySqlDatatypeTestCase( + "SMALLINT", + tinyintValues, + LeafAirbyteSchemaType.INTEGER, + ), + MySqlDatatypeTestCase( + "MEDIUMINT", + tinyintValues, + LeafAirbyteSchemaType.INTEGER, + ), + MySqlDatatypeTestCase("BIGINT", intValues, LeafAirbyteSchemaType.INTEGER), + MySqlDatatypeTestCase( + "SMALLINT UNSIGNED", + tinyintValues, + LeafAirbyteSchemaType.INTEGER, + ), + MySqlDatatypeTestCase( + "MEDIUMINT UNSIGNED", + tinyintValues, + LeafAirbyteSchemaType.INTEGER, + ), + MySqlDatatypeTestCase( + "BIGINT UNSIGNED", + intValues, + LeafAirbyteSchemaType.INTEGER, + ), + MySqlDatatypeTestCase("INT", intValues, LeafAirbyteSchemaType.INTEGER), + MySqlDatatypeTestCase( + "INT UNSIGNED", + intValues, + LeafAirbyteSchemaType.INTEGER, + ), + MySqlDatatypeTestCase("DATE", dateValues, LeafAirbyteSchemaType.DATE), + MySqlDatatypeTestCase( + "TIMESTAMP", + timestampValues, + LeafAirbyteSchemaType.TIMESTAMP_WITH_TIMEZONE, + ), + MySqlDatatypeTestCase( + "DATETIME", + dateTimeValues, + LeafAirbyteSchemaType.TIMESTAMP_WITHOUT_TIMEZONE, + ), + MySqlDatatypeTestCase( + "TIME", + timeValues, + LeafAirbyteSchemaType.TIME_WITHOUT_TIMEZONE, + ), + MySqlDatatypeTestCase("YEAR", yearValues, LeafAirbyteSchemaType.INTEGER), + MySqlDatatypeTestCase( + "VARBINARY(255)", + binaryValues, + LeafAirbyteSchemaType.BINARY, + ), + MySqlDatatypeTestCase( + "BIT", + bitValues, + LeafAirbyteSchemaType.BOOLEAN, + ), + MySqlDatatypeTestCase( + "BIT(8)", + longBitValues, + LeafAirbyteSchemaType.INTEGER, + isGlobal = false, + ), + MySqlDatatypeTestCase( + "BIT(8)", + longBitCdcValues, + LeafAirbyteSchemaType.INTEGER, + isStream = false, + ), + MySqlDatatypeTestCase( + "JSON", + jsonValues, + LeafAirbyteSchemaType.STRING, + isGlobal = false, + ), + MySqlDatatypeTestCase( + "JSON", + jsonCdcValues, + LeafAirbyteSchemaType.STRING, + isStream = false, + ), + MySqlDatatypeTestCase( + "ENUM('a', 'b', 'c')", + enumValues, + LeafAirbyteSchemaType.STRING, + ), + ) + .associateBy { it.id } +} + +data class MySqlDatatypeTestCase( + val sqlType: String, + val sqlToAirbyte: Map, + override val expectedAirbyteSchemaType: AirbyteSchemaType, + override val isGlobal: Boolean = true, + override val isStream: Boolean = true, +) : DatatypeTestCase { + + private val typeName: String + get() = + sqlType + .replace("[^a-zA-Z0-9]".toRegex(), " ") + .trim() + .replace(" +".toRegex(), "_") + .lowercase() + + override val id: String + get() = "tbl_$typeName" + + override val fieldName: String + get() = "col_$typeName" + + override val expectedData: List + get() = + sqlToAirbyte.values.map { Jsons.readTree("""{"${fieldName}":$it}""").get(fieldName) } + + val ddl: List + get() = + listOf( + "CREATE TABLE IF NOT EXISTS $id " + + "(pk INT AUTO_INCREMENT, $fieldName $sqlType, PRIMARY KEY (pk))", + "TRUNCATE TABLE $id", + ) + + val dml: List + get() = sqlToAirbyte.keys.map { "INSERT INTO $id ($fieldName) VALUES ($it)" } +} diff --git a/airbyte-integrations/connectors/source-mysql/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlCdcDatatypeIntegrationTest.kt b/airbyte-integrations/connectors/source-mysql/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlCdcDatatypeIntegrationTest.kt deleted file mode 100644 index de98f82776f1..000000000000 --- a/airbyte-integrations/connectors/source-mysql/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlCdcDatatypeIntegrationTest.kt +++ /dev/null @@ -1,464 +0,0 @@ -/* - * Copyright (c) 2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.mysql - -import com.fasterxml.jackson.databind.JsonNode -import com.fasterxml.jackson.databind.node.ObjectNode -import io.airbyte.cdk.ClockFactory -import io.airbyte.cdk.command.CliRunner -import io.airbyte.cdk.data.AirbyteSchemaType -import io.airbyte.cdk.data.LeafAirbyteSchemaType -import io.airbyte.cdk.jdbc.JdbcConnectionFactory -import io.airbyte.cdk.output.BufferingOutputConsumer -import io.airbyte.cdk.util.Jsons -import io.airbyte.integrations.source.mysql.MysqlContainerFactory.execAsRoot -import io.airbyte.protocol.models.v0.AirbyteMessage -import io.airbyte.protocol.models.v0.AirbyteRecordMessage -import io.airbyte.protocol.models.v0.AirbyteStream -import io.airbyte.protocol.models.v0.CatalogHelpers -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream -import io.airbyte.protocol.models.v0.SyncMode -import io.github.oshai.kotlinlogging.KotlinLogging -import java.sql.Connection -import org.junit.jupiter.api.Assertions -import org.junit.jupiter.api.BeforeAll -import org.junit.jupiter.api.DynamicContainer -import org.junit.jupiter.api.DynamicNode -import org.junit.jupiter.api.DynamicTest -import org.junit.jupiter.api.TestFactory -import org.junit.jupiter.api.Timeout -import org.testcontainers.containers.MySQLContainer - -private val log = KotlinLogging.logger {} - -class MysqlCdcDatatypeIntegrationTest { - @TestFactory - @Timeout(300) - fun syncTests(): Iterable { - val read: DynamicNode = - DynamicTest.dynamicTest("read") { - Assertions.assertFalse(LazyValues.actualReads.isEmpty()) - } - val cases: List = - allStreamNamesAndRecordData.keys.map { streamName: String -> - DynamicContainer.dynamicContainer( - streamName, - listOf( - DynamicTest.dynamicTest("records") { records(streamName) }, - ), - ) - } - return listOf(read) + cases - } - - object LazyValues { - val actualStreams: Map by lazy { - val output: BufferingOutputConsumer = CliRunner.source("discover", config()).run() - output.catalogs().firstOrNull()?.streams?.filterNotNull()?.associateBy { it.name } - ?: mapOf() - } - - val configuredCatalog: ConfiguredAirbyteCatalog by lazy { - val configuredStreams: List = - allStreamNamesAndRecordData.keys - .mapNotNull { actualStreams[it] } - .map { - CatalogHelpers.toDefaultConfiguredStream(it) - .withCursorField( - listOf(MysqlCdcMetaFields.CDC_CURSOR.id), - ) - } - - for (configuredStream in configuredStreams) { - if (configuredStream.stream.supportedSyncModes.contains(SyncMode.INCREMENTAL)) { - configuredStream.syncMode = SyncMode.INCREMENTAL - } - } - ConfiguredAirbyteCatalog().withStreams(configuredStreams) - } - - val allReadMessages: List by lazy { - // only get messsages from the 2nd run - val lastStateMessageFromFirstRun = - CliRunner.source("read", config(), configuredCatalog).run().states().last() - - // insert - connectionFactory - .get() - .also { it.isReadOnly = false } - .use { connection: Connection -> - for (case in testCases) { - for (sql in case.sqlInsertStatements) { - log.info { "test case ${case.id}: executing $sql" } - connection.createStatement().use { stmt -> stmt.execute(sql) } - } - } - } - - // Run it in dbz mode on 2nd time: - CliRunner.source( - "read", - config(), - configuredCatalog, - listOf(lastStateMessageFromFirstRun) - ) - .run() - .messages() - } - - val actualReads: Map by lazy { - val result: Map = - allStreamNamesAndRecordData.keys.associateWith { - BufferingOutputConsumer(ClockFactory().fixed()) - } - for (msg in allReadMessages) { - result[streamName(msg) ?: continue]?.accept(msg) - } - result - } - - fun streamName(msg: AirbyteMessage): String? = - when (msg.type) { - AirbyteMessage.Type.RECORD -> msg.record?.stream - else -> null - } - } - - private fun records(streamName: String) { - val actualRead: BufferingOutputConsumer? = LazyValues.actualReads[streamName] - Assertions.assertNotNull(actualRead) - - fun sortedRecordData(data: List): JsonNode = - Jsons.createArrayNode().apply { addAll(data.sortedBy { it.toString() }) } - - val actualRecords: List = actualRead?.records() ?: listOf() - - val records = actualRecords.mapNotNull { it.data } - - records.forEach { jsonNode -> - if (jsonNode is ObjectNode) { - // Remove unwanted fields - jsonNode.remove("_ab_cdc_updated_at") - jsonNode.remove("_ab_cdc_deleted_at") - jsonNode.remove("_ab_cdc_cursor") - jsonNode.remove("_ab_cdc_log_file") - jsonNode.remove("_ab_cdc_log_pos") - } - } - val actual: JsonNode = sortedRecordData(records) - - log.info { "test case $streamName: emitted records $actual" } - val expected: JsonNode = sortedRecordData(allStreamNamesAndRecordData[streamName]!!) - - Assertions.assertEquals(expected, actual) - } - - companion object { - lateinit var dbContainer: MySQLContainer<*> - - fun config(): MysqlSourceConfigurationSpecification = - MysqlContainerFactory.cdcConfig(dbContainer) - - val connectionFactory: JdbcConnectionFactory by lazy { - JdbcConnectionFactory(MysqlSourceConfigurationFactory().make(config())) - } - - val bitValues = - mapOf( - "b'1'" to "true", - "b'0'" to "false", - ) - - val longBitValues = - mapOf( - "b'10101010'" to """"qg=="""", - ) - - val stringValues = - mapOf( - "'abcdef'" to """"abcdef"""", - "'ABCD'" to """"ABCD"""", - "'OXBEEF'" to """"OXBEEF"""", - ) - - val yearValues = - mapOf( - "1992" to """1992""", - "2002" to """2002""", - "70" to """1970""", - ) - - val precisionTwoDecimalValues = - mapOf( - "0.2" to """0.2""", - ) - - val floatValues = - mapOf( - "123.4567" to """123.4567""", - ) - - val zeroPrecisionDecimalValues = - mapOf( - "2" to """2.0""", - ) - - val tinyintValues = - mapOf( - "10" to "10", - "4" to "4", - "2" to "2", - ) - - val intValues = - mapOf( - "10" to "10", - "100000000" to "100000000", - "200000000" to "200000000", - ) - - val dateValues = - mapOf( - "'2022-01-01'" to """"2022-01-01"""", - ) - - val timeValues = - mapOf( - "'14:30:00'" to """"14:30:00.000000"""", - ) - - val dateTimeValues = - mapOf( - "'2024-09-13 14:30:00'" to """"2024-09-13T14:30:00.000000"""", - "'2024-09-13T14:40:00+00:00'" to """"2024-09-13T14:40:00.000000"""", - ) - - val timestampValues = - mapOf( - "'2024-09-12 14:30:00'" to """"2024-09-12T14:30:00.000000Z"""", - "CONVERT_TZ('2024-09-12 14:30:00', 'America/Los_Angeles', 'UTC')" to - """"2024-09-12T21:30:00.000000Z"""", - ) - - val booleanValues = - mapOf( - "TRUE" to "true", - "FALSE" to "false", - ) - - val testCases: List = - listOf( - TestCase( - "BOOLEAN", - booleanValues, - airbyteSchemaType = LeafAirbyteSchemaType.BOOLEAN, - cursor = false, - ), - TestCase( - "VARCHAR(10)", - stringValues, - airbyteSchemaType = LeafAirbyteSchemaType.STRING, - ), - TestCase( - "DECIMAL(10,2)", - precisionTwoDecimalValues, - airbyteSchemaType = LeafAirbyteSchemaType.NUMBER, - ), - TestCase( - "DECIMAL(10,2) UNSIGNED", - precisionTwoDecimalValues, - airbyteSchemaType = LeafAirbyteSchemaType.NUMBER, - ), - TestCase( - "DECIMAL UNSIGNED", - zeroPrecisionDecimalValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER, - ), - TestCase( - "FLOAT", - precisionTwoDecimalValues, - airbyteSchemaType = LeafAirbyteSchemaType.NUMBER - ), - TestCase( - "FLOAT(7,4)", - floatValues, - airbyteSchemaType = LeafAirbyteSchemaType.NUMBER, - ), - TestCase( - "FLOAT(53,8)", - floatValues, - airbyteSchemaType = LeafAirbyteSchemaType.NUMBER, - ), - TestCase( - "DOUBLE", - precisionTwoDecimalValues, - airbyteSchemaType = LeafAirbyteSchemaType.NUMBER - ), - TestCase( - "DOUBLE UNSIGNED", - precisionTwoDecimalValues, - airbyteSchemaType = LeafAirbyteSchemaType.NUMBER, - ), - TestCase( - "TINYINT", - tinyintValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER, - ), - TestCase( - "TINYINT UNSIGNED", - tinyintValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER, - ), - TestCase( - "SMALLINT", - tinyintValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER, - ), - TestCase( - "MEDIUMINT", - tinyintValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER, - ), - TestCase("BIGINT", intValues, airbyteSchemaType = LeafAirbyteSchemaType.INTEGER), - TestCase( - "SMALLINT UNSIGNED", - tinyintValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER, - ), - TestCase( - "MEDIUMINT UNSIGNED", - tinyintValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER, - ), - TestCase( - "BIGINT UNSIGNED", - intValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER, - ), - TestCase("INT", intValues, airbyteSchemaType = LeafAirbyteSchemaType.INTEGER), - TestCase( - "INT UNSIGNED", - intValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER, - ), - TestCase("DATE", dateValues, airbyteSchemaType = LeafAirbyteSchemaType.DATE), - TestCase( - "TIMESTAMP", - timestampValues, - airbyteSchemaType = LeafAirbyteSchemaType.TIMESTAMP_WITH_TIMEZONE, - ), - TestCase( - "DATETIME", - dateTimeValues, - airbyteSchemaType = LeafAirbyteSchemaType.TIMESTAMP_WITHOUT_TIMEZONE, - ), - TestCase( - "TIME", - timeValues, - airbyteSchemaType = LeafAirbyteSchemaType.TIME_WITHOUT_TIMEZONE, - ), - TestCase("YEAR", yearValues, airbyteSchemaType = LeafAirbyteSchemaType.INTEGER), - TestCase( - "BIT", - bitValues, - airbyteSchemaType = LeafAirbyteSchemaType.BOOLEAN, - cursor = false, - ), - TestCase( - "BIT(8)", - longBitValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER, - ), - ) - - val allStreamNamesAndRecordData: Map> = - testCases.flatMap { it.streamNamesToRecordData.toList() }.toMap() - - @JvmStatic - @BeforeAll - @Timeout(value = 300) - fun startAndProvisionTestContainer() { - dbContainer = - MysqlContainerFactory.exclusive( - "mysql:8.0", - MysqlContainerFactory.WithNetwork, - ) - - val gtidOn = - "SET @@GLOBAL.ENFORCE_GTID_CONSISTENCY = 'ON';" + - "SET @@GLOBAL.GTID_MODE = 'OFF_PERMISSIVE';" + - "SET @@GLOBAL.GTID_MODE = 'ON_PERMISSIVE';" + - "SET @@GLOBAL.GTID_MODE = 'ON';" - val grant = - "GRANT SELECT, RELOAD, SHOW DATABASES, REPLICATION SLAVE, REPLICATION CLIENT " + - "ON *.* TO '${dbContainer.username}'@'%';" - - dbContainer.execAsRoot(gtidOn) - dbContainer.execAsRoot(grant) - dbContainer.execAsRoot("FLUSH PRIVILEGES;") - connectionFactory - .get() - .also { it.isReadOnly = false } - .use { connection: Connection -> - for (case in testCases) { - for (sql in case.sqlStatements) { - log.info { "test case ${case.id}: executing $sql" } - connection.createStatement().use { stmt -> stmt.execute(sql) } - } - } - } - } - } - - data class TestCase( - val sqlType: String, - val sqlToAirbyte: Map, - val airbyteSchemaType: AirbyteSchemaType = LeafAirbyteSchemaType.STRING, - val cursor: Boolean = true, - val customDDL: List? = null, - ) { - val id: String - get() = - sqlType - .replace("[^a-zA-Z0-9]".toRegex(), " ") - .trim() - .replace(" +".toRegex(), "_") - .lowercase() - - val tableName: String - get() = "tbl_$id" - - val columnName: String - get() = "col_$id" - - val sqlStatements: List - get() { - return listOf( - "CREATE DATABASE IF NOT EXISTS test", - "USE test", - "CREATE TABLE IF NOT EXISTS $tableName " + "($columnName $sqlType PRIMARY KEY)", - "TRUNCATE TABLE $tableName", - ) - } - - val sqlInsertStatements: List - get() { - val result = - listOf("USE test;") + - sqlToAirbyte.keys.map { - "INSERT INTO $tableName ($columnName) VALUES ($it)" - } - return result - } - - val streamNamesToRecordData: Map> - get() { - val recordData: List = - sqlToAirbyte.values.map { Jsons.readTree("""{"${columnName}":$it}""") } - return mapOf(tableName to recordData) - } - } -} diff --git a/airbyte-integrations/connectors/source-mysql/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlContainerFactory.kt b/airbyte-integrations/connectors/source-mysql/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlContainerFactory.kt index ab092fe53967..14f3e4ab555a 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlContainerFactory.kt +++ b/airbyte-integrations/connectors/source-mysql/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlContainerFactory.kt @@ -25,6 +25,25 @@ object MysqlContainerFactory { } } + data object WithCdc : MysqlContainerModifier { + override fun modify(container: MySQLContainer<*>) { + container.start() + container.execAsRoot(GTID_ON) + container.execAsRoot(GRANT.format(container.username)) + container.execAsRoot("FLUSH PRIVILEGES;") + } + + const val GTID_ON = + "SET @@GLOBAL.ENFORCE_GTID_CONSISTENCY = 'ON';" + + "SET @@GLOBAL.GTID_MODE = 'OFF_PERMISSIVE';" + + "SET @@GLOBAL.GTID_MODE = 'ON_PERMISSIVE';" + + "SET @@GLOBAL.GTID_MODE = 'ON';" + + const val GRANT = + "GRANT SELECT, RELOAD, SHOW DATABASES, REPLICATION SLAVE, REPLICATION CLIENT " + + "ON *.* TO '%s'@'%%';" + } + data object WithCdcOff : MysqlContainerModifier { override fun modify(container: MySQLContainer<*>) { container.withCommand("--skip-log-bin") @@ -65,17 +84,7 @@ object MysqlContainerFactory { @JvmStatic fun cdcConfig(mySQLContainer: MySQLContainer<*>): MysqlSourceConfigurationSpecification = - MysqlSourceConfigurationSpecification().apply { - host = mySQLContainer.host - port = mySQLContainer.getMappedPort(MySQLContainer.MYSQL_PORT) - username = mySQLContainer.username - password = mySQLContainer.password - jdbcUrlParams = "" - database = "test" - checkpointTargetIntervalSeconds = 60 - concurrency = 1 - setMethodValue(CdcCursor()) - } + config(mySQLContainer).also { it.setMethodValue(CdcCursor()) } fun MySQLContainer<*>.execAsRoot(sql: String) { val cleanSql: String = sql.trim().removeSuffix(";") + ";" diff --git a/airbyte-integrations/connectors/source-mysql/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceDatatypeIntegrationTest.kt b/airbyte-integrations/connectors/source-mysql/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceDatatypeIntegrationTest.kt deleted file mode 100644 index d3d1c5c5c2d7..000000000000 --- a/airbyte-integrations/connectors/source-mysql/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceDatatypeIntegrationTest.kt +++ /dev/null @@ -1,466 +0,0 @@ -/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ -package io.airbyte.integrations.source.mysql - -import com.fasterxml.jackson.databind.JsonNode -import io.airbyte.cdk.ClockFactory -import io.airbyte.cdk.command.CliRunner -import io.airbyte.cdk.data.AirbyteSchemaType -import io.airbyte.cdk.data.LeafAirbyteSchemaType -import io.airbyte.cdk.jdbc.JdbcConnectionFactory -import io.airbyte.cdk.output.BufferingOutputConsumer -import io.airbyte.cdk.util.Jsons -import io.airbyte.protocol.models.v0.AirbyteMessage -import io.airbyte.protocol.models.v0.AirbyteRecordMessage -import io.airbyte.protocol.models.v0.AirbyteStream -import io.airbyte.protocol.models.v0.AirbyteTraceMessage -import io.airbyte.protocol.models.v0.CatalogHelpers -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream -import io.airbyte.protocol.models.v0.SyncMode -import io.github.oshai.kotlinlogging.KotlinLogging -import java.sql.Connection -import org.junit.jupiter.api.Assertions -import org.junit.jupiter.api.BeforeAll -import org.junit.jupiter.api.DynamicContainer -import org.junit.jupiter.api.DynamicNode -import org.junit.jupiter.api.DynamicTest -import org.junit.jupiter.api.TestFactory -import org.junit.jupiter.api.Timeout -import org.testcontainers.containers.MySQLContainer - -private val log = KotlinLogging.logger {} - -class MysqlSourceDatatypeIntegrationTest { - @TestFactory - @Timeout(300) - fun syncTests(): Iterable { - val discover: DynamicNode = - DynamicTest.dynamicTest("discover") { - Assertions.assertFalse(LazyValues.actualStreams.isEmpty()) - } - val read: DynamicNode = - DynamicTest.dynamicTest("read") { - Assertions.assertFalse(LazyValues.actualReads.isEmpty()) - } - val cases: List = - allStreamNamesAndRecordData.keys.map { streamName: String -> - DynamicContainer.dynamicContainer( - streamName, - listOf( - DynamicTest.dynamicTest("discover") { discover(streamName) }, - DynamicTest.dynamicTest("records") { records(streamName) }, - ), - ) - } - return listOf(discover, read) + cases - } - - object LazyValues { - val actualStreams: Map by lazy { - val output: BufferingOutputConsumer = CliRunner.source("discover", config()).run() - output.catalogs().firstOrNull()?.streams?.filterNotNull()?.associateBy { it.name } - ?: mapOf() - } - - val configuredCatalog: ConfiguredAirbyteCatalog by lazy { - val configuredStreams: List = - allStreamNamesAndRecordData.keys - .mapNotNull { actualStreams[it] } - .map(CatalogHelpers::toDefaultConfiguredStream) - for (configuredStream in configuredStreams) { - if (configuredStream.stream.supportedSyncModes.contains(SyncMode.INCREMENTAL)) { - configuredStream.syncMode = SyncMode.INCREMENTAL - } - } - ConfiguredAirbyteCatalog().withStreams(configuredStreams) - } - - val allReadMessages: List by lazy { - CliRunner.source("read", config(), configuredCatalog).run().messages() - } - - val actualReads: Map by lazy { - val result: Map = - allStreamNamesAndRecordData.keys.associateWith { - BufferingOutputConsumer(ClockFactory().fixed()) - } - for (msg in allReadMessages) { - result[streamName(msg) ?: continue]?.accept(msg) - } - result - } - - fun streamName(msg: AirbyteMessage): String? = - when (msg.type) { - AirbyteMessage.Type.RECORD -> msg.record?.stream - AirbyteMessage.Type.STATE -> msg.state?.stream?.streamDescriptor?.name - AirbyteMessage.Type.TRACE -> - when (msg.trace?.type) { - AirbyteTraceMessage.Type.ERROR -> msg.trace?.error?.streamDescriptor?.name - AirbyteTraceMessage.Type.ESTIMATE -> msg.trace?.estimate?.name - AirbyteTraceMessage.Type.STREAM_STATUS -> - msg.trace?.streamStatus?.streamDescriptor?.name - AirbyteTraceMessage.Type.ANALYTICS -> null - null -> null - } - else -> null - } - } - - private fun discover(streamName: String) { - val actualStream: AirbyteStream? = LazyValues.actualStreams[streamName] - log.info { "discover result: ${LazyValues.actualStreams}" } - log.info { "streamName: $streamName" } - Assertions.assertNotNull(actualStream) - log.info { - "test case $streamName: discovered stream ${ - Jsons.valueToTree( - actualStream, - ) - }" - } - val testCase: TestCase = - testCases.find { it.streamNamesToRecordData.keys.contains(streamName) }!! - val isIncrementalSupported: Boolean = - actualStream!!.supportedSyncModes.contains(SyncMode.INCREMENTAL) - val jsonSchema: JsonNode = actualStream.jsonSchema?.get("properties")!! - if (streamName == testCase.tableName) { - val actualSchema: JsonNode = jsonSchema[testCase.columnName] - Assertions.assertNotNull(actualSchema) - val expectedSchema: JsonNode = testCase.airbyteSchemaType.asJsonSchema() - Assertions.assertEquals(expectedSchema, actualSchema) - if (testCase.cursor) { - Assertions.assertTrue(isIncrementalSupported) - } else { - Assertions.assertFalse(isIncrementalSupported) - } - } - } - - private fun records(streamName: String) { - val actualRead: BufferingOutputConsumer? = LazyValues.actualReads[streamName] - Assertions.assertNotNull(actualRead) - - fun sortedRecordData(data: List): JsonNode = - Jsons.createArrayNode().apply { addAll(data.sortedBy { it.toString() }) } - - val actualRecords: List = actualRead?.records() ?: listOf() - - val actual: JsonNode = sortedRecordData(actualRecords.mapNotNull { it.data }) - log.info { "test case $streamName: emitted records $actual" } - val expected: JsonNode = sortedRecordData(allStreamNamesAndRecordData[streamName]!!) - - Assertions.assertEquals(expected, actual) - } - - companion object { - lateinit var dbContainer: MySQLContainer<*> - - fun config(): MysqlSourceConfigurationSpecification = - MysqlContainerFactory.config(dbContainer) - - val connectionFactory: JdbcConnectionFactory by lazy { - JdbcConnectionFactory(MysqlSourceConfigurationFactory().make(config())) - } - - val bitValues = - mapOf( - "b'1'" to "true", - "b'0'" to "false", - ) - - val longBitValues = - mapOf( - "b'10101010'" to """-86""", - ) - - val stringValues = - mapOf( - "'abcdef'" to """"abcdef"""", - "'ABCD'" to """"ABCD"""", - "'OXBEEF'" to """"OXBEEF"""", - ) - - val jsonValues = mapOf("""'{"col1": "v1"}'""" to """"{\"col1\": \"v1\"}"""") - - val yearValues = - mapOf( - "1992" to """1992""", - "2002" to """2002""", - "70" to """1970""", - ) - - val decimalValues = - mapOf( - "0.2" to """0.2""", - ) - - val zeroPrecisionDecimalValues = - mapOf( - "2" to """2""", - ) - - val tinyintValues = - mapOf( - "10" to "10", - "4" to "4", - "2" to "2", - ) - - val intValues = - mapOf( - "10" to "10", - "100000000" to "100000000", - "200000000" to "200000000", - ) - - val dateValues = - mapOf( - "'2022-01-01'" to """"2022-01-01"""", - ) - - val timeValues = - mapOf( - "'14:30:00'" to """"14:30:00.000000"""", - ) - - val dateTimeValues = - mapOf( - "'2024-09-13 14:30:00'" to """"2024-09-13T14:30:00.000000"""", - "'2024-09-13T14:40:00+00:00'" to """"2024-09-13T14:40:00.000000"""" - ) - - val timestampValues = - mapOf( - "'2024-09-12 14:30:00'" to """"2024-09-12T14:30:00.000000Z"""", - "CONVERT_TZ('2024-09-12 14:30:00', 'America/Los_Angeles', 'UTC')" to - """"2024-09-12T21:30:00.000000Z"""", - ) - - val booleanValues = - mapOf( - "TRUE" to "true", - "FALSE" to "false", - ) - - val enumValues = - mapOf( - "'a'" to """"a"""", - "'b'" to """"b"""", - "'c'" to """"c"""", - ) - - // Encoded into base64 - val binaryValues = - mapOf( - "X'89504E470D0A1A0A0000000D49484452'" to """"iVBORw0KGgoAAAANSUhEUg=="""", - ) - - val testCases: List = - listOf( - TestCase( - "BOOLEAN", - booleanValues, - airbyteSchemaType = LeafAirbyteSchemaType.BOOLEAN, - cursor = false - ), - TestCase( - "VARCHAR(10)", - stringValues, - airbyteSchemaType = LeafAirbyteSchemaType.STRING - ), - TestCase( - "DECIMAL(10,2)", - decimalValues, - airbyteSchemaType = LeafAirbyteSchemaType.NUMBER - ), - TestCase( - "DECIMAL(10,2) UNSIGNED", - decimalValues, - airbyteSchemaType = LeafAirbyteSchemaType.NUMBER - ), - TestCase( - "DECIMAL UNSIGNED", - zeroPrecisionDecimalValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER - ), - TestCase("FLOAT", decimalValues, airbyteSchemaType = LeafAirbyteSchemaType.NUMBER), - TestCase( - "FLOAT(7,4)", - decimalValues, - airbyteSchemaType = LeafAirbyteSchemaType.NUMBER - ), - TestCase( - "FLOAT(53,8)", - decimalValues, - airbyteSchemaType = LeafAirbyteSchemaType.NUMBER - ), - TestCase("DOUBLE", decimalValues, airbyteSchemaType = LeafAirbyteSchemaType.NUMBER), - TestCase( - "DOUBLE UNSIGNED", - decimalValues, - airbyteSchemaType = LeafAirbyteSchemaType.NUMBER - ), - TestCase( - "TINYINT", - tinyintValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER - ), - TestCase( - "TINYINT UNSIGNED", - tinyintValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER - ), - TestCase( - "SMALLINT", - tinyintValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER - ), - TestCase( - "MEDIUMINT", - tinyintValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER - ), - TestCase("BIGINT", intValues, airbyteSchemaType = LeafAirbyteSchemaType.INTEGER), - TestCase( - "SMALLINT UNSIGNED", - tinyintValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER - ), - TestCase( - "MEDIUMINT UNSIGNED", - tinyintValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER - ), - TestCase( - "BIGINT UNSIGNED", - intValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER - ), - TestCase("INT", intValues, airbyteSchemaType = LeafAirbyteSchemaType.INTEGER), - TestCase( - "INT UNSIGNED", - intValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER - ), - TestCase("DATE", dateValues, airbyteSchemaType = LeafAirbyteSchemaType.DATE), - TestCase( - "TIMESTAMP", - timestampValues, - airbyteSchemaType = LeafAirbyteSchemaType.TIMESTAMP_WITH_TIMEZONE - ), - TestCase( - "DATETIME", - dateTimeValues, - airbyteSchemaType = LeafAirbyteSchemaType.TIMESTAMP_WITHOUT_TIMEZONE - ), - TestCase( - "TIME", - timeValues, - airbyteSchemaType = LeafAirbyteSchemaType.TIME_WITHOUT_TIMEZONE - ), - TestCase("YEAR", yearValues, airbyteSchemaType = LeafAirbyteSchemaType.INTEGER), - TestCase( - "VARBINARY(255)", - binaryValues, - airbyteSchemaType = LeafAirbyteSchemaType.BINARY, - cursor = true, - noPK = false - ), - TestCase( - "BIT", - bitValues, - airbyteSchemaType = LeafAirbyteSchemaType.BOOLEAN, - cursor = false - ), - TestCase( - "BIT(8)", - longBitValues, - airbyteSchemaType = LeafAirbyteSchemaType.INTEGER - ), - TestCase( - "JSON", - jsonValues, - airbyteSchemaType = LeafAirbyteSchemaType.STRING, - noPK = true - ), - TestCase( - "ENUM('a', 'b', 'c')", - enumValues, - airbyteSchemaType = LeafAirbyteSchemaType.STRING, - noPK = true - ), - ) - - val allStreamNamesAndRecordData: Map> = - testCases.flatMap { it.streamNamesToRecordData.toList() }.toMap() - - @JvmStatic - @BeforeAll - @Timeout(value = 300) - fun startAndProvisionTestContainer() { - dbContainer = - MysqlContainerFactory.exclusive( - "mysql:8.0", - MysqlContainerFactory.WithNetwork, - ) - connectionFactory - .get() - .also { it.isReadOnly = false } - .use { connection: Connection -> - for (case in testCases) { - for (sql in case.sqlStatements) { - log.info { "test case ${case.id}: executing $sql" } - connection.createStatement().use { stmt -> stmt.execute(sql) } - } - } - } - } - } - - data class TestCase( - val sqlType: String, - val sqlToAirbyte: Map, - val airbyteSchemaType: AirbyteSchemaType = LeafAirbyteSchemaType.STRING, - val cursor: Boolean = true, - val noPK: Boolean = false, - val customDDL: List? = null, - ) { - val id: String - get() = - sqlType - .replace("[^a-zA-Z0-9]".toRegex(), " ") - .trim() - .replace(" +".toRegex(), "_") - .lowercase() - - val tableName: String - get() = "tbl_$id" - - val columnName: String - get() = "col_$id" - - val sqlStatements: List - get() { - val ddl: List = - listOf( - "CREATE DATABASE IF NOT EXISTS test", - "USE test", - "CREATE TABLE IF NOT EXISTS $tableName " + - "($columnName $sqlType ${if (noPK) "" else "PRIMARY KEY"})", - "TRUNCATE TABLE $tableName", - ) - val dml: List = - sqlToAirbyte.keys.map { "INSERT INTO $tableName ($columnName) VALUES ($it)" } - - return ddl + dml - } - - val streamNamesToRecordData: Map> - get() { - val recordData: List = - sqlToAirbyte.values.map { Jsons.readTree("""{"${columnName}":$it}""") } - return mapOf(tableName to recordData) - } - } -} diff --git a/airbyte-integrations/connectors/source-pardot/manifest.yaml b/airbyte-integrations/connectors/source-pardot/manifest.yaml index 495ec71883a6..13ac303ce829 100644 --- a/airbyte-integrations/connectors/source-pardot/manifest.yaml +++ b/airbyte-integrations/connectors/source-pardot/manifest.yaml @@ -1,1226 +1,4795 @@ -version: 5.13.0 - -type: DeclarativeSource - -check: - type: CheckStream - stream_names: - - campaigns - -definitions: - streams: - campaigns: - type: DeclarativeStream - name: campaigns - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - $ref: "#/definitions/base_requester" - path: campaign/version/4/do/query - http_method: GET - request_parameters: - format: json - sort_by: id - sort_order: ascending - created_after: "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%SZ') }}" - request_headers: - Pardot-Business-Unit-Id: "{{ config[\"pardot_business_unit_id\"] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - result - - campaign - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: id_greater_than - pagination_strategy: - type: CursorPagination - cursor_value: "{{ response.get(\"campaign\", {})[-1].get(\"id\", {}) }}" - stop_condition: "{{ not response.get(\"campaign\", {})[-1].get(\"id\", {}) }}" - schema_loader: - type: InlineSchemaLoader - schema: - $ref: "#/schemas/campaigns" - email_clicks: - type: DeclarativeStream - name: email_clicks - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - $ref: "#/definitions/base_requester" - path: emailClick/version/4/do/query - http_method: GET - request_parameters: - format: json - created_after: "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%SZ') }}" - request_headers: - Pardot-Business-Unit-Id: "{{ config[\"pardot_business_unit_id\"] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - result - - emailClick - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: id_greater_than - pagination_strategy: - type: CursorPagination - cursor_value: "{{ response.get(\"emailClick\", {})[-1].get(\"id\", {}) }}" - stop_condition: "{{ not response.get(\"emailClick\", {})[-1].get(\"id\", {}) }}" - schema_loader: - type: InlineSchemaLoader - schema: - $ref: "#/schemas/email_clicks" - list_membership: - type: DeclarativeStream - name: list_membership - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - $ref: "#/definitions/base_requester" - path: listMembership/version/4/do/query - http_method: GET - request_parameters: - format: json - sort_by: id - sort_order: ascending - created_after: "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%SZ') }}" - request_headers: - Pardot-Business-Unit-Id: "{{ config[\"pardot_business_unit_id\"] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - result - - list_membership - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: updated_after - pagination_strategy: - type: CursorPagination - cursor_value: >- - {{ response.get("list_membership", {})[-1].get("updated_at", {}) - }} - stop_condition: >- - {{ not response.get("list_membership", {})[-1].get("updated_at", - {}) }} - schema_loader: - type: InlineSchemaLoader - schema: - $ref: "#/schemas/list_membership" - lists: - type: DeclarativeStream - name: lists - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - $ref: "#/definitions/base_requester" - path: list/version/4/do/query - http_method: GET - request_parameters: - format: json - sort_by: updated_at - sort_order: ascending - created_after: "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%SZ') }}" - request_headers: - Pardot-Business-Unit-Id: "{{ config[\"pardot_business_unit_id\"] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - result - - list - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: updated_after - pagination_strategy: - type: CursorPagination - cursor_value: "{{ response.get(\"list\", {})[-1].get(\"updated_at\", {}) }}" - stop_condition: "{{ not response.get(\"list\", {})[-1].get(\"updated_at\", {}) }}" - schema_loader: - type: InlineSchemaLoader - schema: - $ref: "#/schemas/lists" - prospect_accounts: - type: DeclarativeStream - name: prospect_accounts - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - $ref: "#/definitions/base_requester" - path: prospectAccount/version/4/do/query - http_method: GET - request_parameters: - format: json - sort_by: updated_at - sort_order: ascending - created_after: "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%SZ') }}" - request_headers: - Pardot-Business-Unit-Id: "{{ config[\"pardot_business_unit_id\"] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - result - - prospectAccount - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: updated_after - pagination_strategy: - type: CursorPagination - cursor_value: >- - {{ response.get("prospectAccount", {})[-1].get("updated_at", {}) - }} - stop_condition: >- - {{ not response.get("prospectAccount", {})[-1].get("updated_at", - {}) }} - schema_loader: - type: InlineSchemaLoader - schema: - $ref: "#/schemas/prospect_accounts" - prospects: - type: DeclarativeStream - name: prospects - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - $ref: "#/definitions/base_requester" - path: prospect/version/4/do/query - http_method: GET - request_parameters: - format: json - sort_by: updated_at - sort_order: ascending - created_after: "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%SZ') }}" - request_headers: - Pardot-Business-Unit-Id: "{{ config[\"pardot_business_unit_id\"] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - result - - prospect - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: updated_after - pagination_strategy: - type: CursorPagination - cursor_value: "{{ response.get(\"prospect\", {})[-1].get(\"updated_at\", {}) }}" - stop_condition: "{{ not response.get(\"prospect\", {})[-1].get(\"updated_at\", {}) }}" - schema_loader: - type: InlineSchemaLoader - schema: - $ref: "#/schemas/prospects" - users: - type: DeclarativeStream - name: users - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - $ref: "#/definitions/base_requester" - path: user - http_method: GET - request_parameters: - format: json - created_after: "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%SZ') }}" - request_headers: - Pardot-Business-Unit-Id: "{{ config[\"pardot_business_unit_id\"] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - result - - user - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: created_after - pagination_strategy: - type: CursorPagination - cursor_value: "{{ response.get(\"user\", {})[-1].get(\"created_at\", {}) }}" - stop_condition: "{{ not response.get(\"user\", {})[-1].get(\"created_at\", {}) }}" - schema_loader: - type: InlineSchemaLoader - schema: - $ref: "#/schemas/users" - visitor_activities: - type: DeclarativeStream - name: visitor_activities - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - $ref: "#/definitions/base_requester" - path: visitorActivity/version/4/do/query - http_method: GET - request_parameters: - format: json - created_after: "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%SZ') }}" - request_headers: - Pardot-Business-Unit-Id: "{{ config[\"pardot_business_unit_id\"] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - result - - visitor_activity - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: id_greater_than - pagination_strategy: - type: CursorPagination - cursor_value: "{{ response.get(\"visitor_activity\", {})[-1].get(\"id\", {}) }}" - stop_condition: "{{ not response.get(\"visitor_activity\", {})[-1].get(\"id\", {}) }}" - schema_loader: - type: InlineSchemaLoader - schema: - $ref: "#/schemas/visitor_activities" - visitors: - type: DeclarativeStream - name: visitors - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - $ref: "#/definitions/base_requester" - path: visitor/version/4/do/query - http_method: GET - request_parameters: - format: json - sort_by: updated_at - sort_order: ascending - created_after: "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%SZ') }}" - request_headers: - Pardot-Business-Unit-Id: "{{ config[\"pardot_business_unit_id\"] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - result - - visitor - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: updated_after - pagination_strategy: - type: CursorPagination - cursor_value: "{{ response.get(\"visitor\", {})[-1].get(\"updated_at\", {}) }}" - stop_condition: "{{ not response.get(\"visitor\", {})[-1].get(\"updated_at\", {}) }}" - schema_loader: - type: InlineSchemaLoader - schema: - $ref: "#/schemas/visitors" - visits: - type: DeclarativeStream - name: visits - retriever: - type: SimpleRetriever - requester: - $ref: "#/definitions/base_requester" - path: visit/version/4/do/query - http_method: GET - request_parameters: - format: json - sort_order: ascending - created_after: "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%SZ') }}" - request_headers: - Pardot-Business-Unit-Id: "{{ config[\"pardot_business_unit_id\"] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - result - - visit - schema_loader: - type: InlineSchemaLoader - schema: - $ref: "#/schemas/visits" - opportunities: - type: DeclarativeStream - name: opportunities - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - $ref: "#/definitions/base_requester" - path: opportunity/version/4/do/query - http_method: GET - request_parameters: - format: json - created_after: "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%SZ') }}" - request_headers: - Pardot-Business-Unit-Id: "{{ config[\"pardot_business_unit_id\"] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - result - - opportunity - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: created_after - pagination_strategy: - type: CursorPagination - cursor_value: "{{ response.get(\"opportunity\", {})[-1].get(\"created_at\", {}) }}" - stop_condition: >- - {{ not response.get("opportunity", {})[-1].get("created_at", {}) - }} - schema_loader: - type: InlineSchemaLoader - schema: - $ref: "#/schemas/opportunities" - base_requester: - type: HttpRequester - url_base: https://pi.pardot.com/api/ - authenticator: - type: OAuthAuthenticator - client_id: "{{ config[\"client_id\"] }}" - grant_type: refresh_token - client_secret: "{{ config[\"client_secret\"] }}" - refresh_token: "{{ config[\"client_refresh_token\"] }}" - refresh_request_body: {} - token_refresh_endpoint: >- - https://{{ 'test' if config['is_sandbox'] else 'login' - }}.salesforce.com/services/oauth2/token - -streams: - - $ref: "#/definitions/streams/campaigns" - - $ref: "#/definitions/streams/email_clicks" - - $ref: "#/definitions/streams/list_membership" - - $ref: "#/definitions/streams/lists" - - $ref: "#/definitions/streams/prospect_accounts" - - $ref: "#/definitions/streams/prospects" - - $ref: "#/definitions/streams/users" - - $ref: "#/definitions/streams/visitor_activities" - - $ref: "#/definitions/streams/visitors" - - $ref: "#/definitions/streams/visits" - # - $ref: "#/definitions/streams/opportunities" # Currently disabled because test account doesn't have any data - - -spec: - type: Spec - connection_specification: - type: object - $schema: http://json-schema.org/draft-07/schema# - required: - - client_id - - client_secret - - refresh_token - - pardot_business_unit_id - properties: - client_id: - type: string - description: The Consumer Key that can be found when viewing your app in Salesforce - airbyte_secret: true - order: 0 - is_sandbox: - type: boolean - description: >- - Whether or not the the app is in a Salesforce sandbox. If you do not - know what this, assume it is false. - default: false - order: 1 - start_date: - type: string - description: >- - UTC date and time in the format 2017-01-25T00:00:00Z. Any data before - this date will not be replicated. Leave blank to skip this filter - default: null - pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ - examples: - - "2021-07-25T00:00:00Z" - order: 2 - client_secret: - type: string - description: >- - The Consumer Secret that can be found when viewing your app in - Salesforce - airbyte_secret: true - order: 3 - refresh_token: - type: string - description: >- - Salesforce Refresh Token used for Airbyte to access your Salesforce - account. If you don't know what this is, follow this guide - to retrieve it. - airbyte_secret: true - order: 4 - pardot_business_unit_id: - type: string - description: >- - Pardot Business ID, can be found at Setup > Pardot > Pardot Account - Setup - order: 5 - additionalProperties: true - -metadata: - autoImportSchema: - campaigns: false - email_clicks: false - list_membership: false - lists: false - prospect_accounts: false - prospects: false - users: false - visitor_activities: false - visitors: false - visits: false - opportunities: false - yamlComponents: - global: - - authenticator - testedStreams: - campaigns: - streamHash: ef6fbf0ab7ffa8f81749103d94235abd001d0f39 - email_clicks: - streamHash: 2d74c8aab6c19a77ed2315e36c2fcb18296b641a - list_membership: - streamHash: 33edc450360e922bd939bf7aea5bb756e8714d64 - lists: - streamHash: 984611ea793da473a3cfa6968fb01a4a36b6145f - prospect_accounts: - streamHash: 5938979e371a63739b281c64cd4f61064331d513 - prospects: - streamHash: 4bcf3735358cd3942c17750ef76e052faf5a034e - users: - streamHash: 21012c537f20aed85d8fca2a91c135a2ed9a8e34 - visitor_activities: - streamHash: 857eb94e1cf16eac22e0386450e79abfeffdf347 - visitors: - streamHash: 339bee895c4c803eccc98ba0b836fbc0cb36d937 - visits: - streamHash: c526ae3538c2c4f9b434e803521c509c0e5271a2 - opportunities: - streamHash: fe2484cadf31a482f5f5421ac4a70d14f03e8796 - assist: {} - -schemas: - campaigns: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - cost: - type: - - "null" - - integer - id: - type: - - integer - name: - type: - - "null" - - string - email_clicks: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - created_at: - type: - - "null" - - string - format: date-time - drip_program_action_id: - type: - - "null" - - integer - email_template_id: - type: - - "null" - - integer - id: - type: - - integer - list_email_id: - type: - - "null" - - integer - prospect_id: - type: - - "null" - - integer - tracker_redirect_id: - type: - - "null" - - integer - url: - type: - - "null" - - string - list_membership: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - created_at: - type: - - "null" - - string - format: date-time - id: - type: - - integer - list_id: - type: - - integer - opted_out: - type: - - "null" - - boolean - prospect_id: - type: - - integer - updated_at: - type: - - "null" - - string - format: date-time - lists: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - description: - type: - - "null" - - string - created_at: - type: - - "null" - - string - format: date-time - id: - type: - - integer - is_crm_visible: - type: - - "null" - - boolean - is_dynamic: - type: - - "null" - - boolean - is_public: - type: - - "null" - - boolean - name: - type: - - "null" - - string - title: - type: - - "null" - - string - updated_at: - type: - - "null" - - string - format: date-time - prospect_accounts: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - assigned_to: - type: - - "null" - - object - created_at: - type: - - "null" - - string - format: date-time - id: - type: - - integer - name: - type: - - "null" - - string - updated_at: - type: - - "null" - - string - format: date-time - prospects: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - address_one: - type: - - "null" - - string - address_two: - type: - - "null" - - string - annual_revenue: - type: - - "null" - - string - campaign_id: - type: - - "null" - - integer - city: - type: - - "null" - - string - comments: - type: - - "null" - - string - company: - type: - - "null" - - string - country: - type: - - "null" - - string - created_at: - type: - - "null" - - string - format: date-time - crm_account_fid: - type: - - "null" - - string - crm_contact_fid: - type: - - "null" - - string - crm_last_sync: - type: - - "null" - - string - format: date-time - crm_lead_fid: - type: - - "null" - - string - crm_owner_fid: - type: - - "null" - - string - crm_url: - type: - - "null" - - string - department: - type: - - "null" - - string - email: - type: - - "null" - - string - employees: - type: - - "null" - - string - fax: - type: - - "null" - - string - first_name: - type: - - "null" - - string - grade: - type: - - "null" - - string - id: - type: - - integer - industry: - type: - - "null" - - string - is_do_not_call: - type: - - "null" - - integer - is_do_not_email: - type: - - "null" - - integer - is_reviewed: - type: - - "null" - - integer - is_starred: - type: - - "null" - - integer - job_title: - type: - - "null" - - string - last_activity_at: - type: - - "null" - - string - format: date-time - last_name: - type: - - "null" - - string - notes: - type: - - "null" - - string - opted_out: - type: - - "null" - - integer - password: - type: - - "null" - - string - phone: - type: - - "null" - - string - prospect_account_id: - type: - - "null" - - integer - recent_interaction: - type: - - "null" - - string - salutation: - type: - - "null" - - string - score: - type: - - "null" - - integer - source: - type: - - "null" - - string - state: - type: - - "null" - - string - territory: - type: - - "null" - - string - updated_at: - type: - - "null" - - string - format: date-time - website: - type: - - "null" - - string - years_in_business: - type: - - "null" - - string - zip: - type: - - "null" - - string - users: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - created_at: - type: - - "null" - - string - format: date-time - email: - type: - - "null" - - string - first_name: - type: - - "null" - - string - id: - type: - - integer - job_title: - type: - - "null" - - string - last_name: - type: - - "null" - - string - role: - type: - - "null" - - string - updated_at: - type: - - "null" - - string - format: date-time - visitor_activities: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - type: - type: - - "null" - - integer - campaign: - type: - - "null" - - object - campaign_id: - type: - - "null" - - integer - created_at: - type: - - "null" - - string - format: date-time - details: - type: - - "null" - - string - email_id: - type: - - "null" - - integer - email_template_id: - type: - - "null" - - integer - file_id: - type: - - "null" - - integer - form_handler_id: - type: - - "null" - - integer - form_id: - type: - - "null" - - integer - id: - type: - - integer - landing_page_id: - type: - - "null" - - integer - list_email_id: - type: - - "null" - - integer - multivariate_test_variation_id: - type: - - "null" - - integer - paid_search_id_id: - type: - - "null" - - integer - prospect_id: - type: - - "null" - - integer - site_search_query_id: - type: - - "null" - - integer - type_name: - type: - - "null" - - string - updated_at: - type: - - "null" - - string - format: date-time - visitor_id: - type: - - "null" - - integer - visitor_page_view_id: - type: - - "null" - - integer - visitors: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - campaign_parameter: - type: - - "null" - - string - content_parameter: - type: - - "null" - - string - created_at: - type: - - "null" - - string - format: date-time - hostname: - type: - - "null" - - string - id: - type: - - integer - ip_address: - type: - - "null" - - string - medium_parameter: - type: - - "null" - - string - page_view_count: - type: - - "null" - - integer - source_parameter: - type: - - "null" - - string - term_parameter: - type: - - "null" - - string - updated_at: - type: - - "null" - - string - format: date-time - visits: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - type: - type: - - "null" - - integer - campaign: - type: - - "null" - - object - campaign_parameter: - type: - - "null" - - string - content_parameter: - type: - - "null" - - string - created_at: - type: - - "null" - - string - format: date-time - details: - type: - - "null" - - string - duration_in_seconds: - type: - - "null" - - integer - email: - type: - - "null" - - object - email_id: - type: - - "null" - - integer - email_template_id: - type: - - "null" - - integer - first_visitor_page_view_at: - type: - - "null" - - string - format: date-time - id: - type: - - integer - last_visitor_page_view_at: - type: - - "null" - - string - format: date-time - list_email_id: - type: - - "null" - - integer - medium_parameter: - type: - - "null" - - string - prospect_id: - type: - - "null" - - integer - source_parameter: - type: - - "null" - - string - term_parameter: - type: - - "null" - - string - type_name: - type: - - "null" - - string - updated_at: - type: - - "null" - - string - format: date-time - visit_id: - type: - - "null" - - integer - visitor_id: - type: - - "null" - - integer - visitor_page_view_count: - type: - - "null" - - integer - visitor_page_views: - type: - - "null" - - object - opportunities: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - type: - type: - - "null" - - string - campaign_id: - type: - - "null" - - integer - closed_at: - type: - - "null" - - string - format: date-time - created_at: - type: - - "null" - - string - format: date-time - id: - type: - - integer - name: - type: - - "null" - - string - probability: - type: - - "null" - - integer - stage: - type: - - "null" - - string - status: - type: - - "null" - - string - updated_at: - type: - - "null" - - string - format: date-time - value: - type: - - "null" - - number +version: 6.10.0 + +type: DeclarativeSource + +check: + type: CheckStream + stream_names: + - campaigns + +definitions: + streams: + campaigns: + type: DeclarativeStream + name: campaigns + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/campaigns + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,isDeleted,folderId,cost,parentCampaignId,createdById,updatedById,createdAt,updatedAt,salesforceId + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/campaigns" + email_clicks: + type: DeclarativeStream + name: email_clicks + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: emailClick/version/4/do/query + http_method: GET + request_parameters: + format: json + output: bulk + sort_by: id + sort_order: ascending + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - result + - emailClick + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: request_parameter + field_name: id_greater_than + page_size_option: + type: RequestOption + field_name: limit + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + page_size: 200 + cursor_value: "{{ last_record.id }}" + stop_condition: >- + {{ not response.result.emailClick or + response.result.emailClick|length < 200 }} + incremental_sync: + type: DatetimeBasedCursor + cursor_field: created_at + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%d %H:%M:%S" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + start_time_option: + type: RequestOption + field_name: created_after + inject_into: request_parameter + end_time_option: + type: RequestOption + field_name: created_before + inject_into: request_parameter + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/email_clicks" + list_membership: + type: DeclarativeStream + name: list_membership + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/list-memberships + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,listId,prospectId,optedOut,createdAt,updatedAt,createdById,updatedById + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/list_membership" + lists: + type: DeclarativeStream + name: lists + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/lists + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,title,description,isPublic,folderId,campaignId,isDeleted,isDynamic,createdAt,updatedAt,createdById,updatedById + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/lists" + prospect_accounts: + type: DeclarativeStream + name: prospect_accounts + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/prospect-accounts + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,salesforceId,isDeleted,annualRevenue,billingAddressOne,billingAddressTwo,billingCity,billingCountry,billingState,billingZip,description,employees,fax,industry,number,ownership,phone,rating,shippingAddressOne,shippingAddressTwo,shippingCity,shippingCountry,shippingState,shippingZip,sic,site,tickerSymbol,type,website,createdAt,updatedAt,createdById,updatedById,assignedToId + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'id ASC' if not next_page_token.next_page_token }}" + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + partition_router: + type: ListPartitionRouter + values: + - "1" + cursor_field: >- + magic config: this is just a trick to make the next_page_token + object populate, which for some reason doesn't happen when only + Pagination is used (without Incremental or Param + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/prospect_accounts" + prospects: + type: DeclarativeStream + name: prospects + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/prospects + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,email,optedOut,isDeleted,isDoNotCall,isDoNotEmail,isEmailHardBounced,isReviewed,isStarred,doNotSell,prospectAccountId,campaignId,profileId,lifecycleStageId,userId,lastActivityAt,recentInteraction,firstName,lastName,salutation,jobTitle,emailBouncedAt,emailBouncedReason,source,sourceParameter,campaignParameter,mediumParameter,contentParameter,termParameter,firstActivityAt,firstAssignedAt,firstReferrerQuery,firstReferrerType,firstReferrerUrl,salesforceId,salesforceContactId,salesforceLeadId,salesforceAccountId,salesforceCampaignId,salesforceOwnerId,salesforceUrl,salesforceLastSync,assignedToId,convertedAt,convertedFromObjectName,convertedFromObjectType,grade,phone,fax,addressOne,addressTwo,city,state,zip,country,company,annualRevenue,website,industry,department,yearsInBusiness,employees,score,territory,comments,notes,createdAt,updatedAt,createdById,updatedById + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/prospects" + users: + type: DeclarativeStream + name: users + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/users + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,email,username,isDeleted,firstName,jobTitle,role,roleName,salesforceId,tagReplacementLanguage,createdAt,updatedAt,createdById,updatedById + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/users" + visitor_activities: + type: DeclarativeStream + name: visitor_activities + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/visitor-activities + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,visitId,prospectId,visitorId,emailId,type,typeName,details,campaignId,customRedirectId,emailTemplateId,fileId,formHandlerId,formId,landingPageId,listEmailId,multivariateTestVariationId,opportunityId,paidSearchAdId,siteSearchQueryId,visitorPageViewId,createdAt,updatedAt + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/visitor_activities" + visitors: + type: DeclarativeStream + name: visitors + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/visitors + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,prospectId,pageViewCount,isIdentified,doNotSell,hostname,ipAddress,campaignId,sourceParameter,campaignParameter,mediumParameter,contentParameter,termParameter,createdAt,updatedAt + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/visitors" + visits: + type: DeclarativeStream + name: visits + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/visits + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,visitorId,prospectId,visitorPageViewCount,firstVisitorPageViewAt,lastVisitorPageViewAt,durationInSeconds,sourceParameter,campaignParameter,mediumParameter,contentParameter,termParameter,createdAt,updatedAt + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/visits" + folders: + type: DeclarativeStream + name: folders + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/folders + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,parentFolderId,path,usePermissions,createdAt,updatedAt,createdById,updatedById + orderBy: "{{ 'id ASC' if not next_page_token.next_page_token }}" + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + partition_router: + type: ListPartitionRouter + values: + - "1" + cursor_field: >- + magic config: this is just a trick to make the next_page_token + object populate, which for some reason doesn't happen when only + Pagination is used (without Incremental or Param + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/folders" + custom_redirects: + type: DeclarativeStream + name: custom_redirects + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/custom-redirects + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,url,destinationUrl,vanityUrl,campaignId,salesforceId,isDeleted,folderId,trackerDomainId,trackerDomain.domain,vanityUrlPath,trackedUrl,bitlyIsPersonalized,bitlyShortUrl,gaSource,gaMedium,gaTerm,gaContent,gaCampaign,createdAt,updatedAt,createdById,updatedById + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'id ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/custom_redirects" + emails: + type: DeclarativeStream + name: emails + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/emails + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,type,sentAt,subject,clientType,isOperational,campaignId,prospectId,folderId,listId,listEmailId,emailTemplateId,trackerDomainId,senderOptions.type,senderOptions.address,senderOptions.name,senderOptions.userId,senderOptions.prospectCustomFieldId,senderOptions.accountCustomFieldId,replyToOptions.type,replyToOptions.address,replyToOptions.userId,replyToOptions.prospectCustomFieldId,replyToOptions.accountCustomFieldId,createdById + orderBy: "{{ 'sentAt ASC' if not next_page_token.next_page_token }}" + sentAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + sentAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: sentAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: P1D + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/emails" + engagement_studio_programs: + type: DeclarativeStream + name: engagement_studio_programs + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/engagement-studio-programs + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,status,isDeleted,salesforceId,description,businessHours,prospectsMultipleEntry,schedule,scheduleCreatedById,recipientListIds,suppressionListIds,createdAt,updatedAt,createdById,updatedById + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/engagement_studio_programs" + files: + type: DeclarativeStream + name: files + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/files + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,folderId,campaignId,salesforceId,trackerDomainId,vanityUrl,vanityUrlPath,url,size,isTracked,bitlyIsPersonalized,bitlyShortUrl,createdAt,updatedAt,createdById,updatedById + orderBy: "{{ 'id ASC' if not next_page_token.next_page_token }}" + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/files" + folder_contents: + type: DeclarativeStream + name: folder_contents + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/folder-contents + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,folderId,folderRef,objectType,objectId,objectName,objectRef,createdAt,updatedAt,createdById,updatedById + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/folder_contents" + forms: + type: DeclarativeStream + name: forms + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/forms + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,campaignId,layoutTemplateId,folderId,trackerDomainId,salesforceId,isDeleted,isUseRedirectLocation,isAlwaysDisplay,isCookieless,isCaptchaEnabled,showNotProspect,embedCode,submitButtonText,beforeFormContent,afterFormContent,thankYouContent,thankYouCode,redirectLocation,fontSize,fontFamily,fontColor,labelAlignment,radioAlignment,checkboxAlignment,requiredCharacter,createdById,updatedById,createdAt,updatedAt + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'id ASC' if not next_page_token.next_page_token }}" + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/forms" + form_fields: + type: DeclarativeStream + name: form_fields + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/form-fields + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,formId,prospectApiFieldId,type,dataFormat,sortOrder,hasDependents,hasProgressives,hasValues,label,errorMessage,cssClasses,isRequired,isAlwaysDisplay,isMaintainInitialValue,isDoNotPrefill,createdById,updatedById,createdAt,updatedAt + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/form_fields" + form_handlers: + type: DeclarativeStream + name: form_handlers + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/form-handlers + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,folderId,campaignId,trackerDomainId,isDataForwarded,successLocation,errorLocation,isAlwaysEmail,isCookieless,salesforceId,embedCode,createdAt,createdById,isDeleted,updatedById + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'id ASC' if not next_page_token.next_page_token }}" + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/form_handlers" + form_handler_fields: + type: DeclarativeStream + name: form_handler_fields + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/form-handler-fields + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,formHandlerId,dataFormat,prospectApiFieldId,isMaintainInitialValue,errorMessage,isRequired,createdAt,createdById + orderBy: "{{ 'id ASC' if not next_page_token.next_page_token }}" + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/form_handler_fields" + landing_pages: + type: DeclarativeStream + name: landing_pages + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/landing-pages + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,campaignId,salesforceId,isDeleted,layoutType,layoutTableBorder,isUseRedirectLocation,bitlyIsPersonalized,bitlyShortUrl,url,vanityUrl,folderId,formId,layoutTemplateId,title,description,isDoNotIndex,vanityUrlPath,redirectLocation,trackerDomainId,archiveDate,createdAt,updatedAt,createdById,updatedById + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/landing_pages" + layout_templates: + type: DeclarativeStream + name: layout_templates + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/layout-templates + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,layoutContent,siteSearchContent,formContent,folderId,isDeleted,isIncludeDefaultCss,createdAt,updatedAt,createdById,updatedById + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'id ASC' if not next_page_token.next_page_token }}" + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/layout_templates" + lifecycle_stages: + type: DeclarativeStream + name: lifecycle_stages + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/lifecycle-stages + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: id,name,isDeleted,isLocked,position,matchType,createdAt,updatedAt + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/lifecycle_stages" + lifecycle_histories: + type: DeclarativeStream + name: lifecycle_histories + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/lifecycle-histories + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: id,prospectId,previousStageId,nextStageId,secondsElapsed,createdAt + orderBy: "{{ 'createdAt ASC' if not next_page_token.next_page_token }}" + createdAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + createdAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: createdAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/lifecycle_histories" + list_emails: + type: DeclarativeStream + name: list_emails + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/list-emails + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,subject,isPaused,isSent,isDeleted,isOperational,sentAt,campaignId,clientType,senderOptions.type,senderOptions.address,senderOptions.name,senderOptions.userId,senderOptions.prospectCustomFieldId,senderOptions.accountCustomFieldId,replyToOptions.type,replyToOptions.address,replyToOptions.userId,replyToOptions.prospectCustomFieldId,replyToOptions.accountCustomFieldId,emailTemplateId,trackerDomainId,folderId,createdAt,updatedAt,createdById,updatedById + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/list_emails" + opportunities: + type: DeclarativeStream + name: opportunities + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/opportunities + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,closedAt,name,type,stage,status,probability,value,campaignId,salesforceId,createdAt,updatedAt,createdById,updatedById + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/opportunities" + tags: + type: DeclarativeStream + name: tags + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/tags + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: id,name,objectCount,createdById,updatedById,createdAt,updatedAt + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/tags" + tracker_domains: + type: DeclarativeStream + name: tracker_domains + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/tracker-domains + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,domain,isPrimary,isDeleted,defaultCampaignId,httpsStatus,sslStatus,sslStatusDetails,sslRequestedById,validationStatus,validatedAt,vanityUrlStatus,trackingCode,createdAt,updatedAt,createdById,updatedById + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'id ASC' if not next_page_token.next_page_token }}" + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/tracker_domains" + visitor_page_views: + type: DeclarativeStream + name: visitor_page_views + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/visitor-page-views + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,url,title,visitorId,campaignId,visitId,durationInSeconds,salesforceId,createdAt + orderBy: "{{ 'createdAt ASC' if not next_page_token.next_page_token }}" + createdAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + createdAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: createdAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/visitor_page_views" + account: + type: DeclarativeStream + name: account + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/account + http_method: GET + request_parameters: + fields: >- + id,company,level,website,pluginCampaignId,addressOne,addressTwo,city,state,zip,territory,country,phone,fax,adminId,maximumDailyApiCalls,apiCallsUsed,createdAt,updatedAt,createdById,updatedById + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/account" + custom_fields: + type: DeclarativeStream + name: custom_fields + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/custom-fields + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,fieldId,type,salesforceId,isRequired,isRecordMultipleResponses,isUseValues,isAnalyticsSynced,createdAt,updatedAt,createdById,updatedById + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/custom_fields" + dynamic_contents: + type: DeclarativeStream + name: dynamic_contents + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/dynamic-contents + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: >- + id,name,basedOnProspectApiFieldId,embedCode,embedUrl,basedOn,tagReplacementLanguage,folderId,trackerDomainId,createdAt,updatedAt,isDeleted,createdById,updatedById + deleted: "{{ 'all' if not next_page_token.next_page_token }}" + orderBy: "{{ 'updatedAt ASC' if not next_page_token.next_page_token }}" + updatedAtAfterOrEqualTo: >- + {{ stream_interval.start_time if stream_interval.start_time and + not next_page_token.next_page_token }} + updatedAtBeforeOrEqualTo: >- + {{ stream_interval.end_time if stream_interval.end_time and not + next_page_token.next_page_token }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + incremental_sync: + type: DatetimeBasedCursor + cursor_field: updatedAt + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S%z" + - "%Y-%m-%dT%H:%M:%SZ" + - "%Y-%m-%d %H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%S-12:00" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + end_datetime: + type: MinMaxDatetime + datetime: "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: "{{ config.split_up_interval }}" + cursor_granularity: PT1S + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/dynamic_contents" + dynamic_content_variations: + type: DeclarativeStream + name: dynamic_content_variations + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: v5/objects/dynamic-content-variations + http_method: GET + request_parameters: + limit: "{{ config.page_size if not next_page_token.next_page_token }}" + fields: id,dynamicContentId,comparison,operator,value1,value2,content + orderBy: "{{ 'id ASC' if not next_page_token.next_page_token }}" + dynamicContentId: >- + {{ stream_partition.parent_id.dynamic_content_id if + stream_interval.start_time and not next_page_token.next_page_token + }} + request_headers: + Pardot-Business-Unit-Id: "{{ config.pardot_business_unit_id }}" + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - values + schema_normalization: Default + paginator: + type: DefaultPaginator + page_token_option: + type: RequestPath + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get(\"nextPageUrl\", {}) }}" + stop_condition: "{{ not response.get(\"nextPageUrl\", {}) }}" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + parent_key: id + partition_field: dynamic_content_id + stream: + $ref: "#/definitions/streams/dynamic_contents" + incremental_dependency: true + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/dynamic_content_variations" + base_requester: + type: HttpRequester + url_base: https://pi.pardot.com/api/ + authenticator: + type: SessionTokenAuthenticator + login_requester: + type: HttpRequester + url_base: >- + https://{{ 'test' if config['is_sandbox'] else + 'login'}}.salesforce.com/services/oauth2 + path: token + authenticator: + type: NoAuth + http_method: POST + request_parameters: {} + request_headers: {} + request_body_data: + client_id: "'{{ config.client_id }}'" + grant_type: refresh_token + client_secret: "'{{ config.client_secret }}'" + refresh_token: "'{{ config.refresh_token }}'" + session_token_path: + - access_token + expiration_duration: PT24H + request_authentication: + type: Bearer + +streams: + - $ref: "#/definitions/streams/campaigns" + - $ref: "#/definitions/streams/email_clicks" + - $ref: "#/definitions/streams/list_membership" + - $ref: "#/definitions/streams/lists" + - $ref: "#/definitions/streams/prospect_accounts" + - $ref: "#/definitions/streams/prospects" + - $ref: "#/definitions/streams/users" + - $ref: "#/definitions/streams/visitor_activities" + - $ref: "#/definitions/streams/visitors" + - $ref: "#/definitions/streams/visits" + - $ref: "#/definitions/streams/folders" + - $ref: "#/definitions/streams/custom_redirects" + - $ref: "#/definitions/streams/emails" + - $ref: "#/definitions/streams/engagement_studio_programs" + - $ref: "#/definitions/streams/files" + - $ref: "#/definitions/streams/folder_contents" + - $ref: "#/definitions/streams/forms" + - $ref: "#/definitions/streams/form_fields" + - $ref: "#/definitions/streams/form_handlers" + - $ref: "#/definitions/streams/form_handler_fields" + - $ref: "#/definitions/streams/landing_pages" + - $ref: "#/definitions/streams/layout_templates" + - $ref: "#/definitions/streams/lifecycle_stages" + - $ref: "#/definitions/streams/lifecycle_histories" + - $ref: "#/definitions/streams/list_emails" + - $ref: "#/definitions/streams/opportunities" + - $ref: "#/definitions/streams/tags" + - $ref: "#/definitions/streams/tracker_domains" + - $ref: "#/definitions/streams/visitor_page_views" + - $ref: "#/definitions/streams/account" + - $ref: "#/definitions/streams/custom_fields" + - $ref: "#/definitions/streams/dynamic_contents" + - $ref: "#/definitions/streams/dynamic_content_variations" + +spec: + type: Spec + connection_specification: + type: object + $schema: http://json-schema.org/draft-07/schema# + required: + - pardot_business_unit_id + - client_id + - client_secret + - refresh_token + properties: + pardot_business_unit_id: + type: string + description: >- + Pardot Business ID, can be found at Setup > Pardot > Pardot Account + Setup + order: 0 + title: Pardot Business Unit ID + client_id: + type: string + description: The Consumer Key that can be found when viewing your app in Salesforce + order: 1 + title: Client ID + airbyte_secret: true + client_secret: + type: string + description: >- + The Consumer Secret that can be found when viewing your app in + Salesforce + order: 2 + title: Client Secret + airbyte_secret: true + refresh_token: + type: string + description: >- + Salesforce Refresh Token used for Airbyte to access your Salesforce + account. If you don't know what this is, follow this guide + to retrieve it. + order: 3 + title: Refresh Token + airbyte_secret: true + start_date: + type: string + description: >- + UTC date and time in the format 2000-01-01T00:00:00Z. Any data before + this date will not be replicated. Defaults to the year Pardot was + released. + order: 4 + title: Start Date + format: date-time + default: "2007-01-01T00:00:00Z" + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ + examples: + - "2021-07-25T00:00:00Z" + page_size: + type: string + description: The maximum number of records to return per request + order: 5 + title: Page Size Limit + default: "1000" + is_sandbox: + type: boolean + description: >- + Whether or not the the app is in a Salesforce sandbox. If you do not + know what this, assume it is false. + order: 6 + title: Is Sandbox App? + default: false + split_up_interval: + type: string + description: >- + If you're hitting the max pagination limit of the v5 API (100K), try + choosing a more granular value (e.g. P7D). Similarly for small + accounts unlikely to hit this limit, a less granular value (e.g. P1Y) + may speed up the sync. + enum: + - P1Y + - P6M + - P3M + - P1M + - P14D + - P7D + - P3D + - P1D + order: 7 + title: Default Split Up Interval + default: P3M + additionalProperties: true + +metadata: + autoImportSchema: + campaigns: false + email_clicks: false + list_membership: false + lists: false + prospect_accounts: false + prospects: false + users: false + visitor_activities: false + visitors: false + visits: false + folders: false + custom_redirects: false + emails: false + engagement_studio_programs: false + files: false + folder_contents: false + forms: false + form_fields: false + form_handlers: false + form_handler_fields: false + landing_pages: false + layout_templates: false + lifecycle_stages: false + lifecycle_histories: false + list_emails: false + opportunities: false + tags: false + tracker_domains: false + visitor_page_views: false + account: false + custom_fields: false + dynamic_contents: false + dynamic_content_variations: false + testedStreams: + campaigns: + hasRecords: true + streamHash: fc1d627201b08eec5c250804ed7dcd7f09f9675c + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + email_clicks: + hasRecords: true + streamHash: 411f4328e02a6d5ce0a3ec3a9a3cb54804e374e0 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + list_membership: + hasRecords: true + streamHash: e3d9c46036a8c5281ad1cec8c9a1b1246ae6df97 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + lists: + hasRecords: true + streamHash: d6ee1f6521a0fc63d809c213ea53613d50d4c178 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + prospect_accounts: + hasRecords: true + streamHash: 37a2ac73f3e6564eba2aad749645c81ee4b7e8ba + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + prospects: + hasRecords: true + streamHash: eaf5ca07271f45b4128dcb4e6da41df2dd853f5d + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + users: + hasRecords: true + streamHash: bfaff23353daea243780a306ede24a6abc60ded3 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + visitor_activities: + hasRecords: true + streamHash: 8544f58fbc35ed2e59f62caf60e6fdf7224d7cf8 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + visitors: + hasRecords: true + streamHash: 2fdf03a97b7ae2186d2a32a3dcae26abd2b6d06c + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + visits: + hasRecords: true + streamHash: 788619013dc2ed5e75320de887b02d3f0ffc0495 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + folders: + hasRecords: true + streamHash: 952ec71d864df0763936fcf9a48ca5ee2a3f9037 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + custom_redirects: + hasRecords: false + streamHash: 73ca21e4c8acb6ca7a4efad4517b31af4dfcb6f6 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + emails: + hasRecords: true + streamHash: 25e6dc61f25fb81825b764af684379d04e748854 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + engagement_studio_programs: + hasRecords: true + streamHash: 2108ab2546a6df889a2420e2d8cb3aac4b790b70 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + files: + hasRecords: false + streamHash: 9dd470ef4814f6fab7a1acf61671ef171b839da2 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + folder_contents: + hasRecords: true + streamHash: 80dc85cda30756bb57fb750432ad8e580d32a245 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + forms: + hasRecords: true + streamHash: a8143b403bf769d4d24dc771da7888a45cc8cf0c + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + form_fields: + hasRecords: true + streamHash: 6f148b11889ab260145dd5e865a142a85ee35d9f + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + form_handlers: + hasRecords: true + streamHash: 8621f40695e6f14df6b5ac1c5c90f6334fc5866a + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + form_handler_fields: + hasRecords: true + streamHash: 1827a46a501b04b0c21e6411d464c77a08dfb848 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + landing_pages: + hasRecords: true + streamHash: fc06118614b8de9f2c2c53e6c5bbe88745e28c55 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + layout_templates: + hasRecords: true + streamHash: 9088e72d125495ef1d72d881a8c6b72e4950e06d + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + lifecycle_stages: + hasRecords: true + streamHash: c0eab225bad7533145cfeac38e57bb70745f9a79 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + lifecycle_histories: + hasRecords: true + streamHash: 21a5786703917ee27352156ea80fa93cce846954 + hasResponse: true + primaryKeysAreUnique: true + primaryKeysArePresent: true + responsesAreSuccessful: true + list_emails: + streamHash: 1823761c89debf594562433f63c8661b3be55524 + hasResponse: true + responsesAreSuccessful: true + hasRecords: true + primaryKeysArePresent: true + primaryKeysAreUnique: true + opportunities: + streamHash: 0def746ae5fd7dde2e85858a1be8b72501dca4d5 + hasResponse: true + responsesAreSuccessful: true + hasRecords: true + primaryKeysArePresent: true + primaryKeysAreUnique: true + tags: + streamHash: 4ce0030af2ff46782dfdb5a1bea97e8b92e2a493 + hasResponse: true + responsesAreSuccessful: true + hasRecords: true + primaryKeysArePresent: true + primaryKeysAreUnique: true + tracker_domains: + streamHash: e441705cc25b37d24b9163afac9e22a734f2185f + hasResponse: true + responsesAreSuccessful: true + hasRecords: true + primaryKeysArePresent: true + primaryKeysAreUnique: true + visitor_page_views: + streamHash: 561cc8d52b41daa6f4697d2e2a3efdee3cc3a14d + hasResponse: true + responsesAreSuccessful: true + hasRecords: true + primaryKeysArePresent: true + primaryKeysAreUnique: true + account: + streamHash: 0893578c116978dad6459ef40fe2a52ea26a68e9 + hasResponse: true + responsesAreSuccessful: false + hasRecords: false + primaryKeysArePresent: true + primaryKeysAreUnique: true + custom_fields: + streamHash: 020b57ff80114642d70f122a63b22f05d63d8386 + hasResponse: true + responsesAreSuccessful: false + hasRecords: false + primaryKeysArePresent: true + primaryKeysAreUnique: true + dynamic_contents: + streamHash: 795500f19349e89d320fb4814aedf19f5bff9dff + hasResponse: true + responsesAreSuccessful: true + hasRecords: false + primaryKeysArePresent: true + primaryKeysAreUnique: true + dynamic_content_variations: + streamHash: null + assist: {} + +schemas: + campaigns: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + cost: + type: + - "null" + - integer + id: + type: + - integer + name: + type: + - "null" + - string + email_clicks: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + created_at: + type: + - "null" + - string + format: date-time + drip_program_action_id: + type: + - "null" + - integer + email_template_id: + type: + - "null" + - integer + id: + type: + - integer + list_email_id: + type: + - "null" + - integer + prospect_id: + type: + - "null" + - integer + tracker_redirect_id: + type: + - "null" + - integer + url: + type: + - "null" + - string + list_membership: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + createdAt: + type: + - "null" + - string + format: date-time + createdById: + type: + - "null" + - integer + id: + type: + - integer + listId: + type: + - integer + optedOut: + type: + - "null" + - boolean + prospectId: + type: + - integer + updatedAt: + type: + - "null" + - string + format: date-time + updatedById: + type: + - "null" + - integer + required: + - id + - updatedAt + lists: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + description: + type: + - "null" + - string + createdAt: + type: + - "null" + - string + format: date-time + createdById: + type: + - "null" + - integer + folderId: + type: + - "null" + - integer + id: + type: + - integer + isDeleted: + type: + - "null" + - boolean + isDynamic: + type: + - "null" + - boolean + isPublic: + type: + - "null" + - boolean + name: + type: + - "null" + - string + title: + type: + - "null" + - string + updatedAt: + type: + - "null" + - string + format: date-time + updatedById: + type: + - "null" + - integer + prospect_accounts: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + type: + type: + - "null" + - string + description: + type: + - "null" + - string + annualRevenue: + type: + - "null" + - string + assignedToId: + type: + - "null" + - integer + billingAddressOne: + type: + - "null" + - string + billingAddressTwo: + type: + - "null" + - string + billingCity: + type: + - "null" + - string + billingCountry: + type: + - "null" + - string + billingState: + type: + - "null" + - string + billingZip: + type: + - "null" + - string + createdAt: + type: + - "null" + - string + format: date-time + createdById: + type: + - "null" + - integer + employees: + type: + - "null" + - string + fax: + type: + - "null" + - string + id: + type: + - integer + industry: + type: + - "null" + - string + isDeleted: + type: + - "null" + - boolean + name: + type: + - "null" + - string + number: + type: + - "null" + - string + ownership: + type: + - "null" + - string + phone: + type: + - "null" + - string + rating: + type: + - "null" + - string + salesforceId: + type: + - "null" + - string + shippingAddressOne: + type: + - "null" + - string + shippingAddressTwo: + type: + - "null" + - string + shippingCity: + type: + - "null" + - string + shippingCountry: + type: + - "null" + - string + shippingState: + type: + - "null" + - string + shippingZip: + type: + - "null" + - string + sic: + type: + - "null" + - string + site: + type: + - "null" + - string + tickerSymbol: + type: + - "null" + - string + updatedAt: + type: + - "null" + - string + format: date-time + updatedById: + type: + - "null" + - integer + website: + type: + - "null" + - string + prospects: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + addressOne: + type: + - "null" + - string + addressTwo: + type: + - "null" + - string + annualRevenue: + type: + - "null" + - string + assignedToId: + type: + - "null" + - string + campaignId: + type: + - "null" + - integer + campaignParameter: + type: + - "null" + - string + city: + type: + - "null" + - string + comments: + type: + - "null" + - string + company: + type: + - "null" + - string + contentParameter: + type: + - "null" + - string + convertedAt: + type: + - "null" + - string + format: date-time + convertedFromObjectName: + type: + - "null" + - string + convertedFromObjectType: + type: + - "null" + - string + country: + type: + - "null" + - string + createdAt: + type: + - "null" + - string + format: date-time + createdById: + type: + - "null" + - integer + department: + type: + - "null" + - string + doNotSell: + type: + - "null" + - boolean + email: + type: + - "null" + - string + emailBouncedAt: + type: + - "null" + - string + format: date-time + emailBouncedReason: + type: + - "null" + - string + employees: + type: + - "null" + - string + fax: + type: + - "null" + - string + firstActivityAt: + type: + - "null" + - string + format: date-time + firstAssignedAt: + type: + - "null" + - string + format: date-time + firstName: + type: + - "null" + - string + firstReferrerQuery: + type: + - "null" + - string + firstReferrerType: + type: + - "null" + - string + firstReferrerUrl: + type: + - "null" + - string + grade: + type: + - "null" + - string + id: + type: + - integer + industry: + type: + - "null" + - string + isDeleted: + type: + - "null" + - boolean + isDoNotCall: + type: + - "null" + - boolean + isDoNotEmail: + type: + - "null" + - boolean + isEmailHardBounced: + type: + - "null" + - boolean + isReviewed: + type: + - "null" + - boolean + isStarred: + type: + - "null" + - boolean + jobTitle: + type: + - "null" + - string + lastActivityAt: + type: + - "null" + - string + format: date-time + lastName: + type: + - "null" + - string + lifecycleStageId: + type: + - "null" + - integer + mediumParameter: + type: + - "null" + - string + notes: + type: + - "null" + - string + optedOut: + type: + - "null" + - boolean + phone: + type: + - "null" + - string + profileId: + type: + - "null" + - integer + prospectAccountId: + type: + - "null" + - integer + recentInteraction: + type: + - "null" + - string + salesforceAccountId: + type: + - "null" + - string + salesforceCampaignId: + type: + - "null" + - string + salesforceContactId: + type: + - "null" + - string + salesforceId: + type: + - "null" + - string + salesforceLastSync: + type: + - "null" + - string + format: date-time + salesforceLeadId: + type: + - "null" + - string + salesforceOwnerId: + type: + - "null" + - string + salesforceUrl: + type: + - "null" + - string + salutation: + type: + - "null" + - string + score: + type: + - "null" + - string + source: + type: + - "null" + - string + sourceParameter: + type: + - "null" + - string + state: + type: + - "null" + - string + termParameter: + type: + - "null" + - string + territory: + type: + - "null" + - string + updatedAt: + type: + - "null" + - string + format: date-time + updatedById: + type: + - "null" + - integer + userId: + type: + - "null" + - integer + website: + type: + - "null" + - string + yearsInBusiness: + type: + - "null" + - string + zip: + type: + - "null" + - string + users: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + createdAt: + type: + - "null" + - string + format: date-time + createdById: + type: + - "null" + - integer + email: + type: + - "null" + - string + firstName: + type: + - "null" + - string + id: + type: + - integer + isDeleted: + type: + - "null" + - boolean + jobTitle: + type: + - "null" + - string + role: + type: + - "null" + - string + roleName: + type: + - "null" + - string + salesforceId: + type: + - "null" + - string + tagReplacementLanguage: + type: + - "null" + - string + updatedAt: + type: + - "null" + - string + format: date-time + updatedById: + type: + - "null" + - integer + username: + type: + - "null" + - string + visitor_activities: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + type: + type: + - "null" + - integer + campaignId: + type: + - "null" + - integer + createdAt: + type: + - "null" + - string + format: date-time + customRedirectId: + type: + - "null" + - integer + details: + type: + - "null" + - string + emailId: + type: + - "null" + - integer + emailTemplateId: + type: + - "null" + - integer + fileId: + type: + - "null" + - integer + formHandlerId: + type: + - "null" + - integer + formId: + type: + - "null" + - integer + id: + type: + - integer + landingPageId: + type: + - "null" + - integer + listEmailId: + type: + - "null" + - integer + multivariateTestVariationId: + type: + - "null" + - integer + opportunityId: + type: + - "null" + - integer + paidSearchAdId: + type: + - "null" + - integer + prospectId: + type: + - "null" + - integer + siteSearchQueryId: + type: + - "null" + - integer + typeName: + type: + - "null" + - string + updatedAt: + type: + - "null" + - string + format: date-time + visitId: + type: + - "null" + - integer + visitorId: + type: + - "null" + - integer + visitorPageViewId: + type: + - "null" + - integer + visitors: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + campaignId: + type: + - "null" + - integer + campaignParameter: + type: + - "null" + - string + contentParameter: + type: + - "null" + - string + createdAt: + type: + - "null" + - string + format: date-time + doNotSell: + type: + - "null" + - boolean + hostname: + type: + - "null" + - string + id: + type: + - integer + ipAddress: + type: + - "null" + - string + isIdentified: + type: + - "null" + - boolean + mediumParameter: + type: + - "null" + - string + pageViewCount: + type: + - "null" + - integer + prospectId: + type: + - "null" + - integer + sourceParameter: + type: + - "null" + - string + termParameter: + type: + - "null" + - string + updatedAt: + type: + - "null" + - string + format: date-time + visits: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + campaignParameter: + type: + - "null" + - string + contentParameter: + type: + - "null" + - string + createdAt: + type: + - "null" + - string + format: date-time + durationInSeconds: + type: + - "null" + - integer + firstVisitorPageViewAt: + type: + - "null" + - string + format: date-time + id: + type: + - integer + lastVisitorPageViewAt: + type: + - "null" + - string + format: date-time + mediumParameter: + type: + - "null" + - string + prospectId: + type: + - "null" + - integer + sourceParameter: + type: + - "null" + - string + termParameter: + type: + - "null" + - string + updatedAt: + type: + - "null" + - string + format: date-time + visitorId: + type: + - "null" + - integer + visitorPageViewCount: + type: + - "null" + - integer + folders: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + createdAt: + type: + - "null" + - string + createdById: + type: + - "null" + - integer + id: + type: integer + name: + type: + - "null" + - string + parentFolderId: + type: + - "null" + - integer + path: + type: + - "null" + - string + updatedAt: + type: + - "null" + - string + updatedById: + type: + - "null" + - integer + usePermissions: + type: + - "null" + - boolean + required: + - id + custom_redirects: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + bitlyIsPersonalized: + type: + - "null" + - boolean + bitlyShortUrl: + type: + - "null" + - string + campaignId: + type: + - "null" + - integer + createdAt: + type: + - "null" + - string + createdById: + type: + - "null" + - integer + destinationUrl: + type: + - "null" + - string + folderId: + type: + - "null" + - integer + gaCampaign: + type: + - "null" + - string + gaContent: + type: + - "null" + - string + gaMedium: + type: + - "null" + - string + gaSource: + type: + - "null" + - string + gaTerm: + type: + - "null" + - string + id: + type: integer + isDeleted: + type: + - "null" + - boolean + name: + type: + - "null" + - string + salesforceId: + type: + - "null" + - integer + trackedUrl: + type: + - "null" + - string + trackerDomain.domain: + type: + - "null" + - string + trackerDomainId: + type: + - "null" + - integer + updatedAt: + type: + - "null" + - string + updatedById: + type: + - "null" + - integer + url: + type: + - "null" + - string + vanityUrl: + type: + - "null" + - string + vanityUrlPath: + type: + - "null" + - string + required: + - id + emails: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + type: + type: + - string + - "null" + campaignId: + type: + - integer + - "null" + clientType: + type: + - string + - "null" + createdById: + type: + - integer + - "null" + emailTemplateId: + type: + - integer + - "null" + id: + type: integer + isOperational: + type: + - boolean + - "null" + listEmailId: + type: + - integer + - "null" + name: + type: + - string + - "null" + prospectId: + type: + - integer + - "null" + replyToOptions: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + type: + type: + - string + - "null" + accountCustomFieldId: + type: + - integer + - "null" + address: + type: + - string + - "null" + name: + type: + - string + - "null" + prospectCustomFieldId: + type: + - integer + - "null" + userId: + type: + - integer + - "null" + senderOptions: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + type: + type: + - string + - "null" + accountCustomFieldId: + type: + - integer + - "null" + address: + type: + - string + - "null" + name: + type: + - string + - "null" + prospectCustomFieldId: + type: + - integer + - "null" + userId: + type: + - integer + - "null" + sentAt: + type: + - "null" + - string + format: date-time + subject: + type: + - string + - "null" + required: + - id + - sentAt + engagement_studio_programs: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + description: + type: + - string + - "null" + businessHours: + type: + - object + - "null" + properties: + days: + type: + - array + - "null" + items: + type: + - string + - "null" + endTime: + type: + - string + - "null" + airbyte_type: time_without_timezone + format: time + startTime: + type: + - string + - "null" + airbyte_type: time_without_timezone + format: time + timezone: + type: + - string + - "null" + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + id: + type: integer + isDeleted: + type: + - boolean + - "null" + prospectsMultipleEntry: + type: + - object + - "null" + properties: + maximumEntries: + type: + - integer + - "null" + minimumDurationInDays: + type: + - integer + - "null" + recipientListIds: + type: + - array + - "null" + items: + type: + - integer + - "null" + salesforceId: + type: + - string + - "null" + schedule: + type: + - object + - "null" + properties: + createdAt: + type: + - string + - "null" + format: date-time + startOn: + type: + - string + - "null" + format: date-time + stopOn: + type: + - string + - "null" + format: date-time + scheduleCreatedById: + type: + - integer + - "null" + status: + type: + - string + - "null" + suppressionListIds: + type: + - array + - "null" + items: + type: + - integer + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + required: + - id + - updatedAt + files: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + bitlyIsPersonalized: + type: + - boolean + - "null" + bitlyShortUrl: + type: + - string + - "null" + campaignId: + type: + - integer + - "null" + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + folderId: + type: + - integer + - "null" + id: + type: integer + isTracked: + type: + - boolean + - "null" + name: + type: + - string + - "null" + salesforceId: + type: + - string + - "null" + size: + type: + - integer + - "null" + trackerDomainId: + type: + - integer + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + url: + type: + - string + - "null" + vanityUrl: + type: + - string + - "null" + vanityUrlPath: + type: + - string + - "null" + required: + - id + folder_contents: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + folderId: + type: + - integer + - "null" + folderRef: + type: + - string + - "null" + id: + type: integer + objectId: + type: + - integer + - "null" + objectName: + type: + - string + - "null" + objectRef: + type: + - string + - "null" + objectType: + type: + - string + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + required: + - id + - updatedAt + forms: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + afterFormContent: + type: + - string + - "null" + beforeFormContent: + type: + - string + - "null" + campaignId: + type: + - integer + - "null" + checkboxAlignment: + type: + - string + - "null" + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + embedCode: + type: + - string + - "null" + folderId: + type: + - integer + - "null" + fontColor: + type: + - string + - "null" + fontFamily: + type: + - string + - "null" + fontSize: + type: + - string + - "null" + id: + type: integer + isAlwaysDisplay: + type: + - boolean + - "null" + isCaptchaEnabled: + type: + - boolean + - "null" + isCookieless: + type: + - boolean + - "null" + isDeleted: + type: + - boolean + - "null" + isUseRedirectLocation: + type: + - boolean + - "null" + labelAlignment: + type: + - string + - "null" + layoutTemplateId: + type: + - integer + - "null" + name: + type: + - string + - "null" + radioAlignment: + type: + - string + - "null" + redirectLocation: + type: + - string + - "null" + requiredCharacter: + type: + - string + - "null" + salesforceId: + type: + - string + - "null" + showNotProspect: + type: + - boolean + - "null" + submitButtonText: + type: + - string + - "null" + thankYouCode: + type: + - string + - "null" + thankYouContent: + type: + - string + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + required: + - id + form_fields: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + type: + type: + - string + - "null" + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + cssClasses: + type: + - string + - "null" + dataFormat: + type: + - string + - "null" + errorMessage: + type: + - string + - "null" + formId: + type: + - integer + - "null" + hasDependents: + type: + - boolean + - "null" + hasProgressives: + type: + - boolean + - "null" + hasValues: + type: + - boolean + - "null" + id: + type: integer + isAlwaysDisplay: + type: + - boolean + - "null" + isDoNotPrefill: + type: + - boolean + - "null" + isMaintainInitialValue: + type: + - boolean + - "null" + isRequired: + type: + - boolean + - "null" + label: + type: + - string + - "null" + prospectApiFieldId: + type: + - string + - "null" + sortOrder: + type: + - integer + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + required: + - id + - updatedAt + form_handlers: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + campaignId: + type: + - integer + - "null" + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + embedCode: + type: + - string + - "null" + errorLocation: + type: + - string + - "null" + folderId: + type: + - integer + - "null" + id: + type: integer + isAlwaysEmail: + type: + - boolean + - "null" + isCookieless: + type: + - boolean + - "null" + isDataForwarded: + type: + - boolean + - "null" + isDeleted: + type: + - boolean + - "null" + name: + type: + - string + - "null" + salesforceId: + type: + - string + - "null" + successLocation: + type: + - string + - "null" + trackerDomainId: + type: + - integer + - "null" + updatedById: + type: + - integer + - "null" + required: + - id + form_handler_fields: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + dataFormat: + type: + - string + - "null" + errorMessage: + type: + - string + - "null" + formHandlerId: + type: + - integer + - "null" + id: + type: integer + isMaintainInitialValue: + type: + - boolean + - "null" + isRequired: + type: + - boolean + - "null" + name: + type: + - string + - "null" + prospectApiFieldId: + type: + - string + - "null" + required: + - id + landing_pages: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + description: + type: + - string + - "null" + archiveDate: + type: + - string + - "null" + format: date + bitlyIsPersonalized: + type: + - boolean + - "null" + bitlyShortUrl: + type: + - string + - "null" + campaignId: + type: + - integer + - "null" + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + folderId: + type: + - integer + - "null" + formId: + type: + - integer + - "null" + id: + type: integer + isDeleted: + type: + - boolean + - "null" + isDoNotIndex: + type: + - boolean + - "null" + isUseRedirectLocation: + type: + - boolean + - "null" + layoutTableBorder: + type: + - integer + - "null" + layoutTemplateId: + type: + - integer + - "null" + layoutType: + type: + - string + - "null" + name: + type: + - string + - "null" + redirectLocation: + type: + - string + - "null" + salesforceId: + type: + - string + - "null" + title: + type: + - string + - "null" + trackerDomainId: + type: + - integer + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + url: + type: + - string + - "null" + vanityUrl: + type: + - string + - "null" + vanityUrlPath: + type: + - string + - "null" + required: + - id + - updatedAt + layout_templates: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + folderId: + type: + - integer + - "null" + formContent: + type: + - string + - "null" + id: + type: integer + isDeleted: + type: + - boolean + - "null" + isIncludeDefaultCss: + type: + - boolean + - "null" + layoutContent: + type: + - string + - "null" + name: + type: + - string + - "null" + siteSearchContent: + type: + - string + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + required: + - id + lifecycle_stages: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + createdAt: + type: + - string + - "null" + format: date-time + id: + type: integer + isDeleted: + type: + - boolean + - "null" + isLocked: + type: + - boolean + - "null" + matchType: + type: + - string + - "null" + name: + type: + - string + - "null" + position: + type: + - integer + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + required: + - id + - updatedAt + lifecycle_histories: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + createdAt: + type: + - string + - "null" + format: date-time + id: + type: integer + nextStageId: + type: + - integer + - "null" + previousStageId: + type: + - integer + - "null" + prospectId: + type: + - integer + - "null" + secondsElapsed: + type: + - integer + - "null" + required: + - id + - createdAt + list_emails: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + campaignId: + type: + - integer + - "null" + clientType: + type: + - string + - "null" + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + emailTemplateId: + type: + - integer + - "null" + folderId: + type: + - integer + - "null" + id: + type: integer + isDeleted: + type: + - boolean + - "null" + isOperational: + type: + - boolean + - "null" + isPaused: + type: + - boolean + - "null" + isSent: + type: + - boolean + - "null" + name: + type: + - string + - "null" + replyToOptions: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + type: + type: + - string + - "null" + accountCustomFieldId: + type: + - integer + - "null" + address: + type: + - string + - "null" + prospectCustomFieldId: + type: + - integer + - "null" + userId: + type: + - integer + - "null" + senderOptions: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + type: + type: + - string + - "null" + accountCustomFieldId: + type: + - integer + - "null" + address: + type: + - string + - "null" + name: + type: + - string + - "null" + prospectCustomFieldId: + type: + - integer + - "null" + userId: + type: + - integer + - "null" + sentAt: + type: + - string + - "null" + subject: + type: + - string + - "null" + trackerDomainId: + type: + - integer + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + required: + - id + - updatedAt + opportunities: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + type: + type: + - string + - "null" + campaignId: + type: + - integer + - "null" + closedAt: + type: + - string + - "null" + format: date-time + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + id: + type: integer + name: + type: + - string + - "null" + probability: + type: + - integer + - "null" + salesforceId: + type: + - string + - "null" + stage: + type: + - string + - "null" + status: + type: + - string + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + value: + type: + - number + - "null" + required: + - id + - updatedAt + tags: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + id: + type: integer + name: + type: + - string + - "null" + objectCount: + type: + - integer + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + required: + - id + - updatedAt + tracker_domains: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + defaultCampaignId: + type: + - integer + - "null" + domain: + type: + - string + - "null" + httpsStatus: + type: + - string + - "null" + id: + type: integer + isDeleted: + type: + - boolean + - "null" + isPrimary: + type: + - boolean + - "null" + sslRequestedById: + type: + - integer + - "null" + sslStatus: + type: + - string + - "null" + sslStatusDetails: + type: + - string + - "null" + trackingCode: + type: + - string + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + validatedAt: + type: + - string + - "null" + format: date-time + validationStatus: + type: + - string + - "null" + vanityUrlStatus: + type: + - string + - "null" + required: + - id + visitor_page_views: + type: object + $schema: http://json-schema.org/schema# + additionalProperties: true + properties: + campaignId: + type: + - integer + - "null" + createdAt: + type: + - string + - "null" + format: date-time + durationInSeconds: + type: + - integer + - "null" + id: + type: integer + salesforceId: + type: + - string + - "null" + title: + type: + - string + - "null" + url: + type: + - string + - "null" + visitId: + type: + - integer + - "null" + visitorId: + type: + - integer + - "null" + required: + - id + - createdAt + account: + type: object + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + addressOne: + type: + - string + - "null" + addressTwo: + type: + - string + - "null" + adminId: + type: + - integer + - "null" + apiCallsUsed: + type: + - integer + - "null" + city: + type: + - string + - "null" + company: + type: + - string + - "null" + country: + type: + - string + - "null" + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + fax: + type: + - string + - "null" + id: + type: integer + level: + type: + - string + - "null" + maximumDailyApiCalls: + type: + - integer + - "null" + phone: + type: + - string + - "null" + pluginCampaignId: + type: + - integer + - "null" + state: + type: + - string + - "null" + territory: + type: + - string + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + website: + type: + - string + - "null" + zip: + type: + - string + - "null" + required: + - id + - createdAt + - updatedAt + custom_fields: + type: object + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + type: + type: + - string + - "null" + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + fieldId: + type: + - integer + - "null" + id: + type: integer + isAnalyticsSynced: + type: + - boolean + - "null" + isRecordMultipleResponses: + type: + - boolean + - "null" + isRequired: + type: + - boolean + - "null" + isUseValues: + type: + - boolean + - "null" + name: + type: + - string + - "null" + salesforceId: + type: + - string + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + required: + - id + - createdAt + - updatedAt + dynamic_contents: + type: object + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + basedOn: + type: + - string + - "null" + basedOnProspectApiFieldId: + type: + - integer + - "null" + createdAt: + type: + - string + - "null" + format: date-time + createdById: + type: + - integer + - "null" + embedCode: + type: + - string + - "null" + embedUrl: + type: + - string + - "null" + folderId: + type: + - integer + - "null" + id: + type: integer + isDeleted: + type: + - boolean + - "null" + name: + type: + - string + - "null" + tagReplacementLanguage: + type: + - string + - "null" + trackerDomainId: + type: + - integer + - "null" + updatedAt: + type: + - string + - "null" + format: date-time + updatedById: + type: + - integer + - "null" + required: + - id + - updatedAt + dynamic_content_variations: + type: object + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + id: + type: integer + dynamicContentId: + type: + - integer + - "null" + comparison: + type: + - string + - "null" + operator: + type: + - string + - "null" + value1: + type: + - string + - "null" + value2: + type: + - string + - "null" + content: + type: + - string + - "null" + required: + - id diff --git a/airbyte-integrations/connectors/source-pardot/metadata.yaml b/airbyte-integrations/connectors/source-pardot/metadata.yaml index 55e8e0bfbf5f..1596dd7938af 100644 --- a/airbyte-integrations/connectors/source-pardot/metadata.yaml +++ b/airbyte-integrations/connectors/source-pardot/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: api connectorType: source definitionId: ad15c7ba-72a7-440b-af15-b9a963dc1a8a - dockerImageTag: 0.2.0 + dockerImageTag: 1.0.0 dockerRepository: airbyte/source-pardot githubIssueLabel: source-pardot icon: salesforcepardot.svg @@ -18,6 +18,11 @@ data: oss: enabled: true releaseStage: alpha + releases: + breakingChanges: + 1.0.0: + message: Most streams have been migrated to use Pardot API V5 in this release. The authentication flow, which was previously broken, should now work for new connections using this version. + upgradeDeadline: "2024-12-26" documentationUrl: https://docs.airbyte.com/integrations/sources/pardot tags: - language:manifest-only @@ -29,5 +34,5 @@ data: connectorTestSuitesOptions: - suite: unitTests connectorBuildOptions: - baseImage: docker.io/airbyte/source-declarative-manifest:5.13.0@sha256:ffc5977f59e1f38bf3f5dd70b6fa0520c2450ebf85153c5a8df315b8c918d5c3 + baseImage: docker.io/airbyte/source-declarative-manifest:6.10.0@sha256:58722e84dbd06bb2af9250e37d24d1c448e247fc3a84d75ee4407d52771b6f03 metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-pennylane/manifest.yaml b/airbyte-integrations/connectors/source-pennylane/manifest.yaml index a35423e830d4..cddb5b96cbdf 100644 --- a/airbyte-integrations/connectors/source-pennylane/manifest.yaml +++ b/airbyte-integrations/connectors/source-pennylane/manifest.yaml @@ -497,10 +497,40 @@ streams: properties: amount: type: string + billing_subscription: + type: object + properties: + id: + type: integer + v2_id: + type: integer categories: type: array credit_notes: type: array + items: + type: object + properties: + amount: + type: string + currency: + type: string + currency_amount: + type: string + currency_price_before_tax: + type: string + currency_tax: + type: string + draft: + type: boolean + id: + type: string + invoice_number: + type: string + tax: + type: string + v2_id: + type: integer currency: type: string currency_amount: @@ -572,10 +602,19 @@ streams: type: string id: type: string + imputation_dates: + type: object + properties: + end_date: + type: string + start_date: + type: string invoice_number: type: string is_draft: type: boolean + is_estimate: + type: boolean label: type: string language: @@ -630,6 +669,17 @@ streams: type: array paid: type: boolean + payments: + type: array + items: + type: object + properties: + created_at: + type: string + currency_amount: + type: string + label: + type: string pdf_invoice_free_text: type: string pdf_invoice_subject: @@ -646,8 +696,19 @@ streams: type: string status: type: string + transactions_reference: + type: object + properties: + banking_provider: + type: string + provider_field_name: + type: string + provider_field_value: + type: string updated_at: type: string + v2_id: + type: integer retriever: type: SimpleRetriever requester: @@ -975,4 +1036,4 @@ metadata: customer_invoices: true products: true category_groups: true - categories: true + categories: true \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-pennylane/metadata.yaml b/airbyte-integrations/connectors/source-pennylane/metadata.yaml index fa8ef5183c79..226271c8c008 100644 --- a/airbyte-integrations/connectors/source-pennylane/metadata.yaml +++ b/airbyte-integrations/connectors/source-pennylane/metadata.yaml @@ -17,7 +17,7 @@ data: connectorSubtype: api connectorType: source definitionId: b9e4a306-4e3b-4387-a01d-c00d03d8c28c - dockerImageTag: 0.0.6 + dockerImageTag: 0.1.0 dockerRepository: airbyte/source-pennylane githubIssueLabel: source-pennylane icon: icon.svg diff --git a/airbyte-integrations/connectors/source-pipedrive/acceptance-test-config.yml b/airbyte-integrations/connectors/source-pipedrive/acceptance-test-config.yml index 303f046105c3..8f4edb5f0c74 100644 --- a/airbyte-integrations/connectors/source-pipedrive/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-pipedrive/acceptance-test-config.yml @@ -17,6 +17,9 @@ acceptance_tests: basic_read: tests: - config_path: "secrets/config.json" + expect_records: + path: "integration_tests/expected_records.jsonl" + exact_order: no configured_catalog_path: "integration_tests/configured_catalog.json" fail_on_extra_columns: false empty_streams: @@ -30,13 +33,12 @@ acceptance_tests: - name: stages - name: deal_products - name: mail + - name: deals + - name: users + - name: persons incremental: - tests: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state: - future_state_path: "integration_tests/abnormal_state.json" + bypass_reason: "All incremental streams are empty in sandbox account." full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-pipedrive/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-pipedrive/integration_tests/expected_records.jsonl new file mode 100644 index 000000000000..e97465c57468 --- /dev/null +++ b/airbyte-integrations/connectors/source-pipedrive/integration_tests/expected_records.jsonl @@ -0,0 +1,34 @@ +{"stream":"deal_fields","data":{"id":12488,"key":"eae9c2a5b618934581aebed0747cc33cd681379e","name":"Test field 3","order_nr":4,"field_type":"enum","json_column_flag":true,"add_time":"2023-02-22 11:04:13","update_time":"2023-02-22 11:04:13","last_updated_by_user_id":11884360,"edit_flag":true,"details_visible_flag":true,"add_visible_flag":true,"important_flag":true,"bulk_edit_allowed":true,"filtering_allowed":true,"sortable_flag":true,"mandatory_flag":false,"searchable_flag":true,"active_flag":true,"projects_detail_visible_flag":false,"show_in_pipelines":{"show_in_all":true,"pipeline_ids":[]},"options":[{"id":23,"label":"Test"}]},"emitted_at":1733742439563} +{"stream":"deal_fields","data":{"id":12489,"key":"bed1d9f4cfdaf761fab04b38df20144b0fd156d6","name":"Test field 4","order_nr":5,"field_type":"varchar","json_column_flag":true,"add_time":"2023-02-22 11:04:30","update_time":"2023-02-22 11:04:30","last_updated_by_user_id":11884360,"edit_flag":true,"details_visible_flag":true,"add_visible_flag":true,"important_flag":true,"bulk_edit_allowed":true,"filtering_allowed":true,"sortable_flag":true,"mandatory_flag":false,"searchable_flag":true,"active_flag":true,"projects_detail_visible_flag":false,"show_in_pipelines":{"show_in_all":true,"pipeline_ids":[]}},"emitted_at":1733742439563} +{"stream":"deal_fields","data":{"id":12490,"key":"41505adc22569bf93214dd7f7eaa10eaa387947d","name":"Test field 5","order_nr":6,"field_type":"varchar","json_column_flag":true,"add_time":"2023-02-22 11:04:43","update_time":"2023-02-22 11:04:43","last_updated_by_user_id":11884360,"edit_flag":true,"details_visible_flag":true,"add_visible_flag":true,"important_flag":true,"bulk_edit_allowed":true,"filtering_allowed":true,"sortable_flag":true,"mandatory_flag":false,"searchable_flag":true,"active_flag":true,"projects_detail_visible_flag":false,"show_in_pipelines":{"show_in_all":true,"pipeline_ids":[]}},"emitted_at":1733742439563} +{"stream":"goals","data":{"id":"404e9d01204e052dff21ec32121f0270","owner_id":11884360,"title":"Activities added Team Airbyte","type":{"name":"activities_added","params":{"pipeline_id":null,"activity_type_id":[1]}},"assignee":{"type":"person","id":11884360},"expected_outcome":{"tracking_metric":"quantity","target":100},"interval":"weekly","duration":{"start":"2023-10-15","end":"2023-11-03"},"report_ids":["389fb0576579ea5b28761aef85af7e8a"],"is_active":false},"emitted_at":1733742439864} +{"stream":"goals","data":{"id":"b1b124b30aea9704b28dde37a41b1cb2","owner_id":11884360,"title":"Activities completed Team Airbyte","type":{"name":"activities_completed","params":{"pipeline_id":null,"activity_type_id":[1]}},"assignee":{"type":"person","id":11884360},"expected_outcome":{"tracking_metric":"quantity","target":10},"interval":"weekly","duration":{"start":"2023-09-01","end":"2023-10-10"},"report_ids":["cf970db4608d730d53dc87875ebb2302"],"is_active":false},"emitted_at":1733742439890} +{"stream":"goals","data":{"id":"b1b124b30aea9704b28dde37a41b2bcb","owner_id":11884360,"title":"Deals won Team Airbyte","type":{"name":"deals_won","params":{"pipeline_id":[1]}},"assignee":{"type":"person","id":11884360},"expected_outcome":{"tracking_metric":"sum","currency_id":148,"target":100},"interval":"monthly","duration":{"start":"2023-10-01","end":null},"report_ids":["ddbcc8dc48e84aea5e28a2db3488e89d"],"is_active":true},"emitted_at":1733742439890} +{"stream":"lead_labels","data":{"id":"aecece60-c069-11eb-93bf-b59c4f1731e6","name":"Hot","color":"red","add_time":"2021-05-29T10:36:10.182Z","update_time":"2021-05-29T10:36:10.182Z"},"emitted_at":1733742439909} +{"stream":"lead_labels","data":{"id":"aecece61-c069-11eb-93bf-b59c4f1731e6","name":"Warm","color":"yellow","add_time":"2021-05-29T10:36:10.182Z","update_time":"2021-05-29T10:36:10.182Z"},"emitted_at":1733742439910} +{"stream":"lead_labels","data":{"id":"aecece62-c069-11eb-93bf-b59c4f1731e6","name":"Cold","color":"blue","add_time":"2021-05-29T10:36:10.182Z","update_time":"2021-05-29T10:36:10.182Z"},"emitted_at":1733742439910} +{"stream":"activity_types","data":{"id":5,"order_nr":5,"name":"Email","key_string":"email","icon_key":"email","active_flag":true,"is_custom_flag":false,"add_time":"2020-12-10 07:23:48"},"emitted_at":1733742439922} +{"stream":"activity_types","data":{"id":6,"order_nr":6,"name":"Lunch","key_string":"lunch","icon_key":"lunch","active_flag":true,"is_custom_flag":false,"add_time":"2020-12-10 07:23:48"},"emitted_at":1733742439922} +{"stream":"activity_types","data":{"id":7,"order_nr":7,"name":"Test 1","key_string":"test_1","icon_key":"car","active_flag":true,"is_custom_flag":true,"add_time":"2023-02-22 11:13:54","update_time":"2023-02-22 11:13:54"},"emitted_at":1733742439922} +{"stream":"leads","data":{"id":"56b65210-b28f-11ed-8345-ef71fee43bbf","title":"Test Organization 6 lead","owner_id":11884360,"creator_id":11884360,"label_ids":["aecece60-c069-11eb-93bf-b59c4f1731e6"],"value":{"amount":800,"currency":"USD"},"expected_close_date":"2023-03-30","person_id":7,"organization_id":6,"is_archived":false,"source_name":"Manually created","origin":"ManuallyCreated","was_seen":true,"next_activity_id":30,"add_time":"2023-02-22T09:00:22.321Z","update_time":"2023-02-22T11:48:49.834Z","visible_to":"3","cc_email":"airbyte-sandbox+7780468+lead54t3wth3rdv6nou8zygue80qn@pipedrivemail.com"},"emitted_at":1733742439928} +{"stream":"leads","data":{"id":"11761620-b29b-11ed-83bc-c5d5bb21e359","title":"Test Organization 8 lead","owner_id":11884360,"creator_id":11884360,"label_ids":["aecece61-c069-11eb-93bf-b59c4f1731e6"],"value":{"amount":3000,"currency":"USD"},"expected_close_date":"2023-04-28","person_id":9,"organization_id":8,"is_archived":false,"source_name":"Manually created","origin":"ManuallyCreated","was_seen":true,"next_activity_id":31,"add_time":"2023-02-22T10:24:20.098Z","update_time":"2023-02-22T11:49:10.328Z","visible_to":"3","cc_email":"airbyte-sandbox+7780468+lead117qmm8o4y5irltqaigfmfdcp@pipedrivemail.com"},"emitted_at":1733742439928} +{"stream":"leads","data":{"id":"98b9c5a0-b29b-11ed-83b4-fd61bd275e86","title":"Test Organization 10 lead","owner_id":11884360,"creator_id":11884360,"label_ids":["aecece60-c069-11eb-93bf-b59c4f1731e6"],"value":{"amount":2000,"currency":"USD"},"expected_close_date":"2023-05-30","person_id":11,"organization_id":10,"is_archived":false,"source_name":"Manually created","origin":"ManuallyCreated","was_seen":true,"next_activity_id":32,"add_time":"2023-02-22T10:28:07.034Z","update_time":"2023-02-22T11:49:24.853Z","visible_to":"3","cc_email":"airbyte-sandbox+7780468+lead91i2xdc9n1af14fee3k0llapy@pipedrivemail.com"},"emitted_at":1733742439928} +{"stream":"organizations","data":{"id":12,"company_id":7780468,"owner_id":{"id":11884360,"name":"Team Airbyte","email":"integration-test@airbyte.io","has_pic":0,"pic_hash":null,"active_flag":true,"value":11884360},"name":"Test Organization 7","open_deals_count":0,"related_open_deals_count":0,"closed_deals_count":0,"related_closed_deals_count":0,"email_messages_count":0,"people_count":0,"activities_count":0,"done_activities_count":0,"undone_activities_count":0,"files_count":0,"notes_count":0,"followers_count":1,"won_deals_count":0,"related_won_deals_count":0,"lost_deals_count":0,"related_lost_deals_count":0,"active_flag":true,"first_char":"t","update_time":"2023-02-22 08:21:58","add_time":"2023-02-22 08:18:16","visible_to":"3","label":5,"label_ids":[5],"address":"DY Patil College, Sant Tukaram Nagar, Pimpri Colony, Pimpri-Chinchwad, Maharashtra, India","address_subpremise":"","address_street_number":"","address_route":"","address_sublocality":"Pimpri Colony","address_locality":"Pimpri-Chinchwad","address_admin_area_level_1":"Maharashtra","address_admin_area_level_2":"Pune Division","address_country":"India","address_postal_code":"411018","address_formatted_address":"DY Patil College, DR. D Y PATIL MEDICAL COLLEGE, Sant Tukaram Nagar, Pimpri Colony, Pimpri-Chinchwad, Maharashtra 411018, India","owner_name":"Team Airbyte","cc_email":"airbyte-sandbox@pipedrivemail.com"},"emitted_at":1733742440202} +{"stream":"organizations","data":{"id":13,"company_id":7780468,"owner_id":{"id":11884360,"name":"Team Airbyte","email":"integration-test@airbyte.io","has_pic":0,"pic_hash":null,"active_flag":true,"value":11884360},"name":"Test Organization 6","open_deals_count":0,"related_open_deals_count":0,"closed_deals_count":0,"related_closed_deals_count":0,"email_messages_count":0,"people_count":0,"activities_count":0,"done_activities_count":0,"undone_activities_count":0,"files_count":0,"notes_count":0,"followers_count":1,"won_deals_count":0,"related_won_deals_count":0,"lost_deals_count":0,"related_lost_deals_count":0,"active_flag":true,"first_char":"t","update_time":"2023-02-22 08:23:17","add_time":"2023-02-22 08:18:33","visible_to":"3","label":5,"label_ids":[5],"address":"Anand Vihar Railway Station, Block D, Anand Vihar, Delhi, Uttar Pradesh, India","address_subpremise":"","address_street_number":"","address_route":"","address_sublocality":"Anand Vihar","address_locality":"Delhi","address_admin_area_level_1":"Uttar Pradesh","address_admin_area_level_2":"Delhi Division","address_country":"India","address_postal_code":"261205","address_formatted_address":"J8X8+F33, Block D, Anand Vihar, Delhi, Uttar Pradesh 261205, India","owner_name":"Team Airbyte","cc_email":"airbyte-sandbox@pipedrivemail.com"},"emitted_at":1733742440202} +{"stream":"organizations","data":{"id":14,"company_id":7780468,"owner_id":{"id":11884360,"name":"Team Airbyte","email":"integration-test@airbyte.io","has_pic":0,"pic_hash":null,"active_flag":true,"value":11884360},"name":"Test Organization 8","open_deals_count":1,"related_open_deals_count":0,"closed_deals_count":0,"related_closed_deals_count":0,"email_messages_count":0,"people_count":1,"activities_count":0,"done_activities_count":0,"undone_activities_count":0,"files_count":0,"notes_count":0,"followers_count":1,"won_deals_count":0,"related_won_deals_count":0,"lost_deals_count":0,"related_lost_deals_count":0,"active_flag":true,"first_char":"t","update_time":"2023-10-13 13:25:00","add_time":"2023-02-22 08:18:50","visible_to":"3","label":5,"label_ids":[5],"address":"London Eye, London, UK","address_subpremise":"Riverside Building","address_street_number":"","address_route":"","address_sublocality":"","address_locality":"","address_admin_area_level_1":"","address_admin_area_level_2":"Greater London","address_country":"United Kingdom","address_postal_code":"SE1 7PB","address_formatted_address":"Riverside Building, County Hall, London SE1 7PB, UK","owner_name":"Team Airbyte","16a9e1fcbccc7f7a5f429a1840c9f66db9ee901a":"My Custom Field Value","cc_email":"airbyte-sandbox@pipedrivemail.com"},"emitted_at":1733742440202} +{"stream":"organization_fields","data":{"key":"address_formatted_address","name":"Full/combined address of Address","field_type":"varchar","edit_flag":false,"active_flag":true,"is_subfield":true,"mandatory_flag":false,"parent_id":4021,"id_suffix":"formatted_address"},"emitted_at":1733742440211} +{"stream":"organization_fields","data":{"id":4023,"key":"label_ids","name":"Labels","order_nr":0,"field_type":"set","json_column_flag":false,"add_time":"2024-04-29 09:28:14","edit_flag":false,"details_visible_flag":true,"add_visible_flag":true,"important_flag":true,"bulk_edit_allowed":true,"filtering_allowed":true,"sortable_flag":true,"mandatory_flag":false,"searchable_flag":false,"active_flag":true,"options":[{"id":5,"label":"Customer","color":"green"},{"id":6,"label":"Hot lead","color":"red"},{"id":7,"label":"Warm lead","color":"yellow"},{"id":8,"label":"Cold lead","color":"blue"}]},"emitted_at":1733742440211} +{"stream":"organization_fields","data":{"id":4022,"key":"16a9e1fcbccc7f7a5f429a1840c9f66db9ee901a","name":"Pipedrive Custom Fields","order_nr":1,"field_type":"varchar","json_column_flag":true,"add_time":"2023-10-13 13:23:22","update_time":"2023-10-13 13:23:22","last_updated_by_user_id":11884360,"edit_flag":true,"details_visible_flag":true,"add_visible_flag":true,"important_flag":false,"bulk_edit_allowed":true,"filtering_allowed":true,"sortable_flag":true,"mandatory_flag":false,"searchable_flag":true,"active_flag":true},"emitted_at":1733742440211} +{"stream":"permission_sets","data":{"id":"fa17fff0-db56-11ec-93f1-e9cfc58fcd59","name":"Global admin","assignment_count":1,"app":"global","type":"admin"},"emitted_at":1733742440211} +{"stream":"permission_sets","data":{"id":"fa287ab0-db56-11ec-93f1-e9cfc58fcd59","name":"Global regular user","assignment_count":4,"app":"global","type":"regular"},"emitted_at":1733742440211} +{"stream":"permission_sets","data":{"id":"57b60400-ed6c-11ec-88fb-4fe88bf2db36","name":"Account settings","assignment_count":1,"app":"account_settings","type":"admin"},"emitted_at":1733742440211} +{"stream":"person_fields","data":{"id":9065,"key":"last_name","name":"Last name","order_nr":0,"field_type":"varchar","json_column_flag":false,"add_time":"2020-12-10 07:23:49","update_time":"2023-07-20 09:24:05","last_updated_by_user_id":0,"edit_flag":false,"details_visible_flag":true,"add_visible_flag":false,"important_flag":true,"bulk_edit_allowed":true,"filtering_allowed":true,"sortable_flag":true,"mandatory_flag":false,"searchable_flag":false,"active_flag":true},"emitted_at":1733742440213} +{"stream":"person_fields","data":{"id":9067,"key":"label_ids","name":"Labels","order_nr":0,"field_type":"set","json_column_flag":false,"add_time":"2024-04-29 09:28:14","edit_flag":false,"details_visible_flag":true,"add_visible_flag":true,"important_flag":true,"bulk_edit_allowed":true,"filtering_allowed":true,"sortable_flag":true,"mandatory_flag":false,"searchable_flag":false,"active_flag":true,"options":[{"id":1,"label":"Customer","color":"green"},{"id":2,"label":"Hot lead","color":"red"},{"id":3,"label":"Warm lead","color":"yellow"},{"id":4,"label":"Cold lead","color":"blue"}]},"emitted_at":1733742440213} +{"stream":"person_fields","data":{"id":9066,"key":"aa02d059909fdc632d590bd578d7b3baf4bf9780","name":"Custom Field 1","order_nr":1,"field_type":"varchar","json_column_flag":true,"add_time":"2023-07-12 17:53:46","update_time":"2023-07-12 17:53:46","last_updated_by_user_id":11884360,"edit_flag":true,"details_visible_flag":true,"add_visible_flag":false,"important_flag":false,"bulk_edit_allowed":true,"filtering_allowed":true,"sortable_flag":true,"mandatory_flag":false,"searchable_flag":true,"active_flag":true},"emitted_at":1733742440213} +{"stream":"currencies","data":{"id":166,"code":"ZMK","name":"Zambian Kwacha","symbol":"ZMK","decimal_points":2,"active_flag":true,"is_custom_flag":false},"emitted_at":1733742440218} +{"stream":"currencies","data":{"id":201,"code":"ZMW","name":"Zambian Kwacha","symbol":"ZMW","decimal_points":2,"active_flag":true,"is_custom_flag":false},"emitted_at":1733742440218} +{"stream":"currencies","data":{"id":167,"code":"ZWL","name":"Zimbabwe Dollar","symbol":"ZWL","decimal_points":2,"active_flag":true,"is_custom_flag":false},"emitted_at":1733742440219} +{"stream":"roles","data":{"id":1,"name":"(Unassigned users)","active_flag":true,"assignment_count":"5","sub_role_count":"0","level":1,"description":"This is the default group for managing your visibility settings. New users are added automatically unless you change their group when you invite them."},"emitted_at":1733742444262} +{"stream":"product_fields","data":{"id":26,"key":"category","name":"Category","order_nr":0,"field_type":"enum","json_column_flag":false,"add_time":"2020-12-10 07:23:48","update_time":"2023-07-20 09:24:08","last_updated_by_user_id":11884360,"edit_flag":false,"details_visible_flag":true,"add_visible_flag":true,"important_flag":true,"bulk_edit_allowed":true,"filtering_allowed":true,"sortable_flag":true,"mandatory_flag":false,"searchable_flag":false,"active_flag":true,"options":[{"id":12,"label":"Food"}]},"emitted_at":1733742444467} +{"stream":"product_fields","data":{"id":27,"key":"description","name":"Description","order_nr":0,"field_type":"text","json_column_flag":false,"add_time":"2020-12-10 07:23:48","update_time":"2023-07-20 09:24:08","last_updated_by_user_id":0,"edit_flag":false,"details_visible_flag":true,"add_visible_flag":false,"important_flag":true,"bulk_edit_allowed":true,"filtering_allowed":true,"sortable_flag":true,"mandatory_flag":false,"searchable_flag":false,"active_flag":true},"emitted_at":1733742444468} +{"stream":"product_fields","data":{"id":28,"key":"unit_prices","name":"Unit prices","order_nr":0,"field_type":"double","json_column_flag":false,"add_time":"2020-12-10 07:23:48","update_time":"2023-09-21 07:51:19","last_updated_by_user_id":0,"edit_flag":false,"details_visible_flag":false,"add_visible_flag":true,"important_flag":true,"bulk_edit_allowed":false,"filtering_allowed":true,"sortable_flag":true,"mandatory_flag":true,"searchable_flag":false,"active_flag":true},"emitted_at":1733742444468} diff --git a/airbyte-integrations/connectors/source-pipedrive/metadata.yaml b/airbyte-integrations/connectors/source-pipedrive/metadata.yaml index d2b7cd6c1078..6233ea2e546e 100644 --- a/airbyte-integrations/connectors/source-pipedrive/metadata.yaml +++ b/airbyte-integrations/connectors/source-pipedrive/metadata.yaml @@ -17,7 +17,7 @@ data: connectorSubtype: api connectorType: source definitionId: d8286229-c680-4063-8c59-23b9b391c700 - dockerImageTag: 2.2.28 + dockerImageTag: 2.3.0 dockerRepository: airbyte/source-pipedrive documentationUrl: https://docs.airbyte.com/integrations/sources/pipedrive githubIssueLabel: source-pipedrive diff --git a/airbyte-integrations/connectors/source-pipedrive/poetry.lock b/airbyte-integrations/connectors/source-pipedrive/poetry.lock index 3ae23dbdd91a..a6360eb9a5bb 100644 --- a/airbyte-integrations/connectors/source-pipedrive/poetry.lock +++ b/airbyte-integrations/connectors/source-pipedrive/poetry.lock @@ -1,58 +1,75 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.90.0" +version = "6.11.1" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = "<4.0,>=3.9" +python-versions = "<3.13,>=3.10" files = [ - {file = "airbyte_cdk-0.90.0-py3-none-any.whl", hash = "sha256:bd0aa5843cdc4901f2e482f0e86695ca4e6db83b65c5017799255dd20535cf56"}, - {file = "airbyte_cdk-0.90.0.tar.gz", hash = "sha256:25cefc010718bada5cce3f87e7ae93068630732c0d34ce5145f8ddf7457d4d3c"}, + {file = "airbyte_cdk-6.11.1-py3-none-any.whl", hash = "sha256:e48cfae5d3fb12e3fa32b0aabb0f59f75417e185f9fbeb421f73088b3f6e2120"}, + {file = "airbyte_cdk-6.11.1.tar.gz", hash = "sha256:8a19e8e96a165610e6d6d52846ef41dac88a8afdd55f6ada894f71e33d4782e5"}, ] [package.dependencies] -airbyte-protocol-models = ">=0.9.0,<1.0" +airbyte-protocol-models-dataclasses = ">=0.14,<0.15" backoff = "*" cachetools = "*" -cryptography = ">=42.0.5,<43.0.0" -Deprecated = ">=1.2,<1.3" -dpath = ">=2.0.1,<2.1.0" -genson = "1.2.2" +cryptography = ">=42.0.5,<44.0.0" +dpath = ">=2.1.6,<3.0.0" +dunamai = ">=1.22.0,<2.0.0" +genson = "1.3.0" isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" jsonref = ">=0.2,<0.3" -jsonschema = ">=3.2.0,<3.3.0" +jsonschema = ">=4.17.3,<4.18.0" langchain_core = "0.1.42" +nltk = "3.9.1" +numpy = "<2" +orjson = ">=3.10.7,<4.0.0" +pandas = "2.2.2" pendulum = "<3.0.0" -pydantic = ">=1.10.8,<2.0.0" +psutil = "6.1.0" +pydantic = ">=2.7,<3.0" pyjwt = ">=2.8.0,<3.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" +python-ulid = ">=3.0.0,<4.0.0" pytz = "2024.1" PyYAML = ">=6.0.1,<7.0.0" +rapidfuzz = ">=3.10.1,<4.0.0" requests = "*" requests_cache = "*" -wcmatch = "8.4" +serpyco-rs = ">=1.10.2,<2.0.0" +wcmatch = "10.0" +xmltodict = ">=0.13.0,<0.14.0" [package.extras] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] -vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "python-calamine (==0.2.3)", "python-snappy (==0.7.3)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sql = ["sqlalchemy (>=2.0,!=2.0.36,<3.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.8.0)"] [[package]] -name = "airbyte-protocol-models" +name = "airbyte-protocol-models-dataclasses" version = "0.14.1" -description = "Declares the Airbyte Protocol." +description = "Declares the Airbyte Protocol using Python Dataclasses. Dataclasses in Python have less performance overhead compared to Pydantic models, making them a more efficient choice for scenarios where speed and memory usage are critical" optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.14.1-py3-none-any.whl", hash = "sha256:851a9a7864191a05f7f0942e05eb7b0e36e3395be8db074f75a43b9098186089"}, - {file = "airbyte_protocol_models-0.14.1.tar.gz", hash = "sha256:bcb31493081fc7a2cb923b975eb6a46bc471fe1d82ac645ca5e551bb63731ffa"}, + {file = "airbyte_protocol_models_dataclasses-0.14.1-py3-none-any.whl", hash = "sha256:dfe10b32ee09e6ba9b4f17bd309e841b61cbd61ec8f80b1937ff104efd6209a9"}, + {file = "airbyte_protocol_models_dataclasses-0.14.1.tar.gz", hash = "sha256:f62a46556b82ea0d55de144983141639e8049d836dd4e0a9d7234c5b2e103c08"}, ] -[package.dependencies] -pydantic = ">=1.9.2,<2.0.0" +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] [[package]] name = "anyio" @@ -86,21 +103,32 @@ files = [ {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, ] +[[package]] +name = "attributes-doc" +version = "0.4.0" +description = "PEP 224 implementation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "attributes-doc-0.4.0.tar.gz", hash = "sha256:b1576c94a714e9fc2c65c47cf10d0c8e1a5f7c4f5ae7f69006be108d95cbfbfb"}, + {file = "attributes_doc-0.4.0-py2.py3-none-any.whl", hash = "sha256:4c3007d9e58f3a6cb4b9c614c4d4ce2d92161581f28e594ddd8241cc3a113bdd"}, +] + [[package]] name = "attrs" -version = "24.2.0" +version = "24.3.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, + {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, ] [package.extras] benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] @@ -368,6 +396,20 @@ files = [ {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "colorama" version = "0.4.6" @@ -381,43 +423,38 @@ files = [ [[package]] name = "cryptography" -version = "42.0.8" +version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, - {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, - {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, - {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, - {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, - {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, - {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] [package.dependencies] @@ -430,37 +467,34 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] -name = "deprecated" -version = "1.2.15" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +name = "dpath" +version = "2.2.0" +description = "Filesystem-like pathing and searching for dictionaries" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = ">=3.7" files = [ - {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"}, - {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"}, + {file = "dpath-2.2.0-py3-none-any.whl", hash = "sha256:b330a375ded0a0d2ed404440f6c6a715deae5313af40bbb01c8a41d891900576"}, + {file = "dpath-2.2.0.tar.gz", hash = "sha256:34f7e630dc55ea3f219e555726f5da4b4b25f2200319c8e6902c394258dd6a3e"}, ] -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "jinja2 (>=3.0.3,<3.1.0)", "setuptools", "sphinx (<2)", "tox"] - [[package]] -name = "dpath" -version = "2.0.8" -description = "Filesystem-like pathing and searching for dictionaries" +name = "dunamai" +version = "1.23.0" +description = "Dynamic version generation" optional = false -python-versions = ">=3.7" +python-versions = ">=3.5" files = [ - {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, - {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, + {file = "dunamai-1.23.0-py3-none-any.whl", hash = "sha256:a0906d876e92441793c6a423e16a4802752e723e9c9a5aabdc5535df02dbe041"}, + {file = "dunamai-1.23.0.tar.gz", hash = "sha256:a163746de7ea5acb6dacdab3a6ad621ebc612ed1e528aaa8beedb8887fccd2c4"}, ] +[package.dependencies] +packaging = ">=20.9" + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -477,12 +511,13 @@ test = ["pytest (>=6)"] [[package]] name = "genson" -version = "1.2.2" +version = "1.3.0" description = "GenSON is a powerful, user-friendly JSON Schema generator." optional = false python-versions = "*" files = [ - {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, + {file = "genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7"}, + {file = "genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37"}, ] [[package]] @@ -597,6 +632,17 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "joblib" +version = "1.4.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, + {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, +] + [[package]] name = "jsonpatch" version = "1.33" @@ -635,24 +681,22 @@ files = [ [[package]] name = "jsonschema" -version = "3.2.0" +version = "4.17.3" description = "An implementation of JSON Schema validation for Python" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, - {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, ] [package.dependencies] attrs = ">=17.4.0" -pyrsistent = ">=0.14.0" -setuptools = "*" -six = ">=1.11.0" +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" [package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] [[package]] name = "langchain-core" @@ -767,6 +811,76 @@ files = [ {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] +[[package]] +name = "nltk" +version = "3.9.1" +description = "Natural Language Toolkit" +optional = false +python-versions = ">=3.8" +files = [ + {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, + {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, +] + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + [[package]] name = "orjson" version = "3.10.12" @@ -862,6 +976,78 @@ files = [ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + [[package]] name = "pendulum" version = "2.1.2" @@ -927,6 +1113,36 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "psutil" +version = "6.1.0" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc"}, + {file = "psutil-6.1.0-cp27-none-win32.whl", hash = "sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e"}, + {file = "psutil-6.1.0-cp27-none-win_amd64.whl", hash = "sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, + {file = "psutil-6.1.0-cp36-cp36m-win32.whl", hash = "sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca"}, + {file = "psutil-6.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747"}, + {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, + {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, + {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, +] + +[package.extras] +dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] + [[package]] name = "py" version = "1.11.0" @@ -951,62 +1167,135 @@ files = [ [[package]] name = "pydantic" -version = "1.10.19" -description = "Data validation and settings management using python type hints" +version = "2.10.3" +description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-1.10.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a415b9e95fa602b10808113967f72b2da8722061265d6af69268c111c254832d"}, - {file = "pydantic-1.10.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:11965f421f7eb026439d4eb7464e9182fe6d69c3d4d416e464a4485d1ba61ab6"}, - {file = "pydantic-1.10.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5bb81fcfc6d5bff62cd786cbd87480a11d23f16d5376ad2e057c02b3b44df96"}, - {file = "pydantic-1.10.19-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83ee8c9916689f8e6e7d90161e6663ac876be2efd32f61fdcfa3a15e87d4e413"}, - {file = "pydantic-1.10.19-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0399094464ae7f28482de22383e667625e38e1516d6b213176df1acdd0c477ea"}, - {file = "pydantic-1.10.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8b2cf5e26da84f2d2dee3f60a3f1782adedcee785567a19b68d0af7e1534bd1f"}, - {file = "pydantic-1.10.19-cp310-cp310-win_amd64.whl", hash = "sha256:1fc8cc264afaf47ae6a9bcbd36c018d0c6b89293835d7fb0e5e1a95898062d59"}, - {file = "pydantic-1.10.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d7a8a1dd68bac29f08f0a3147de1885f4dccec35d4ea926e6e637fac03cdb4b3"}, - {file = "pydantic-1.10.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07d00ca5ef0de65dd274005433ce2bb623730271d495a7d190a91c19c5679d34"}, - {file = "pydantic-1.10.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad57004e5d73aee36f1e25e4e73a4bc853b473a1c30f652dc8d86b0a987ffce3"}, - {file = "pydantic-1.10.19-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dce355fe7ae53e3090f7f5fa242423c3a7b53260747aa398b4b3aaf8b25f41c3"}, - {file = "pydantic-1.10.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0d32227ea9a3bf537a2273fd2fdb6d64ab4d9b83acd9e4e09310a777baaabb98"}, - {file = "pydantic-1.10.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e351df83d1c9cffa53d4e779009a093be70f1d5c6bb7068584086f6a19042526"}, - {file = "pydantic-1.10.19-cp311-cp311-win_amd64.whl", hash = "sha256:d8d72553d2f3f57ce547de4fa7dc8e3859927784ab2c88343f1fc1360ff17a08"}, - {file = "pydantic-1.10.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d5b5b7c6bafaef90cbb7dafcb225b763edd71d9e22489647ee7df49d6d341890"}, - {file = "pydantic-1.10.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:570ad0aeaf98b5e33ff41af75aba2ef6604ee25ce0431ecd734a28e74a208555"}, - {file = "pydantic-1.10.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0890fbd7fec9e151c7512941243d830b2d6076d5df159a2030952d480ab80a4e"}, - {file = "pydantic-1.10.19-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec5c44e6e9eac5128a9bfd21610df3b8c6b17343285cc185105686888dc81206"}, - {file = "pydantic-1.10.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6eb56074b11a696e0b66c7181da682e88c00e5cebe6570af8013fcae5e63e186"}, - {file = "pydantic-1.10.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9d7d48fbc5289efd23982a0d68e973a1f37d49064ccd36d86de4543aff21e086"}, - {file = "pydantic-1.10.19-cp312-cp312-win_amd64.whl", hash = "sha256:fd34012691fbd4e67bdf4accb1f0682342101015b78327eaae3543583fcd451e"}, - {file = "pydantic-1.10.19-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a5d5b877c7d3d9e17399571a8ab042081d22fe6904416a8b20f8af5909e6c8f"}, - {file = "pydantic-1.10.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c46f58ef2df958ed2ea7437a8be0897d5efe9ee480818405338c7da88186fb3"}, - {file = "pydantic-1.10.19-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d8a38a44bb6a15810084316ed69c854a7c06e0c99c5429f1d664ad52cec353c"}, - {file = "pydantic-1.10.19-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a82746c6d6e91ca17e75f7f333ed41d70fce93af520a8437821dec3ee52dfb10"}, - {file = "pydantic-1.10.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:566bebdbe6bc0ac593fa0f67d62febbad9f8be5433f686dc56401ba4aab034e3"}, - {file = "pydantic-1.10.19-cp37-cp37m-win_amd64.whl", hash = "sha256:22a1794e01591884741be56c6fba157c4e99dcc9244beb5a87bd4aa54b84ea8b"}, - {file = "pydantic-1.10.19-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:076c49e24b73d346c45f9282d00dbfc16eef7ae27c970583d499f11110d9e5b0"}, - {file = "pydantic-1.10.19-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d4320510682d5a6c88766b2a286d03b87bd3562bf8d78c73d63bab04b21e7b4"}, - {file = "pydantic-1.10.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e66aa0fa7f8aa9d0a620361834f6eb60d01d3e9cea23ca1a92cda99e6f61dac"}, - {file = "pydantic-1.10.19-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d216f8d0484d88ab72ab45d699ac669fe031275e3fa6553e3804e69485449fa0"}, - {file = "pydantic-1.10.19-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f28a81978e936136c44e6a70c65bde7548d87f3807260f73aeffbf76fb94c2f"}, - {file = "pydantic-1.10.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d3449633c207ec3d2d672eedb3edbe753e29bd4e22d2e42a37a2c1406564c20f"}, - {file = "pydantic-1.10.19-cp38-cp38-win_amd64.whl", hash = "sha256:7ea24e8614f541d69ea72759ff635df0e612b7dc9d264d43f51364df310081a3"}, - {file = "pydantic-1.10.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:573254d844f3e64093f72fcd922561d9c5696821ff0900a0db989d8c06ab0c25"}, - {file = "pydantic-1.10.19-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ff09600cebe957ecbb4a27496fe34c1d449e7957ed20a202d5029a71a8af2e35"}, - {file = "pydantic-1.10.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4739c206bfb6bb2bdc78dcd40bfcebb2361add4ceac6d170e741bb914e9eff0f"}, - {file = "pydantic-1.10.19-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bfb5b378b78229119d66ced6adac2e933c67a0aa1d0a7adffbe432f3ec14ce4"}, - {file = "pydantic-1.10.19-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7f31742c95e3f9443b8c6fa07c119623e61d76603be9c0d390bcf7e888acabcb"}, - {file = "pydantic-1.10.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6444368b651a14c2ce2fb22145e1496f7ab23cbdb978590d47c8d34a7bc0289"}, - {file = "pydantic-1.10.19-cp39-cp39-win_amd64.whl", hash = "sha256:945407f4d08cd12485757a281fca0e5b41408606228612f421aa4ea1b63a095d"}, - {file = "pydantic-1.10.19-py3-none-any.whl", hash = "sha256:2206a1752d9fac011e95ca83926a269fb0ef5536f7e053966d058316e24d929f"}, - {file = "pydantic-1.10.19.tar.gz", hash = "sha256:fea36c2065b7a1d28c6819cc2e93387b43dd5d3cf5a1e82d8132ee23f36d1f10"}, + {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, + {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" +annotated-types = ">=0.6.0" +pydantic-core = "2.27.1" +typing-extensions = ">=4.12.2" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.27.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, + {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, + {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, + {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, + {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, + {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, + {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, + {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, + {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, + {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, + {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, + {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, + {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, + {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, + {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, + {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, + {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pyjwt" @@ -1136,6 +1425,20 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-ulid" +version = "3.0.0" +description = "Universally unique lexicographically sortable identifier" +optional = false +python-versions = ">=3.9" +files = [ + {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, + {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, +] + +[package.extras] +pydantic = ["pydantic (>=2.0)"] + [[package]] name = "pytz" version = "2024.1" @@ -1220,6 +1523,209 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +[[package]] +name = "rapidfuzz" +version = "3.10.1" +description = "rapid fuzzy string matching" +optional = false +python-versions = ">=3.9" +files = [ + {file = "rapidfuzz-3.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f17d9f21bf2f2f785d74f7b0d407805468b4c173fa3e52c86ec94436b338e74a"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b31f358a70efc143909fb3d75ac6cd3c139cd41339aa8f2a3a0ead8315731f2b"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f4f43f2204b56a61448ec2dd061e26fd344c404da99fb19f3458200c5874ba2"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d81bf186a453a2757472133b24915768abc7c3964194406ed93e170e16c21cb"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3611c8f45379a12063d70075c75134f2a8bd2e4e9b8a7995112ddae95ca1c982"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c3b537b97ac30da4b73930fa8a4fe2f79c6d1c10ad535c5c09726612cd6bed9"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:231ef1ec9cf7b59809ce3301006500b9d564ddb324635f4ea8f16b3e2a1780da"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ed4f3adc1294834955b7e74edd3c6bd1aad5831c007f2d91ea839e76461a5879"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7b6015da2e707bf632a71772a2dbf0703cff6525732c005ad24987fe86e8ec32"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1b35a118d61d6f008e8e3fb3a77674d10806a8972c7b8be433d6598df4d60b01"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:bc308d79a7e877226f36bdf4e149e3ed398d8277c140be5c1fd892ec41739e6d"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f017dbfecc172e2d0c37cf9e3d519179d71a7f16094b57430dffc496a098aa17"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-win32.whl", hash = "sha256:36c0e1483e21f918d0f2f26799fe5ac91c7b0c34220b73007301c4f831a9c4c7"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:10746c1d4c8cd8881c28a87fd7ba0c9c102346dfe7ff1b0d021cdf093e9adbff"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-win_arm64.whl", hash = "sha256:dfa64b89dcb906835e275187569e51aa9d546a444489e97aaf2cc84011565fbe"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:92958ae075c87fef393f835ed02d4fe8d5ee2059a0934c6c447ea3417dfbf0e8"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba7521e072c53e33c384e78615d0718e645cab3c366ecd3cc8cb732befd94967"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d02cbd75d283c287471b5b3738b3e05c9096150f93f2d2dfa10b3d700f2db9"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efa1582a397da038e2f2576c9cd49b842f56fde37d84a6b0200ffebc08d82350"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f12912acee1f506f974f58de9fdc2e62eea5667377a7e9156de53241c05fdba8"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666d5d8b17becc3f53447bcb2b6b33ce6c2df78792495d1fa82b2924cd48701a"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26f71582c0d62445067ee338ddad99b655a8f4e4ed517a90dcbfbb7d19310474"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8a2ef08b27167bcff230ffbfeedd4c4fa6353563d6aaa015d725dd3632fc3de7"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:365e4fc1a2b95082c890f5e98489b894e6bf8c338c6ac89bb6523c2ca6e9f086"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1996feb7a61609fa842e6b5e0c549983222ffdedaf29644cc67e479902846dfe"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:cf654702f144beaa093103841a2ea6910d617d0bb3fccb1d1fd63c54dde2cd49"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ec108bf25de674781d0a9a935030ba090c78d49def3d60f8724f3fc1e8e75024"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-win32.whl", hash = "sha256:031f8b367e5d92f7a1e27f7322012f3c321c3110137b43cc3bf678505583ef48"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:f98f36c6a1bb9a6c8bbec99ad87c8c0e364f34761739b5ea9adf7b48129ae8cf"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-win_arm64.whl", hash = "sha256:f1da2028cb4e41be55ee797a82d6c1cf589442504244249dfeb32efc608edee7"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1340b56340896bede246f612b6ecf685f661a56aabef3d2512481bfe23ac5835"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2316515169b7b5a453f0ce3adbc46c42aa332cae9f2edb668e24d1fc92b2f2bb"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e06fe6a12241ec1b72c0566c6b28cda714d61965d86569595ad24793d1ab259"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d99c1cd9443b19164ec185a7d752f4b4db19c066c136f028991a480720472e23"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1d9aa156ed52d3446388ba4c2f335e312191d1ca9d1f5762ee983cf23e4ecf6"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:54bcf4efaaee8e015822be0c2c28214815f4f6b4f70d8362cfecbd58a71188ac"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0c955e32afdbfdf6e9ee663d24afb25210152d98c26d22d399712d29a9b976b"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:191633722203f5b7717efcb73a14f76f3b124877d0608c070b827c5226d0b972"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:195baad28057ec9609e40385991004e470af9ef87401e24ebe72c064431524ab"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0fff4a6b87c07366662b62ae994ffbeadc472e72f725923f94b72a3db49f4671"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4ffed25f9fdc0b287f30a98467493d1e1ce5b583f6317f70ec0263b3c97dbba6"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d02cf8e5af89a9ac8f53c438ddff6d773f62c25c6619b29db96f4aae248177c0"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-win32.whl", hash = "sha256:f3bb81d4fe6a5d20650f8c0afcc8f6e1941f6fecdb434f11b874c42467baded0"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:aaf83e9170cb1338922ae42d320699dccbbdca8ffed07faeb0b9257822c26e24"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-win_arm64.whl", hash = "sha256:c5da802a0d085ad81b0f62828fb55557996c497b2d0b551bbdfeafd6d447892f"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fc22d69a1c9cccd560a5c434c0371b2df0f47c309c635a01a913e03bbf183710"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38b0dac2c8e057562b8f0d8ae5b663d2d6a28c5ab624de5b73cef9abb6129a24"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fde3bbb14e92ce8fcb5c2edfff72e474d0080cadda1c97785bf4822f037a309"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9141fb0592e55f98fe9ac0f3ce883199b9c13e262e0bf40c5b18cdf926109d16"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:237bec5dd1bfc9b40bbd786cd27949ef0c0eb5fab5eb491904c6b5df59d39d3c"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18123168cba156ab5794ea6de66db50f21bb3c66ae748d03316e71b27d907b95"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b75fe506c8e02769cc47f5ab21ce3e09b6211d3edaa8f8f27331cb6988779be"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da82aa4b46973aaf9e03bb4c3d6977004648c8638febfc0f9d237e865761270"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c34c022d5ad564f1a5a57a4a89793bd70d7bad428150fb8ff2760b223407cdcf"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e96c84d6c2a0ca94e15acb5399118fff669f4306beb98a6d8ec6f5dccab4412"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e8e154b84a311263e1aca86818c962e1fa9eefdd643d1d5d197fcd2738f88cb9"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:335fee93188f8cd585552bb8057228ce0111bd227fa81bfd40b7df6b75def8ab"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-win32.whl", hash = "sha256:6729b856166a9e95c278410f73683957ea6100c8a9d0a8dbe434c49663689255"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:0e06d99ad1ad97cb2ef7f51ec6b1fedd74a3a700e4949353871cf331d07b382a"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-win_arm64.whl", hash = "sha256:8d1b7082104d596a3eb012e0549b2634ed15015b569f48879701e9d8db959dbb"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:779027d3307e1a2b1dc0c03c34df87a470a368a1a0840a9d2908baf2d4067956"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:440b5608ab12650d0390128d6858bc839ae77ffe5edf0b33a1551f2fa9860651"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82cac41a411e07a6f3dc80dfbd33f6be70ea0abd72e99c59310819d09f07d945"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:958473c9f0bca250590200fd520b75be0dbdbc4a7327dc87a55b6d7dc8d68552"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ef60dfa73749ef91cb6073be1a3e135f4846ec809cc115f3cbfc6fe283a5584"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7fbac18f2c19fc983838a60611e67e3262e36859994c26f2ee85bb268de2355"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a0d519ff39db887cd73f4e297922786d548f5c05d6b51f4e6754f452a7f4296"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bebb7bc6aeb91cc57e4881b222484c26759ca865794187217c9dcea6c33adae6"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe07f8b9c3bb5c5ad1d2c66884253e03800f4189a60eb6acd6119ebaf3eb9894"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bfa48a4a2d45a41457f0840c48e579db157a927f4e97acf6e20df8fc521c79de"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2cf44d01bfe8ee605b7eaeecbc2b9ca64fc55765f17b304b40ed8995f69d7716"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e6bbca9246d9eedaa1c84e04a7f555493ba324d52ae4d9f3d9ddd1b740dcd87"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-win32.whl", hash = "sha256:567f88180f2c1423b4fe3f3ad6e6310fc97b85bdba574801548597287fc07028"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6b2cd7c29d6ecdf0b780deb587198f13213ac01c430ada6913452fd0c40190fc"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-win_arm64.whl", hash = "sha256:9f912d459e46607ce276128f52bea21ebc3e9a5ccf4cccfef30dd5bddcf47be8"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ac4452f182243cfab30ba4668ef2de101effaedc30f9faabb06a095a8c90fd16"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:565c2bd4f7d23c32834652b27b51dd711814ab614b4e12add8476be4e20d1cf5"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d9747149321607be4ccd6f9f366730078bed806178ec3eeb31d05545e9e8f"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:616290fb9a8fa87e48cb0326d26f98d4e29f17c3b762c2d586f2b35c1fd2034b"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073a5b107e17ebd264198b78614c0206fa438cce749692af5bc5f8f484883f50"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39c4983e2e2ccb9732f3ac7d81617088822f4a12291d416b09b8a1eadebb3e29"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ac7adee6bcf0c6fee495d877edad1540a7e0f5fc208da03ccb64734b43522d7a"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:425f4ac80b22153d391ee3f94bc854668a0c6c129f05cf2eaf5ee74474ddb69e"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65a2fa13e8a219f9b5dcb9e74abe3ced5838a7327e629f426d333dfc8c5a6e66"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75561f3df9a906aaa23787e9992b228b1ab69007932dc42070f747103e177ba8"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edd062490537e97ca125bc6c7f2b7331c2b73d21dc304615afe61ad1691e15d5"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfcc8feccf63245a22dfdd16e222f1a39771a44b870beb748117a0e09cbb4a62"}, + {file = "rapidfuzz-3.10.1.tar.gz", hash = "sha256:5a15546d847a915b3f42dc79ef9b0c78b998b4e2c53b252e7166284066585979"}, +] + +[package.extras] +all = ["numpy"] + +[[package]] +name = "regex" +version = "2024.11.6" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.8" +files = [ + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, + {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, + {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, + {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, + {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, + {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, + {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, + {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, + {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, + {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, + {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, + {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, + {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, + {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, +] + [[package]] name = "requests" version = "2.32.3" @@ -1286,24 +1792,58 @@ files = [ requests = ">=2.0.1,<3.0.0" [[package]] -name = "setuptools" -version = "75.6.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" +name = "serpyco-rs" +version = "1.11.0" +description = "" optional = false python-versions = ">=3.9" files = [ - {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, - {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:4b2bd933539bd8c84315e2fb5ae52ef7a58ace5a6dfe3f8b73f74dc71216779e"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:627f957889ff73c4d2269fc7b6bba93212381befe03633e7cb5495de66ba9a33"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b0933620abc01434023e0e3e22255b7e4ab9b427b5a9a5ee00834656d792377a"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9ce46683d92e34abb20304817fc5ac6cb141a06fc7468dedb1d8865a8a9682f6"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bda437d86e8859bf91c189c1f4650899822f6d6d7b02b48f5729da904eb7bb7d"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a72bfbd282af17ebe76d122639013e802c09902543fdbbd828fb2159ec9755e"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d4808df5384e3e8581e31a90ba7a1fa501c0837b1f174284bb8a4555b6864ea"}, + {file = "serpyco_rs-1.11.0-cp310-none-win_amd64.whl", hash = "sha256:c7b60aef4c16d68efb0d6241f05d0a434d873d98449cbb4366b0d385f0a7172b"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8d47ee577cf4d69b53917615cb031ad8708eb2f59fe78194b1968c13130fc2f7"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6090d9a1487237cdd4e9362a823eede23249602019b917e7bd57846179286e79"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7192eb3df576386fefd595ea31ae25c62522841ffec7e7aeb37a80b55bdc3213"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b52ef8affb7e71b9b98a7d5216d6a7ad03b04e990acb147cd9211c8b931c5487"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3480e09e473560c60e74aaa789e6b4d079637371aae0a98235440111464bbba7"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c92e36b0ab6fe866601c2331f7e99c809a126d21963c03d8a5c29331526deed"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84f497361952d4566bc1f77e9e15a84a2614f593cc671fbf0a0fa80046f9c3d7"}, + {file = "serpyco_rs-1.11.0-cp311-none-win_amd64.whl", hash = "sha256:37fc1cf192bef9784fbf1f4e03cec21750b9e704bef55cc0442f71a715eee920"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3ea93d485f03dc8b0cfb0d477f0ad2e86e78f0461b53010656ab5b4db1b41fb0"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7772410d15694b03f9c5500a2c47d62eed76e191bea4087ad042250346b1a38e"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42118463c1679846cffd2f06f47744c9b9eb33c5d0448afd88ea19e1a81a8ddd"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:79481a455b76cc56021dc55bb6d5bdda1b2b32bcb6a1ee711b597140d112e9b1"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8fd79051f9af9591fc03cf7d3033ff180416301f6a4fd3d1e3d92ebd2d68697"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d29c8f9aeed734a3b51f7349d04ec9063516ffa4e10b632d75e9b1309e4930e4"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15609158b0d9591ffa118302cd9d0039970cb3faf91dce32975f7d276e7411d5"}, + {file = "serpyco_rs-1.11.0-cp312-none-win_amd64.whl", hash = "sha256:00081eae77fbf4c5d88371c5586317ab02ccb293a330b460869a283edf2b7b69"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3028893366a1985adcedb13fa8f6f98c087c185efc427f94c2ccdafa40f45832"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c18bf511316f3abf648a68ee62ef88617bec57d3fcde69466b4361102715ae5"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7dde9ef09cdfaf7c62378186b9e29f54ec76114be4c347be6a06dd559c5681e"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:18500ebc5e75285841e35585a238629a990b709e14f68933233640d15ca17d5f"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47c23132d4e03982703a7630aa09877b41e499722142f76b6153f6619b612f3"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5f8e6ba499f6a0825bee0d8f8764569d367af871b563fc6512c171474e8e5383"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15438a076047c34cff6601a977df54948e8d39d1a86f89d05c48bc60f4c12a61"}, + {file = "serpyco_rs-1.11.0-cp313-none-win_amd64.whl", hash = "sha256:84ee2c109415bd81904fc9abb9aec86a5dd13166808c21142cf23ec639f683bd"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5c97c16c865261577fac4effeccc7ef5e0a1e8e35e7a3ee6c90c77c3a4cd7ff9"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47825e70f86fd6ef7c4a835dea3d6e8eef4fee354ed7b39ced99f31aba74a86e"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24d220220365110edba2f778f41ab3cf396883da0f26e1361a3ada9bd0227f73"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3a46f334af5a9d77acc6e1e58f355ae497900a2798929371f0545e274f6e6166"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d72b748acce4b4e3c7c9724e1eb33d033a1c26b08a698b393e0288060e0901"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2b8b6f205e8cc038d4d30dd0e70eece7bbecc816eb2f3787c330dc2218e232d"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:038d748bfff31f150f0c3edab2766b8843edb952cb1bd3bf547886beb0912dae"}, + {file = "serpyco_rs-1.11.0-cp39-none-win_amd64.whl", hash = "sha256:0fee1c89ec2cb013dc232e4ebef88e2844357ce8631063b56639dbfb83762f20"}, + {file = "serpyco_rs-1.11.0.tar.gz", hash = "sha256:70a844615ffb229e6e89c204b3ab7404aacaf2838911814c7d847969b8da2e3a"}, ] -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] -core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] +[package.dependencies] +attributes-doc = "*" +typing-extensions = "*" [[package]] name = "six" @@ -1353,6 +1893,27 @@ files = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +[[package]] +name = "tqdm" +version = "4.67.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] +discord = ["requests"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "typing-extensions" version = "4.12.2" @@ -1364,6 +1925,17 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "tzdata" +version = "2024.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, +] + [[package]] name = "url-normalize" version = "1.4.3" @@ -1397,93 +1969,30 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "wcmatch" -version = "8.4" +version = "10.0" description = "Wildcard/glob file name matcher." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, - {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, + {file = "wcmatch-10.0-py3-none-any.whl", hash = "sha256:0dd927072d03c0a6527a20d2e6ad5ba8d0380e60870c383bc533b71744df7b7a"}, + {file = "wcmatch-10.0.tar.gz", hash = "sha256:e72f0de09bba6a04e0de70937b0cf06e55f36f37b3deb422dfaf854b867b840a"}, ] [package.dependencies] bracex = ">=2.1.1" [[package]] -name = "wrapt" -version = "1.17.0" -description = "Module for decorators, wrappers and monkey patching." +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" optional = false -python-versions = ">=3.8" +python-versions = ">=3.4" files = [ - {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e185ec6060e301a7e5f8461c86fb3640a7beb1a0f0208ffde7a65ec4074931df"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb90765dd91aed05b53cd7a87bd7f5c188fcd95960914bae0d32c5e7f899719d"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:879591c2b5ab0a7184258274c42a126b74a2c3d5a329df16d69f9cee07bba6ea"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fce6fee67c318fdfb7f285c29a82d84782ae2579c0e1b385b7f36c6e8074fffb"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0698d3a86f68abc894d537887b9bbf84d29bcfbc759e23f4644be27acf6da301"}, - {file = "wrapt-1.17.0-cp310-cp310-win32.whl", hash = "sha256:69d093792dc34a9c4c8a70e4973a3361c7a7578e9cd86961b2bbf38ca71e4e22"}, - {file = "wrapt-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f28b29dc158ca5d6ac396c8e0a2ef45c4e97bb7e65522bfc04c989e6fe814575"}, - {file = "wrapt-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74bf625b1b4caaa7bad51d9003f8b07a468a704e0644a700e936c357c17dd45a"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f2a28eb35cf99d5f5bd12f5dd44a0f41d206db226535b37b0c60e9da162c3ed"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81b1289e99cf4bad07c23393ab447e5e96db0ab50974a280f7954b071d41b489"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2939cd4a2a52ca32bc0b359015718472d7f6de870760342e7ba295be9ebaf9"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a9653131bda68a1f029c52157fd81e11f07d485df55410401f745007bd6d339"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4e4b4385363de9052dac1a67bfb535c376f3d19c238b5f36bddc95efae15e12d"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bdf62d25234290db1837875d4dceb2151e4ea7f9fff2ed41c0fde23ed542eb5b"}, - {file = "wrapt-1.17.0-cp311-cp311-win32.whl", hash = "sha256:5d8fd17635b262448ab8f99230fe4dac991af1dabdbb92f7a70a6afac8a7e346"}, - {file = "wrapt-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:92a3d214d5e53cb1db8b015f30d544bc9d3f7179a05feb8f16df713cecc2620a"}, - {file = "wrapt-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:89fc28495896097622c3fc238915c79365dd0ede02f9a82ce436b13bd0ab7569"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875d240fdbdbe9e11f9831901fb8719da0bd4e6131f83aa9f69b96d18fae7504"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ed16d95fd142e9c72b6c10b06514ad30e846a0d0917ab406186541fe68b451"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b956061b8db634120b58f668592a772e87e2e78bc1f6a906cfcaa0cc7991c1"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:daba396199399ccabafbfc509037ac635a6bc18510ad1add8fd16d4739cdd106"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d63f4d446e10ad19ed01188d6c1e1bb134cde8c18b0aa2acfd973d41fcc5ada"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8a5e7cc39a45fc430af1aefc4d77ee6bad72c5bcdb1322cfde852c15192b8bd4"}, - {file = "wrapt-1.17.0-cp312-cp312-win32.whl", hash = "sha256:0a0a1a1ec28b641f2a3a2c35cbe86c00051c04fffcfcc577ffcdd707df3f8635"}, - {file = "wrapt-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c34f6896a01b84bab196f7119770fd8466c8ae3dfa73c59c0bb281e7b588ce7"}, - {file = "wrapt-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:714c12485aa52efbc0fc0ade1e9ab3a70343db82627f90f2ecbc898fdf0bb181"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da427d311782324a376cacb47c1a4adc43f99fd9d996ffc1b3e8529c4074d393"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba1739fb38441a27a676f4de4123d3e858e494fac05868b7a281c0a383c098f4"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e711fc1acc7468463bc084d1b68561e40d1eaa135d8c509a65dd534403d83d7b"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:140ea00c87fafc42739bd74a94a5a9003f8e72c27c47cd4f61d8e05e6dec8721"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73a96fd11d2b2e77d623a7f26e004cc31f131a365add1ce1ce9a19e55a1eef90"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0b48554952f0f387984da81ccfa73b62e52817a4386d070c75e4db7d43a28c4a"}, - {file = "wrapt-1.17.0-cp313-cp313-win32.whl", hash = "sha256:498fec8da10e3e62edd1e7368f4b24aa362ac0ad931e678332d1b209aec93045"}, - {file = "wrapt-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd136bb85f4568fffca995bd3c8d52080b1e5b225dbf1c2b17b66b4c5fa02838"}, - {file = "wrapt-1.17.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17fcf043d0b4724858f25b8826c36e08f9fb2e475410bece0ec44a22d533da9b"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a557d97f12813dc5e18dad9fa765ae44ddd56a672bb5de4825527c847d6379"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0229b247b0fc7dee0d36176cbb79dbaf2a9eb7ecc50ec3121f40ef443155fb1d"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8425cfce27b8b20c9b89d77fb50e368d8306a90bf2b6eef2cdf5cd5083adf83f"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c900108df470060174108012de06d45f514aa4ec21a191e7ab42988ff42a86c"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4e547b447073fc0dbfcbff15154c1be8823d10dab4ad401bdb1575e3fdedff1b"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:914f66f3b6fc7b915d46c1cc424bc2441841083de01b90f9e81109c9759e43ab"}, - {file = "wrapt-1.17.0-cp313-cp313t-win32.whl", hash = "sha256:a4192b45dff127c7d69b3bdfb4d3e47b64179a0b9900b6351859f3001397dabf"}, - {file = "wrapt-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4f643df3d4419ea3f856c5c3f40fec1d65ea2e89ec812c83f7767c8730f9827a"}, - {file = "wrapt-1.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:69c40d4655e078ede067a7095544bcec5a963566e17503e75a3a3e0fe2803b13"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f495b6754358979379f84534f8dd7a43ff8cff2558dcdea4a148a6e713a758f"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa7ef4e0886a6f482e00d1d5bcd37c201b383f1d314643dfb0367169f94f04c"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fc931382e56627ec4acb01e09ce66e5c03c384ca52606111cee50d931a342d"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8f8909cdb9f1b237786c09a810e24ee5e15ef17019f7cecb207ce205b9b5fcce"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad47b095f0bdc5585bced35bd088cbfe4177236c7df9984b3cc46b391cc60627"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:948a9bd0fb2c5120457b07e59c8d7210cbc8703243225dbd78f4dfc13c8d2d1f"}, - {file = "wrapt-1.17.0-cp38-cp38-win32.whl", hash = "sha256:5ae271862b2142f4bc687bdbfcc942e2473a89999a54231aa1c2c676e28f29ea"}, - {file = "wrapt-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:f335579a1b485c834849e9075191c9898e0731af45705c2ebf70e0cd5d58beed"}, - {file = "wrapt-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d751300b94e35b6016d4b1e7d0e7bbc3b5e1751e2405ef908316c2a9024008a1"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264cbb4a18dc4acfd73b63e4bcfec9c9802614572025bdd44d0721983fc1d9c"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33539c6f5b96cf0b1105a0ff4cf5db9332e773bb521cc804a90e58dc49b10578"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30970bdee1cad6a8da2044febd824ef6dc4cc0b19e39af3085c763fdec7de33"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc7f729a72b16ee21795a943f85c6244971724819819a41ddbaeb691b2dd85ad"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6ff02a91c4fc9b6a94e1c9c20f62ea06a7e375f42fe57587f004d1078ac86ca9"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dfb7cff84e72e7bf975b06b4989477873dcf160b2fd89959c629535df53d4e0"}, - {file = "wrapt-1.17.0-cp39-cp39-win32.whl", hash = "sha256:2399408ac33ffd5b200480ee858baa58d77dd30e0dd0cab6a8a9547135f30a88"}, - {file = "wrapt-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f763a29ee6a20c529496a20a7bcb16a73de27f5da6a843249c7047daf135977"}, - {file = "wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371"}, - {file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"}, + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, ] [metadata] lock-version = "2.0" -python-versions = "^3.9,<3.12" -content-hash = "b45c7da2b07fd3a9a79c2ffac21f4db7af48b0884a6e1c9f41f17035161a5fab" +python-versions = "^3.10,<3.12" +content-hash = "8060c30cd8661d53311681f3681fba0d293f827a08512361393d1721da608a67" diff --git a/airbyte-integrations/connectors/source-pipedrive/pyproject.toml b/airbyte-integrations/connectors/source-pipedrive/pyproject.toml index 97c49ff2b661..588cb5d6444d 100644 --- a/airbyte-integrations/connectors/source-pipedrive/pyproject.toml +++ b/airbyte-integrations/connectors/source-pipedrive/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.2.28" +version = "2.3.0" name = "source-pipedrive" description = "Source implementation for Pipedrive." authors = [ "Airbyte ",] @@ -16,8 +16,8 @@ repository = "https://github.com/airbytehq/airbyte" include = "source_pipedrive" [tool.poetry.dependencies] -python = "^3.9,<3.12" -airbyte-cdk = "^0" +python = "^3.10,<3.12" +airbyte-cdk = "^6" [tool.poetry.scripts] source-pipedrive = "source_pipedrive.run:run" diff --git a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/manifest.yaml b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/manifest.yaml index 8b02277d3d67..a49dd6bd9064 100644 --- a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/manifest.yaml +++ b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/manifest.yaml @@ -411,3 +411,46 @@ spec: examples: - "2017-01-25 00:00:00Z" type: string + num_workers: + title: Number of concurrent workers + description: Number of concurrent workers to be used in the sync. This number should be set according to your Pipedrive subscription plan and its rate limits, which coule be found here on Pipedrive Rate limiting page. Please note that Pipedrive API rate limits are reset daily. If you experience rate limiting issues, please lower the number of workers according to your plan. + type: integer + default: 10 + minimum: 1 + maximum: 40 + +# rate limiting https://pipedrive.readme.io/docs/core-api-concepts-rate-limiting + +# Tokens Daily Budget: +# Each company account is allocated a daily API token budget, which is shared among all users within that account. +# This budget is exclusively for API traffic authenticated by API tokens or OAuth tokens, +# and it does not impact actions performed directly within the Pipedrive user interface. +# 30,000 base tokens × subscription plan multiplier × number of seats + +#| Plan | Plan multiplier | +#|:-------------|-----------------| +#| Essential | 1 | +#| Advanced | 2 | +#| Professional | 3 | +#| Power | 5 | +#| Enterprise | 7 | + +# API Requests: + +# Each API request consumes a specific number of tokens, with each API endpoint assigned cost in tokens based on the complexity and resource demand of the endpoint. +# When a request is made, the corresponding token cost is deducted from the company’s daily API budget. +# Lightweight endpoints consume fewer tokens, while more complex or data-intensive endpoints require a higher token cost. + +# | API Endpoint type | Cost in tokens | +# |---------------------------------------------| +# | Get list of entities | 20 | + +# We assume that airbyte users mostly on Essential plan, so they have 30k(Tokens Daily Budget) * 20(Cost in tokens) requests per day, which should be enough with 10 workers. +# If it's not, users can increase/decrease the number of workers. + +# Max value of workers: 40, Min value of workers: 1, Default: 10. + +concurrency_level: + type: ConcurrencyLevel + default_concurrency: "{{ config.get('num_workers', 10)}}" + max_concurrency: 40 diff --git a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/run.py b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/run.py index 2ff2b80c12a8..68d288dbaeef 100644 --- a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/run.py +++ b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/run.py @@ -1,14 +1,53 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. # - import sys +import traceback +from datetime import datetime +from typing import List -from airbyte_cdk.entrypoint import launch +from airbyte_cdk.entrypoint import AirbyteEntrypoint, launch, logger +from airbyte_cdk.exception_handler import init_uncaught_exception_handler +from airbyte_cdk.models import AirbyteErrorTraceMessage, AirbyteMessage, AirbyteMessageSerializer, AirbyteTraceMessage, TraceType, Type +from orjson import orjson from source_pipedrive import SourcePipedrive -def run(): - source = SourcePipedrive() - launch(source, sys.argv[1:]) +def _get_source(args: List[str]): + catalog_path = AirbyteEntrypoint.extract_catalog(args) + config_path = AirbyteEntrypoint.extract_config(args) + state_path = AirbyteEntrypoint.extract_state(args) + try: + return SourcePipedrive( + SourcePipedrive.read_catalog(catalog_path) if catalog_path else None, + SourcePipedrive.read_config(config_path) if config_path else None, + SourcePipedrive.read_state(state_path) if state_path else None, + ) + except Exception as error: + print( + orjson.dumps( + AirbyteMessageSerializer.dump( + AirbyteMessage( + type=Type.TRACE, + trace=AirbyteTraceMessage( + type=TraceType.ERROR, + emitted_at=int(datetime.now().timestamp() * 1000), + error=AirbyteErrorTraceMessage( + message=f"Error starting the sync. This could be due to an invalid configuration or catalog. Please contact Support for assistance. Error: {error}", + stack_trace=traceback.format_exc(), + ), + ), + ) + ) + ).decode() + ) + return None + + +def run() -> None: + init_uncaught_exception_handler(logger) + _args = sys.argv[1:] + source = _get_source(_args) + if source: + launch(source, _args) diff --git a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/source.py b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/source.py index 32dd3a077d5d..8070fbcfcc40 100644 --- a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/source.py +++ b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/source.py @@ -1,8 +1,11 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from typing import Any, Mapping, Optional +from airbyte_cdk.models import ConfiguredAirbyteCatalog from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource +from airbyte_cdk.sources.source import TState """ This file provides the necessary constructs to interpret a provided declarative YAML configuration file into @@ -14,5 +17,5 @@ # Declarative Source class SourcePipedrive(YamlDeclarativeSource): - def __init__(self): - super().__init__(**{"path_to_yaml": "manifest.yaml"}) + def __init__(self, catalog: Optional[ConfiguredAirbyteCatalog], config: Optional[Mapping[str, Any]], state: TState, **kwargs): + super().__init__(catalog=catalog, config=config, state=state, **{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-postgres/metadata.yaml b/airbyte-integrations/connectors/source-postgres/metadata.yaml index b5eaef3aa7f5..ad14df0875a6 100644 --- a/airbyte-integrations/connectors/source-postgres/metadata.yaml +++ b/airbyte-integrations/connectors/source-postgres/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 - dockerImageTag: 3.6.23 + dockerImageTag: 3.6.24 dockerRepository: airbyte/source-postgres documentationUrl: https://docs.airbyte.com/integrations/sources/postgres githubIssueLabel: source-postgres diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresQueryUtils.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresQueryUtils.java index 4cbac03d2115..8aaf42b508a8 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresQueryUtils.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresQueryUtils.java @@ -95,13 +95,13 @@ SELECT pg_relation_filenode('%s') public static final String TOTAL_BYTES_RESULT_COL = "totalbytes"; /** - * Query returns the size table data takes on DB server disk (not incling any index or other + * Query returns the size table data takes on DB server disk (not including any index or other * metadata) And the size of each page used in (page, tuple) ctid. This helps us evaluate how many * pages we need to read to traverse the entire table. */ public static final String CTID_TABLE_BLOCK_SIZE = """ - WITH block_sz AS (SELECT current_setting('block_size')::int), rel_sz AS (select pg_relation_size('%s')) SELECT * from block_sz, rel_sz + SELECT current_setting('block_size')::int, pg_relation_size('%s') """; /** diff --git a/airbyte-integrations/connectors/source-s3/metadata.yaml b/airbyte-integrations/connectors/source-s3/metadata.yaml index f7667560b388..0161b63420fb 100644 --- a/airbyte-integrations/connectors/source-s3/metadata.yaml +++ b/airbyte-integrations/connectors/source-s3/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: file connectorType: source definitionId: 69589781-7828-43c5-9f63-8925b1c1ccc2 - dockerImageTag: 4.10.2 + dockerImageTag: 4.11.0 dockerRepository: airbyte/source-s3 documentationUrl: https://docs.airbyte.com/integrations/sources/s3 githubIssueLabel: source-s3 diff --git a/airbyte-integrations/connectors/source-s3/pyproject.toml b/airbyte-integrations/connectors/source-s3/pyproject.toml index 5e8804d5db8e..cb8312de4f9c 100644 --- a/airbyte-integrations/connectors/source-s3/pyproject.toml +++ b/airbyte-integrations/connectors/source-s3/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.10.2" +version = "4.11.0" name = "source-s3" description = "Source implementation for S3." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-s3/source_s3/v4/stream_reader.py b/airbyte-integrations/connectors/source-s3/source_s3/v4/stream_reader.py index b92cf14077b6..aff8c257686d 100644 --- a/airbyte-integrations/connectors/source-s3/source_s3/v4/stream_reader.py +++ b/airbyte-integrations/connectors/source-s3/source_s3/v4/stream_reader.py @@ -31,7 +31,7 @@ class SourceS3StreamReader(AbstractFileBasedStreamReader): - FILE_SIZE_LIMIT = 1_000_000_000 + FILE_SIZE_LIMIT = 1_500_000_000 def __init__(self): super().__init__() diff --git a/airbyte-integrations/connectors/source-sftp-bulk/metadata.yaml b/airbyte-integrations/connectors/source-sftp-bulk/metadata.yaml index bc47478a304f..42babf16651e 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/metadata.yaml +++ b/airbyte-integrations/connectors/source-sftp-bulk/metadata.yaml @@ -7,7 +7,7 @@ data: connectorSubtype: file connectorType: source definitionId: 31e3242f-dee7-4cdc-a4b8-8e06c5458517 - dockerImageTag: 1.5.0 + dockerImageTag: 1.6.0 dockerRepository: airbyte/source-sftp-bulk documentationUrl: https://docs.airbyte.com/integrations/sources/sftp-bulk githubIssueLabel: source-sftp-bulk diff --git a/airbyte-integrations/connectors/source-sftp-bulk/pyproject.toml b/airbyte-integrations/connectors/source-sftp-bulk/pyproject.toml index 1e61e307b0ff..88a8b2a1197c 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/pyproject.toml +++ b/airbyte-integrations/connectors/source-sftp-bulk/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.5.0" +version = "1.6.0" name = "source-sftp-bulk" description = "Source implementation for SFTP Bulk." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/stream_reader.py b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/stream_reader.py index 9fe264932fd7..10d075f001e6 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/stream_reader.py +++ b/airbyte-integrations/connectors/source-sftp-bulk/source_sftp_bulk/stream_reader.py @@ -19,7 +19,7 @@ class SourceSFTPBulkStreamReader(AbstractFileBasedStreamReader): - FILE_SIZE_LIMIT = 1_000_000_000 + FILE_SIZE_LIMIT = 1_500_000_000 def __init__(self): super().__init__() diff --git a/airbyte-integrations/connectors/source-stripe/erd/discovered_catalog.json b/airbyte-integrations/connectors/source-stripe/erd/discovered_catalog.json index bc328de3e6cb..230c8b9161cf 100644 --- a/airbyte-integrations/connectors/source-stripe/erd/discovered_catalog.json +++ b/airbyte-integrations/connectors/source-stripe/erd/discovered_catalog.json @@ -850,10 +850,6 @@ "description": "The date and time when the transaction was created", "type": ["null", "integer"] }, - "updated": { - "description": "The date and time when the transaction was created", - "type": ["null", "integer"] - }, "credit_note": { "description": "Credit note related to the balance transaction", "type": ["null", "string"] @@ -893,7 +889,9 @@ } } }, - "supported_sync_modes": ["full_refresh"], + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["created"], "source_defined_primary_key": [["id"]], "is_resumable": true }, @@ -3975,6 +3973,48 @@ "default_cursor_field": ["created"], "source_defined_primary_key": [["id"]] }, + { + "name": "payout_balance_transactions", + "json_schema": { + "type": ["null", "object"], + "properties": { + "fee": { "type": ["null", "integer"] }, + "currency": { "type": ["null", "string"] }, + "source": { "type": ["null", "string"] }, + "fee_details": { + "type": ["null", "array"], + "items": { + "properties": { + "application": { "type": ["null", "string"] }, + "type": { "type": ["null", "string"] }, + "description": { "type": ["null", "string"] }, + "amount": { "type": ["null", "integer"] }, + "currency": { "type": ["null", "string"] } + }, + "type": ["null", "object"] + } + }, + "available_on": { "type": ["null", "integer"] }, + "status": { "type": ["null", "string"] }, + "description": { "type": ["null", "string"] }, + "net": { "type": ["null", "integer"] }, + "exchange_rate": { "type": ["null", "number"] }, + "type": { "type": ["null", "string"] }, + "id": { "type": ["null", "string"] }, + "object": { "type": ["null", "string"] }, + "created": { "type": ["null", "integer"] }, + "updated": { "type": ["null", "integer"] }, + "amount": { "type": ["null", "integer"] }, + "reporting_category": { "type": ["null", "string"] }, + "payout": { "type": ["null", "string"] } + } + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated"], + "source_defined_primary_key": [["payout"], ["id"]], + "user_defined_primary_key": [["payout"], ["id"]] + }, { "name": "files", "json_schema": { diff --git a/airbyte-integrations/connectors/source-stripe/erd/source.dbml b/airbyte-integrations/connectors/source-stripe/erd/source.dbml index 4fe9e3952ae4..53bc467b87b7 100644 --- a/airbyte-integrations/connectors/source-stripe/erd/source.dbml +++ b/airbyte-integrations/connectors/source-stripe/erd/source.dbml @@ -68,7 +68,6 @@ Table "customer_balance_transactions" { "livemode" boolean "metadata" object "type" string - "updated" integer } Table "events" { @@ -239,6 +238,30 @@ Table "balance_transactions" { "reporting_category" string } +Table "payout_balance_transactions" { + "payout" string + "fee" integer + "currency" string + "source" string + "fee_details" array + "available_on" integer + "status" string + "description" string + "net" integer + "exchange_rate" number + "type" string + "id" string + "object" string + "created" integer + "updated" integer + "amount" integer + "reporting_category" string + + indexes { + (payout, id) [pk] + } +} + Table "files" { "id" string [pk] "purpose" string diff --git a/airbyte-integrations/connectors/source-stripe/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-stripe/integration_tests/abnormal_state.json index 5ef52ef8bb90..2f1fc983ccb8 100644 --- a/airbyte-integrations/connectors/source-stripe/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-stripe/integration_tests/abnormal_state.json @@ -296,7 +296,7 @@ { "type": "STREAM", "stream": { - "stream_state": { "updated": 10000000000 }, + "stream_state": { "created": 10000000000 }, "stream_descriptor": { "name": "customer_balance_transactions" } } }, @@ -313,5 +313,12 @@ "stream_state": { "created": 10000000000 }, "stream_descriptor": { "name": "transfer_reversals" } } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated": 10000000000 }, + "stream_descriptor": { "name": "payout_balance_transactions" } + } } ] diff --git a/airbyte-integrations/connectors/source-stripe/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-stripe/integration_tests/configured_catalog.json index 879a113db53c..da8999b4a344 100644 --- a/airbyte-integrations/connectors/source-stripe/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-stripe/integration_tests/configured_catalog.json @@ -187,7 +187,7 @@ "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["updated"], + "default_cursor_field": ["created"], "source_defined_primary_key": [["id"]] }, "primary_key": [["id"]], @@ -376,6 +376,20 @@ "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" }, + { + "stream": { + "name": "payout_balance_transactions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated"], + "source_defined_primary_key": [["id"]] + }, + "primary_key": [["id"]], + "cursor_field": ["updated"], + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, { "stream": { "name": "payouts", @@ -603,6 +617,7 @@ { "stream": { "name": "transfer_reversals", + "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, "default_cursor_field": ["created"], diff --git a/airbyte-integrations/connectors/source-stripe/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-stripe/integration_tests/expected_records.jsonl index 5f12404b5c5b..87f54ffd035c 100644 --- a/airbyte-integrations/connectors/source-stripe/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-stripe/integration_tests/expected_records.jsonl @@ -57,6 +57,8 @@ {"stream": "payment_intents", "data": {"id": "pi_3K9FSOEcXtiJtvvh0AEIFllC", "object": "payment_intent", "amount": 5300, "amount_capturable": 0, "amount_details": {"tip": {}}, "amount_received": 5300, "application": null, "application_fee_amount": null, "automatic_payment_methods": null, "canceled_at": null, "cancellation_reason": null, "capture_method": "automatic", "client_secret": "pi_3K9FSOEcXtiJtvvh0AEIFllC_secret_uPUtIaSltgtW0qK7mLD0uF2Mr", "confirmation_method": "automatic", "created": 1640120472, "currency": "usd", "customer": null, "description": null, "invoice": null, "last_payment_error": null, "latest_charge": "ch_3K9FSOEcXtiJtvvh0zxb7clc", "livemode": false, "metadata": {}, "next_action": null, "on_behalf_of": null, "payment_method": null, "payment_method_configuration_details": null, "payment_method_options": {"card": {"installments": null, "mandate_options": null, "network": null, "request_three_d_secure": "automatic"}}, "payment_method_types": ["card"], "processing": null, "receipt_email": null, "review": null, "setup_future_usage": null, "shipping": null, "source": "src_1K9FSOEcXtiJtvvhHGu1qtOx", "statement_descriptor": "airbyte.io", "statement_descriptor_suffix": null, "status": "succeeded", "transfer_data": null, "transfer_group": null, "updated": 1640120472}, "emitted_at": 1697627315508} {"stream": "payment_intents", "data": {"id": "pi_3K9F5DEcXtiJtvvh16scJMp6", "object": "payment_intent", "amount": 4200, "amount_capturable": 0, "amount_details": {"tip": {}}, "amount_received": 4200, "application": null, "application_fee_amount": null, "automatic_payment_methods": null, "canceled_at": null, "cancellation_reason": null, "capture_method": "automatic", "client_secret": "pi_3K9F5DEcXtiJtvvh16scJMp6_secret_YwhzCTpXtfcKYeklXnPnysRRi", "confirmation_method": "automatic", "created": 1640119035, "currency": "usd", "customer": null, "description": "edgao test", "invoice": null, "last_payment_error": null, "latest_charge": "ch_3K9F5DEcXtiJtvvh1w2MaTpj", "livemode": false, "metadata": {}, "next_action": null, "on_behalf_of": null, "payment_method": null, "payment_method_configuration_details": null, "payment_method_options": {"card": {"installments": null, "mandate_options": null, "network": null, "request_three_d_secure": "automatic"}}, "payment_method_types": ["card"], "processing": null, "receipt_email": null, "review": null, "setup_future_usage": null, "shipping": null, "source": "src_1K9F5CEcXtiJtvvhrsZdur8Y", "statement_descriptor": "airbyte.io", "statement_descriptor_suffix": null, "status": "succeeded", "transfer_data": null, "transfer_group": null, "updated": 1640119035}, "emitted_at": 1697627315511} {"stream": "payment_intents", "data": {"id": "pi_3K9F4mEcXtiJtvvh18NKhEuo", "object": "payment_intent", "amount": 4200, "amount_capturable": 0, "amount_details": {"tip": {}}, "amount_received": 0, "application": null, "application_fee_amount": null, "automatic_payment_methods": null, "canceled_at": null, "cancellation_reason": null, "capture_method": "automatic", "client_secret": "pi_3K9F4mEcXtiJtvvh18NKhEuo_secret_pfUt7CTkPjVdJacycm0bMpdLt", "confirmation_method": "automatic", "created": 1640119008, "currency": "usd", "customer": null, "description": "edgao test", "invoice": null, "last_payment_error": {"charge": "ch_3K9F4mEcXtiJtvvh1kUzxjwN", "code": "card_declined", "decline_code": "test_mode_live_card", "doc_url": "https://stripe.com/docs/error-codes/card-declined", "message": "Your card was declined. Your request was in test mode, but used a non test (live) card. For a list of valid test cards, visit: https://stripe.com/docs/testing.", "source": {"id": "src_1K9F4hEcXtiJtvvhrUEwvCyi", "object": "source", "amount": null, "card": {"address_line1_check": null, "address_zip_check": null, "brand": "Visa", "country": "US", "cvc_check": "unchecked", "dynamic_last4": null, "exp_month": 9, "exp_year": 2028, "fingerprint": "Re3p4j8issXA77iI", "funding": "credit", "last4": "8097", "name": null, "three_d_secure": "optional", "tokenization_method": null}, "client_secret": "src_client_secret_b3v8YqNMLGykB120fqv2Tjhq", "created": 1640119003, "currency": null, "flow": "none", "livemode": false, "metadata": {}, "owner": {"address": null, "email": null, "name": null, "phone": null, "verified_address": null, "verified_email": null, "verified_name": null, "verified_phone": null}, "statement_descriptor": null, "status": "consumed", "type": "card", "usage": "reusable"}, "type": "card_error"}, "latest_charge": "ch_3K9F4mEcXtiJtvvh1kUzxjwN", "livemode": false, "metadata": {}, "next_action": null, "on_behalf_of": null, "payment_method": null, "payment_method_configuration_details": null, "payment_method_options": {"card": {"installments": null, "mandate_options": null, "network": null, "request_three_d_secure": "automatic"}}, "payment_method_types": ["card"], "processing": null, "receipt_email": null, "review": null, "setup_future_usage": null, "shipping": null, "source": null, "statement_descriptor": "airbyte.io", "statement_descriptor_suffix": null, "status": "requires_payment_method", "transfer_data": null, "transfer_group": null, "updated": 1640119008}, "emitted_at": 1697627315513} +{"stream":"payout_balance_transactions","data":{"payout":"po_1MTErVEcXtiJtvvhPP5x9VRX","updated":1674437417,"id":"txn_1MSI78EcXtiJtvvhAGjxP1UM","object":"balance_transaction","amount":-700,"available_on":1674518400,"created":1674211590,"currency":"usd","description":"Chargeback withdrawal for ch_3MSI77EcXtiJtvvh1GzoukUC","fee":1500,"fee_details":[{"amount":1500,"application":null,"currency":"usd","description":"Dispute fee","type":"stripe_fee"}],"net":-2200,"reporting_category":"dispute","source":"dp_1MSI78EcXtiJtvvhxC77m2kh","status":"available","type":"adjustment"},"emitted_at":1734032137962} +{"stream":"payout_balance_transactions","data":{"payout":"po_1MTErVEcXtiJtvvhPP5x9VRX","updated":1674437417,"id":"txn_3MSI77EcXtiJtvvh1vvRye5q","object":"balance_transaction","amount":700,"available_on":1674518400,"created":1674211589,"currency":"usd","description":"Test","fee":54,"fee_details":[{"amount":54,"application":null,"currency":"usd","description":"Stripe processing fees","type":"stripe_fee"}],"net":646,"reporting_category":"charge","source":"ch_3MSI77EcXtiJtvvh1GzoukUC","status":"available","type":"charge"},"emitted_at":1734032137962} {"stream": "promotion_codes", "data": {"id": "promo_1MVtmyEcXtiJtvvhkV5jPFPU", "object": "promotion_code", "active": true, "code": "g20", "coupon": {"id": "iJ6qlwM5", "object": "coupon", "amount_off": null, "created": 1674208993, "currency": null, "duration": "forever", "duration_in_months": null, "livemode": false, "max_redemptions": null, "metadata": {}, "name": "\u0415\u0443\u0456\u0435", "percent_off": 10.0, "redeem_by": null, "times_redeemed": 3, "valid": true}, "created": 1675071396, "customer": null, "expires_at": null, "livemode": false, "max_redemptions": null, "metadata": {}, "restrictions": {"first_time_transaction": false, "minimum_amount": null, "minimum_amount_currency": null}, "times_redeemed": 0, "updated": 1675071396}, "emitted_at": 1697627317910} {"stream": "promotion_codes", "data": {"id": "promo_1MVtmkEcXtiJtvvht0RA3MKg", "object": "promotion_code", "active": true, "code": "FRIENDS20", "coupon": {"id": "iJ6qlwM5", "object": "coupon", "amount_off": null, "created": 1674208993, "currency": null, "duration": "forever", "duration_in_months": null, "livemode": false, "max_redemptions": null, "metadata": {}, "name": "\u0415\u0443\u0456\u0435", "percent_off": 10.0, "redeem_by": null, "times_redeemed": 3, "valid": true}, "created": 1675071382, "customer": null, "expires_at": null, "livemode": false, "max_redemptions": null, "metadata": {}, "restrictions": {"first_time_transaction": true, "minimum_amount": 10000, "minimum_amount_currency": "usd"}, "times_redeemed": 0, "updated": 1675071382}, "emitted_at": 1697627317911} {"stream": "setup_intents", "data": {"id": "seti_1KnfIjEcXtiJtvvhPw5znVKY", "object": "setup_intent", "application": null, "automatic_payment_methods": null, "cancellation_reason": null, "client_secret": "seti_1KnfIjEcXtiJtvvhPw5znVKY_secret_LUebPsqMz6AF4ivxIg4LMaAT0OdZF5L", "created": 1649752937, "customer": null, "description": null, "flow_directions": null, "last_setup_error": null, "latest_attempt": "setatt_1KnfIjEcXtiJtvvhqDfSlpM4", "livemode": false, "mandate": null, "metadata": {}, "next_action": null, "on_behalf_of": null, "payment_method": "pm_1KnfIj2eZvKYlo2CAlv2Vhqc", "payment_method_configuration_details": null, "payment_method_options": {"acss_debit": {"currency": "cad", "mandate_options": {"interval_description": "First day of every month", "payment_schedule": "interval", "transaction_type": "personal"}, "verification_method": "automatic"}}, "payment_method_types": ["acss_debit"], "single_use_mandate": null, "status": "succeeded", "usage": "off_session", "updated": 1649752937}, "emitted_at": 1697627319186} diff --git a/airbyte-integrations/connectors/source-stripe/metadata.yaml b/airbyte-integrations/connectors/source-stripe/metadata.yaml index 0a3ad7181159..d823cbea97bf 100644 --- a/airbyte-integrations/connectors/source-stripe/metadata.yaml +++ b/airbyte-integrations/connectors/source-stripe/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: e094cb9a-26de-4645-8761-65c0c425d1de - dockerImageTag: 5.8.0 + dockerImageTag: 5.8.2 dockerRepository: airbyte/source-stripe documentationUrl: https://docs.airbyte.com/integrations/sources/stripe erdUrl: https://dbdocs.io/airbyteio/source-stripe?view=relationships diff --git a/airbyte-integrations/connectors/source-stripe/pyproject.toml b/airbyte-integrations/connectors/source-stripe/pyproject.toml index 600987c814af..86b794867c91 100644 --- a/airbyte-integrations/connectors/source-stripe/pyproject.toml +++ b/airbyte-integrations/connectors/source-stripe/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "5.8.0" +version = "5.8.2" name = "source-stripe" description = "Source implementation for Stripe." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-stripe/source_stripe/schemas/payout_balance_transactions.json b/airbyte-integrations/connectors/source-stripe/source_stripe/schemas/payout_balance_transactions.json new file mode 100644 index 000000000000..6e33b8c2bd48 --- /dev/null +++ b/airbyte-integrations/connectors/source-stripe/source_stripe/schemas/payout_balance_transactions.json @@ -0,0 +1,78 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema#", + "additionalProperties": true, + "type": ["object", "null"], + "properties": { + "payout": { + "type": ["null", "string"] + }, + "fee": { + "type": ["null", "integer"] + }, + "currency": { + "type": ["null", "string"] + }, + "source": { + "type": ["null", "string"] + }, + "fee_details": { + "type": ["null", "array"], + "items": { + "properties": { + "application": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "amount": { + "type": ["null", "integer"] + }, + "currency": { + "type": ["null", "string"] + } + }, + "type": ["null", "object"] + } + }, + "available_on": { + "type": ["null", "integer"] + }, + "status": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "net": { + "type": ["null", "integer"] + }, + "exchange_rate": { + "type": ["null", "number"] + }, + "type": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "string"] + }, + "object": { + "type": ["null", "string"] + }, + "created": { + "type": ["null", "integer"] + }, + "updated": { + "type": ["null", "integer"] + }, + "amount": { + "type": ["null", "integer"] + }, + "reporting_category": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-stripe/source_stripe/source.py b/airbyte-integrations/connectors/source-stripe/source_stripe/source.py index 04d97cd6a738..5ce96c24cc82 100644 --- a/airbyte-integrations/connectors/source-stripe/source_stripe/source.py +++ b/airbyte-integrations/connectors/source-stripe/source_stripe/source.py @@ -274,6 +274,20 @@ def streams(self, config: MutableMapping[str, Any]) -> List[Stream]: **args, ) + payouts = IncrementalStripeStream( + name="payouts", + path="payouts", + event_types=[ + "payout.canceled", + "payout.created", + "payout.failed", + "payout.paid", + "payout.reconciliation_completed", + "payout.updated", + ], + **args, + ) + streams = [ checkout_sessions, Events(**incremental_args), @@ -400,17 +414,18 @@ def streams(self, config: MutableMapping[str, Any]) -> List[Stream]: ], **args, ), - IncrementalStripeStream( - name="payouts", - path="payouts", - event_types=[ - "payout.canceled", - "payout.created", - "payout.failed", - "payout.paid", - "payout.reconciliation_completed", - "payout.updated", - ], + payouts, + ParentIncrementalStripeSubStream( + name="payout_balance_transactions", + path=lambda self, stream_slice, *args, **kwargs: "balance_transactions", + parent=payouts, + cursor_field="updated", + slice_data_retriever=lambda record, stream_slice: { + "payout": stream_slice["parent"]["id"], + "updated": stream_slice["parent"]["updated"], + **record, + }, + extra_request_params=lambda self, stream_slice, *args, **kwargs: {"payout": f"{stream_slice['parent']['id']}"}, **args, ), IncrementalStripeStream( @@ -489,12 +504,11 @@ def streams(self, config: MutableMapping[str, Any]) -> List[Stream]: event_types=["topup.canceled", "topup.created", "topup.failed", "topup.reversed", "topup.succeeded"], **args, ), - UpdatedCursorIncrementalStripeSubStream( + ParentIncrementalStripeSubStream( name="customer_balance_transactions", path=lambda self, stream_slice, *args, **kwargs: f"customers/{stream_slice['parent']['id']}/balance_transactions", parent=self.customers(**args), - legacy_cursor_field="created", - event_types=["customer_cash_balance_transaction.*"], + cursor_field="created", **args, ), UpdatedCursorIncrementalStripeLazySubStream( diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/request_builder.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/request_builder.py index d45ab6e3b5b2..336179ea05b3 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/request_builder.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/request_builder.py @@ -20,6 +20,10 @@ def application_fees_endpoint(cls, account_id: str, client_secret: str) -> "Stri def application_fees_refunds_endpoint(cls, application_fee_id: str, account_id: str, client_secret: str) -> "StripeRequestBuilder": return cls(f"application_fees/{application_fee_id}/refunds", account_id, client_secret) + @classmethod + def balance_transactions_endpoint(cls, account_id: str, client_secret: str) -> "StripeRequestBuilder": + return cls("balance_transactions", account_id, client_secret) + @classmethod def customers_endpoint(cls, account_id: str, client_secret: str) -> "StripeRequestBuilder": return cls("customers", account_id, client_secret) @@ -52,6 +56,10 @@ def issuing_transactions_endpoint(cls, account_id: str, client_secret: str) -> " def payment_methods_endpoint(cls, customer_id: str, account_id: str, client_secret: str) -> "StripeRequestBuilder": return cls(f"customers/{customer_id}/payment_methods", account_id, client_secret) + @classmethod + def payouts_endpoint(cls, account_id: str, client_secret: str) -> "StripeRequestBuilder": + return cls("payouts", account_id, client_secret) + @classmethod def persons_endpoint( cls, @@ -82,6 +90,7 @@ def __init__(self, resource: str, account_id: str, client_secret: str) -> None: self._created_lte: Optional[datetime] = None self._limit: Optional[int] = None self._object: Optional[str] = None + self._payout: Optional[str] = None self._starting_after_id: Optional[str] = None self._types: List[str] = [] self._expands: List[str] = [] @@ -118,6 +127,10 @@ def with_expands(self, expands: List[str]) -> "StripeRequestBuilder": self._expands = expands return self + def with_payout(self, payout: str) -> "StripeRequestBuilder": + self._payout = payout + return self + def build(self) -> HttpRequest: query_params = {} if self._created_gte: @@ -135,6 +148,8 @@ def build(self) -> HttpRequest: query_params["type"] = self._types if self._object: query_params["object"] = self._object + if self._payout: + query_params["payout"] = self._payout if self._expands: query_params["expand[]"] = self._expands diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_payout_balance_transactions.py b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_payout_balance_transactions.py new file mode 100644 index 000000000000..f1a96ed48693 --- /dev/null +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_payout_balance_transactions.py @@ -0,0 +1,174 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from unittest import TestCase + +import freezegun +from airbyte_cdk.models import ConfiguredAirbyteCatalog, SyncMode +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from integration.config import ConfigBuilder +from integration.pagination import StripePaginationStrategy +from integration.request_builder import StripeRequestBuilder +from source_stripe import SourceStripe + +_STREAM_NAME = "payout_balance_transactions" +_A_PAYOUT_ID = "a_payout_id" +_ANOTHER_PAYOUT_ID = "another_payout_id" +_ACCOUNT_ID = "acct_1G9HZLIEn49ers" +_AVOIDING_INCLUSIVE_BOUNDARIES = timedelta(seconds=1) +_CLIENT_SECRET = "ConfigBuilder default client secret" +_NOW = datetime.now(timezone.utc) +_START_DATE = _NOW - timedelta(days=75) +_STATE_DATE = _NOW - timedelta(days=10) +_NO_STATE = StateBuilder().build() +_AVOIDING_INCLUSIVE_BOUNDARIES = timedelta(seconds=1) + +_DATA_FIELD = NestedPath(["data", "object"]) +_EVENT_TYPES = [ + "payout.canceled", + "payout.created", + "payout.failed", + "payout.paid", + "payout.reconciliation_completed", + "payout.updated", +] + + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_account_id(_ACCOUNT_ID).with_client_secret(_CLIENT_SECRET) + + +def _create_catalog(sync_mode: SyncMode = SyncMode.full_refresh) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(name=_STREAM_NAME, sync_mode=sync_mode).build() + + +def _balance_transactions_request() -> StripeRequestBuilder: + return StripeRequestBuilder.balance_transactions_endpoint(_ACCOUNT_ID, _CLIENT_SECRET) + + +def _events_request() -> StripeRequestBuilder: + return StripeRequestBuilder.events_endpoint(_ACCOUNT_ID, _CLIENT_SECRET) + + +def _payouts_request() -> StripeRequestBuilder: + return StripeRequestBuilder.payouts_endpoint(_ACCOUNT_ID, _CLIENT_SECRET) + + +def _balance_transaction_record() -> RecordBuilder: + return create_record_builder( + find_template("balance_transactions", __file__), + FieldPath("data"), + record_id_path=FieldPath("id"), + ) + + +def _balance_transactions_response() -> HttpResponseBuilder: + return create_response_builder( + response_template=find_template("balance_transactions", __file__), + records_path=FieldPath("data"), + pagination_strategy=StripePaginationStrategy(), + ) + + +def _event_record() -> RecordBuilder: + return create_record_builder( + find_template("events", __file__), + FieldPath("data"), + record_id_path=FieldPath("id"), + record_cursor_path=FieldPath("created"), + ) + + +def _events_response() -> HttpResponseBuilder: + return create_response_builder(find_template("events", __file__), FieldPath("data"), pagination_strategy=StripePaginationStrategy()) + + +def _create_payout_record() -> RecordBuilder: + return create_record_builder( + find_template("payouts", __file__), + FieldPath("data"), + record_id_path=FieldPath("id"), + ) + + +def _payouts_response() -> HttpResponseBuilder: + return create_response_builder( + response_template=find_template("payouts", __file__), + records_path=FieldPath("data"), + pagination_strategy=StripePaginationStrategy(), + ) + + +@freezegun.freeze_time(_NOW.isoformat()) +class PayoutBalanceTransactionsFullRefreshTest(TestCase): + @HttpMocker() + def test_given_multiple_parents_when_read_then_extract_from_all_children(self, http_mocker: HttpMocker) -> None: + config = _config().with_start_date(_START_DATE).build() + http_mocker.get( + _payouts_request().with_created_gte(_START_DATE).with_created_lte(_NOW).with_limit(100).build(), + _payouts_response().with_record(_create_payout_record().with_id(_A_PAYOUT_ID)).with_record(_create_payout_record().with_id(_ANOTHER_PAYOUT_ID)).build(), + ) + http_mocker.get( + _balance_transactions_request().with_limit(100).with_payout(_A_PAYOUT_ID).build(), + _balance_transactions_response().with_record(_balance_transaction_record()).build(), + ) + http_mocker.get( + _balance_transactions_request().with_limit(100).with_payout(_ANOTHER_PAYOUT_ID).build(), + _balance_transactions_response().with_record(_balance_transaction_record()).with_record(_balance_transaction_record()).build(), + ) + + source = SourceStripe(config=config, catalog=_create_catalog(), state=_NO_STATE) + output = read(source, config=config, catalog=_create_catalog()) + + assert len(output.records) == 3 + + @HttpMocker() + def test_when_read_then_add_payout_field(self, http_mocker: HttpMocker) -> None: + config = _config().with_start_date(_START_DATE).build() + http_mocker.get( + _payouts_request().with_created_gte(_START_DATE).with_created_lte(_NOW).with_limit(100).build(), + _payouts_response().with_record(_create_payout_record().with_id(_A_PAYOUT_ID)).build(), + ) + http_mocker.get( + _balance_transactions_request().with_limit(100).with_payout(_A_PAYOUT_ID).build(), + _balance_transactions_response().with_record(_balance_transaction_record()).build(), + ) + + source = SourceStripe(config=config, catalog=_create_catalog(), state=_NO_STATE) + output = read(source, config=config, catalog=_create_catalog()) + + assert output.records[0].record.data["payout"] + + +@freezegun.freeze_time(_NOW.isoformat()) +class PayoutBalanceTransactionsIncrementalTest(TestCase): + @HttpMocker() + def test_when_read_then_fetch_from_updated_payouts(self, http_mocker: HttpMocker) -> None: + config = _config().with_start_date(_START_DATE).build() + state = StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(_STATE_DATE.timestamp())}).build() + catalog = _create_catalog(SyncMode.incremental) + http_mocker.get( + _events_request().with_created_gte(_STATE_DATE + _AVOIDING_INCLUSIVE_BOUNDARIES).with_created_lte(_NOW).with_limit(100).with_types(_EVENT_TYPES).build(), + _events_response().with_record(_event_record().with_field(_DATA_FIELD, _create_payout_record().with_id(_A_PAYOUT_ID).build())).build(), + ) + http_mocker.get( + _balance_transactions_request().with_limit(100).with_payout(_A_PAYOUT_ID).build(), + _balance_transactions_response().with_record(_balance_transaction_record()).build(), + ) + + source = SourceStripe(config=config, catalog=catalog, state=state) + output = read(source, config=config, catalog=catalog, state=state) + + assert len(output.records) == 1 diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/resource/http/response/balance_transactions.json b/airbyte-integrations/connectors/source-stripe/unit_tests/resource/http/response/balance_transactions.json new file mode 100644 index 000000000000..c0b625098640 --- /dev/null +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/resource/http/response/balance_transactions.json @@ -0,0 +1,24 @@ +{ + "object": "list", + "url": "/v1/balance_transactions", + "has_more": false, + "data": [ + { + "id": "txn_1MiN3gLkdIwHu7ixxapQrznl", + "object": "balance_transaction", + "amount": -400, + "available_on": 1678043844, + "created": 1678043844, + "currency": "usd", + "description": null, + "exchange_rate": null, + "fee": 0, + "fee_details": [], + "net": -400, + "reporting_category": "transfer", + "source": "tr_1MiN3gLkdIwHu7ixNCZvFdgA", + "status": "available", + "type": "transfer" + } + ] +} diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/resource/http/response/payouts.json b/airbyte-integrations/connectors/source-stripe/unit_tests/resource/http/response/payouts.json new file mode 100644 index 000000000000..f4bb44a3ed7e --- /dev/null +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/resource/http/response/payouts.json @@ -0,0 +1,32 @@ +{ + "object": "list", + "url": "/v1/payouts", + "has_more": false, + "data": [ + { + "id": "po_1OaFDbEcg9tTZuTgNYmX0PKB", + "object": "payout", + "amount": 1100, + "arrival_date": 1680652800, + "automatic": false, + "balance_transaction": "txn_1OaFDcEcg9tTZuTgYMR25tSe", + "created": 1680648691, + "currency": "usd", + "description": null, + "destination": "ba_1MtIhL2eZvKYlo2CAElKwKu2", + "failure_balance_transaction": null, + "failure_code": null, + "failure_message": null, + "livemode": false, + "metadata": {}, + "method": "standard", + "original_payout": null, + "reconciliation_status": "not_applicable", + "reversed_by": null, + "source_type": "card", + "statement_descriptor": null, + "status": "pending", + "type": "bank_account" + } + ] +} diff --git a/airbyte-integrations/connectors/source-stripe/unit_tests/test_source.py b/airbyte-integrations/connectors/source-stripe/unit_tests/test_source.py index f386415db130..1567a930ebbe 100644 --- a/airbyte-integrations/connectors/source-stripe/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-stripe/unit_tests/test_source.py @@ -49,7 +49,7 @@ def _a_valid_config(): def test_streams_are_unique(config): stream_names = [s.name for s in SourceStripe(_ANY_CATALOG, _ANY_CONFIG, _NO_STATE).streams(config=config)] - assert len(stream_names) == len(set(stream_names)) == 46 + assert len(stream_names) == len(set(stream_names)) == 47 @pytest.mark.parametrize( diff --git a/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml b/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml index 49fc72f026c8..85570a016627 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml +++ b/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml @@ -11,7 +11,7 @@ data: connectorSubtype: api connectorType: source definitionId: 79c1aa37-dae3-42ae-b333-d1c105477715 - dockerImageTag: 4.4.0 + dockerImageTag: 4.4.1 dockerRepository: airbyte/source-zendesk-support documentationUrl: https://docs.airbyte.com/integrations/sources/zendesk-support githubIssueLabel: source-zendesk-support diff --git a/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml b/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml index aa3074a6bee4..20928f1debee 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml +++ b/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.4.0" +version = "4.4.1" name = "source-zendesk-support" description = "Source implementation for Zendesk Support." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py index 5ca18a285112..15d04a0d21dc 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py @@ -83,8 +83,8 @@ def check_connection(self, logger, config) -> Tuple[bool, any]: """ auth = self.get_authenticator(config) try: - datetime.strptime(config["start_date"], DATETIME_FORMAT) - settings = UserSettingsStream(config["subdomain"], authenticator=auth, start_date=None).get_settings() + start_date = datetime.strptime(config["start_date"], DATETIME_FORMAT) if config["start_date"] else None + settings = UserSettingsStream(config["subdomain"], authenticator=auth, start_date=start_date).get_settings() except Exception as e: return False, e active_features = [k for k, v in settings.get("active_features", {}).items() if v] diff --git a/docs/enterprise-setup/assets/enterprise-connectors/service-now-setup.png b/docs/enterprise-setup/assets/enterprise-connectors/service-now-setup.png new file mode 100644 index 000000000000..813fdffc40a3 Binary files /dev/null and b/docs/enterprise-setup/assets/enterprise-connectors/service-now-setup.png differ diff --git a/docs/integrations/destinations/dev-null.md b/docs/integrations/destinations/dev-null.md index 98750b53d860..3e2fac2c2000 100644 --- a/docs/integrations/destinations/dev-null.md +++ b/docs/integrations/destinations/dev-null.md @@ -49,7 +49,8 @@ The OSS and Cloud variants have the same version number starting from version `0 | Version | Date | Pull Request | Subject | |:------------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------------------------------| -| 0.7.12 | 2024-12-04 | [48794](https://github.com/airbytehq/airbyte/pull/48794) | Promoting release candidate 0.7.12-rc.2 to a main version. | +| 0.7.13 | 2024-12-16 | [49819](https://github.com/airbytehq/airbyte/pull/49819) | Picked up CDK changes. | +| 0.7.12 | 2024-12-04 | [48794](https://github.com/airbytehq/airbyte/pull/48794) | Promoting release candidate 0.7.12-rc.2 to a main version. | | 0.7.12-rc.2 | 2024-11-26 | [48693](https://github.com/airbytehq/airbyte/pull/48693) | Update for testing progressive rollout | | 0.7.12-rc.1 | 2024-11-25 | [48693](https://github.com/airbytehq/airbyte/pull/48693) | Update for testing progressive rollout | | 0.7.11 | 2024-11-18 | [48468](https://github.com/airbytehq/airbyte/pull/48468) | Implement File CDk | diff --git a/docs/integrations/destinations/mssql-v2.md b/docs/integrations/destinations/mssql-v2.md new file mode 100644 index 000000000000..0e168a4ea5b7 --- /dev/null +++ b/docs/integrations/destinations/mssql-v2.md @@ -0,0 +1,12 @@ +# MSSQL (V2) + +## Changelog + +
+ Expand to review + +| Version | Date | Pull Request | Subject | +|:--------|:-----------| :--------------------------------------------------------- |:---------------| +| 0.1.0 | 2024-12-16 | [\#49460](https://github.com/airbytehq/airbyte/pull/49460) | Initial commit | + +
\ No newline at end of file diff --git a/docs/integrations/enterprise-connectors/source-service-now.md b/docs/integrations/enterprise-connectors/source-service-now.md new file mode 100644 index 000000000000..86bcf0ce46b0 --- /dev/null +++ b/docs/integrations/enterprise-connectors/source-service-now.md @@ -0,0 +1,62 @@ +# Source ServiceNow + +:::info +Airbyte Enterprise Connectors are a selection of premium connectors available exclusively for Airbyte Self-Managed Enterprise and Airbyte Teams customers. These connectors, built and maintained by the Airbyte team, provide enhanced capabilities and support for critical enterprise systems. To learn more about enterprise connectors, please [talk to our sales team](https://airbyte.com/company/talk-to-sales). +::: + +Airbyte’s incubating ServiceNow enterprise source connector currently offers Full Refresh syncs for streams that are part of Software Asset Management and Configuration Management Database applications. + +## Features + +| Feature | Supported?\(Yes/No\) | Notes | +| :---------------- | :------------------- | :---- | +| Full Refresh Sync | Yes | | +| Incremental Sync | No | | + +## Setup Guide + +1. Enter your ServiceNow environment as the Base URL. +2. Enter the username and password for a ServiceNow user account that has access to all tables that you want to include in the connection. + +![ServiceNow Connector setup with credentials](https://raw.githubusercontent.com/airbytehq/airbyte/refs/heads/master/docs/enterprise-setup/assets/enterprise-connectors/service-now-setup.png) + +## Supported streams + +### Configuration Management Database (CMDB) + +- cmdb_ci_wap_network +- cmdb_ci_ip_router +- cmdb_ci_ip_switch +- cmdb_ci_lb_bigip +- cmdb_ci_ip_firewall +- cmdb_ci_printer +- cmdb_ci_scanner +- cmdb_ci_linux_server +- cmdb_ci_comm +- cmdb_ci_win_server +- cmdb_ci_ucs_chassis +- cmdb_ci_storage_switch +- cmdb_ci_pc_hardware +- cmdb_ci_esx_server +- cmdb_ci_aix_server +- cmdb_ci_solaris_server +- cmdb_ci_chassis_server +- cmdb_ci_server +- cmdb_ci_net_app_server + +### Software Asset Management (SAM) + +- cmdb_model_category +- sam_sw_product_lifecycle +- alm_license + +## Changelog + +
+ Expand to review + +The connector is still incubating; this section exists to satisfy Airbyte's QA checks. + +- 0.1.0 + +
diff --git a/docs/integrations/sources/google-analytics-data-api.md b/docs/integrations/sources/google-analytics-data-api.md index 80c627be74e2..5b9dbc37a451 100644 --- a/docs/integrations/sources/google-analytics-data-api.md +++ b/docs/integrations/sources/google-analytics-data-api.md @@ -272,6 +272,7 @@ The Google Analytics connector is subject to Google Analytics Data API quotas. P | Version | Date | Pull Request | Subject | |:--------|:-----------| :------------------------------------------------------- |:---------------------------------------------------------------------------------------| +| 2.6.2 | 2024-12-14 | [48649](https://github.com/airbytehq/airbyte/pull/48649) | Starting with this version, the Docker image is now rootless. Please note that this and future versions will not be compatible with Airbyte versions earlier than 0.64 | | 2.6.1 | 2024-10-29 | [47899](https://github.com/airbytehq/airbyte/pull/47899) | Update dependencies | | 2.6.0 | 2024-10-28 | [47013](https://github.com/airbytehq/airbyte/pull/47013) | Migrate to CDK v5 | | 2.5.13 | 2024-10-28 | [47061](https://github.com/airbytehq/airbyte/pull/47061) | Update dependencies | diff --git a/docs/integrations/sources/hoorayhr.md b/docs/integrations/sources/hoorayhr.md new file mode 100644 index 000000000000..3fae8fbfbb53 --- /dev/null +++ b/docs/integrations/sources/hoorayhr.md @@ -0,0 +1,33 @@ +# HoorayHR + +Source connector for HoorayHR (https://hoorayhr.io). The connector uses https://api.hoorayhr.io + +## Configuration + +Use the credentials of your HoorayHR account to configure the connector. Make sure MFA is disabled. Currently this is a limitation of the HoorayHR API. + +| Input | Type | Description | Default Value | +| ------------------ | -------- | ------------------ | ------------- | +| `hoorayhrusername` | `string` | HoorayHR Username. | | +| `hoorayhrpassword` | `string` | HoorayHR Password. | | + +## Streams + +| Stream Name | Primary Key | Pagination | Supports Full Sync | Supports Incremental | +| ----------- | ----------- | ------------- | ------------------ | -------------------- | +| sick-leaves | id | No pagination | ✅ | ❌ | +| time-off | id | No pagination | ✅ | ❌ | +| leave-types | id | No pagination | ✅ | ❌ | +| users | id | No pagination | ✅ | ❌ | + +## Changelog + +
+ Expand to review + +| Version | Date | Pull Request | Subject | +| ------- | ---------- | ------------ | --------------------------------------------------------------------------------------------------- | +| 0.1.0 | 2024-12-17 | | Added some more documentation and icon for HoorayHR by [@JoeriSmits](https://github.com/JoeriSmits) | +| 0.0.1 | 2024-12-17 | | Initial release by [@JoeriSmits](https://github.com/JoeriSmits) via Connector Builder | + +
diff --git a/docs/integrations/sources/intercom.md b/docs/integrations/sources/intercom.md index 6f69d1797bf6..301f8f5cc8e9 100644 --- a/docs/integrations/sources/intercom.md +++ b/docs/integrations/sources/intercom.md @@ -96,6 +96,7 @@ The Intercom connector should not run into Intercom API limitations under normal | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------| +| 0.9.0-rc.1 | 2024-12-17 | [47240](https://github.com/airbytehq/airbyte/pull/47240) | Migrate to manifest-only format | | 0.8.3 | 2024-12-12 | [48979](https://github.com/airbytehq/airbyte/pull/48979) | Update dependencies | | 0.8.2 | 2024-10-29 | [47919](https://github.com/airbytehq/airbyte/pull/47919) | Update dependencies | | 0.8.1 | 2024-10-28 | [47537](https://github.com/airbytehq/airbyte/pull/47537) | Update dependencies | diff --git a/docs/integrations/sources/mixmax.md b/docs/integrations/sources/mixmax.md index b2724f8859d5..4f18e9478a99 100644 --- a/docs/integrations/sources/mixmax.md +++ b/docs/integrations/sources/mixmax.md @@ -44,6 +44,7 @@ Visit `https://developer.mixmax.com/reference/getting-started-with-the-api` for | Version | Date | Pull Request | Subject | | ------------------ | ------------ | --- | ---------------- | +| 0.0.7 | 2024-12-14 | [49604](https://github.com/airbytehq/airbyte/pull/49604) | Update dependencies | | 0.0.6 | 2024-12-12 | [49267](https://github.com/airbytehq/airbyte/pull/49267) | Update dependencies | | 0.0.5 | 2024-12-11 | [48986](https://github.com/airbytehq/airbyte/pull/48986) | Starting with this version, the Docker image is now rootless. Please note that this and future versions will not be compatible with Airbyte versions earlier than 0.64 | | 0.0.4 | 2024-11-04 | [48160](https://github.com/airbytehq/airbyte/pull/48160) | Update dependencies | diff --git a/docs/integrations/sources/mssql.md b/docs/integrations/sources/mssql.md index 5e1ceaa3778e..689e83f07c22 100644 --- a/docs/integrations/sources/mssql.md +++ b/docs/integrations/sources/mssql.md @@ -101,7 +101,9 @@ approaches CDC. - The SQL Server CDC feature processes changes that occur in user-created tables only. You cannot enable CDC on the SQL Server master database. - Using variables with partition switching on databases or tables with change data capture \(CDC\) - is not supported for the `ALTER TABLE` ... `SWITCH TO` ... `PARTITION` ... statement + is not supported for the `ALTER TABLE` ... `SWITCH TO` ... `PARTITION` ... statement. +- CDC incremental syncing is only available for tables with at least one primary key. Tables without primary keys can still be replicated by CDC but only in Full Refresh mode. + For more information on CDC limitations, refer to our [CDC Limitations doc](https://docs.airbyte.com/understanding-airbyte/cdc#limitations). - Our CDC implementation uses at least once delivery for all change records. - Read more on CDC limitations in the [Microsoft docs](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/about-change-data-capture-sql-server?view=sql-server-2017#limitations). diff --git a/docs/integrations/sources/mysql.md b/docs/integrations/sources/mysql.md index cb4102703bdd..9c61f0aaf679 100644 --- a/docs/integrations/sources/mysql.md +++ b/docs/integrations/sources/mysql.md @@ -226,8 +226,9 @@ Any database or table encoding combination of charset and collation is supported | Version | Date | Pull Request | Subject | |:---------|:-----------|:-----------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| -| 3.9.1 | 2024-12-12 | [49456](https://github.com/airbytehq/airbyte/pull/49456) | Bump version to re-relase | -| 3.9.0 | 2024-12-12 | [49423](https://github.com/airbytehq/airbyte/pull/49423) | Promoting release candidate 3.9.0-rc.27 to a main version. | +| 3.9.2 | 2024-12-16 | [49830](https://github.com/airbytehq/airbyte/pull/49830) | Fixes an issue with auto generated tinyint columns | +| 3.9.1 | 2024-12-12 | [49456](https://github.com/airbytehq/airbyte/pull/49456) | Bump version to re-relase | +| 3.9.0 | 2024-12-12 | [49423](https://github.com/airbytehq/airbyte/pull/49423) | Promoting release candidate 3.9.0-rc.27 to a main version. | | 3.9.0-rc | 2024-11-05 | [48369](https://github.com/airbytehq/airbyte/pull/48369) | Progressive rollout test. | | 3.7.3 | 2024-09-17 | [45639](https://github.com/airbytehq/airbyte/pull/45639) | Adopt latest CDK to use the latest apache sshd mina to handle tcpkeepalive requests. | | 3.7.2 | 2024-09-05 | [45181](https://github.com/airbytehq/airbyte/pull/45181) | Fix incorrect categorizing resumable/nonresumable full refresh streams. | diff --git a/docs/integrations/sources/mysql/mysql-troubleshooting.md b/docs/integrations/sources/mysql/mysql-troubleshooting.md index b733ef331181..84e403f91323 100644 --- a/docs/integrations/sources/mysql/mysql-troubleshooting.md +++ b/docs/integrations/sources/mysql/mysql-troubleshooting.md @@ -10,6 +10,8 @@ - Make sure to read our [CDC docs](../../../understanding-airbyte/cdc.md) to see limitations that impact all databases using CDC replication. - Our CDC implementation uses at least once delivery for all change records. +- To enable CDC with incremental sync, ensure the table has at least one primary key. + Tables without primary keys can still be replicated by CDC but only in Full Refresh mode. ### Vendor-Specific Connector Limitations diff --git a/docs/integrations/sources/pardot-migrations.md b/docs/integrations/sources/pardot-migrations.md new file mode 100644 index 000000000000..a6dfcef4abaa --- /dev/null +++ b/docs/integrations/sources/pardot-migrations.md @@ -0,0 +1,14 @@ +# Pardot Migration Guide + +## Upgrading to 1.0.0 + +Version 1.0.0 contains a number of fixes and updates to the Pardot source connector: + +- Fixed authentication +- Migrate all existing streams to Pardot v5 API (except email_clicks which is only available in v4) +- Re-implement incremental syncs for existing streams where possible +- Add 23 new streams from the v5 API (folders, emails, engagement_studio_programs, folder_contents, forms, form_fields, form_handlers, form_handler_fields, landing_pages, layout_templates, lifecycle_stages, lifecycle_histories, list_emails, opportunities, tags, tracker_domains, visitor_page_views) +- Add additional configuration options to better handle large accounts (e.g. adjustable split-up windows, page size) +- Align to Pardot-recommended sort/filter/pagination conventions to avoid timeouts (based on Pardot support case #469072278) + +The previous implementation of the authentication flow was no longer functional, preventing the instantiation of new sources. All users with existing connections should reconfigure the source and go through the authentication flow before attempting to sync with this connector. OSS users should be sure to manually update their source version to >=1.0.0 before attempting to configure this source. diff --git a/docs/integrations/sources/pardot.md b/docs/integrations/sources/pardot.md index f9907d617348..5c90ca581663 100644 --- a/docs/integrations/sources/pardot.md +++ b/docs/integrations/sources/pardot.md @@ -1,66 +1,101 @@ -# Pardot +# Pardot (Salesforce Marketing Cloud Account Engagement) ## Overview -The Airbyte Source for [Salesforce Pardot](https://www.pardot.com/) +This page contains the setup guide and reference information for the [Pardot (Salesforce Marketing Cloud Account Engagement)](https://www.salesforce.com/marketing/b2b-automation/) source connector. -The Pardot supports full refresh syncs +## Prerequisites -### Output schema +- Pardot/Marketing Cloud Account Engagement account +- Pardot Business Unit ID +- Client ID +- Client Secret +- Refresh Token -Several output streams are available from this source: +## Setup Guide -- [Campaigns](https://developer.salesforce.com/docs/marketing/pardot/guide/campaigns-v4.html) -- [EmailClicks](https://developer.salesforce.com/docs/marketing/pardot/guide/batch-email-clicks-v4.html) -- [ListMembership](https://developer.salesforce.com/docs/marketing/pardot/guide/list-memberships-v4.html) -- [Lists](https://developer.salesforce.com/docs/marketing/pardot/guide/lists-v4.html) -- [ProspectAccounts](https://developer.salesforce.com/docs/marketing/pardot/guide/prospect-accounts-v4.html) -- [Prospects](https://developer.salesforce.com/docs/marketing/pardot/guide/prospects-v4.html) -- [Users](https://developer.salesforce.com/docs/marketing/pardot/guide/users-v4.html) -- [VisitorActivities](https://developer.salesforce.com/docs/marketing/pardot/guide/visitor-activities-v4.html) -- [Visitors](https://developer.salesforce.com/docs/marketing/pardot/guide/visitors-v4.html) -- [Visits](https://developer.salesforce.com/docs/marketing/pardot/guide/visits-v4.html) +### Required configuration options +- **Pardot Business Unit ID** (`pardot_business_unit_id`): This value uniquely identifies your account, and can be found at Setup > Pardot > Pardot Account Setup -If there are more endpoints you'd like Airbyte to support, please [create an issue.](https://github.com/airbytehq/airbyte/issues/new/choose) +- **Client ID** (`client_id`): The Consumer Key that can be found when viewing your app in Salesforce -### Features +- **Client Secret** (`client_secret`): The Consumer Secret that can be found when viewing your app in Salesforce -| Feature | Supported? | -| :---------------- | :--------- | -| Full Refresh Sync | Yes | -| Incremental Sync | No | -| SSL connection | No | -| Namespaces | No | +- **Refresh Token** (`refresh_token`): Salesforce Refresh Token used for Airbyte to access your Salesforce account. If you don't know what this is, follow [this guide](https://medium.com/@bpmmendis94/obtain-access-refresh-tokens-from-salesforce-rest-api-a324fe4ccd9b) to retrieve it. -### Performance considerations +### Optional configuration options +- **Start Date** (`start_date`): UTC date and time in the format `2020-01-25T00:00:00Z`. Any data before this date will not be replicated. Defaults to `2007-01-01T00:00:00Z` (the year Pardot was launched) -The Pardot connector should not run into Pardot API limitations under normal usage. Please [create an issue](https://github.com/airbytehq/airbyte/issues) if you see any rate limit issues that are not automatically retried successfully. +- **Page Size Limit** (`page_size`): The default page size to return; defaults to `1000` (which is Pardot's maximum). Does not apply to the Email Clicks stream which uses the v4 API and is limited to 200 per page. -## Getting started +- **Default Split Up Interval** (`split_up_interval`): The default split up interval is used on incremental streams to prevent hitting Pardots limit of 100 pages per result (effectively 100K records on most endpoints). The default is `P3M`, which will break incremental requests into three-month groupings. If you expect more than 100K records to be modified between syncs in a single endpoint, you can increase the granularity to `P1M`, `P14D`, `P7D`, `P3D`, or `P1D` to reduce the maximum records to be paged through per split. For small accounts unlikely to hit the limit, decreasing the granularity to `P6M` or `P1Y` may increase the speed of those syncs, especially the initial backfill. -### Requirements +- **Is Sandbox App?** (`is_sandbox`): Whether or not the app is in a Salesforce sandbox. If you do not know what this is, assume it is false. -- Pardot Account -- Pardot Business Unit ID -- Client ID -- Client Secret -- Refresh Token -- Start Date -- Is Sandbox environment? +## Supported Sync Modes + +The Pardot source connector supports the following [sync modes](https://docs.airbyte.com/cloud/core-concepts/#connection-sync-modes): + +- Full Refresh +- Incremental -### Setup guide +Incremental streams are based on the Pardot API's `UpdatedAt` field when the object is updateable and the API supports it; otherwise `CreatedAt` or `Id` are used in that order of preference. + +### Performance Considerations + +The Pardot connector should not run into Pardot API limitations under normal usage. Please [create an issue](https://github.com/airbytehq/airbyte/issues) if you see any rate limit issues that are not automatically retried successfully. -- `pardot_business_unit_id`: Pardot Business ID, can be found at Setup > Pardot > Pardot Account Setup -- `client_id`: The Consumer Key that can be found when viewing your app in Salesforce -- `client_secret`: The Consumer Secret that can be found when viewing your app in Salesforce -- `refresh_token`: Salesforce Refresh Token used for Airbyte to access your Salesforce account. If you don't know what this is, follow [this guide](https://medium.com/@bpmmendis94/obtain-access-refresh-tokens-from-salesforce-rest-api-a324fe4ccd9b) to retrieve it. -- `start_date`: UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. Leave blank to skip this filter -- `is_sandbox`: Whether or not the app is in a Salesforce sandbox. If you do not know what this is, assume it is false. +:::tip + +Due to timeouts on large accounts, the Split Up Interval (the time period used to page through incrementally) is surfaced as a configuration option. While the default should work for most accounts, large accounts may need to increase the granularity from the default. Similarly, small accounts may see faster initial syncs with a longer interval. See the Optional Configuration Options section for more details. + +::: + +## Supported Streams + +Several output streams are available from this source. Unless noted otherwise, streams are from Pardot's v5 API: + +- [Account (Metadata)](https://developer.salesforce.com/docs/marketing/pardot/guide/account-v5.html) (full refresh) +- [Campaigns](https://developer.salesforce.com/docs/marketing/pardot/guide/campaign-v5.html) (incremental) +- [Custom Fields](https://developer.salesforce.com/docs/marketing/pardot/guide/custom-field-v5.html) (incremental) +- [Custom Redirects](https://developer.salesforce.com/docs/marketing/pardot/guide/custom-redirect-v5.html) (full refresh) +- [Dynamic Content](https://developer.salesforce.com/docs/marketing/pardot/guide/dynamic-content-v5.html) (incremental) +- [Dynamic Content Variations](https://developer.salesforce.com/docs/marketing/pardot/guide/dynamic-content-variation.html) (incremental parent) +- [Emails](https://developer.salesforce.com/docs/marketing/pardot/guide/email-v5.html) (incremental) +- [Email Clicks (v4 API)](https://developer.salesforce.com/docs/marketing/pardot/guide/batch-email-clicks-v4.html) (incremental) +- [Engagement Studio Programs](https://developer.salesforce.com/docs/marketing/pardot/guide/engagement-studio-program-v5.html) (incremental) +- [Files](https://developer.salesforce.com/docs/marketing/pardot/guide/export-v5.html) (full refresh) +- [Folders](https://developer.salesforce.com/docs/marketing/pardot/guide/folder-v5.html) (full refresh) +- [Folder Contents](https://developer.salesforce.com/docs/marketing/pardot/guide/folder-contents-v5.html) (incremental) +- [Forms](https://developer.salesforce.com/docs/marketing/pardot/guide/form-v5.html) (full refresh) +- [Form Fields](https://developer.salesforce.com/docs/marketing/pardot/guide/form-field-v5.html) (incremental) +- [Form Handlers](https://developer.salesforce.com/docs/marketing/pardot/guide/form-handler-v5.html) (full refresh) +- [Form Handler Fields](https://developer.salesforce.com/docs/marketing/pardot/guide/form-handler-field-v5.html) (full refresh) +- [Landing Pages](https://developer.salesforce.com/docs/marketing/pardot/guide/landing-page-v5.html) (incremental) +- [Layout Templates](https://developer.salesforce.com/docs/marketing/pardot/guide/layout-template-v5.html) (full refresh) +- [Lifecycle Stages](https://developer.salesforce.com/docs/marketing/pardot/guide/lifecycle-stage-v5.html) (incremental) +- [Lifecycle Histories](https://developer.salesforce.com/docs/marketing/pardot/guide/lifecycle-history-v5.html) (incremental) +- [Lists](https://developer.salesforce.com/docs/marketing/pardot/guide/list-v5.html) (incremental) +- [List Emails](https://developer.salesforce.com/docs/marketing/pardot/guide/list-email-v5.html) (incremental) +- [List Memberships](https://developer.salesforce.com/docs/marketing/pardot/guide/list-membership-v5.html) (incremental) +- [Opportunities](https://developer.salesforce.com/docs/marketing/pardot/guide/opportunity-v5.html) (incremental) +- [Prospects](https://developer.salesforce.com/docs/marketing/pardot/guide/prospect-v5.html) (incremental) +- [Prospect Accounts](https://developer.salesforce.com/docs/marketing/pardot/guide/prospect-account-v5.html) (full refresh) +- [Tags](https://developer.salesforce.com/docs/marketing/pardot/guide/tag-v5.html) (incremental) +- [Tracker Domains](https://developer.salesforce.com/docs/marketing/pardot/guide/tracker-domain-v5.html) (full refresh) +- [Users](https://developer.salesforce.com/docs/marketing/pardot/guide/user-v5.html) (incremental) +- [Visitors](https://developer.salesforce.com/docs/marketing/pardot/guide/visitor-v5.html) (incremental) +- [Visitor Activity](https://developer.salesforce.com/docs/marketing/pardot/guide/visitor-activity-v5.html) (incremental) +- [Visitor Page Views](https://developer.salesforce.com/docs/marketing/pardot/guide/visitor-page-view-v5.html) (incremental) +- [Visits](https://developer.salesforce.com/docs/marketing/pardot/guide/visit-v5.html) (incremental) + +If there are more endpoints you'd like Airbyte to support, please [create an issue](https://github.com/airbytehq/airbyte/issues/new/choose). ## Changelog | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :-------------------- | +| 1.0.0 | 2023-12-12 | [49424](https://github.com/airbytehq/airbyte/pull/49424) | Update streams to API V5. Fix auth flow | | 0.2.0 | 2024-10-13 | [44528](https://github.com/airbytehq/airbyte/pull/44528) | Migrate to LowCode then Manifest-only | | 0.1.22 | 2024-10-12 | [46778](https://github.com/airbytehq/airbyte/pull/46778) | Update dependencies | | 0.1.21 | 2024-10-05 | [46441](https://github.com/airbytehq/airbyte/pull/46441) | Update dependencies | diff --git a/docs/integrations/sources/pennylane.md b/docs/integrations/sources/pennylane.md index 93fcdff320c1..3aaf18b121ac 100644 --- a/docs/integrations/sources/pennylane.md +++ b/docs/integrations/sources/pennylane.md @@ -27,6 +27,7 @@ | Version | Date | Pull Request | Subject | |---------|------|--------------|---------| +| 0.1.0 | 2024-12-10 | [48892](https://github.com/airbytehq/airbyte/pull/48892) | Add missing fields to `customer_invoices` stream | | 0.0.6 | 2024-12-14 | [49659](https://github.com/airbytehq/airbyte/pull/49659) | Update dependencies | | 0.0.5 | 2024-12-12 | [49322](https://github.com/airbytehq/airbyte/pull/49322) | Update dependencies | | 0.0.4 | 2024-12-11 | [49053](https://github.com/airbytehq/airbyte/pull/49053) | Starting with this version, the Docker image is now rootless. Please note that this and future versions will not be compatible with Airbyte versions earlier than 0.64 | diff --git a/docs/integrations/sources/pipedrive.md b/docs/integrations/sources/pipedrive.md index ef81c67990a3..a64cb0d8ead8 100644 --- a/docs/integrations/sources/pipedrive.md +++ b/docs/integrations/sources/pipedrive.md @@ -110,57 +110,58 @@ The Pipedrive connector will gracefully handle rate limits. For more information ## Changelog -| Version | Date | Pull Request | Subject | -| :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------- | -| 2.2.28 | 2024-12-14 | [49692](https://github.com/airbytehq/airbyte/pull/49692) | Update dependencies | -| 2.2.27 | 2024-12-12 | [49041](https://github.com/airbytehq/airbyte/pull/49041) | Starting with this version, the Docker image is now rootless. Please note that this and future versions will not be compatible with Airbyte versions earlier than 0.64 | -| 2.2.26 | 2024-11-04 | [48293](https://github.com/airbytehq/airbyte/pull/48293) | Update dependencies | -| 2.2.25 | 2024-10-29 | [47743](https://github.com/airbytehq/airbyte/pull/47743) | Update dependencies | -| 2.2.24 | 2024-10-28 | [47103](https://github.com/airbytehq/airbyte/pull/47103) | Update dependencies | -| 2.2.23 | 2024-10-12 | [46822](https://github.com/airbytehq/airbyte/pull/46822) | Update dependencies | -| 2.2.22 | 2024-10-05 | [46487](https://github.com/airbytehq/airbyte/pull/46487) | Update dependencies | -| 2.2.21 | 2024-09-28 | [46132](https://github.com/airbytehq/airbyte/pull/46132) | Update dependencies | -| 2.2.20 | 2024-09-21 | [45748](https://github.com/airbytehq/airbyte/pull/45748) | Update dependencies | -| 2.2.19 | 2024-09-14 | [45556](https://github.com/airbytehq/airbyte/pull/45556) | Update dependencies | -| 2.2.18 | 2024-09-07 | [45303](https://github.com/airbytehq/airbyte/pull/45303) | Update dependencies | -| 2.2.17 | 2024-08-31 | [44981](https://github.com/airbytehq/airbyte/pull/44981) | Update dependencies | -| 2.2.16 | 2024-08-24 | [44644](https://github.com/airbytehq/airbyte/pull/44644) | Update dependencies | -| 2.2.15 | 2024-08-17 | [44316](https://github.com/airbytehq/airbyte/pull/44316) | Update dependencies | -| 2.2.14 | 2024-08-12 | [43888](https://github.com/airbytehq/airbyte/pull/43888) | Update dependencies | -| 2.2.13 | 2024-08-10 | [43679](https://github.com/airbytehq/airbyte/pull/43679) | Update dependencies | -| 2.2.12 | 2024-08-03 | [43056](https://github.com/airbytehq/airbyte/pull/43056) | Update dependencies | -| 2.2.11 | 2024-07-27 | [42287](https://github.com/airbytehq/airbyte/pull/42287) | Update dependencies | -| 2.2.10 | 2024-07-13 | [41729](https://github.com/airbytehq/airbyte/pull/41729) | Update dependencies | -| 2.2.9 | 2024-07-10 | [41465](https://github.com/airbytehq/airbyte/pull/41465) | Update dependencies | -| 2.2.8 | 2024-07-09 | [41082](https://github.com/airbytehq/airbyte/pull/41082) | Update dependencies | -| 2.2.7 | 2024-07-06 | [40778](https://github.com/airbytehq/airbyte/pull/40778) | Update dependencies | -| 2.2.6 | 2024-06-25 | [40501](https://github.com/airbytehq/airbyte/pull/40501) | Update dependencies | -| 2.2.5 | 2024-06-22 | [40171](https://github.com/airbytehq/airbyte/pull/40171) | Update dependencies | -| 2.2.4 | 2024-06-04 | [39095](https://github.com/airbytehq/airbyte/pull/39095) | [autopull] Upgrade base image to v1.2.1 | -| 2.2.3 | 2024-05-20 | [38405](https://github.com/airbytehq/airbyte/pull/38405) | [autopull] base image + poetry + up_to_date | -| 2.2.2 | 2024-01-11 | [34153](https://github.com/airbytehq/airbyte/pull/34153) | prepare for airbyte-lib | -| 2.2.1 | 2023-11-06 | [31147](https://github.com/airbytehq/airbyte/pull/31147) | Bugfix: handle records with a null data field | -| 2.2.0 | 2023-10-25 | [31707](https://github.com/airbytehq/airbyte/pull/31707) | Add new stream mail | -| 2.1.0 | 2023-10-10 | [31184](https://github.com/airbytehq/airbyte/pull/31184) | Add new stream goals | -| 2.0.1 | 2023-10-13 | [31151](https://github.com/airbytehq/airbyte/pull/31151) | Add additionalProperties in schemas to read custom fields | -| 2.0.0 | 2023-08-09 | [29293](https://github.com/airbytehq/airbyte/pull/29293) | Migrated to Low-Code CDK | -| 1.0.0 | 2023-06-29 | [27832](https://github.com/airbytehq/airbyte/pull/27832) | Remove `followers_count` field from `Products` stream | -| 0.1.19 | 2023-07-05 | [27967](https://github.com/airbytehq/airbyte/pull/27967) | Update `OrganizationFields` and `ProductFields` with `display_field` field | -| 0.1.18 | 2023-06-02 | [26892](https://github.com/airbytehq/airbyte/pull/26892) | Update `DialFields` schema with `pipeline_ids` property | -| 0.1.17 | 2023-03-21 | [24282](https://github.com/airbytehq/airbyte/pull/24282) | Bugfix handle missed `cursor_field` | -| 0.1.16 | 2023-03-08 | [23789](https://github.com/airbytehq/airbyte/pull/23789) | Add 11 new streams | -| 0.1.15 | 2023-03-02 | [23705](https://github.com/airbytehq/airbyte/pull/23705) | Disable OAuth | -| 0.1.14 | 2023-03-01 | [23539](https://github.com/airbytehq/airbyte/pull/23539) | Fix schema for "activities", "check" works if empty "deals" | -| 0.1.13 | 2022-09-16 | [16799](https://github.com/airbytehq/airbyte/pull/16799) | Migrate to per-stream state | -| 0.1.12 | 2022-05-12 | [12806](https://github.com/airbytehq/airbyte/pull/12806) | Remove date-time format from schemas | -| 0.1.10 | 2022-04-26 | [11870](https://github.com/airbytehq/airbyte/pull/11870) | Add 3 streams: DealFields, OrganizationFields and PersonFields | -| 0.1.9 | 2021-12-07 | [8582](https://github.com/airbytehq/airbyte/pull/8582) | Update connector fields title/description | -| 0.1.8 | 2021-11-16 | [7875](https://github.com/airbytehq/airbyte/pull/7875) | Extend schema for "persons" stream | -| 0.1.7 | 2021-11-15 | [7968](https://github.com/airbytehq/airbyte/pull/7968) | Update oAuth flow config | -| 0.1.6 | 2021-10-05 | [6821](https://github.com/airbytehq/airbyte/pull/6821) | Add OAuth support | -| 0.1.5 | 2021-09-27 | [6441](https://github.com/airbytehq/airbyte/pull/6441) | Fix normalization error | -| 0.1.4 | 2021-08-26 | [5943](https://github.com/airbytehq/airbyte/pull/5943) | Add organizations stream | -| 0.1.3 | 2021-08-26 | [5642](https://github.com/airbytehq/airbyte/pull/5642) | Remove date-time from deals stream | -| 0.1.2 | 2021-07-23 | [4912](https://github.com/airbytehq/airbyte/pull/4912) | Update money type to support floating point | -| 0.1.1 | 2021-07-19 | [4686](https://github.com/airbytehq/airbyte/pull/4686) | Update spec.json | -| 0.1.0 | 2021-07-19 | [4686](https://github.com/airbytehq/airbyte/pull/4686) | 🎉 New source: Pipedrive connector | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 2.3.0 | 2024-12-17 | [48615](https://github.com/airbytehq/airbyte/pull/48615) | Update airbyte-cdk to use concurrency | +| 2.2.28 | 2024-12-14 | [49692](https://github.com/airbytehq/airbyte/pull/49692) | Update dependencies | +| 2.2.27 | 2024-12-12 | [49041](https://github.com/airbytehq/airbyte/pull/49041) | Starting with this version, the Docker image is now rootless. Please note that this and future versions will not be compatible with Airbyte versions earlier than 0.64 | +| 2.2.26 | 2024-11-04 | [48293](https://github.com/airbytehq/airbyte/pull/48293) | Update dependencies | +| 2.2.25 | 2024-10-29 | [47743](https://github.com/airbytehq/airbyte/pull/47743) | Update dependencies | +| 2.2.24 | 2024-10-28 | [47103](https://github.com/airbytehq/airbyte/pull/47103) | Update dependencies | +| 2.2.23 | 2024-10-12 | [46822](https://github.com/airbytehq/airbyte/pull/46822) | Update dependencies | +| 2.2.22 | 2024-10-05 | [46487](https://github.com/airbytehq/airbyte/pull/46487) | Update dependencies | +| 2.2.21 | 2024-09-28 | [46132](https://github.com/airbytehq/airbyte/pull/46132) | Update dependencies | +| 2.2.20 | 2024-09-21 | [45748](https://github.com/airbytehq/airbyte/pull/45748) | Update dependencies | +| 2.2.19 | 2024-09-14 | [45556](https://github.com/airbytehq/airbyte/pull/45556) | Update dependencies | +| 2.2.18 | 2024-09-07 | [45303](https://github.com/airbytehq/airbyte/pull/45303) | Update dependencies | +| 2.2.17 | 2024-08-31 | [44981](https://github.com/airbytehq/airbyte/pull/44981) | Update dependencies | +| 2.2.16 | 2024-08-24 | [44644](https://github.com/airbytehq/airbyte/pull/44644) | Update dependencies | +| 2.2.15 | 2024-08-17 | [44316](https://github.com/airbytehq/airbyte/pull/44316) | Update dependencies | +| 2.2.14 | 2024-08-12 | [43888](https://github.com/airbytehq/airbyte/pull/43888) | Update dependencies | +| 2.2.13 | 2024-08-10 | [43679](https://github.com/airbytehq/airbyte/pull/43679) | Update dependencies | +| 2.2.12 | 2024-08-03 | [43056](https://github.com/airbytehq/airbyte/pull/43056) | Update dependencies | +| 2.2.11 | 2024-07-27 | [42287](https://github.com/airbytehq/airbyte/pull/42287) | Update dependencies | +| 2.2.10 | 2024-07-13 | [41729](https://github.com/airbytehq/airbyte/pull/41729) | Update dependencies | +| 2.2.9 | 2024-07-10 | [41465](https://github.com/airbytehq/airbyte/pull/41465) | Update dependencies | +| 2.2.8 | 2024-07-09 | [41082](https://github.com/airbytehq/airbyte/pull/41082) | Update dependencies | +| 2.2.7 | 2024-07-06 | [40778](https://github.com/airbytehq/airbyte/pull/40778) | Update dependencies | +| 2.2.6 | 2024-06-25 | [40501](https://github.com/airbytehq/airbyte/pull/40501) | Update dependencies | +| 2.2.5 | 2024-06-22 | [40171](https://github.com/airbytehq/airbyte/pull/40171) | Update dependencies | +| 2.2.4 | 2024-06-04 | [39095](https://github.com/airbytehq/airbyte/pull/39095) | [autopull] Upgrade base image to v1.2.1 | +| 2.2.3 | 2024-05-20 | [38405](https://github.com/airbytehq/airbyte/pull/38405) | [autopull] base image + poetry + up_to_date | +| 2.2.2 | 2024-01-11 | [34153](https://github.com/airbytehq/airbyte/pull/34153) | prepare for airbyte-lib | +| 2.2.1 | 2023-11-06 | [31147](https://github.com/airbytehq/airbyte/pull/31147) | Bugfix: handle records with a null data field | +| 2.2.0 | 2023-10-25 | [31707](https://github.com/airbytehq/airbyte/pull/31707) | Add new stream mail | +| 2.1.0 | 2023-10-10 | [31184](https://github.com/airbytehq/airbyte/pull/31184) | Add new stream goals | +| 2.0.1 | 2023-10-13 | [31151](https://github.com/airbytehq/airbyte/pull/31151) | Add additionalProperties in schemas to read custom fields | +| 2.0.0 | 2023-08-09 | [29293](https://github.com/airbytehq/airbyte/pull/29293) | Migrated to Low-Code CDK | +| 1.0.0 | 2023-06-29 | [27832](https://github.com/airbytehq/airbyte/pull/27832) | Remove `followers_count` field from `Products` stream | +| 0.1.19 | 2023-07-05 | [27967](https://github.com/airbytehq/airbyte/pull/27967) | Update `OrganizationFields` and `ProductFields` with `display_field` field | +| 0.1.18 | 2023-06-02 | [26892](https://github.com/airbytehq/airbyte/pull/26892) | Update `DialFields` schema with `pipeline_ids` property | +| 0.1.17 | 2023-03-21 | [24282](https://github.com/airbytehq/airbyte/pull/24282) | Bugfix handle missed `cursor_field` | +| 0.1.16 | 2023-03-08 | [23789](https://github.com/airbytehq/airbyte/pull/23789) | Add 11 new streams | +| 0.1.15 | 2023-03-02 | [23705](https://github.com/airbytehq/airbyte/pull/23705) | Disable OAuth | +| 0.1.14 | 2023-03-01 | [23539](https://github.com/airbytehq/airbyte/pull/23539) | Fix schema for "activities", "check" works if empty "deals" | +| 0.1.13 | 2022-09-16 | [16799](https://github.com/airbytehq/airbyte/pull/16799) | Migrate to per-stream state | +| 0.1.12 | 2022-05-12 | [12806](https://github.com/airbytehq/airbyte/pull/12806) | Remove date-time format from schemas | +| 0.1.10 | 2022-04-26 | [11870](https://github.com/airbytehq/airbyte/pull/11870) | Add 3 streams: DealFields, OrganizationFields and PersonFields | +| 0.1.9 | 2021-12-07 | [8582](https://github.com/airbytehq/airbyte/pull/8582) | Update connector fields title/description | +| 0.1.8 | 2021-11-16 | [7875](https://github.com/airbytehq/airbyte/pull/7875) | Extend schema for "persons" stream | +| 0.1.7 | 2021-11-15 | [7968](https://github.com/airbytehq/airbyte/pull/7968) | Update oAuth flow config | +| 0.1.6 | 2021-10-05 | [6821](https://github.com/airbytehq/airbyte/pull/6821) | Add OAuth support | +| 0.1.5 | 2021-09-27 | [6441](https://github.com/airbytehq/airbyte/pull/6441) | Fix normalization error | +| 0.1.4 | 2021-08-26 | [5943](https://github.com/airbytehq/airbyte/pull/5943) | Add organizations stream | +| 0.1.3 | 2021-08-26 | [5642](https://github.com/airbytehq/airbyte/pull/5642) | Remove date-time from deals stream | +| 0.1.2 | 2021-07-23 | [4912](https://github.com/airbytehq/airbyte/pull/4912) | Update money type to support floating point | +| 0.1.1 | 2021-07-19 | [4686](https://github.com/airbytehq/airbyte/pull/4686) | Update spec.json | +| 0.1.0 | 2021-07-19 | [4686](https://github.com/airbytehq/airbyte/pull/4686) | 🎉 New source: Pipedrive connector | diff --git a/docs/integrations/sources/postgres.md b/docs/integrations/sources/postgres.md index a80a9c1c8cdb..ebab3c84a6e0 100644 --- a/docs/integrations/sources/postgres.md +++ b/docs/integrations/sources/postgres.md @@ -329,6 +329,7 @@ According to Postgres [documentation](https://www.postgresql.org/docs/14/datatyp | Version | Date | Pull Request | Subject | |---------|------------|----------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.6.24 | 2024-12-16 | [49469](https://github.com/airbytehq/airbyte/pull/49469) | Simplify CTID_TABLE_BLOCK_SIZE query for Postgres integration | | 3.6.23 | 2024-11-13 | [\#48482](https://github.com/airbytehq/airbyte/pull/48482) | Convert large integer typed using NUMERIC(X, 0) into a BigInteger. l | 3.6.22 | 2024-10-02 | [46900](https://github.com/airbytehq/airbyte/pull/46900) | Fixed a bug where source docs won't render on Airbyte 1.1 | | 3.6.21 | 2024-10-02 | [46322](https://github.com/airbytehq/airbyte/pull/46322) | Support CDC against a read-replica (continuation) | diff --git a/docs/integrations/sources/s3.md b/docs/integrations/sources/s3.md index 57bff58e0174..e9a683231edc 100644 --- a/docs/integrations/sources/s3.md +++ b/docs/integrations/sources/s3.md @@ -361,6 +361,7 @@ This connector utilizes the open source [Unstructured](https://unstructured-io.g | Version | Date | Pull Request | Subject | |:--------|:-----------|:----------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------| +| 4.11.0 | 2024-12-17 | [49824](https://github.com/airbytehq/airbyte/pull/49824) | Increase file size limit to 1.5GB | | 4.10.2 | 2024-11-25 | [48613](https://github.com/airbytehq/airbyte/pull/48613) | Starting with this version, the Docker image is now rootless. Please note that this and future versions will not be compatible with Airbyte versions earlier than 0.64 | | 4.10.1 | 2024-11-12 | [48346](https://github.com/airbytehq/airbyte/pull/48346) | Implement file-transfer capabilities | | 4.9.2 | 2024-11-04 | [48259](https://github.com/airbytehq/airbyte/pull/48259) | Update dependencies | diff --git a/docs/integrations/sources/sftp-bulk.md b/docs/integrations/sources/sftp-bulk.md index 232c6e112c01..a5856cd403bf 100644 --- a/docs/integrations/sources/sftp-bulk.md +++ b/docs/integrations/sources/sftp-bulk.md @@ -156,6 +156,7 @@ This source provides a single stream per file with a dynamic schema. The current | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------| +| 1.6.0 | 2024-12-17 | [49826](https://github.com/airbytehq/airbyte/pull/49826) | Increase individual file size limit. | | 1.5.0 | 2024-12-02 | [48434](https://github.com/airbytehq/airbyte/pull/48434) | Add get_file method for file-transfer feature. | | 1.4.0 | 2024-10-31 | [46739](https://github.com/airbytehq/airbyte/pull/46739) | make private key an airbyte secret. | | 1.3.0 | 2024-10-31 | [47703](https://github.com/airbytehq/airbyte/pull/47703) | Update dependency to CDK v6 with ability to transfer files. | diff --git a/docs/integrations/sources/stripe.md b/docs/integrations/sources/stripe.md index 2c5a44d241ab..f958aac8623d 100644 --- a/docs/integrations/sources/stripe.md +++ b/docs/integrations/sources/stripe.md @@ -110,6 +110,10 @@ The Stripe source connector supports the following streams: - [Payment Intents](https://stripe.com/docs/api/payment_intents/list) \(Incremental\) - [Payment Methods](https://docs.stripe.com/api/payment_methods/customer_list?lang=curl) \(Incremental\) - [Payouts](https://stripe.com/docs/api/payouts/list) \(Incremental\) +- [Payout Balance Transactions](https://docs.stripe.com/api/balance_transactions/list) \(Incremental\) + :::note + This stream is built with a call using payout_id from the payout stream (parent) as a parmeter to the balance transaction API to get balance transactions that comprised the actual amount of the payout. Check [the Stripe docs](https://docs.stripe.com/api/balance_transactions/list) for more details. + ::: - [Promotion Code](https://stripe.com/docs/api/promotion_codes/list) \(Incremental\) - [Persons](https://stripe.com/docs/api/persons/list) \(Incremental\) - [Plans](https://stripe.com/docs/api/plans/list) \(Incremental\) @@ -197,6 +201,7 @@ On the other hand, the following streams use the `updated` field value as a curs - `Invoices` - `Payment Intents` - `Payouts` +- `Payout Balance Transactions` - `Promotion Codes` - `Persons` - `Plans` @@ -240,7 +245,9 @@ Each record is marked with `is_deleted` flag when the appropriate event happens Expand to review | Version | Date | Pull Request | Subject | -| :------ | :--------- | :-------------------------------------------------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +|:--------|:-----------|:----------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 5.8.2 | 2024-12-10 | [46499](https://github.com/airbytehq/airbyte/pull/46499) | Source-Stripe: Refactor Customer Balance Transactions | +| 5.8.1 | 2024-12-08 | [46499](https://github.com/airbytehq/airbyte/pull/46499) | Source-Stripe: Add new payout_balance_transactions incremental stream | | 5.8.0 | 2024-10-12 | [46864](https://github.com/airbytehq/airbyte/pull/46864) | Add incremental stream support to `accounts` stream | | 5.7.0 | 2024-10-01 | [45860](https://github.com/airbytehq/airbyte/pull/45860) | Add incremental stream support to `invoice_line_items` and `subscription_items` streams | | 5.6.2 | 2024-10-05 | [43881](https://github.com/airbytehq/airbyte/pull/43881) | Update dependencies | diff --git a/docs/integrations/sources/zendesk-support.md b/docs/integrations/sources/zendesk-support.md index 1346acf705d4..62fb583a5aad 100644 --- a/docs/integrations/sources/zendesk-support.md +++ b/docs/integrations/sources/zendesk-support.md @@ -56,14 +56,16 @@ If you prefer to authenticate with OAuth for **Airbyte Open Source**, you can fo ### Set up the Zendesk Support connector in Airbyte + #### For Airbyte Cloud: 1. [Log into your Airbyte Cloud](https://cloud.airbyte.com/workspaces) account. 2. Click Sources and then click + New source. 3. On the Set up the source page, select Zendesk Support from the Source type dropdown. 4. Enter a name for the Zendesk Support connector. - - + + + #### For Airbyte Open Source: 1. Navigate to the Airbyte Open Source dashboard. @@ -149,7 +151,7 @@ The Zendesk Support source connector supports the following streams: The Zendesk Support connector fetches deleted records in the following streams: | Stream | Deletion indicator field | -|:-------------------------|:-------------------------| +| :----------------------- | :----------------------- | | **Brands** | `is_deleted` | | **Groups** | `deleted` | | **Organizations** | `deleted_at` | @@ -183,7 +185,8 @@ The Zendesk connector ideally should not run into Zendesk API limitations under Expand to review | Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| :------ | :--------- | :------------------------------------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| 4.4.1 | 2024-12-13 | [48889](https://github.com/airbytehq/airbyte/pull/48889) | Check if `start_date` exist in check operation | | 4.4.0 | 2024-11-11 | [48379](https://github.com/airbytehq/airbyte/pull/48379) | Make DatetimeBasedCursor syncs concurrent | | 4.3.3 | 2024-10-28 | [47663](https://github.com/airbytehq/airbyte/pull/47663) | Update dependencies | | 4.3.2 | 2024-10-21 | [47202](https://github.com/airbytehq/airbyte/pull/47202) | Update dependencies and expected records | @@ -294,10 +297,10 @@ The Zendesk connector ideally should not run into Zendesk API limitations under | 0.2.3 | 2022-03-23 | [11349](https://github.com/airbytehq/airbyte/pull/11349) | Fixed the bug when Tickets stream didn't return deleted records | | 0.2.2 | 2022-03-17 | [11237](https://github.com/airbytehq/airbyte/pull/11237) | Fixed the bug when TicketComments stream didn't return all records | | 0.2.1 | 2022-03-15 | [11162](https://github.com/airbytehq/airbyte/pull/11162) | Added support of OAuth2.0 authentication method | -| 0.2.0 | 2022-03-01 | [9456](https://github.com/airbytehq/airbyte/pull/9456) | Update source to use future requests | -| 0.1.12 | 2022-01-25 | [9785](https://github.com/airbytehq/airbyte/pull/9785) | Add additional log messages | -| 0.1.11 | 2021-12-21 | [8987](https://github.com/airbytehq/airbyte/pull/8987) | Update connector fields title/description | -| 0.1.9 | 2021-12-16 | [8616](https://github.com/airbytehq/airbyte/pull/8616) | Adds Brands, CustomRoles and Schedules streams | +| 0.2.0 | 2022-03-01 | [9456](https://github.com/airbytehq/airbyte/pull/9456) | Update source to use future requests | +| 0.1.12 | 2022-01-25 | [9785](https://github.com/airbytehq/airbyte/pull/9785) | Add additional log messages | +| 0.1.11 | 2021-12-21 | [8987](https://github.com/airbytehq/airbyte/pull/8987) | Update connector fields title/description | +| 0.1.9 | 2021-12-16 | [8616](https://github.com/airbytehq/airbyte/pull/8616) | Adds Brands, CustomRoles and Schedules streams | | 0.1.8 | 2021-11-23 | [8050](https://github.com/airbytehq/airbyte/pull/8168) | Adds TicketMetricEvents stream | | 0.1.7 | 2021-11-23 | [8058](https://github.com/airbytehq/airbyte/pull/8058) | Added support of AccessToken authentication | | 0.1.6 | 2021-11-18 | [8050](https://github.com/airbytehq/airbyte/pull/8050) | Fix wrong types for schemas, add TypeTransformer | diff --git a/docusaurus/src/css/custom.css b/docusaurus/src/css/custom.css index a4e5298b3e57..663f4b360be0 100644 --- a/docusaurus/src/css/custom.css +++ b/docusaurus/src/css/custom.css @@ -261,7 +261,9 @@ table tr:last-child td:last-child { border-bottom-right-radius: 10px; } - +table th code { + color: var(--ifm-color-content); +} table td code { background-color: var(--color-blue-30);