diff --git a/.gitignore b/.gitignore index 11372931a..0da665fee 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,4 @@ dist/ .mypy_cache/ __pycache__/ poetry.toml -.idea/ \ No newline at end of file +.ruff_cache/ diff --git a/.mock/definition/__package__.yml b/.mock/definition/__package__.yml index c4851dd7d..33e332202 100644 --- a/.mock/definition/__package__.yml +++ b/.mock/definition/__package__.yml @@ -2,6 +2,9 @@ errors: BadRequestError: status-code: 400 type: unknown + docs: Invalid project ID supplied + examples: + - value: string NotFoundError: status-code: 404 type: unknown @@ -10,6 +13,8 @@ errors: status-code: 500 type: string docs: Predicting error + examples: + - value: Server responded with an error. MethodNotAllowedError: status-code: 405 type: unknown @@ -28,6 +33,7 @@ types: - fixed_and_accepted - deleted_review docs: Action which was performed in the last annotation history item + inline: true source: openapi: openapi/openapi.yaml Annotation: @@ -304,12 +310,14 @@ types: - ER - TR - PR + inline: true source: openapi: openapi/openapi.yaml MlBackendAuthMethod: enum: - NONE - BASIC_AUTH + inline: true source: openapi: openapi/openapi.yaml MlBackend: @@ -516,6 +524,7 @@ types: name: UniformSampling - value: Uncertainty sampling name: UncertaintySampling + inline: true source: openapi: openapi/openapi.yaml ProjectSkipQueue: @@ -523,6 +532,7 @@ types: - REQUEUE_FOR_ME - REQUEUE_FOR_OTHERS - IGNORE_SKIPPED + inline: true source: openapi: openapi/openapi.yaml Project: @@ -556,6 +566,7 @@ types: type: optional docs: Show annotation history to annotator organization: optional + prompts: optional> color: type: optional validation: @@ -680,6 +691,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml ConvertedFormat: @@ -702,6 +714,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml Export: @@ -798,6 +811,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml ExportCreate: @@ -842,6 +856,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml ProjectImport: @@ -885,6 +900,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml ProjectReimport: @@ -911,6 +927,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml AzureBlobImportStorage: @@ -987,6 +1004,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml AzureBlobExportStorage: @@ -1060,6 +1078,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml GcsExportStorage: @@ -1133,6 +1152,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml LocalFilesExportStorage: @@ -1197,6 +1217,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml RedisExportStorage: @@ -1273,6 +1294,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml S3ExportStorage: @@ -1358,6 +1380,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml GcsImportStorage: @@ -1434,6 +1457,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml LocalFilesImportStorage: @@ -1495,6 +1519,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml RedisImportStorage: @@ -1568,6 +1593,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml S3ImportStorage: @@ -1662,6 +1688,7 @@ types: docs: Last annotators or reviewers who updated this task source: openapi: openapi/openapi.yaml + inline: true BaseTaskFileUpload: discriminated: false docs: Uploaded file used as data source for this task @@ -1672,6 +1699,7 @@ types: docs: Uploaded file ID used as data source for this task source: openapi: openapi/openapi.yaml + inline: true BaseTask: properties: id: optional @@ -1751,6 +1779,7 @@ types: - fixed_and_accepted - deleted_review docs: Action which was performed in the last annotation history item + inline: true source: openapi: openapi/openapi.yaml AnnotationsDmField: @@ -1823,24 +1852,38 @@ types: openapi: openapi/openapi.yaml DataManagerTaskSerializerPredictionsItem: properties: - result: optional>> - score: optional - model_version: optional - model: optional> - model_run: optional> - task: optional - project: optional - created_at: optional - updated_at: optional + result: + type: optional>> + score: + type: optional + model_version: + type: optional + model: + type: optional> + model_run: + type: optional> + task: + type: optional + project: + type: optional + created_at: + type: optional + updated_at: + type: optional source: openapi: openapi/openapi.yaml + inline: true DataManagerTaskSerializerDraftsItem: properties: - result: optional>> - created_at: optional - updated_at: optional + result: + type: optional>> + created_at: + type: optional + updated_at: + type: optional source: openapi: openapi/openapi.yaml + inline: true DataManagerTaskSerializerAnnotatorsItem: discriminated: false union: @@ -1848,6 +1891,7 @@ types: - map source: openapi: openapi/openapi.yaml + inline: true DataManagerTaskSerializer: properties: id: optional @@ -1935,6 +1979,7 @@ types: - LABEL_LINK_CREATED - LABEL_LINK_UPDATED - LABEL_LINK_DELETED + inline: true source: openapi: openapi/openapi.yaml Webhook: @@ -1984,6 +2029,7 @@ types: - LABEL_LINK_CREATED - LABEL_LINK_UPDATED - LABEL_LINK_DELETED + inline: true source: openapi: openapi/openapi.yaml WebhookSerializerForUpdate: @@ -2026,6 +2072,7 @@ types: - map source: openapi: openapi/openapi.yaml + inline: true TaskCommentAuthorsItem: discriminated: false union: @@ -2033,6 +2080,7 @@ types: - map source: openapi: openapi/openapi.yaml + inline: true Task: properties: id: @@ -2156,6 +2204,7 @@ types: - in_progress - failed - completed + inline: true source: openapi: openapi/openapi.yaml S3SImportStorage: @@ -2289,6 +2338,7 @@ types: - map source: openapi: openapi/openapi.yaml + inline: true PromptOrganization: discriminated: false docs: Organization ID of the prompt @@ -2297,6 +2347,7 @@ types: - map source: openapi: openapi/openapi.yaml + inline: true Prompt: properties: title: @@ -2337,6 +2388,7 @@ types: enum: - OpenAI - AzureOpenAI + inline: true source: openapi: openapi/openapi.yaml PromptVersionCreatedBy: @@ -2346,6 +2398,7 @@ types: - map source: openapi: openapi/openapi.yaml + inline: true PromptVersionOrganization: discriminated: false union: @@ -2353,6 +2406,7 @@ types: - map source: openapi: openapi/openapi.yaml + inline: true PromptVersion: properties: title: @@ -2400,6 +2454,7 @@ types: - Failed docs: Status of the refinement job default: Pending + inline: true source: openapi: openapi/openapi.yaml RefinedPromptResponse: @@ -2435,6 +2490,7 @@ types: - map source: openapi: openapi/openapi.yaml + inline: true InferenceRunCreatedBy: discriminated: false union: @@ -2442,11 +2498,13 @@ types: - map source: openapi: openapi/openapi.yaml + inline: true InferenceRunProjectSubset: enum: - All - HasGT - Sample + inline: true source: openapi: openapi/openapi.yaml InferenceRunStatus: @@ -2456,6 +2514,7 @@ types: - Completed - Failed - Canceled + inline: true source: openapi: openapi/openapi.yaml InferenceRun: @@ -2464,7 +2523,8 @@ types: project: integer model_version: optional created_by: optional - project_subset: InferenceRunProjectSubset + project_subset: + type: InferenceRunProjectSubset status: optional job_id: optional created_at: optional @@ -2485,6 +2545,7 @@ types: docs: The label for this KPI, to be displayed to the user source: openapi: openapi/openapi.yaml + inline: true KeyIndicatorsItemExtraKpisItem: properties: key: @@ -2497,6 +2558,7 @@ types: docs: The label for this KPI, to be displayed to the user source: openapi: openapi/openapi.yaml + inline: true KeyIndicatorsItem: properties: key: @@ -2517,11 +2579,14 @@ types: docs: Extra KPIs to be displayed in the hover-tootip for that indicator source: openapi: openapi/openapi.yaml - KeyIndicators: list + KeyIndicators: + type: list KeyIndicatorValue: properties: - title: optional - values: optional> + title: + type: optional + values: + type: optional> source: openapi: openapi/openapi.yaml ModelProviderConnectionProvider: @@ -2529,6 +2594,7 @@ types: - OpenAI - AzureOpenAI - Custom + inline: true source: openapi: openapi/openapi.yaml ModelProviderConnectionScope: @@ -2536,6 +2602,7 @@ types: - Organization - User - Model + inline: true source: openapi: openapi/openapi.yaml ModelProviderConnectionOrganization: @@ -2545,6 +2612,7 @@ types: - map source: openapi: openapi/openapi.yaml + inline: true ModelProviderConnectionCreatedBy: discriminated: false union: @@ -2552,6 +2620,7 @@ types: - map source: openapi: openapi/openapi.yaml + inline: true ModelProviderConnectionBudgetResetPeriod: enum: - Monthly @@ -2559,12 +2628,15 @@ types: - None docs: Budget reset period for the model provider connection (null if not reset) default: None + inline: true source: openapi: openapi/openapi.yaml ModelProviderConnection: properties: - provider: ModelProviderConnectionProvider - api_key: optional + provider: + type: ModelProviderConnectionProvider + api_key: + type: optional deployment_name: optional endpoint: optional scope: optional @@ -2607,6 +2679,7 @@ types: - map source: openapi: openapi/openapi.yaml + inline: true Comment: properties: id: integer @@ -2615,8 +2688,10 @@ types: task: integer annotation: integer created_by: CommentCreatedBy - created_at: datetime - updated_at: datetime + created_at: + type: datetime + updated_at: + type: datetime is_resolved: optional resolved_at: optional source: diff --git a/.mock/definition/actions.yml b/.mock/definition/actions.yml index 212d65f01..6e745bb48 100644 --- a/.mock/definition/actions.yml +++ b/.mock/definition/actions.yml @@ -9,6 +9,8 @@ service: docs: >- Retrieve all the registered actions with descriptions that data manager can use. + source: + openapi: openapi/openapi.yaml display-name: Get actions examples: - {} @@ -23,6 +25,8 @@ service: More complex actions require additional parameters in the request body. Call `GET api/actions?project=` to explore them.
Example: `GET api/actions?id=delete_tasks&project=1` + source: + openapi: openapi/openapi.yaml display-name: Post actions request: name: ActionsCreateRequest @@ -65,6 +69,7 @@ service: List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. + content-type: application/json examples: - query-parameters: id: retrieve_tasks_predictions @@ -113,6 +118,7 @@ types: "and") will be applied to all items in the filters list. It is not possible to combine "or" and "and" within one list of filters. All filters will be either combined with "or" or with "and", but not a mix of both. + inline: true source: openapi: openapi/openapi.yaml ActionsCreateRequestFiltersItemsItemFilter: @@ -208,6 +214,7 @@ types: Number of unresolved comments in a task
  • `filter:tasks:updated_at`
    (Datetime) Time the task was updated at (e.g. new annotation was created, review added, etc)
  • + inline: true source: openapi: openapi/openapi.yaml ActionsCreateRequestFiltersItemsItemOperator: @@ -239,6 +246,7 @@ types: to
  • `not_exists`
    Does not exist

  • `not_in`
    Is not between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `starts_with`
    Starts with
  • + inline: true source: openapi: openapi/openapi.yaml ActionsCreateRequestFiltersItemsItemValue: @@ -257,6 +265,7 @@ types: docs: List of strings or integers source: openapi: openapi/openapi.yaml + inline: true ActionsCreateRequestFiltersItemsItem: properties: filter: @@ -340,6 +349,7 @@ types: docs: Value to filter by source: openapi: openapi/openapi.yaml + inline: true ActionsCreateRequestFilters: docs: >- Filters to apply on tasks. You can use [the helper class `Filters` from @@ -361,6 +371,7 @@ types: type: list source: openapi: openapi/openapi.yaml + inline: true ActionsCreateRequestSelectedItemsIncluded: properties: all: @@ -371,6 +382,7 @@ types: docs: List of included task IDs source: openapi: openapi/openapi.yaml + inline: true ActionsCreateRequestSelectedItemsExcluded: properties: all: @@ -381,6 +393,7 @@ types: docs: List of excluded task IDs source: openapi: openapi/openapi.yaml + inline: true ActionsCreateRequestSelectedItems: discriminated: false docs: >- @@ -389,10 +402,11 @@ types: used. If "all" is `true`, `"excluded"` must be used.
    Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` union: - - ActionsCreateRequestSelectedItemsIncluded - - ActionsCreateRequestSelectedItemsExcluded + - type: ActionsCreateRequestSelectedItemsIncluded + - type: ActionsCreateRequestSelectedItemsExcluded source: openapi: openapi/openapi.yaml + inline: true ActionsCreateRequestOrderingItem: enum: - value: tasks:agreement @@ -439,5 +453,6 @@ types: name: TasksUnresolvedCommentCount - value: tasks:updated_at name: TasksUpdatedAt + inline: true source: openapi: openapi/openapi.yaml diff --git a/.mock/definition/annotations.yml b/.mock/definition/annotations.yml index 693c43aed..6a7ff3680 100644 --- a/.mock/definition/annotations.yml +++ b/.mock/definition/annotations.yml @@ -18,6 +18,8 @@ service: annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -86,6 +88,8 @@ service: It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -115,6 +119,8 @@ service: For information about the JSON format used in the result, see [Label Studio JSON format of annotated tasks](https://labelstud.io/guide/export#Label-Studio-JSON-format-of-annotated-tasks). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -151,6 +157,7 @@ service: lead_time: type: optional docs: How much time it took to annotate the task (in seconds) + content-type: application/json response: docs: Updated annotation type: root.Annotation @@ -228,6 +235,8 @@ service: You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -320,6 +329,8 @@ service: } ``` + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -356,6 +367,7 @@ service: lead_time: type: optional docs: How much time it took to annotate the task (in seconds) + content-type: application/json response: docs: Created annotation type: root.Annotation @@ -427,6 +439,8 @@ service: auth: true docs: | Create multiple annotations for specific tasks in a bulk operation. + source: + openapi: openapi/openapi.yaml display-name: Create annotations in bulk request: name: AnnotationsCreateBulkRequest @@ -436,6 +450,7 @@ service: lead_time: optional project: optional result: optional> + content-type: application/json response: docs: Annotations created successfully type: list diff --git a/.mock/definition/comments.yml b/.mock/definition/comments.yml index 7cb0181f4..1f9ffbdba 100644 --- a/.mock/definition/comments.yml +++ b/.mock/definition/comments.yml @@ -11,6 +11,8 @@ service: docs: | Get a list of comments for a specific project. + source: + openapi: openapi/openapi.yaml display-name: List comments request: name: CommentsListRequest @@ -49,6 +51,8 @@ service: docs: | Create a new comment. + source: + openapi: openapi/openapi.yaml display-name: Create comment request: name: CommentsCreateRequest @@ -58,6 +62,7 @@ service: project: optional text: optional is_resolved: optional + content-type: application/json response: docs: '' type: root.Comment @@ -84,6 +89,8 @@ service: docs: | Get a specific comment. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -116,6 +123,8 @@ service: docs: | Delete a specific comment. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -133,6 +142,8 @@ service: docs: | Update a specific comment. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -146,6 +157,7 @@ service: project: optional text: optional is_resolved: optional + content-type: application/json response: docs: '' type: root.Comment diff --git a/.mock/definition/dataManager.yml b/.mock/definition/dataManager.yml index 215350e70..d17a3a030 100644 --- a/.mock/definition/dataManager.yml +++ b/.mock/definition/dataManager.yml @@ -12,6 +12,8 @@ service: Retrieve the data manager columns available for the tasks in a specific project. For more details, see [GET api/actions](#/Data%20Manager/get_api_actions). + source: + openapi: openapi/openapi.yaml display-name: Get data manager columns request: name: ApiDmColumnsListRequest @@ -57,6 +59,8 @@ service: method: GET auth: true docs: Retrieve the project state for the data manager. + source: + openapi: openapi/openapi.yaml display-name: Get project state examples: - {} @@ -69,6 +73,8 @@ service: docs: >- Overwrite view data with updated filters and other information for a specific project. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: string @@ -84,6 +90,7 @@ service: project: type: optional docs: Project ID + content-type: application/json response: docs: '' type: ApiDmViewsUpdateResponse @@ -118,6 +125,7 @@ types: "and") will be applied to all items in the filters list. It is not possible to combine "or" and "and" within one list of filters. All filters will be either combined with "or" or with "and", but not a mix of both. + inline: true source: openapi: openapi/openapi.yaml ApiDmViewsUpdateRequestDataFiltersItemsItemFilter: @@ -213,6 +221,7 @@ types: Number of unresolved comments in a task
  • `filter:tasks:updated_at`
    (Datetime) Time the task was updated at (e.g. new annotation was created, review added, etc)
  • + inline: true source: openapi: openapi/openapi.yaml ApiDmViewsUpdateRequestDataFiltersItemsItemOperator: @@ -244,6 +253,7 @@ types: to
  • `not_exists`
    Does not exist

  • `not_in`
    Is not between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `starts_with`
    Starts with
  • + inline: true source: openapi: openapi/openapi.yaml ApiDmViewsUpdateRequestDataFiltersItemsItemValue: @@ -262,6 +272,7 @@ types: docs: List of strings or integers source: openapi: openapi/openapi.yaml + inline: true ApiDmViewsUpdateRequestDataFiltersItemsItem: properties: filter: @@ -345,6 +356,7 @@ types: docs: Value to filter by source: openapi: openapi/openapi.yaml + inline: true ApiDmViewsUpdateRequestDataFilters: docs: >- Filters to apply on tasks. You can use [the helper class `Filters` from @@ -366,6 +378,7 @@ types: type: list source: openapi: openapi/openapi.yaml + inline: true ApiDmViewsUpdateRequestDataOrderingItem: enum: - value: tasks:agreement @@ -412,6 +425,7 @@ types: name: TasksUnresolvedCommentCount - value: tasks:updated_at name: TasksUpdatedAt + inline: true source: openapi: openapi/openapi.yaml ApiDmViewsUpdateRequestData: @@ -433,6 +447,7 @@ types: the field name, e.g. `-tasks:created_at`. source: openapi: openapi/openapi.yaml + inline: true ApiDmViewsUpdateResponseDataFiltersConjunction: enum: - or @@ -442,6 +457,7 @@ types: "and") will be applied to all items in the filters list. It is not possible to combine "or" and "and" within one list of filters. All filters will be either combined with "or" or with "and", but not a mix of both. + inline: true source: openapi: openapi/openapi.yaml ApiDmViewsUpdateResponseDataFiltersItemsItemFilter: @@ -537,6 +553,7 @@ types: Number of unresolved comments in a task
  • `filter:tasks:updated_at`
    (Datetime) Time the task was updated at (e.g. new annotation was created, review added, etc)
  • + inline: true source: openapi: openapi/openapi.yaml ApiDmViewsUpdateResponseDataFiltersItemsItemOperator: @@ -568,6 +585,7 @@ types: to
  • `not_exists`
    Does not exist

  • `not_in`
    Is not between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `starts_with`
    Starts with
  • + inline: true source: openapi: openapi/openapi.yaml ApiDmViewsUpdateResponseDataFiltersItemsItemValue: @@ -586,6 +604,7 @@ types: docs: List of strings or integers source: openapi: openapi/openapi.yaml + inline: true ApiDmViewsUpdateResponseDataFiltersItemsItem: properties: filter: @@ -669,6 +688,7 @@ types: docs: Value to filter by source: openapi: openapi/openapi.yaml + inline: true ApiDmViewsUpdateResponseDataFilters: docs: >- Filters to apply on tasks. You can use [the helper class `Filters` from @@ -690,6 +710,7 @@ types: type: list source: openapi: openapi/openapi.yaml + inline: true ApiDmViewsUpdateResponseDataOrderingItem: enum: - value: tasks:agreement @@ -736,6 +757,7 @@ types: name: TasksUnresolvedCommentCount - value: tasks:updated_at name: TasksUpdatedAt + inline: true source: openapi: openapi/openapi.yaml ApiDmViewsUpdateResponseData: @@ -757,6 +779,7 @@ types: the field name, e.g. `-tasks:created_at`. source: openapi: openapi/openapi.yaml + inline: true ApiDmViewsUpdateResponse: properties: data: diff --git a/.mock/definition/export_storage.yml b/.mock/definition/exportStorage.yml similarity index 90% rename from .mock/definition/export_storage.yml rename to .mock/definition/exportStorage.yml index b279e4063..fbe231718 100644 --- a/.mock/definition/export_storage.yml +++ b/.mock/definition/exportStorage.yml @@ -7,6 +7,8 @@ service: method: GET auth: true docs: Retrieve a list of the export storages of all types with their IDs. + source: + openapi: openapi/openapi.yaml display-name: List all export storages from the project examples: - {} @@ -17,6 +19,8 @@ service: method: GET auth: true docs: Retrieve a list of the export storages types. + source: + openapi: openapi/openapi.yaml display-name: List all export storages types response: docs: '' diff --git a/.mock/definition/export_storage/azure.yml b/.mock/definition/exportStorage/azure.yml similarity index 96% rename from .mock/definition/export_storage/azure.yml rename to .mock/definition/exportStorage/azure.yml index 43bf70682..e7de94cd9 100644 --- a/.mock/definition/export_storage/azure.yml +++ b/.mock/definition/exportStorage/azure.yml @@ -23,6 +23,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml display-name: List export storages request: name: AzureListRequest @@ -77,6 +79,8 @@ service: After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + source: + openapi: openapi/openapi.yaml display-name: Create new storage request: name: AzureCreateRequest @@ -107,6 +111,7 @@ service: account_key: type: optional docs: Azure Blob account key + content-type: application/json response: docs: '' type: AzureCreateResponse @@ -133,6 +138,8 @@ service: Validate a specific Azure export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + source: + openapi: openapi/openapi.yaml display-name: Validate export storage request: name: AzureValidateRequest @@ -166,6 +173,7 @@ service: account_key: type: optional docs: Azure Blob account key + content-type: application/json examples: - request: {} audiences: @@ -183,6 +191,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -235,6 +245,8 @@ service: synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -258,6 +270,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -292,6 +306,7 @@ service: account_key: type: optional docs: Azure Blob account key + content-type: application/json response: docs: '' type: AzureUpdateResponse @@ -332,6 +347,8 @@ service: Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + source: + openapi: openapi/openapi.yaml path-parameters: id: string display-name: Sync export storage diff --git a/.mock/definition/export_storage/gcs.yml b/.mock/definition/exportStorage/gcs.yml similarity index 96% rename from .mock/definition/export_storage/gcs.yml rename to .mock/definition/exportStorage/gcs.yml index 116621875..99143be56 100644 --- a/.mock/definition/export_storage/gcs.yml +++ b/.mock/definition/exportStorage/gcs.yml @@ -22,6 +22,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml display-name: List export storages request: name: GcsListRequest @@ -75,6 +77,8 @@ service: After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + source: + openapi: openapi/openapi.yaml display-name: Create new storage request: name: GcsCreateRequest @@ -107,6 +111,7 @@ service: google_project_id: type: optional docs: Google project ID + content-type: application/json response: docs: '' type: GcsCreateResponse @@ -133,6 +138,8 @@ service: Validate a specific GCS export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + source: + openapi: openapi/openapi.yaml display-name: Validate export storage request: name: GcsValidateRequest @@ -168,6 +175,7 @@ service: google_project_id: type: optional docs: Google project ID + content-type: application/json examples: - request: {} audiences: @@ -185,6 +193,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -237,6 +247,8 @@ service: synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -260,6 +272,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -296,6 +310,7 @@ service: google_project_id: type: optional docs: Google project ID + content-type: application/json response: docs: '' type: GcsUpdateResponse @@ -335,6 +350,8 @@ service: Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + source: + openapi: openapi/openapi.yaml path-parameters: id: string display-name: Sync export storage diff --git a/.mock/definition/export_storage/local.yml b/.mock/definition/exportStorage/local.yml similarity index 96% rename from .mock/definition/export_storage/local.yml rename to .mock/definition/exportStorage/local.yml index d32c1789e..789d247af 100644 --- a/.mock/definition/export_storage/local.yml +++ b/.mock/definition/exportStorage/local.yml @@ -22,6 +22,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml display-name: List export storages request: name: LocalListRequest @@ -72,6 +74,8 @@ service: After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + source: + openapi: openapi/openapi.yaml display-name: Create new storage request: name: LocalCreateRequest @@ -100,6 +104,7 @@ service: generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. default: false + content-type: application/json response: docs: '' type: LocalCreateResponse @@ -124,6 +129,8 @@ service: Validate a specific local file export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + source: + openapi: openapi/openapi.yaml display-name: Validate export storage request: name: LocalValidateRequest @@ -155,6 +162,7 @@ service: generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. default: false + content-type: application/json examples: - request: {} audiences: @@ -172,6 +180,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -221,6 +231,8 @@ service: synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -244,6 +256,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -276,6 +290,7 @@ service: generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. default: false + content-type: application/json response: docs: '' type: LocalUpdateResponse @@ -314,6 +329,8 @@ service: Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + source: + openapi: openapi/openapi.yaml path-parameters: id: string display-name: Sync export storage diff --git a/.mock/definition/export_storage/redis.yml b/.mock/definition/exportStorage/redis.yml similarity index 96% rename from .mock/definition/export_storage/redis.yml rename to .mock/definition/exportStorage/redis.yml index 4de217341..914b19342 100644 --- a/.mock/definition/export_storage/redis.yml +++ b/.mock/definition/exportStorage/redis.yml @@ -22,6 +22,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml display-name: List export storages request: name: RedisListRequest @@ -76,6 +78,8 @@ service: After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + source: + openapi: openapi/openapi.yaml display-name: Create new storage request: name: RedisCreateRequest @@ -108,6 +112,7 @@ service: password: type: optional docs: Server Password (optional) + content-type: application/json response: docs: '' type: RedisCreateResponse @@ -135,6 +140,8 @@ service: Validate a specific Redis export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + source: + openapi: openapi/openapi.yaml display-name: Validate export storage request: name: RedisValidateRequest @@ -170,6 +177,7 @@ service: password: type: optional docs: Server Password (optional) + content-type: application/json examples: - request: {} audiences: @@ -187,6 +195,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -240,6 +250,8 @@ service: synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -263,6 +275,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -299,6 +313,7 @@ service: password: type: optional docs: Server Password (optional) + content-type: application/json response: docs: '' type: RedisUpdateResponse @@ -340,6 +355,8 @@ service: Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + source: + openapi: openapi/openapi.yaml path-parameters: id: string display-name: Sync export storage diff --git a/.mock/definition/export_storage/s3.yml b/.mock/definition/exportStorage/s3.yml similarity index 97% rename from .mock/definition/export_storage/s3.yml rename to .mock/definition/exportStorage/s3.yml index 28844250c..b9b98fdca 100644 --- a/.mock/definition/export_storage/s3.yml +++ b/.mock/definition/exportStorage/s3.yml @@ -22,6 +22,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml display-name: List export storages request: name: S3ListRequest @@ -79,6 +81,8 @@ service: After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + source: + openapi: openapi/openapi.yaml display-name: Create new storage request: name: S3CreateRequest @@ -120,6 +124,7 @@ service: s3_endpoint: type: optional docs: S3 Endpoint + content-type: application/json response: docs: '' type: S3CreateResponse @@ -150,6 +155,8 @@ service: Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + source: + openapi: openapi/openapi.yaml display-name: Validate export storage request: name: S3ValidateRequest @@ -194,6 +201,7 @@ service: s3_endpoint: type: optional docs: S3 Endpoint + content-type: application/json examples: - request: {} audiences: @@ -211,6 +219,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -267,6 +277,8 @@ service: synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -290,6 +302,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -335,6 +349,7 @@ service: s3_endpoint: type: optional docs: S3 Endpoint + content-type: application/json response: docs: '' type: S3UpdateResponse @@ -378,6 +393,8 @@ service: Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + source: + openapi: openapi/openapi.yaml path-parameters: id: string display-name: Sync export storage diff --git a/.mock/definition/export_storage/s3s.yml b/.mock/definition/exportStorage/s3S.yml similarity index 95% rename from .mock/definition/export_storage/s3s.yml rename to .mock/definition/exportStorage/s3S.yml index 6bcdad026..b3d5cb030 100644 --- a/.mock/definition/export_storage/s3s.yml +++ b/.mock/definition/exportStorage/s3S.yml @@ -22,6 +22,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml display-name: List export storages request: name: S3SListRequest @@ -61,6 +63,8 @@ service: For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation. + source: + openapi: openapi/openapi.yaml display-name: Create export storage request: name: S3SCreateRequest @@ -96,6 +100,7 @@ service: s3_endpoint: type: optional docs: S3 Endpoint + content-type: application/json response: docs: '' type: root.S3SExportStorage @@ -125,6 +130,8 @@ service: Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -160,6 +167,8 @@ service: Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -179,6 +188,8 @@ service: Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -218,6 +229,7 @@ service: s3_endpoint: type: optional docs: S3 Endpoint + content-type: application/json response: docs: '' type: root.S3SExportStorage @@ -249,6 +261,8 @@ service: Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + source: + openapi: openapi/openapi.yaml display-name: Validate export storage request: name: S3SValidateRequest @@ -284,6 +298,7 @@ service: s3_endpoint: type: optional docs: S3 Endpoint + content-type: application/json examples: - request: {} audiences: diff --git a/.mock/definition/files.yml b/.mock/definition/files.yml index 5f098b9d9..1d1c1eac6 100644 --- a/.mock/definition/files.yml +++ b/.mock/definition/files.yml @@ -11,6 +11,8 @@ service: docs: >- Retrieve details about a specific uploaded file. To get the file upload ID, use [Get files list](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -35,6 +37,8 @@ service: docs: >- Delete a specific uploaded file. To get the file upload ID, use [Get files list](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -64,6 +68,8 @@ service: ‘file=@path/to/my_file.csv’ ``` + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -71,6 +77,7 @@ service: display-name: Update file upload request: body: root.FileUpload + content-type: application/json response: docs: '' type: root.FileUpload @@ -98,6 +105,8 @@ service: You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -138,6 +147,8 @@ service: You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -155,6 +166,8 @@ service: docs: >- Download a specific uploaded file. If you aren't sure of the file name, try [Get files list](list) first. + source: + openapi: openapi/openapi.yaml path-parameters: filename: string display-name: Download file diff --git a/.mock/definition/import_storage.yml b/.mock/definition/importStorage.yml similarity index 90% rename from .mock/definition/import_storage.yml rename to .mock/definition/importStorage.yml index 5c4bea19e..6a7663a25 100644 --- a/.mock/definition/import_storage.yml +++ b/.mock/definition/importStorage.yml @@ -7,6 +7,8 @@ service: method: GET auth: true docs: Retrieve a list of the import storages of all types with their IDs. + source: + openapi: openapi/openapi.yaml display-name: List all import storages from the project examples: - {} @@ -17,6 +19,8 @@ service: method: GET auth: true docs: Retrieve a list of the import storages types. + source: + openapi: openapi/openapi.yaml display-name: List all import storages types response: docs: '' diff --git a/.mock/definition/import_storage/azure.yml b/.mock/definition/importStorage/azure.yml similarity index 97% rename from .mock/definition/import_storage/azure.yml rename to .mock/definition/importStorage/azure.yml index 13515cc6b..ed30150b5 100644 --- a/.mock/definition/import_storage/azure.yml +++ b/.mock/definition/importStorage/azure.yml @@ -23,6 +23,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml display-name: List import storages request: name: AzureListRequest @@ -83,6 +85,8 @@ service: After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + source: + openapi: openapi/openapi.yaml display-name: Create new storage request: name: AzureCreateRequest @@ -130,6 +134,7 @@ service: account_key: type: optional docs: Azure Blob account key + content-type: application/json response: docs: '' type: AzureCreateResponse @@ -159,6 +164,8 @@ service: Validate a specific Azure import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + source: + openapi: openapi/openapi.yaml display-name: Validate import storage request: name: AzureValidateRequest @@ -209,6 +216,7 @@ service: account_key: type: optional docs: Azure Blob account key + content-type: application/json examples: - request: {} audiences: @@ -226,6 +234,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -285,6 +295,8 @@ service: If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -308,6 +320,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -359,6 +373,7 @@ service: account_key: type: optional docs: Azure Blob account key + content-type: application/json response: docs: '' type: AzureUpdateResponse @@ -402,6 +417,8 @@ service: Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer diff --git a/.mock/definition/import_storage/gcs.yml b/.mock/definition/importStorage/gcs.yml similarity index 97% rename from .mock/definition/import_storage/gcs.yml rename to .mock/definition/importStorage/gcs.yml index b6edf9518..0db24201b 100644 --- a/.mock/definition/import_storage/gcs.yml +++ b/.mock/definition/importStorage/gcs.yml @@ -22,6 +22,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml display-name: List import storages request: name: GcsListRequest @@ -82,6 +84,8 @@ service: After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + source: + openapi: openapi/openapi.yaml display-name: Create import storage request: name: GcsCreateRequest @@ -132,6 +136,7 @@ service: google_project_id: type: optional docs: Google project ID + content-type: application/json response: docs: '' type: GcsCreateResponse @@ -161,6 +166,8 @@ service: Validate a specific GCS import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + source: + openapi: openapi/openapi.yaml display-name: Validate import storage request: name: GcsValidateRequest @@ -214,6 +221,7 @@ service: google_project_id: type: optional docs: Google project ID + content-type: application/json examples: - request: {} audiences: @@ -231,6 +239,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -290,6 +300,8 @@ service: If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -313,6 +325,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -367,6 +381,7 @@ service: google_project_id: type: optional docs: Google project ID + content-type: application/json response: docs: '' type: GcsUpdateResponse @@ -409,6 +424,8 @@ service: Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer diff --git a/.mock/definition/import_storage/local.yml b/.mock/definition/importStorage/local.yml similarity index 96% rename from .mock/definition/import_storage/local.yml rename to .mock/definition/importStorage/local.yml index 606640048..cc6d03c47 100644 --- a/.mock/definition/import_storage/local.yml +++ b/.mock/definition/importStorage/local.yml @@ -24,6 +24,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml display-name: List import storages request: name: LocalListRequest @@ -73,6 +75,8 @@ service: After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + source: + openapi: openapi/openapi.yaml display-name: Create import storage request: name: LocalCreateRequest @@ -101,6 +105,7 @@ service: generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. default: false + content-type: application/json response: docs: '' type: LocalCreateResponse @@ -125,6 +130,8 @@ service: Validate a specific local file import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + source: + openapi: openapi/openapi.yaml display-name: Validate import storage request: name: LocalValidateRequest @@ -156,6 +163,7 @@ service: generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. default: false + content-type: application/json examples: - request: {} audiences: @@ -173,6 +181,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -227,6 +237,8 @@ service: If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -250,6 +262,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -282,6 +296,7 @@ service: generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. default: false + content-type: application/json response: docs: '' type: LocalUpdateResponse @@ -320,6 +335,8 @@ service: Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer diff --git a/.mock/definition/import_storage/redis.yml b/.mock/definition/importStorage/redis.yml similarity index 97% rename from .mock/definition/import_storage/redis.yml rename to .mock/definition/importStorage/redis.yml index d607d9605..5289eb04a 100644 --- a/.mock/definition/import_storage/redis.yml +++ b/.mock/definition/importStorage/redis.yml @@ -22,6 +22,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml display-name: List import storages request: name: RedisListRequest @@ -75,6 +77,8 @@ service: After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + source: + openapi: openapi/openapi.yaml display-name: Create import storage request: name: RedisCreateRequest @@ -114,6 +118,7 @@ service: password: type: optional docs: Server Password (optional) + content-type: application/json response: docs: '' type: RedisCreateResponse @@ -141,6 +146,8 @@ service: Validate a specific Redis import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + source: + openapi: openapi/openapi.yaml display-name: Validate import storage request: name: RedisValidateRequest @@ -183,6 +190,7 @@ service: password: type: optional docs: Server Password (optional) + content-type: application/json examples: - request: {} audiences: @@ -200,6 +208,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -258,6 +268,8 @@ service: If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -281,6 +293,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -324,6 +338,7 @@ service: password: type: optional docs: Server Password (optional) + content-type: application/json response: docs: '' type: RedisUpdateResponse @@ -365,6 +380,8 @@ service: Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer diff --git a/.mock/definition/import_storage/s3.yml b/.mock/definition/importStorage/s3.yml similarity index 97% rename from .mock/definition/import_storage/s3.yml rename to .mock/definition/importStorage/s3.yml index e78c18d00..edd6ccb15 100644 --- a/.mock/definition/import_storage/s3.yml +++ b/.mock/definition/importStorage/s3.yml @@ -22,6 +22,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml display-name: List import storages request: name: S3ListRequest @@ -86,6 +88,8 @@ service: After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + source: + openapi: openapi/openapi.yaml display-name: Create import storage request: name: S3CreateRequest @@ -148,6 +152,7 @@ service: s3_endpoint: type: optional docs: S3 Endpoint + content-type: application/json response: docs: '' type: S3CreateResponse @@ -182,6 +187,8 @@ service: Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + source: + openapi: openapi/openapi.yaml display-name: Validate import storage request: name: S3ValidateRequest @@ -247,6 +254,7 @@ service: s3_endpoint: type: optional docs: S3 Endpoint + content-type: application/json examples: - request: {} audiences: @@ -264,6 +272,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -328,6 +338,8 @@ service: If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -351,6 +363,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -417,6 +431,7 @@ service: s3_endpoint: type: optional docs: S3 Endpoint + content-type: application/json response: docs: '' type: S3UpdateResponse @@ -464,6 +479,8 @@ service: Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer diff --git a/.mock/definition/import_storage/s3s.yml b/.mock/definition/importStorage/s3S.yml similarity index 97% rename from .mock/definition/import_storage/s3s.yml rename to .mock/definition/importStorage/s3S.yml index a5f52f6c0..831ab6002 100644 --- a/.mock/definition/import_storage/s3s.yml +++ b/.mock/definition/importStorage/s3S.yml @@ -22,6 +22,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml display-name: List import storages request: name: S3SListRequest @@ -83,6 +85,8 @@ service: After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + source: + openapi: openapi/openapi.yaml display-name: Create import storage request: name: S3SCreateRequest @@ -139,6 +143,7 @@ service: s3_endpoint: type: optional docs: S3 Endpoint + content-type: application/json response: docs: '' type: root.S3SImportStorage @@ -181,6 +186,8 @@ service: Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -242,6 +249,8 @@ service: If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -265,6 +274,8 @@ service: For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -325,6 +336,7 @@ service: s3_endpoint: type: optional docs: S3 Endpoint + content-type: application/json response: docs: '' type: root.S3SImportStorage @@ -369,6 +381,8 @@ service: Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + source: + openapi: openapi/openapi.yaml display-name: Validate import storage request: name: S3SValidateRequest @@ -425,6 +439,7 @@ service: s3_endpoint: type: optional docs: S3 Endpoint + content-type: application/json examples: - request: {} audiences: @@ -438,6 +453,8 @@ service: Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer diff --git a/.mock/definition/labels.yml b/.mock/definition/labels.yml index 1d2a1f7c4..1154551c8 100644 --- a/.mock/definition/labels.yml +++ b/.mock/definition/labels.yml @@ -26,6 +26,8 @@ service: docs: >- List all custom labels added to your project separately from the labeling configuration. + source: + openapi: openapi/openapi.yaml display-name: List labels request: name: LabelsListRequest @@ -65,9 +67,12 @@ service: method: POST auth: true docs: Add labels to your project without updating the labeling configuration. + source: + openapi: openapi/openapi.yaml display-name: Create labels request: body: list + content-type: application/json response: docs: '' type: list @@ -104,6 +109,8 @@ service: docs: | Retrieve a specific custom label used for your project by its ID. + source: + openapi: openapi/openapi.yaml path-parameters: id: string display-name: Get label @@ -139,6 +146,8 @@ service: docs: >- Remove labels from your project without updating the labeling configuration. + source: + openapi: openapi/openapi.yaml path-parameters: id: string display-name: Remove labels @@ -154,11 +163,14 @@ service: docs: >- Update labels used for your project without updating the labeling configuration. + source: + openapi: openapi/openapi.yaml path-parameters: id: string display-name: Update labels request: body: root.Label + content-type: application/json response: docs: '' type: root.Label diff --git a/.mock/definition/ml.yml b/.mock/definition/ml.yml index f154a5c8c..c811d2329 100644 --- a/.mock/definition/ml.yml +++ b/.mock/definition/ml.yml @@ -19,6 +19,8 @@ service: You will need to provide the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + source: + openapi: openapi/openapi.yaml display-name: List ML backends request: name: MlListRequest @@ -75,6 +77,8 @@ service: environment variables to allow the ML backend to interact with those files. See [Allow the ML backend to access Label Studio](https://labelstud.io/guide/ml#Allow-the-ML-backend-to-access-Label-Studio-data). + source: + openapi: openapi/openapi.yaml display-name: Add ML backend request: name: MlCreateRequest @@ -110,6 +114,7 @@ service: timeout: type: optional docs: Response model timeout + content-type: application/json response: docs: '' type: MlCreateResponse @@ -143,6 +148,8 @@ service: For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -191,6 +198,8 @@ service: For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -213,6 +222,8 @@ service: For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -252,6 +263,7 @@ service: timeout: type: optional docs: Response model timeout + content-type: application/json response: docs: '' type: MlUpdateResponse @@ -301,6 +313,8 @@ service: is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](../tasks/list). The ML backend connection ID is available via [List ML backends](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -316,6 +330,7 @@ service: context: type: optional> docs: Context for ML model + content-type: application/json examples: - path-parameters: id: 1 @@ -331,6 +346,8 @@ service: After you add an ML backend, call this API with the ML backend ID to run a test prediction on specific task data + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -360,6 +377,8 @@ service: You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -372,6 +391,7 @@ service: use_ground_truth: type: optional docs: Whether to include ground truth annotations in training + content-type: application/json errors: - root.InternalServerError examples: @@ -389,6 +409,8 @@ service: Get available versions of the model. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: string display-name: Get model versions @@ -405,6 +427,7 @@ types: - NONE - BASIC_AUTH docs: Auth method + inline: true source: openapi: openapi/openapi.yaml MlCreateResponseAuthMethod: @@ -412,6 +435,7 @@ types: - NONE - BASIC_AUTH docs: Auth method + inline: true source: openapi: openapi/openapi.yaml MlCreateResponse: @@ -453,6 +477,7 @@ types: - NONE - BASIC_AUTH docs: Auth method + inline: true source: openapi: openapi/openapi.yaml MlUpdateResponseAuthMethod: @@ -460,6 +485,7 @@ types: - NONE - BASIC_AUTH docs: Auth method + inline: true source: openapi: openapi/openapi.yaml MlUpdateResponse: diff --git a/.mock/definition/model_providers.yml b/.mock/definition/modelProviders.yml similarity index 93% rename from .mock/definition/model_providers.yml rename to .mock/definition/modelProviders.yml index f82ce067f..ef8f17e2f 100644 --- a/.mock/definition/model_providers.yml +++ b/.mock/definition/modelProviders.yml @@ -11,6 +11,8 @@ service: docs: > Get all model provider connections created by the user in the current organization. + source: + openapi: openapi/openapi.yaml display-name: Get model provider connections response: docs: '' @@ -41,9 +43,12 @@ service: auth: true docs: | Create a new model provider connection. + source: + openapi: openapi/openapi.yaml display-name: Create model provider connection request: body: root.ModelProviderConnection + content-type: application/json response: docs: '' type: root.ModelProviderConnection @@ -75,6 +80,8 @@ service: auth: true docs: | Get a model provider connection by ID. + source: + openapi: openapi/openapi.yaml path-parameters: pk: type: integer @@ -111,6 +118,8 @@ service: auth: true docs: | Delete a model provider connection by ID. + source: + openapi: openapi/openapi.yaml path-parameters: pk: type: integer @@ -127,6 +136,8 @@ service: auth: true docs: | Update a model provider connection by ID. + source: + openapi: openapi/openapi.yaml path-parameters: pk: type: integer @@ -134,6 +145,7 @@ service: display-name: Update model provider connection request: body: root.ModelProviderConnection + content-type: application/json response: docs: '' type: root.ModelProviderConnection diff --git a/.mock/definition/organizations.yml b/.mock/definition/organizations.yml index 935efa55a..e106e87b6 100644 --- a/.mock/definition/organizations.yml +++ b/.mock/definition/organizations.yml @@ -11,6 +11,8 @@ service: docs: >- Get a link to use to invite a new member to an organization in Label Studio Enterprise. + source: + openapi: openapi/openapi.yaml display-name: Get organization invite link response: docs: '' @@ -27,6 +29,8 @@ service: docs: >- Reset the token used in the invitation link to invite someone to an organization. + source: + openapi: openapi/openapi.yaml display-name: Reset organization token response: docs: '' @@ -44,6 +48,8 @@ service: Return a list of the organizations you've created or that you have access to. + source: + openapi: openapi/openapi.yaml display-name: List your organizations response: docs: '' @@ -60,6 +66,8 @@ service: method: GET auth: true docs: Retrieve the settings for a specific organization by ID. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -87,6 +95,8 @@ service: method: PATCH auth: true docs: Update the settings for a specific organization by ID. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -94,6 +104,7 @@ service: display-name: Update organization settings request: body: root.Organization + content-type: application/json response: docs: '' type: root.Organization diff --git a/.mock/definition/organizations/members.yml b/.mock/definition/organizations/members.yml index 749fcc367..c0ceeaf39 100644 --- a/.mock/definition/organizations/members.yml +++ b/.mock/definition/organizations/members.yml @@ -27,6 +27,8 @@ service: pagination: offset: $request.page results: $response.results + source: + openapi: openapi/openapi.yaml path-parameters: id: string display-name: Get organization members list @@ -60,6 +62,8 @@ service: method: GET auth: true docs: Get organization member details by user ID. + source: + openapi: openapi/openapi.yaml path-parameters: id: string user_pk: @@ -87,6 +91,8 @@ service: method: DELETE auth: true docs: Soft delete a member from the organization. + source: + openapi: openapi/openapi.yaml path-parameters: id: string user_pk: diff --git a/.mock/definition/predictions.yml b/.mock/definition/predictions.yml index f665f5726..5af5dbe0c 100644 --- a/.mock/definition/predictions.yml +++ b/.mock/definition/predictions.yml @@ -28,6 +28,8 @@ service: To import predictions via the API, see [Create prediction](create). + source: + openapi: openapi/openapi.yaml display-name: List predictions request: name: PredictionsListRequest @@ -112,6 +114,8 @@ service: For more information, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations) + source: + openapi: openapi/openapi.yaml display-name: Create prediction request: name: PredictionsCreateRequest @@ -138,6 +142,7 @@ service: Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface + content-type: application/json response: docs: Created prediction type: root.Prediction @@ -209,6 +214,8 @@ service: For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -260,6 +267,8 @@ service: method: PUT auth: true docs: Overwrite prediction data by prediction ID. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -290,6 +299,7 @@ service: Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface + content-type: application/json response: docs: Updated prediction type: root.Prediction @@ -358,6 +368,8 @@ service: Delete a prediction. To find the prediction ID, use [List predictions](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -381,6 +393,8 @@ service: For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -411,6 +425,7 @@ service: Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface + content-type: application/json response: docs: Updated prediction type: root.Prediction diff --git a/.mock/definition/projects.yml b/.mock/definition/projects.yml index 5aead99bb..3587225ec 100644 --- a/.mock/definition/projects.yml +++ b/.mock/definition/projects.yml @@ -188,6 +188,8 @@ service: pagination: offset: $request.page results: $response.results + source: + openapi: openapi/openapi.yaml display-name: List all projects request: name: ProjectsListRequest @@ -227,6 +229,12 @@ service: enable_empty_annotation: true show_annotation_history: true organization: 1 + prompts: + - title: title + input_fields: + - input_fields + output_classes: + - output_classes color: color maximum_annotations: 1 is_published: true @@ -287,6 +295,8 @@ service: '{"label_config": "[...]"}' ``` + source: + openapi: openapi/openapi.yaml display-name: Create new project request: name: ProjectsCreateRequest @@ -350,6 +360,7 @@ service: model_version: type: optional docs: Model version + content-type: application/json response: docs: '' type: ProjectsCreateResponse @@ -383,9 +394,12 @@ service: method: POST auth: true docs: Validate an arbitrary labeling configuration. + source: + openapi: openapi/openapi.yaml display-name: Validate label config request: body: root.ProjectLabelConfig + content-type: application/json examples: - request: label_config: label_config @@ -399,6 +413,8 @@ service: Retrieve information about a specific project by project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -423,6 +439,20 @@ service: enable_empty_annotation: true show_annotation_history: true organization: 1 + prompts: + - title: title + description: description + created_by: 1 + created_at: '2024-01-15T09:30:00Z' + updated_at: '2024-01-15T09:30:00Z' + organization: 1 + input_fields: + - input_fields + output_classes: + - output_classes + associated_projects: + - 1 + skill_name: skill_name color: '#FF0000' maximum_annotations: 1 is_published: true @@ -478,6 +508,8 @@ service: The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -523,6 +555,8 @@ service: might have created in the Data Manager are removed. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -590,6 +624,7 @@ service: model_version: type: optional docs: Model version + content-type: application/json response: docs: '' type: ProjectsUpdateResponse @@ -723,6 +758,8 @@ service:
    + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -735,9 +772,11 @@ service: query-parameters: commit_to_project: type: optional + default: true docs: Set to "true" to immediately commit tasks to the project. return_task_ids: type: optional + default: false docs: Set to "true" to return task IDs in the response. preannotated_from_fields: type: optional @@ -749,6 +788,7 @@ service: `text` field and a prediction with the `label` field when `preannoted_from_fields=["prediction"]`. name: ProjectsImportTasksRequest + content-type: application/json response: docs: Tasks successfully imported type: ProjectsImportTasksResponse @@ -779,6 +819,8 @@ service: method: GET auth: true docs: Return data related to async project reimport operation + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -828,6 +870,8 @@ service: 'Authorization: Token abc123' ``` + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -864,6 +908,8 @@ service: The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -871,6 +917,7 @@ service: display-name: Validate project labeling config request: body: root.ProjectLabelConfig + content-type: application/json response: docs: '' type: root.ProjectLabelConfig diff --git a/.mock/definition/projects/exports.yml b/.mock/definition/projects/exports.yml index 1f0e3adb0..c3cd38625 100644 --- a/.mock/definition/projects/exports.yml +++ b/.mock/definition/projects/exports.yml @@ -52,6 +52,8 @@ service: You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -102,6 +104,8 @@ service: You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -132,6 +136,8 @@ service: Included in the response is information about each snapshot, such as who created it and what format it is in. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -185,6 +191,8 @@ service: For more information, see the [Label Studio documentation on exporting annotations](https://labelstud.io/guide/export.html). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -192,6 +200,7 @@ service: display-name: Create new export snapshot request: body: root.ExportCreate + content-type: application/json response: docs: '' type: root.ExportCreate @@ -258,6 +267,8 @@ service: You will also need the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -308,6 +319,8 @@ service: You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -345,6 +358,8 @@ service: The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -355,6 +370,7 @@ service: display-name: Export conversion request: body: root.ExportConvert + content-type: application/json response: docs: '' type: root.ExportConvert @@ -389,6 +405,8 @@ service: The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer diff --git a/.mock/definition/projects/labels.yml b/.mock/definition/projects/labels.yml index 42df7cf9c..9107f8e3b 100644 --- a/.mock/definition/projects/labels.yml +++ b/.mock/definition/projects/labels.yml @@ -24,6 +24,8 @@ service: method: GET auth: true docs: List label links for a specific label and project. + source: + openapi: openapi/openapi.yaml display-name: List label links request: name: LabelsListRequest @@ -55,9 +57,12 @@ service: docs: >- Create label links to link new custom labels to your project labeling configuration. + source: + openapi: openapi/openapi.yaml display-name: Create label links request: body: root.LabelLink + content-type: application/json response: docs: '' type: root.LabelLink @@ -80,6 +85,8 @@ service: method: GET auth: true docs: 'Get label links for a specific project configuration. ' + source: + openapi: openapi/openapi.yaml path-parameters: id: string display-name: Get label link @@ -109,6 +116,8 @@ service: the label stops being available for the project it was linked to. You can add a new label link at any time. + source: + openapi: openapi/openapi.yaml path-parameters: id: string display-name: Remove label link @@ -128,11 +137,14 @@ service: toName, or name parameters for a tag in the labeling configuration change. + source: + openapi: openapi/openapi.yaml path-parameters: id: string display-name: Update label link request: body: root.LabelLink + content-type: application/json response: docs: '' type: root.LabelLink @@ -160,6 +172,8 @@ service: If you want to update the labels in saved annotations, use this endpoint. + source: + openapi: openapi/openapi.yaml display-name: Bulk update labels examples: - {} diff --git a/.mock/definition/prompts.yml b/.mock/definition/prompts.yml index 217b18677..e2a4aa6d7 100644 --- a/.mock/definition/prompts.yml +++ b/.mock/definition/prompts.yml @@ -10,6 +10,8 @@ service: auth: true docs: | Get a list of prompts. + source: + openapi: openapi/openapi.yaml display-name: List prompts response: docs: '' @@ -38,9 +40,12 @@ service: auth: true docs: | Create a new prompt. + source: + openapi: openapi/openapi.yaml display-name: Create prompt request: body: root.Prompt + content-type: application/json response: docs: '' type: root.Prompt @@ -74,6 +79,8 @@ service: auth: true docs: | Get a prompt by ID. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -108,6 +115,8 @@ service: auth: true docs: | Delete a prompt by ID. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -124,6 +133,8 @@ service: auth: true docs: | Update a prompt by ID. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -131,6 +142,7 @@ service: display-name: Update prompt request: body: root.Prompt + content-type: application/json response: docs: '' type: root.Prompt @@ -166,6 +178,8 @@ service: auth: true docs: | Create a new batch prediction. + source: + openapi: openapi/openapi.yaml display-name: Create batch predictions request: name: PromptsBatchPredictionsRequest @@ -175,6 +189,7 @@ service: type: optional docs: Model Run ID to associate the prediction with results: optional> + content-type: application/json response: docs: '' type: PromptsBatchPredictionsResponse @@ -191,6 +206,8 @@ service: auth: true docs: | Create a new batch of failed predictions. + source: + openapi: openapi/openapi.yaml display-name: Create batch of failed predictions request: name: PromptsBatchFailedPredictionsRequest @@ -201,6 +218,7 @@ service: docs: Model Run ID where the failed predictions came from failed_predictions: >- optional> + content-type: application/json response: docs: '' type: PromptsBatchFailedPredictionsResponse @@ -250,6 +268,7 @@ types: docs: Total cost of the inference (in USD) source: openapi: openapi/openapi.yaml + inline: true PromptsBatchPredictionsResponse: properties: detail: optional @@ -268,6 +287,7 @@ types: docs: Error message details source: openapi: openapi/openapi.yaml + inline: true PromptsBatchFailedPredictionsResponse: properties: detail: optional diff --git a/.mock/definition/prompts/indicators.yml b/.mock/definition/prompts/indicators.yml index 687f12b99..7c1087ab6 100644 --- a/.mock/definition/prompts/indicators.yml +++ b/.mock/definition/prompts/indicators.yml @@ -10,6 +10,8 @@ service: auth: true docs: | Get key indicators for the Prompt dashboard. + source: + openapi: openapi/openapi.yaml path-parameters: pk: type: integer @@ -39,6 +41,8 @@ service: auth: true docs: | Get a specific key indicator for the Prompt dashboard. + source: + openapi: openapi/openapi.yaml path-parameters: indicator_key: type: string diff --git a/.mock/definition/prompts/runs.yml b/.mock/definition/prompts/runs.yml index d2ce3c31f..3878bad5b 100644 --- a/.mock/definition/prompts/runs.yml +++ b/.mock/definition/prompts/runs.yml @@ -17,6 +17,8 @@ service: method: GET auth: true docs: Get information (status, etadata, etc) about an existing inference run + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -68,6 +70,8 @@ service: auth: true docs: | Run a prompt inference. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -78,6 +82,7 @@ service: display-name: Run prompt inference request: body: root.InferenceRun + content-type: application/json response: docs: '' type: root.InferenceRun diff --git a/.mock/definition/prompts/versions.yml b/.mock/definition/prompts/versions.yml index f3e2d86db..dfef9de0a 100644 --- a/.mock/definition/prompts/versions.yml +++ b/.mock/definition/prompts/versions.yml @@ -10,6 +10,8 @@ service: auth: true docs: | Get a list of prompt versions. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -41,6 +43,8 @@ service: auth: true docs: | Create a new version of a prompt. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -48,6 +52,7 @@ service: display-name: Create prompt version request: body: root.PromptVersion + content-type: application/json response: docs: '' type: root.PromptVersion @@ -75,6 +80,8 @@ service: auth: true docs: | Get a prompt version by ID. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -110,6 +117,8 @@ service: auth: true docs: | Delete a prompt version by ID. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -130,6 +139,8 @@ service: auth: true docs: | Update a prompt version by ID. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -140,6 +151,7 @@ service: display-name: Update prompt version request: body: root.PromptVersion + content-type: application/json response: docs: '' type: root.PromptVersion @@ -169,6 +181,8 @@ service: docs: > Get cost estimate for running a prompt version on a particular project/subset + source: + openapi: openapi/openapi.yaml path-parameters: prompt_id: type: integer @@ -216,6 +230,8 @@ service: auth: true docs: | Get the refined prompt based on the `refinement_job_id`. + source: + openapi: openapi/openapi.yaml path-parameters: prompt_id: type: integer @@ -269,6 +285,8 @@ service: docs: > Refine a prompt version using a teacher model and save the refined prompt as a new version. + source: + openapi: openapi/openapi.yaml path-parameters: prompt_id: type: integer @@ -294,6 +312,7 @@ service: project_id: type: optional docs: Project ID to target the refined prompt for + content-type: application/json response: docs: '' type: root.RefinedPromptResponse diff --git a/.mock/definition/tasks.yml b/.mock/definition/tasks.yml index 6578fa6bb..61ac267e3 100644 --- a/.mock/definition/tasks.yml +++ b/.mock/definition/tasks.yml @@ -25,6 +25,8 @@ service: The import ID is returned as part of the response when you call [Import tasks](import-tasks). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -82,6 +84,8 @@ service: The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -113,6 +117,8 @@ service: pagination: offset: $request.page results: $response.tasks + source: + openapi: openapi/openapi.yaml display-name: Get task list request: name: TasksListRequest @@ -134,6 +140,7 @@ service: docs: Resolve task data URIs using Cloud Storage fields: type: optional + default: task_only docs: >- Set to "all" if you want to include annotations and predictions in the response @@ -260,6 +267,8 @@ service: You will also need to provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + source: + openapi: openapi/openapi.yaml display-name: Create task request: name: TasksCreateRequest @@ -271,6 +280,7 @@ service: project: type: optional docs: Project ID + content-type: application/json response: docs: Created task type: root.BaseTask @@ -320,6 +330,8 @@ service: The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: string @@ -425,6 +437,8 @@ service: This action cannot be undone. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: string @@ -447,6 +461,8 @@ service: You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: string @@ -462,6 +478,7 @@ service: project: type: optional docs: Project ID + content-type: application/json response: docs: Updated task type: root.BaseTask diff --git a/.mock/definition/users.yml b/.mock/definition/users.yml index 34eee904c..3c0199e85 100644 --- a/.mock/definition/users.yml +++ b/.mock/definition/users.yml @@ -26,6 +26,8 @@ service: docs: >- Reset your access token or API key. When reset, any scripts or automations you have in place will need to be updated with the new key. + source: + openapi: openapi/openapi.yaml display-name: Reset user token response: docs: User token response @@ -45,6 +47,8 @@ service: find this in the Label Studio interface, click **Account & Settings** in the upper right. For more information, see [Access Token](https://labelstud.io/guide/user_account#Access-token). + source: + openapi: openapi/openapi.yaml display-name: Get user token response: docs: User token response @@ -62,6 +66,8 @@ service: docs: >- Get information about your user account, such as your username, email, and user ID. + source: + openapi: openapi/openapi.yaml display-name: Retrieve my user response: docs: '' @@ -90,6 +96,8 @@ service: docs: | List all users in your Label Studio organization. + source: + openapi: openapi/openapi.yaml display-name: List users response: docs: '' @@ -118,6 +126,8 @@ service: docs: | Create a user in Label Studio. + source: + openapi: openapi/openapi.yaml display-name: Create a user request: name: UsersCreateRequest @@ -150,6 +160,7 @@ service: allow_newsletters: type: optional docs: Whether the user allows newsletters + content-type: application/json response: docs: '' type: root.BaseUser @@ -181,6 +192,8 @@ service: You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -224,6 +237,8 @@ service: Use caution when deleting a user, as this can cause issues such as breaking the "Annotated by" filter or leaving orphaned records. + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -246,6 +261,8 @@ service: You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -283,6 +300,7 @@ service: allow_newsletters: type: optional docs: Whether the user allows newsletters + content-type: application/json response: docs: '' type: root.BaseUser @@ -306,6 +324,26 @@ service: date_joined: '2024-01-15T09:30:00Z' audiences: - public + get_product_tour: + path: /api/current-user/product-tour + method: GET + auth: true + source: + openapi: openapi/openapi.yaml + examples: + - {} + audiences: + - internal + update_product_tour: + path: /api/current-user/product-tour + method: PATCH + auth: true + source: + openapi: openapi/openapi.yaml + examples: + - {} + audiences: + - internal source: openapi: openapi/openapi.yaml imports: diff --git a/.mock/definition/views.yml b/.mock/definition/views.yml index a0c0772f0..275a465f5 100644 --- a/.mock/definition/views.yml +++ b/.mock/definition/views.yml @@ -18,6 +18,8 @@ service: You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + source: + openapi: openapi/openapi.yaml display-name: List views request: name: ViewsListRequest @@ -63,6 +65,8 @@ service: You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + source: + openapi: openapi/openapi.yaml display-name: Create view request: name: ViewsCreateRequest @@ -74,6 +78,7 @@ service: project: type: optional docs: Project ID + content-type: application/json response: docs: '' type: root.View @@ -112,12 +117,15 @@ service: You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + source: + openapi: openapi/openapi.yaml display-name: Delete all project views request: name: ViewReset body: properties: project: integer + content-type: application/json examples: - request: project: 1 @@ -131,6 +139,8 @@ service: Get the details about a specific Data Manager view (tab). You will need to supply the view ID. You can find this using [List views](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: string @@ -169,6 +179,8 @@ service: docs: >- Delete a specific Data Manager view (tab) by ID. You can find the view using [List views](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: string @@ -188,6 +200,8 @@ service: You can update a specific Data Manager view (tab) with additional filters and other customizations. You will need to supply the view ID. You can find this using [List views](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: string @@ -203,6 +217,7 @@ service: project: type: optional docs: Project ID + content-type: application/json response: docs: '' type: root.View @@ -242,6 +257,7 @@ types: "and") will be applied to all items in the filters list. It is not possible to combine "or" and "and" within one list of filters. All filters will be either combined with "or" or with "and", but not a mix of both. + inline: true source: openapi: openapi/openapi.yaml ViewsCreateRequestDataFiltersItemsItemFilter: @@ -337,6 +353,7 @@ types: Number of unresolved comments in a task
  • `filter:tasks:updated_at`
    (Datetime) Time the task was updated at (e.g. new annotation was created, review added, etc)
  • + inline: true source: openapi: openapi/openapi.yaml ViewsCreateRequestDataFiltersItemsItemOperator: @@ -368,6 +385,7 @@ types: to
  • `not_exists`
    Does not exist

  • `not_in`
    Is not between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `starts_with`
    Starts with
  • + inline: true source: openapi: openapi/openapi.yaml ViewsCreateRequestDataFiltersItemsItemValue: @@ -386,6 +404,7 @@ types: docs: List of strings or integers source: openapi: openapi/openapi.yaml + inline: true ViewsCreateRequestDataFiltersItemsItem: properties: filter: @@ -469,6 +488,7 @@ types: docs: Value to filter by source: openapi: openapi/openapi.yaml + inline: true ViewsCreateRequestDataFilters: docs: >- Filters to apply on tasks. You can use [the helper class `Filters` from @@ -490,6 +510,7 @@ types: type: list source: openapi: openapi/openapi.yaml + inline: true ViewsCreateRequestDataOrderingItem: enum: - value: tasks:agreement @@ -536,6 +557,7 @@ types: name: TasksUnresolvedCommentCount - value: tasks:updated_at name: TasksUpdatedAt + inline: true source: openapi: openapi/openapi.yaml ViewsCreateRequestData: @@ -557,6 +579,7 @@ types: the field name, e.g. `-tasks:created_at`. source: openapi: openapi/openapi.yaml + inline: true ViewsUpdateRequestDataFiltersConjunction: enum: - or @@ -566,6 +589,7 @@ types: "and") will be applied to all items in the filters list. It is not possible to combine "or" and "and" within one list of filters. All filters will be either combined with "or" or with "and", but not a mix of both. + inline: true source: openapi: openapi/openapi.yaml ViewsUpdateRequestDataFiltersItemsItemFilter: @@ -661,6 +685,7 @@ types: Number of unresolved comments in a task
  • `filter:tasks:updated_at`
    (Datetime) Time the task was updated at (e.g. new annotation was created, review added, etc)
  • + inline: true source: openapi: openapi/openapi.yaml ViewsUpdateRequestDataFiltersItemsItemOperator: @@ -692,6 +717,7 @@ types: to
  • `not_exists`
    Does not exist

  • `not_in`
    Is not between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `starts_with`
    Starts with
  • + inline: true source: openapi: openapi/openapi.yaml ViewsUpdateRequestDataFiltersItemsItemValue: @@ -710,6 +736,7 @@ types: docs: List of strings or integers source: openapi: openapi/openapi.yaml + inline: true ViewsUpdateRequestDataFiltersItemsItem: properties: filter: @@ -793,6 +820,7 @@ types: docs: Value to filter by source: openapi: openapi/openapi.yaml + inline: true ViewsUpdateRequestDataFilters: docs: >- Filters to apply on tasks. You can use [the helper class `Filters` from @@ -814,6 +842,7 @@ types: type: list source: openapi: openapi/openapi.yaml + inline: true ViewsUpdateRequestDataOrderingItem: enum: - value: tasks:agreement @@ -860,6 +889,7 @@ types: name: TasksUnresolvedCommentCount - value: tasks:updated_at name: TasksUpdatedAt + inline: true source: openapi: openapi/openapi.yaml ViewsUpdateRequestData: @@ -881,3 +911,4 @@ types: the field name, e.g. `-tasks:created_at`. source: openapi: openapi/openapi.yaml + inline: true diff --git a/.mock/definition/webhooks.yml b/.mock/definition/webhooks.yml index 7c09ba02e..a2eddd057 100644 --- a/.mock/definition/webhooks.yml +++ b/.mock/definition/webhooks.yml @@ -21,6 +21,8 @@ service: For more information, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks). + source: + openapi: openapi/openapi.yaml display-name: List all webhooks request: name: WebhooksListRequest @@ -71,9 +73,12 @@ service: with webhooks: project-level task events and organization events. If you want to use organization-level webhook events, you will need to set `LABEL_STUDIO_ALLOW_ORGANIZATION_WEBHOOKS=true`. + source: + openapi: openapi/openapi.yaml display-name: Create a webhook request: body: root.Webhook + content-type: application/json response: docs: '' type: root.Webhook @@ -106,6 +111,8 @@ service: Get descriptions of all available webhook actions to set up webhooks. For more information, see the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + source: + openapi: openapi/openapi.yaml display-name: Get all webhook actions request: name: WebhooksInfoRequest @@ -130,6 +137,8 @@ service: For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -163,6 +172,8 @@ service: method: PUT auth: true docs: '' + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -174,6 +185,10 @@ service: url: type: string docs: URL of webhook + validation: + format: uri + minLength: 1 + maxLength: 2048 send_payload: type: optional docs: If value is False send only action @@ -190,6 +205,7 @@ service: type: optional allow-multiple: true name: ApiWebhooksUpdateRequest + content-type: application/json response: docs: '' type: root.WebhookSerializerForUpdate @@ -230,6 +246,8 @@ service: For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -253,6 +271,8 @@ service: For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -264,6 +284,10 @@ service: url: type: string docs: URL of webhook + validation: + format: uri + minLength: 1 + maxLength: 2048 send_payload: type: optional docs: If value is False send only action @@ -280,6 +304,7 @@ service: type: optional allow-multiple: true name: WebhooksUpdateRequest + content-type: application/json response: docs: '' type: root.WebhookSerializerForUpdate diff --git a/.mock/definition/workspaces.yml b/.mock/definition/workspaces.yml index 6c016308c..5dc022251 100644 --- a/.mock/definition/workspaces.yml +++ b/.mock/definition/workspaces.yml @@ -20,6 +20,8 @@ service: For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + source: + openapi: openapi/openapi.yaml display-name: List workspaces response: docs: '' @@ -55,6 +57,8 @@ service: For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + source: + openapi: openapi/openapi.yaml display-name: Create workspace request: name: WorkspacesCreateRequest @@ -79,6 +83,7 @@ service: is_archived: type: optional docs: Is workspace archived + content-type: application/json response: docs: '' type: root.Workspace @@ -106,6 +111,8 @@ service: Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -139,6 +146,8 @@ service: Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -157,6 +166,8 @@ service: Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -185,6 +196,7 @@ service: is_archived: type: optional docs: Is workspace archived + content-type: application/json response: docs: '' type: root.Workspace diff --git a/.mock/definition/workspaces/members.yml b/.mock/definition/workspaces/members.yml index 713020e45..cb269579f 100644 --- a/.mock/definition/workspaces/members.yml +++ b/.mock/definition/workspaces/members.yml @@ -26,6 +26,8 @@ service: List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -51,6 +53,8 @@ service: Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -63,6 +67,7 @@ service: user: type: optional docs: User ID of the workspace member + content-type: application/json response: docs: '' type: MembersCreateResponse @@ -84,6 +89,8 @@ service: Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list). + source: + openapi: openapi/openapi.yaml path-parameters: id: type: integer @@ -96,6 +103,7 @@ service: user: type: optional docs: User ID of the workspace member + content-type: application/json examples: - path-parameters: id: 1 diff --git a/.mock/fern.config.json b/.mock/fern.config.json index 853cd5857..c7083c6db 100644 --- a/.mock/fern.config.json +++ b/.mock/fern.config.json @@ -1,4 +1,4 @@ { "organization" : "humansignal-org", - "version" : "0.39.12" + "version" : "0.46.12" } \ No newline at end of file diff --git a/.mock/openapi/openapi.yaml b/.mock/openapi/openapi.yaml index 598f2decd..e10cad058 100644 --- a/.mock/openapi/openapi.yaml +++ b/.mock/openapi/openapi.yaml @@ -8982,6 +8982,11 @@ components: title: Organization type: integer nullable: true + prompts: + type: array + items: + $ref: "#/components/schemas/Prompt" + nullable: true color: title: Color type: string diff --git a/poetry.lock b/poetry.lock index 2455efc65..afc5b7286 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "annotated-types" @@ -13,24 +13,24 @@ files = [ [[package]] name = "anyio" -version = "4.5.2" +version = "4.7.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"}, - {file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"}, + {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, + {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [[package]] @@ -46,13 +46,13 @@ files = [ [[package]] name = "argcomplete" -version = "3.5.1" +version = "3.5.2" description = "Bash tab completion for argparse" optional = false python-versions = ">=3.8" files = [ - {file = "argcomplete-3.5.1-py3-none-any.whl", hash = "sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363"}, - {file = "argcomplete-3.5.1.tar.gz", hash = "sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4"}, + {file = "argcomplete-3.5.2-py3-none-any.whl", hash = "sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472"}, + {file = "argcomplete-3.5.2.tar.gz", hash = "sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb"}, ] [package.extras] @@ -60,52 +60,52 @@ test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] [[package]] name = "attrs" -version = "24.2.0" +version = "24.3.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, + {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, ] [package.extras] benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "black" -version = "24.8.0" +version = "24.10.0" description = "The uncompromising code formatter." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, - {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, - {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, - {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, - {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, - {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, - {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, - {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, - {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, - {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, - {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, - {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, - {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, - {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, - {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, - {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, - {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, - {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, - {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, - {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, - {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, - {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, + {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, + {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, + {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, + {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, + {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, + {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, + {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, + {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, + {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, + {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, + {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, + {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, + {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, + {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, + {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, + {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, + {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"}, + {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"}, + {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"}, + {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"}, + {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, + {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, ] [package.dependencies] @@ -119,144 +119,131 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +d = ["aiohttp (>=3.10)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2024.8.30" +version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] name = "charset-normalizer" -version = "3.4.0" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +python-versions = ">=3.7" +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -294,8 +281,8 @@ jinja2 = ">=2.10.1,<4.0" packaging = "*" pydantic = [ {version = ">=1.10.0,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.12\" and python_version < \"4.0\""}, - {version = ">=1.5.1,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version < \"3.10\""}, {version = ">=1.10.0,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, + {version = ">=1.5.1,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version < \"3.10\""}, {version = ">=1.9.0,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, ] pyyaml = ">=6.0.1" @@ -309,21 +296,21 @@ validation = ["openapi-spec-validator (>=0.2.8,<0.7.0)", "prance (>=0.18.2)"] [[package]] name = "dnspython" -version = "2.6.1" +version = "2.7.0" description = "DNS toolkit" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, ] [package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] trio = ["trio (>=0.23)"] wmi = ["wmi (>=1.5.1)"] @@ -358,13 +345,13 @@ test = ["pytest (>=6)"] [[package]] name = "faker" -version = "33.0.0" +version = "33.1.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-33.0.0-py3-none-any.whl", hash = "sha256:68e5580cb6b4226710886e595eabc13127149d6e71e9d1db65506a7fbe2c7fce"}, - {file = "faker-33.0.0.tar.gz", hash = "sha256:9b01019c1ddaf2253ca2308c0472116e993f4ad8fc9905f82fa965e0c6f932e9"}, + {file = "Faker-33.1.0-py3-none-any.whl", hash = "sha256:d30c5f0e2796b8970de68978365247657486eb0311c5abe88d0b895b68dff05d"}, + {file = "faker-33.1.0.tar.gz", hash = "sha256:1c925fc0e86a51fc46648b504078c88d0cd48da1da2595c4e712841cab43a1e4"}, ] [package.dependencies] @@ -395,13 +382,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.6" +version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, - {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] [package.dependencies] @@ -416,13 +403,13 @@ trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" -version = "0.27.2" +version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, - {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [package.dependencies] @@ -430,7 +417,6 @@ anyio = "*" certifi = "*" httpcore = "==1.*" idna = "*" -sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] @@ -598,13 +584,13 @@ colors = ["colorama (>=0.4.6)"] [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, ] [package.dependencies] @@ -669,13 +655,13 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.12.1" +version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, + {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, + {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, ] [package.dependencies] @@ -837,71 +823,72 @@ source = ["Cython (>=3.0.11)"] [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] @@ -1044,70 +1031,89 @@ files = [ [[package]] name = "pandas" -version = "2.0.3" +version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, - {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, - {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, - {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, - {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, - {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, - {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, - {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, - {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, - {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, - {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, ] [package.dependencies] numpy = [ - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, - {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" -tzdata = ">=2022.1" +tzdata = ">=2022.7" [package.extras] -all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] -aws = ["s3fs (>=2021.08.0)"] -clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] -compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] -computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2021.07.0)"] -gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] -hdf5 = ["tables (>=3.6.1)"] -html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] -mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] -spss = ["pyreadstat (>=1.1.2)"] -sql-other = ["SQLAlchemy (>=1.4.16)"] -test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.6.3)"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] [[package]] name = "pathspec" @@ -1122,95 +1128,90 @@ files = [ [[package]] name = "pillow" -version = "10.4.0" +version = "11.0.0" description = "Python Imaging Library (Fork)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, - {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, - {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, - {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, - {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, - {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, - {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, - {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, - {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, - {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, - {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, - {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, - {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, - {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, - {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, - {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, - {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, - {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, - {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, - {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97"}, + {file = "pillow-11.0.0-cp310-cp310-win32.whl", hash = "sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50"}, + {file = "pillow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c"}, + {file = "pillow-11.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1c1d72714f429a521d8d2d018badc42414c3077eb187a59579f28e4270b4b0fc"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:499c3a1b0d6fc8213519e193796eb1a86a1be4b1877d678b30f83fd979811d1a"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8b2351c85d855293a299038e1f89db92a2f35e8d2f783489c6f0b2b5f3fe8a3"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4dba50cfa56f910241eb7f883c20f1e7b1d8f7d91c750cd0b318bad443f4d5"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5ddbfd761ee00c12ee1be86c9c0683ecf5bb14c9772ddbd782085779a63dd55b"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:45c566eb10b8967d71bf1ab8e4a525e5a93519e29ea071459ce517f6b903d7fa"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b4fd7bd29610a83a8c9b564d457cf5bd92b4e11e79a4ee4716a63c959699b306"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cb929ca942d0ec4fac404cbf520ee6cac37bf35be479b970c4ffadf2b6a1cad9"}, + {file = "pillow-11.0.0-cp311-cp311-win32.whl", hash = "sha256:006bcdd307cc47ba43e924099a038cbf9591062e6c50e570819743f5607404f5"}, + {file = "pillow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:52a2d8323a465f84faaba5236567d212c3668f2ab53e1c74c15583cf507a0291"}, + {file = "pillow-11.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:16095692a253047fe3ec028e951fa4221a1f3ed3d80c397e83541a3037ff67c9"}, + {file = "pillow-11.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2c0a187a92a1cb5ef2c8ed5412dd8d4334272617f532d4ad4de31e0495bd923"}, + {file = "pillow-11.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:084a07ef0821cfe4858fe86652fffac8e187b6ae677e9906e192aafcc1b69903"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8069c5179902dcdce0be9bfc8235347fdbac249d23bd90514b7a47a72d9fecf4"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f02541ef64077f22bf4924f225c0fd1248c168f86e4b7abdedd87d6ebaceab0f"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fcb4621042ac4b7865c179bb972ed0da0218a076dc1820ffc48b1d74c1e37fe9"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:00177a63030d612148e659b55ba99527803288cea7c75fb05766ab7981a8c1b7"}, + {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8853a3bf12afddfdf15f57c4b02d7ded92c7a75a5d7331d19f4f9572a89c17e6"}, + {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3107c66e43bda25359d5ef446f59c497de2b5ed4c7fdba0894f8d6cf3822dafc"}, + {file = "pillow-11.0.0-cp312-cp312-win32.whl", hash = "sha256:86510e3f5eca0ab87429dd77fafc04693195eec7fd6a137c389c3eeb4cfb77c6"}, + {file = "pillow-11.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8ec4a89295cd6cd4d1058a5e6aec6bf51e0eaaf9714774e1bfac7cfc9051db47"}, + {file = "pillow-11.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:27a7860107500d813fcd203b4ea19b04babe79448268403172782754870dac25"}, + {file = "pillow-11.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcd1fb5bb7b07f64c15618c89efcc2cfa3e95f0e3bcdbaf4642509de1942a699"}, + {file = "pillow-11.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e038b0745997c7dcaae350d35859c9715c71e92ffb7e0f4a8e8a16732150f38"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ae08bd8ffc41aebf578c2af2f9d8749d91f448b3bfd41d7d9ff573d74f2a6b2"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d69bfd8ec3219ae71bcde1f942b728903cad25fafe3100ba2258b973bd2bc1b2"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:61b887f9ddba63ddf62fd02a3ba7add935d053b6dd7d58998c630e6dbade8527"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:c6a660307ca9d4867caa8d9ca2c2658ab685de83792d1876274991adec7b93fa"}, + {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:73e3a0200cdda995c7e43dd47436c1548f87a30bb27fb871f352a22ab8dcf45f"}, + {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fba162b8872d30fea8c52b258a542c5dfd7b235fb5cb352240c8d63b414013eb"}, + {file = "pillow-11.0.0-cp313-cp313-win32.whl", hash = "sha256:f1b82c27e89fffc6da125d5eb0ca6e68017faf5efc078128cfaa42cf5cb38798"}, + {file = "pillow-11.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:8ba470552b48e5835f1d23ecb936bb7f71d206f9dfeee64245f30c3270b994de"}, + {file = "pillow-11.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:846e193e103b41e984ac921b335df59195356ce3f71dcfd155aa79c603873b84"}, + {file = "pillow-11.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4ad70c4214f67d7466bea6a08061eba35c01b1b89eaa098040a35272a8efb22b"}, + {file = "pillow-11.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6ec0d5af64f2e3d64a165f490d96368bb5dea8b8f9ad04487f9ab60dc4bb6003"}, + {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c809a70e43c7977c4a42aefd62f0131823ebf7dd73556fa5d5950f5b354087e2"}, + {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:4b60c9520f7207aaf2e1d94de026682fc227806c6e1f55bba7606d1c94dd623a"}, + {file = "pillow-11.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1e2688958a840c822279fda0086fec1fdab2f95bf2b717b66871c4ad9859d7e8"}, + {file = "pillow-11.0.0-cp313-cp313t-win32.whl", hash = "sha256:607bbe123c74e272e381a8d1957083a9463401f7bd01287f50521ecb05a313f8"}, + {file = "pillow-11.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c39ed17edea3bc69c743a8dd3e9853b7509625c2462532e62baa0732163a904"}, + {file = "pillow-11.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:75acbbeb05b86bc53cbe7b7e6fe00fbcf82ad7c684b3ad82e3d711da9ba287d3"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2e46773dc9f35a1dd28bd6981332fd7f27bec001a918a72a79b4133cf5291dba"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2679d2258b7f1192b378e2893a8a0a0ca472234d4c2c0e6bdd3380e8dfa21b6a"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda2616eb2313cbb3eebbe51f19362eb434b18e3bb599466a1ffa76a033fb916"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ec184af98a121fb2da42642dea8a29ec80fc3efbaefb86d8fdd2606619045d"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:8594f42df584e5b4bb9281799698403f7af489fba84c34d53d1c4bfb71b7c4e7"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:c12b5ae868897c7338519c03049a806af85b9b8c237b7d675b8c5e089e4a618e"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:70fbbdacd1d271b77b7721fe3cdd2d537bbbd75d29e6300c672ec6bb38d9672f"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5178952973e588b3f1360868847334e9e3bf49d19e169bbbdfaf8398002419ae"}, + {file = "pillow-11.0.0-cp39-cp39-win32.whl", hash = "sha256:8c676b587da5673d3c75bd67dd2a8cdfeb282ca38a30f37950511766b26858c4"}, + {file = "pillow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:94f3e1780abb45062287b4614a5bc0874519c86a777d4a7ad34978e86428b8dd"}, + {file = "pillow-11.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:290f2cc809f9da7d6d622550bbf4c1e57518212da51b6a30fe8e0a270a5b78bd"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5bd2d3bdb846d757055910f0a59792d33b555800813c3b39ada1829c372ccb06"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:375b8dd15a1f5d2feafff536d47e22f69625c1aa92f12b339ec0b2ca40263273"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:daffdf51ee5db69a82dd127eabecce20729e21f7a3680cf7cbb23f0829189790"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7326a1787e3c7b0429659e0a944725e1b03eeaa10edd945a86dead1913383944"}, + {file = "pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739"}, ] [package.extras] -docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] @@ -1250,23 +1251,20 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.9.2" +version = "2.10.4" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, - {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, + {file = "pydantic-2.10.4-py3-none-any.whl", hash = "sha256:597e135ea68be3a37552fb524bc7d0d66dcf93d395acd93a00682f1efcb8ee3d"}, + {file = "pydantic-2.10.4.tar.gz", hash = "sha256:82f12e9723da6de4fe2ba888b5971157b3be7ad914267dea8f05f82b28254f06"}, ] [package.dependencies] annotated-types = ">=0.6.0" email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} -pydantic-core = "2.23.4" -typing-extensions = [ - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, -] +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -1274,100 +1272,111 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.23.4" +version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, - {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, - {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, - {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, - {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, - {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, - {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, - {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, - {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, - {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, - {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, - {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, - {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, - {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, ] [package.dependencies] @@ -1658,114 +1667,114 @@ fixture = ["fixtures"] [[package]] name = "rpds-py" -version = "0.20.1" +version = "0.22.3" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "rpds_py-0.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a649dfd735fff086e8a9d0503a9f0c7d01b7912a333c7ae77e1515c08c146dad"}, - {file = "rpds_py-0.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f16bc1334853e91ddaaa1217045dd7be166170beec337576818461268a3de67f"}, - {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14511a539afee6f9ab492b543060c7491c99924314977a55c98bfa2ee29ce78c"}, - {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ccb8ac2d3c71cda472b75af42818981bdacf48d2e21c36331b50b4f16930163"}, - {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c142b88039b92e7e0cb2552e8967077e3179b22359e945574f5e2764c3953dcf"}, - {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f19169781dddae7478a32301b499b2858bc52fc45a112955e798ee307e294977"}, - {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13c56de6518e14b9bf6edde23c4c39dac5b48dcf04160ea7bce8fca8397cdf86"}, - {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:925d176a549f4832c6f69fa6026071294ab5910e82a0fe6c6228fce17b0706bd"}, - {file = "rpds_py-0.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:78f0b6877bfce7a3d1ff150391354a410c55d3cdce386f862926a4958ad5ab7e"}, - {file = "rpds_py-0.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3dd645e2b0dcb0fd05bf58e2e54c13875847687d0b71941ad2e757e5d89d4356"}, - {file = "rpds_py-0.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4f676e21db2f8c72ff0936f895271e7a700aa1f8d31b40e4e43442ba94973899"}, - {file = "rpds_py-0.20.1-cp310-none-win32.whl", hash = "sha256:648386ddd1e19b4a6abab69139b002bc49ebf065b596119f8f37c38e9ecee8ff"}, - {file = "rpds_py-0.20.1-cp310-none-win_amd64.whl", hash = "sha256:d9ecb51120de61e4604650666d1f2b68444d46ae18fd492245a08f53ad2b7711"}, - {file = "rpds_py-0.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:762703bdd2b30983c1d9e62b4c88664df4a8a4d5ec0e9253b0231171f18f6d75"}, - {file = "rpds_py-0.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0b581f47257a9fce535c4567782a8976002d6b8afa2c39ff616edf87cbeff712"}, - {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:842c19a6ce894493563c3bd00d81d5100e8e57d70209e84d5491940fdb8b9e3a"}, - {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42cbde7789f5c0bcd6816cb29808e36c01b960fb5d29f11e052215aa85497c93"}, - {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c8e9340ce5a52f95fa7d3b552b35c7e8f3874d74a03a8a69279fd5fca5dc751"}, - {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ba6f89cac95c0900d932c9efb7f0fb6ca47f6687feec41abcb1bd5e2bd45535"}, - {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a916087371afd9648e1962e67403c53f9c49ca47b9680adbeef79da3a7811b0"}, - {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:200a23239781f46149e6a415f1e870c5ef1e712939fe8fa63035cd053ac2638e"}, - {file = "rpds_py-0.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:58b1d5dd591973d426cbb2da5e27ba0339209832b2f3315928c9790e13f159e8"}, - {file = "rpds_py-0.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6b73c67850ca7cae0f6c56f71e356d7e9fa25958d3e18a64927c2d930859b8e4"}, - {file = "rpds_py-0.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d8761c3c891cc51e90bc9926d6d2f59b27beaf86c74622c8979380a29cc23ac3"}, - {file = "rpds_py-0.20.1-cp311-none-win32.whl", hash = "sha256:cd945871335a639275eee904caef90041568ce3b42f402c6959b460d25ae8732"}, - {file = "rpds_py-0.20.1-cp311-none-win_amd64.whl", hash = "sha256:7e21b7031e17c6b0e445f42ccc77f79a97e2687023c5746bfb7a9e45e0921b84"}, - {file = "rpds_py-0.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:36785be22066966a27348444b40389f8444671630063edfb1a2eb04318721e17"}, - {file = "rpds_py-0.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:142c0a5124d9bd0e2976089484af5c74f47bd3298f2ed651ef54ea728d2ea42c"}, - {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbddc10776ca7ebf2a299c41a4dde8ea0d8e3547bfd731cb87af2e8f5bf8962d"}, - {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15a842bb369e00295392e7ce192de9dcbf136954614124a667f9f9f17d6a216f"}, - {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be5ef2f1fc586a7372bfc355986226484e06d1dc4f9402539872c8bb99e34b01"}, - {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbcf360c9e3399b056a238523146ea77eeb2a596ce263b8814c900263e46031a"}, - {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd27a66740ffd621d20b9a2f2b5ee4129a56e27bfb9458a3bcc2e45794c96cb"}, - {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0b937b2a1988f184a3e9e577adaa8aede21ec0b38320d6009e02bd026db04fa"}, - {file = "rpds_py-0.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6889469bfdc1eddf489729b471303739bf04555bb151fe8875931f8564309afc"}, - {file = "rpds_py-0.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19b73643c802f4eaf13d97f7855d0fb527fbc92ab7013c4ad0e13a6ae0ed23bd"}, - {file = "rpds_py-0.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3c6afcf2338e7f374e8edc765c79fbcb4061d02b15dd5f8f314a4af2bdc7feb5"}, - {file = "rpds_py-0.20.1-cp312-none-win32.whl", hash = "sha256:dc73505153798c6f74854aba69cc75953888cf9866465196889c7cdd351e720c"}, - {file = "rpds_py-0.20.1-cp312-none-win_amd64.whl", hash = "sha256:8bbe951244a838a51289ee53a6bae3a07f26d4e179b96fc7ddd3301caf0518eb"}, - {file = "rpds_py-0.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6ca91093a4a8da4afae7fe6a222c3b53ee4eef433ebfee4d54978a103435159e"}, - {file = "rpds_py-0.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b9c2fe36d1f758b28121bef29ed1dee9b7a2453e997528e7d1ac99b94892527c"}, - {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f009c69bc8c53db5dfab72ac760895dc1f2bc1b62ab7408b253c8d1ec52459fc"}, - {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6740a3e8d43a32629bb9b009017ea5b9e713b7210ba48ac8d4cb6d99d86c8ee8"}, - {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32b922e13d4c0080d03e7b62991ad7f5007d9cd74e239c4b16bc85ae8b70252d"}, - {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe00a9057d100e69b4ae4a094203a708d65b0f345ed546fdef86498bf5390982"}, - {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fe9b04b6fa685bd39237d45fad89ba19e9163a1ccaa16611a812e682913496"}, - {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa7ac11e294304e615b43f8c441fee5d40094275ed7311f3420d805fde9b07b4"}, - {file = "rpds_py-0.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aa97af1558a9bef4025f8f5d8c60d712e0a3b13a2fe875511defc6ee77a1ab7"}, - {file = "rpds_py-0.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:483b29f6f7ffa6af845107d4efe2e3fa8fb2693de8657bc1849f674296ff6a5a"}, - {file = "rpds_py-0.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:37fe0f12aebb6a0e3e17bb4cd356b1286d2d18d2e93b2d39fe647138458b4bcb"}, - {file = "rpds_py-0.20.1-cp313-none-win32.whl", hash = "sha256:a624cc00ef2158e04188df5e3016385b9353638139a06fb77057b3498f794782"}, - {file = "rpds_py-0.20.1-cp313-none-win_amd64.whl", hash = "sha256:b71b8666eeea69d6363248822078c075bac6ed135faa9216aa85f295ff009b1e"}, - {file = "rpds_py-0.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5b48e790e0355865197ad0aca8cde3d8ede347831e1959e158369eb3493d2191"}, - {file = "rpds_py-0.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3e310838a5801795207c66c73ea903deda321e6146d6f282e85fa7e3e4854804"}, - {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2249280b870e6a42c0d972339e9cc22ee98730a99cd7f2f727549af80dd5a963"}, - {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e79059d67bea28b53d255c1437b25391653263f0e69cd7dec170d778fdbca95e"}, - {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b431c777c9653e569986ecf69ff4a5dba281cded16043d348bf9ba505486f36"}, - {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da584ff96ec95e97925174eb8237e32f626e7a1a97888cdd27ee2f1f24dd0ad8"}, - {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a0629ec053fc013808a85178524e3cb63a61dbc35b22499870194a63578fb9"}, - {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fbf15aff64a163db29a91ed0868af181d6f68ec1a3a7d5afcfe4501252840bad"}, - {file = "rpds_py-0.20.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:07924c1b938798797d60c6308fa8ad3b3f0201802f82e4a2c41bb3fafb44cc28"}, - {file = "rpds_py-0.20.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4a5a844f68776a7715ecb30843b453f07ac89bad393431efbf7accca3ef599c1"}, - {file = "rpds_py-0.20.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:518d2ca43c358929bf08f9079b617f1c2ca6e8848f83c1225c88caeac46e6cbc"}, - {file = "rpds_py-0.20.1-cp38-none-win32.whl", hash = "sha256:3aea7eed3e55119635a74bbeb80b35e776bafccb70d97e8ff838816c124539f1"}, - {file = "rpds_py-0.20.1-cp38-none-win_amd64.whl", hash = "sha256:7dca7081e9a0c3b6490a145593f6fe3173a94197f2cb9891183ef75e9d64c425"}, - {file = "rpds_py-0.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b41b6321805c472f66990c2849e152aff7bc359eb92f781e3f606609eac877ad"}, - {file = "rpds_py-0.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a90c373ea2975519b58dece25853dbcb9779b05cc46b4819cb1917e3b3215b6"}, - {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16d4477bcb9fbbd7b5b0e4a5d9b493e42026c0bf1f06f723a9353f5153e75d30"}, - {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84b8382a90539910b53a6307f7c35697bc7e6ffb25d9c1d4e998a13e842a5e83"}, - {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4888e117dd41b9d34194d9e31631af70d3d526efc363085e3089ab1a62c32ed1"}, - {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5265505b3d61a0f56618c9b941dc54dc334dc6e660f1592d112cd103d914a6db"}, - {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e75ba609dba23f2c95b776efb9dd3f0b78a76a151e96f96cc5b6b1b0004de66f"}, - {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1791ff70bc975b098fe6ecf04356a10e9e2bd7dc21fa7351c1742fdeb9b4966f"}, - {file = "rpds_py-0.20.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d126b52e4a473d40232ec2052a8b232270ed1f8c9571aaf33f73a14cc298c24f"}, - {file = "rpds_py-0.20.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c14937af98c4cc362a1d4374806204dd51b1e12dded1ae30645c298e5a5c4cb1"}, - {file = "rpds_py-0.20.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3d089d0b88996df627693639d123c8158cff41c0651f646cd8fd292c7da90eaf"}, - {file = "rpds_py-0.20.1-cp39-none-win32.whl", hash = "sha256:653647b8838cf83b2e7e6a0364f49af96deec64d2a6578324db58380cff82aca"}, - {file = "rpds_py-0.20.1-cp39-none-win_amd64.whl", hash = "sha256:fa41a64ac5b08b292906e248549ab48b69c5428f3987b09689ab2441f267d04d"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a07ced2b22f0cf0b55a6a510078174c31b6d8544f3bc00c2bcee52b3d613f74"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:68cb0a499f2c4a088fd2f521453e22ed3527154136a855c62e148b7883b99f9a"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa3060d885657abc549b2a0f8e1b79699290e5d83845141717c6c90c2df38311"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95f3b65d2392e1c5cec27cff08fdc0080270d5a1a4b2ea1d51d5f4a2620ff08d"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2cc3712a4b0b76a1d45a9302dd2f53ff339614b1c29603a911318f2357b04dd2"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d4eea0761e37485c9b81400437adb11c40e13ef513375bbd6973e34100aeb06"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f5179583d7a6cdb981151dd349786cbc318bab54963a192692d945dd3f6435d"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fbb0ffc754490aff6dabbf28064be47f0f9ca0b9755976f945214965b3ace7e"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a94e52537a0e0a85429eda9e49f272ada715506d3b2431f64b8a3e34eb5f3e75"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:92b68b79c0da2a980b1c4197e56ac3dd0c8a149b4603747c4378914a68706979"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:93da1d3db08a827eda74356f9f58884adb254e59b6664f64cc04cdff2cc19b0d"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:754bbed1a4ca48479e9d4182a561d001bbf81543876cdded6f695ec3d465846b"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ca449520e7484534a2a44faf629362cae62b660601432d04c482283c47eaebab"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9c4cb04a16b0f199a8c9bf807269b2f63b7b5b11425e4a6bd44bd6961d28282c"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb63804105143c7e24cee7db89e37cb3f3941f8e80c4379a0b355c52a52b6780"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:55cd1fa4ecfa6d9f14fbd97ac24803e6f73e897c738f771a9fe038f2f11ff07c"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f8f741b6292c86059ed175d80eefa80997125b7c478fb8769fd9ac8943a16c0"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fc212779bf8411667234b3cdd34d53de6c2b8b8b958e1e12cb473a5f367c338"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ad56edabcdb428c2e33bbf24f255fe2b43253b7d13a2cdbf05de955217313e6"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a3a1e9ee9728b2c1734f65d6a1d376c6f2f6fdcc13bb007a08cc4b1ff576dc5"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e13de156137b7095442b288e72f33503a469aa1980ed856b43c353ac86390519"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:07f59760ef99f31422c49038964b31c4dfcfeb5d2384ebfc71058a7c9adae2d2"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:59240685e7da61fb78f65a9f07f8108e36a83317c53f7b276b4175dc44151684"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:83cba698cfb3c2c5a7c3c6bac12fe6c6a51aae69513726be6411076185a8b24a"}, - {file = "rpds_py-0.20.1.tar.gz", hash = "sha256:e1791c4aabd117653530dccd24108fa03cc6baf21f58b950d0a73c3b3b29a350"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, + {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, + {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, + {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, + {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, + {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, + {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, + {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, + {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, + {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, + {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, + {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, ] [[package]] @@ -1779,26 +1788,53 @@ files = [ {file = "rstr-3.2.2.tar.gz", hash = "sha256:c4a564d4dfb4472d931d145c43d1cf1ad78c24592142e7755b8866179eeac012"}, ] +[[package]] +name = "ruff" +version = "0.5.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, + {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, + {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, + {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, + {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, + {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, + {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, +] + [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] name = "smart-open" -version = "7.0.5" +version = "7.1.0" description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" optional = false python-versions = "<4.0,>=3.7" files = [ - {file = "smart_open-7.0.5-py3-none-any.whl", hash = "sha256:8523ed805c12dff3eaa50e9c903a6cb0ae78800626631c5fe7ea073439847b89"}, - {file = "smart_open-7.0.5.tar.gz", hash = "sha256:d3672003b1dbc85e2013e4983b88eb9a5ccfd389b0d4e5015f39a9ee5620ec18"}, + {file = "smart_open-7.1.0-py3-none-any.whl", hash = "sha256:4b8489bb6058196258bafe901730c7db0dcf4f083f316e97269c66f45502055b"}, + {file = "smart_open-7.1.0.tar.gz", hash = "sha256:a4f09f84f0f6d3637c6543aca7b5487438877a21360e7368ccf1f704789752ba"}, ] [package.dependencies] @@ -1840,31 +1876,61 @@ files = [ [[package]] name = "tomli" -version = "2.1.0" +version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"}, - {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] name = "tqdm" -version = "4.67.0" +version = "4.67.1" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.67.0-py3-none-any.whl", hash = "sha256:0cd8af9d56911acab92182e88d763100d4788bdf421d251616040cc4d44863be"}, - {file = "tqdm-4.67.0.tar.gz", hash = "sha256:fe5a6f95e6fe0b9755e9469b77b9c3cf850048224ecaa8293d7d2d31f97d869a"}, + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] discord = ["requests"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] @@ -1872,13 +1938,13 @@ telegram = ["requests"] [[package]] name = "types-python-dateutil" -version = "2.9.0.20241003" +version = "2.9.0.20241206" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.9.0.20241003.tar.gz", hash = "sha256:58cb85449b2a56d6684e41aeefb4c4280631246a0da1a719bdbe6f3fb0317446"}, - {file = "types_python_dateutil-2.9.0.20241003-py3-none-any.whl", hash = "sha256:250e1d8e80e7bbc3a6c99b907762711d1a1cdd00e978ad39cb5940f6f0a87f3d"}, + {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, + {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, ] [[package]] @@ -1992,13 +2058,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.3" +version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] @@ -2095,4 +2161,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.9,<4" -content-hash = "3955a2976dfe4ede2a998869d7a129d014f1a010421616bcf7b2dbce46c0459e" +content-hash = "72f273f4cf05c59fa013629059313c3ddd5211a755d9b96fe5aae1277bf4b34c" diff --git a/pyproject.toml b/pyproject.toml index 2a2742bf5..804f86473 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "label-studio-sdk" -version = "1.0.9.dev" +version = "1.0.9" description = "" readme = "README.md" authors = [] @@ -37,25 +37,27 @@ appdirs = ">=1.4.3" datamodel-code-generator = "0.26.1" httpx = ">=0.21.2" ijson = ">=3.2.3" +jsf = "^0.11.2" jsonschema = ">=4.23.0" lxml = ">=4.2.5" nltk = "^3.9.1" numpy = ">=1.26.4,<2.0.0" pandas = ">=0.24.0" pydantic = ">= 1.9.2" +pydantic-core = "^2.18.2" requests = ">=2.22.0" requests-mock = "1.12.1" typing_extensions = ">= 4.0.0" ujson = ">=5.8.0" xmljson = "0.2.1" -jsf = "^0.11.2" [tool.poetry.dev-dependencies] mypy = "1.0.1" pytest = "^7.4.0" pytest-asyncio = "^0.23.5" python-dateutil = "^2.9.0" types-python-dateutil = "^2.9.0.20240316" +ruff = "^0.5.6" [tool.pytest.ini_options] testpaths = [ "tests" ] @@ -64,6 +66,9 @@ asyncio_mode = "auto" [tool.mypy] plugins = ["pydantic.mypy"] +[tool.ruff] +line-length = 120 + [build-system] requires = ["poetry-core"] diff --git a/reference.md b/reference.md index 32485cf2d..7bb3d3036 100644 --- a/reference.md +++ b/reference.md @@ -12,9 +12,10 @@
    -Tasks can have multiple annotations. Use this call to retrieve a specific annotation using its ID. -You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. +Tasks can have multiple annotations. Use this call to retrieve a specific annotation using its ID. + +You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs.
    @@ -29,7 +30,7 @@ You can find the ID in the Label Studio UI listed at the top of the annotation i
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -84,11 +85,12 @@ client.annotations.get(
    -Delete an annotation. + +Delete an annotation. This action can't be undone! -You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. +You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs.
    @@ -103,7 +105,7 @@ You will need to supply the annotation's unique ID. You can find the ID in the L
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -158,9 +160,10 @@ client.annotations.delete(
    -Update attributes for an existing annotation. -You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. +Update attributes for an existing annotation. + +You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. For information about the JSON format used in the result, see [Label Studio JSON format of annotated tasks](https://labelstud.io/guide/export#Label-Studio-JSON-format-of-annotated-tasks).
    @@ -177,7 +180,7 @@ For information about the JSON format used in the result, see [Label Studio JSON
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -228,7 +231,7 @@ client.annotations.update(
    -**result:** `typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]]` — Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) +**result:** `typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]]` — Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format)
    @@ -316,9 +319,10 @@ client.annotations.update(
    + List all annotations for a task. -You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). +You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list).
    @@ -333,7 +337,7 @@ You will need to supply the task ID. You can find this in Label Studio by openin
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -388,11 +392,13 @@ client.annotations.list(
    -Add annotations to a task like an annotator does. -You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). +Add annotations to a task like an annotator does. -The content of the result field depends on your labeling configuration. For example, send the following data as part of your POST +You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). + + +The content of the result field depends on your labeling configuration. For example, send the following data as part of your POST request to send an empty annotation with the ID of the user who completed the task: ```json @@ -403,7 +409,7 @@ request to send an empty annotation with the ID of the user who completed the ta "lead_time": 0, "task": 0 "completed_by": 123 -} +} ```
    @@ -419,7 +425,7 @@ request to send an empty annotation with the ID of the user who completed the ta
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -470,7 +476,7 @@ client.annotations.create(
    -**result:** `typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]]` — Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) +**result:** `typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]]` — Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format)
    @@ -573,7 +579,7 @@ Create multiple annotations for specific tasks in a bulk operation.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -618,7 +624,7 @@ client.annotations.create_bulk()
    -**result:** `typing.Optional[typing.Dict[str, typing.Any]]` +**result:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]`
    @@ -666,7 +672,7 @@ Reset your access token or API key. When reset, any scripts or automations you h
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -726,7 +732,7 @@ Get a access token to authenticate to the API as the current user. To find this
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -786,7 +792,7 @@ Get information about your user account, such as your username, email, and user
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -831,7 +837,8 @@ client.users.whoami()
    -List all users in your Label Studio organization. + +List all users in your Label Studio organization.
    @@ -846,7 +853,7 @@ List all users in your Label Studio organization.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -891,6 +898,7 @@ client.users.list()
    + Create a user in Label Studio.
    @@ -906,7 +914,7 @@ Create a user in Label Studio.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -1023,8 +1031,9 @@ client.users.create()
    -Get info about a specific Label Studio user. -You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + +Get info about a specific Label Studio user. +You will need to provide their user ID. You can find a list of all user IDs using [List users](list).
    @@ -1039,7 +1048,7 @@ You will need to provide their user ID. You can find a list of all user IDs usin
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -1094,6 +1103,7 @@ client.users.get(
    + Delete a specific Label Studio user. You will need to provide their user ID. You can find a list of all user IDs using [List users](list). @@ -1113,7 +1123,7 @@ You will need to provide their user ID. You can find a list of all user IDs usin
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -1168,9 +1178,10 @@ client.users.delete(
    -Update details for a specific Label Studio user, such as their name or contact information. -You will need to provide their user ID. You can find a list of all user IDs using [List users](list). +Update details for a specific Label Studio user, such as their name or contact information. + +You will need to provide their user ID. You can find a list of all user IDs using [List users](list).
    @@ -1185,7 +1196,7 @@ You will need to provide their user ID. You can find a list of all user IDs usin
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -1328,7 +1339,7 @@ Retrieve all the registered actions with descriptions that data manager can use.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -1388,12 +1399,12 @@ Perform a Data Manager action with the selected tasks and filters. Note: More co
    ```python -from label_studio_sdk import ( +from label_studio_sdk import LabelStudio +from label_studio_sdk.actions import ( ActionsCreateRequestFilters, ActionsCreateRequestFiltersItemsItem, ActionsCreateRequestSelectedItemsExcluded, ) -from label_studio_sdk.client import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -1506,9 +1517,10 @@ client.actions.create(
    -List all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize which tasks and information appears. -You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). +List all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize which tasks and information appears. + +You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list).
    @@ -1523,7 +1535,7 @@ You will need to provide the project ID. You can find this in the URL when viewi
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -1576,9 +1588,10 @@ client.views.list()
    -Create a new Data Manager view for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks and information appears. -You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). +Create a new Data Manager view for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks and information appears. + +You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list).
    @@ -1593,7 +1606,7 @@ You will need to provide the project ID. You can find this in the URL when viewi
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -1654,7 +1667,8 @@ client.views.create()
    -Delete all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks appear. + +Delete all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks appear. You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list).
    @@ -1671,7 +1685,7 @@ You will need to provide the project ID. You can find this in the URL when viewi
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -1726,7 +1740,8 @@ client.views.delete_all(
    -Get the details about a specific Data Manager view (tab). You will need to supply the view ID. You can find this using [List views](list). + +Get the details about a specific Data Manager view (tab). You will need to supply the view ID. You can find this using [List views](list).
    @@ -1741,7 +1756,7 @@ Get the details about a specific Data Manager view (tab). You will need to suppl
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -1811,7 +1826,7 @@ Delete a specific Data Manager view (tab) by ID. You can find the view using [Li
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -1866,7 +1881,8 @@ client.views.delete(
    -You can update a specific Data Manager view (tab) with additional filters and other customizations. You will need to supply the view ID. You can find this using [List views](list). + +You can update a specific Data Manager view (tab) with additional filters and other customizations. You will need to supply the view ID. You can find this using [List views](list).
    @@ -1881,7 +1897,7 @@ You can update a specific Data Manager view (tab) with additional filters and ot
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -1968,7 +1984,7 @@ Retrieve details about a specific uploaded file. To get the file upload ID, use
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -2038,7 +2054,7 @@ Delete a specific uploaded file. To get the file upload ID, use [Get files list]
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -2093,10 +2109,10 @@ client.files.delete(
    -Update a specific uploaded file. To get the file upload ID, use [Get files list](list). -You will need to include the file data in the request body. For example: +Update a specific uploaded file. To get the file upload ID, use [Get files list](list). +You will need to include the file data in the request body. For example: ```bash curl -H 'Authorization: Token abc123' \ -X POST 'https://localhost:8080/api/import/file-upload/245' -F ‘file=@path/to/my_file.csv’ ``` @@ -2114,7 +2130,7 @@ curl -H 'Authorization: Token abc123' \ -X POST 'https://localhost:8080/api/impo
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -2185,9 +2201,10 @@ client.files.update(
    -Retrieve the list of uploaded files used to create labeling tasks for a specific project. These are files that have been uploaded directly to Label Studio. -You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +Retrieve the list of uploaded files used to create labeling tasks for a specific project. These are files that have been uploaded directly to Label Studio. + +You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list).
    @@ -2202,7 +2219,7 @@ You must provide a project ID. The project ID can be found in the URL when viewi
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -2273,9 +2290,10 @@ client.files.list(
    -Delete uploaded files for a specific project. These are files that have been uploaded directly to Label Studio. -You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +Delete uploaded files for a specific project. These are files that have been uploaded directly to Label Studio. + +You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list).
    @@ -2290,7 +2308,7 @@ You must provide a project ID. The project ID can be found in the URL when viewi
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -2360,7 +2378,7 @@ Download a specific uploaded file. If you aren't sure of the file name, try [Get
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -2403,8 +2421,8 @@ client.files.download(
    -## Projects -
    client.projects.list(...) +## Ml +
    client.ml.list(...)
    @@ -2416,16 +2434,11 @@ client.files.download(
    -Return a list of the projects within your organization. -To perform most tasks with the Label Studio API, you must specify the project ID, sometimes referred to as the `pk`. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using this API call. +List all configured Machine Learning (ML) backends for a specific project by ID. For more information about ML backends, see [Machine learning integration](https://labelstud.io/guide/ml). -To retrieve a list of your Label Studio projects, update the following command to match your own environment. -Replace the domain name, port, and authorization token, then run the following from the command line: -```bash -curl -X GET https://localhost:8080/api/projects/ -H 'Authorization: Token abc123' -``` +You will need to provide the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list).
    @@ -2440,17 +2453,12 @@ curl -X GET https://localhost:8080/api/projects/ -H 'Authorization: Token abc123
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -response = client.projects.list() -for item in response: - yield item -# alternatively, you can paginate page-by-page -for page in response.iter_pages(): - yield page +client.ml.list() ```
    @@ -2466,39 +2474,7 @@ for page in response.iter_pages():
    -**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. - -
    -
    - -
    -
    - -**ids:** `typing.Optional[str]` — ids - -
    -
    - -
    -
    - -**title:** `typing.Optional[str]` — title - -
    -
    - -
    -
    - -**page:** `typing.Optional[int]` — A page number within the paginated result set. - -
    -
    - -
    -
    - -**page_size:** `typing.Optional[int]` — Number of results to return per page. +**project:** `typing.Optional[int]` — Project ID
    @@ -2518,7 +2494,7 @@ for page in response.iter_pages():
    -
    client.projects.create(...) +
    client.ml.create(...)
    @@ -2530,15 +2506,12 @@ for page in response.iter_pages():
    -Create a project and set up the labeling interface. For more information about setting up projects, see the following: -- [Create and configure projects](https://labelstud.io/guide/setup_project) -- [Configure labeling interface](https://labelstud.io/guide/setup) -- [Project settings](https://labelstud.io/guide/project_settings) +Add an ML backend to a project. For more information about what you need to configure when adding an ML backend, see [Connect the model to Label studio](https://labelstud.io/guide/ml#Connect-the-model-to-Label-Studio). + +If you are using Docker Compose, you may need to adjust your ML backend URL. See [localhost and Docker containers](https://labelstud.io/guide/ml#localhost-and-Docker-containers). -```bash -curl -H Content-Type:application/json -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects' --data '{"label_config": "[...]"}' -``` +If you are using files that are located in the cloud, local storage, or uploaded to Label Studio, you must configure your environment variables to allow the ML backend to interact with those files. See [Allow the ML backend to access Label Studio](https://labelstud.io/guide/ml#Allow-the-ML-backend-to-access-Label-Studio-data).
    @@ -2553,12 +2526,12 @@ curl -H Content-Type:application/json -H 'Authorization: Token abc123' -X POST '
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.projects.create() +client.ml.create() ```
    @@ -2574,47 +2547,7 @@ client.projects.create()
    -**title:** `typing.Optional[str]` — Project title - -
    -
    - -
    -
    - -**description:** `typing.Optional[str]` — Project description - -
    -
    - -
    -
    - -**label_config:** `typing.Optional[str]` — Label config in XML format - -
    -
    - -
    -
    - -**expert_instruction:** `typing.Optional[str]` — Labeling instructions to show to the user - -
    -
    - -
    -
    - -**show_instruction:** `typing.Optional[bool]` — Show labeling instructions - -
    -
    - -
    -
    - -**show_skip_button:** `typing.Optional[bool]` — Show skip button +**url:** `typing.Optional[str]` — ML backend URL
    @@ -2622,7 +2555,7 @@ client.projects.create()
    -**enable_empty_annotation:** `typing.Optional[bool]` — Allow empty annotations +**project:** `typing.Optional[int]` — Project ID
    @@ -2630,7 +2563,7 @@ client.projects.create()
    -**show_annotation_history:** `typing.Optional[bool]` — Show annotation history +**is_interactive:** `typing.Optional[bool]` — Is interactive
    @@ -2638,7 +2571,7 @@ client.projects.create()
    -**reveal_preannotations_interactively:** `typing.Optional[bool]` — Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest +**title:** `typing.Optional[str]` — Title
    @@ -2646,7 +2579,7 @@ client.projects.create()
    -**show_collab_predictions:** `typing.Optional[bool]` — Show predictions to annotators +**description:** `typing.Optional[str]` — Description
    @@ -2654,7 +2587,7 @@ client.projects.create()
    -**maximum_annotations:** `typing.Optional[int]` — Maximum annotations per task +**auth_method:** `typing.Optional[MlCreateRequestAuthMethod]` — Auth method
    @@ -2662,7 +2595,7 @@ client.projects.create()
    -**color:** `typing.Optional[str]` — Project color in HEX format +**basic_auth_user:** `typing.Optional[str]` — Basic auth user
    @@ -2670,7 +2603,7 @@ client.projects.create()
    -**control_weights:** `typing.Optional[typing.Dict[str, typing.Any]]` — Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} +**basic_auth_pass:** `typing.Optional[str]` — Basic auth password
    @@ -2678,7 +2611,7 @@ client.projects.create()
    -**workspace:** `typing.Optional[int]` — Workspace ID +**extra_params:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` — Extra parameters
    @@ -2686,7 +2619,7 @@ client.projects.create()
    -**model_version:** `typing.Optional[str]` — Model version +**timeout:** `typing.Optional[int]` — Response model timeout
    @@ -2706,7 +2639,7 @@ client.projects.create()
    -
    client.projects.get(...) +
    client.ml.get(...)
    @@ -2718,7 +2651,10 @@ client.projects.create()
    -Retrieve information about a specific project by project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + +Get details about a specific ML backend. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + +For more information, see [Machine learning integration](https://labelstud.io/guide/ml).
    @@ -2733,12 +2669,12 @@ Retrieve information about a specific project by project ID. The project ID can
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.projects.get( +client.ml.get( id=1, ) @@ -2756,7 +2692,7 @@ client.projects.get(
    -**id:** `int` — A unique integer value identifying this project. +**id:** `int` — A unique integer value identifying this ml backend.
    @@ -2776,7 +2712,7 @@ client.projects.get(
    -
    client.projects.delete(...) +
    client.ml.delete(...)
    @@ -2788,9 +2724,10 @@ client.projects.get(
    -Delete a project by specified project ID. Deleting a project permanently removes all tasks, annotations, and project data from Label Studio. -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). +Remove an existing ML backend connection. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + +For more information, see [Machine learning integration](https://labelstud.io/guide/ml).
    @@ -2805,12 +2742,12 @@ The project ID can be found in the URL when viewing the project in Label Studio,
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.projects.delete( +client.ml.delete( id=1, ) @@ -2828,7 +2765,7 @@ client.projects.delete(
    -**id:** `int` — A unique integer value identifying this project. +**id:** `int` — A unique integer value identifying this ml backend.
    @@ -2848,7 +2785,7 @@ client.projects.delete(
    -
    client.projects.update(...) +
    client.ml.update(...)
    @@ -2860,19 +2797,10 @@ client.projects.delete(
    -Update the project settings for a specific project. For more information, see the following: - -- [Create and configure projects](https://labelstud.io/guide/setup_project) -- [Configure labeling interface](https://labelstud.io/guide/setup) -- [Project settings](https://labelstud.io/guide/project_settings) -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). +Update the ML backend parameters. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). - -If you are modifying the labeling config for project that has in-progress work, note the following: -* You cannot remove labels or change the type of labeling being performed unless you delete any existing annotations that are using those labels. -* If you make changes to the labeling configuration, any tabs that you might have created in the Data Manager are removed. - +For more information, see [Machine learning integration](https://labelstud.io/guide/ml).
    @@ -2887,12 +2815,12 @@ If you are modifying the labeling config for project that has in-progress work,
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.projects.update( +client.ml.update( id=1, ) @@ -2910,7 +2838,7 @@ client.projects.update(
    -**id:** `int` — A unique integer value identifying this project. +**id:** `int` — A unique integer value identifying this ml backend.
    @@ -2918,7 +2846,7 @@ client.projects.update(
    -**title:** `typing.Optional[str]` — Project title +**url:** `typing.Optional[str]` — ML backend URL
    @@ -2926,7 +2854,7 @@ client.projects.update(
    -**description:** `typing.Optional[str]` — Project description +**project:** `typing.Optional[int]` — Project ID
    @@ -2934,7 +2862,7 @@ client.projects.update(
    -**label_config:** `typing.Optional[str]` — Label config in XML format +**is_interactive:** `typing.Optional[bool]` — Is interactive
    @@ -2942,7 +2870,7 @@ client.projects.update(
    -**expert_instruction:** `typing.Optional[str]` — Labeling instructions to show to the user +**title:** `typing.Optional[str]` — Title
    @@ -2950,7 +2878,7 @@ client.projects.update(
    -**show_instruction:** `typing.Optional[bool]` — Show labeling instructions +**description:** `typing.Optional[str]` — Description
    @@ -2958,7 +2886,7 @@ client.projects.update(
    -**show_skip_button:** `typing.Optional[bool]` — Show skip button +**auth_method:** `typing.Optional[MlUpdateRequestAuthMethod]` — Auth method
    @@ -2966,7 +2894,7 @@ client.projects.update(
    -**enable_empty_annotation:** `typing.Optional[bool]` — Allow empty annotations +**basic_auth_user:** `typing.Optional[str]` — Basic auth user
    @@ -2974,7 +2902,7 @@ client.projects.update(
    -**show_annotation_history:** `typing.Optional[bool]` — Show annotation history +**basic_auth_pass:** `typing.Optional[str]` — Basic auth password
    @@ -2982,7 +2910,7 @@ client.projects.update(
    -**reveal_preannotations_interactively:** `typing.Optional[bool]` — Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest +**extra_params:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` — Extra parameters
    @@ -2990,7 +2918,7 @@ client.projects.update(
    -**show_collab_predictions:** `typing.Optional[bool]` — Show predictions to annotators +**timeout:** `typing.Optional[int]` — Response model timeout
    @@ -2998,63 +2926,69 @@ client.projects.update(
    -**maximum_annotations:** `typing.Optional[int]` — Maximum annotations per task +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    - -
    -
    - -**color:** `typing.Optional[str]` — Project color in HEX format -
    -
    -
    -**control_weights:** `typing.Optional[typing.Dict[str, typing.Any]]` — Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} -
    +
    +
    client.ml.predict_interactive(...)
    -**workspace:** `typing.Optional[int]` — Workspace ID - -
    -
    +#### 📝 Description
    -**model_version:** `typing.Optional[str]` — Model version - -
    -
    -
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - + +Enable interactive pre-annotations for a specific task. + +ML-assisted labeling with interactive pre-annotations works with image segmentation and object detection tasks using rectangles, ellipses, polygons, brush masks, and keypoints, as well as with HTML and text named entity recognition tasks. Your ML backend must support the type of labeling that you’re performing, recognize the input that you create, and be able to respond with the relevant output for a prediction. For more information, see [Interactive pre-annotations](https://labelstud.io/guide/ml.html#Interactive-pre-annotations). + +Before you can use interactive annotations, it must be enabled for you ML backend connection (`"is_interactive": true`). + +You will need the task ID and the ML backend connection ID. The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](../tasks/list). The ML backend connection ID is available via [List ML backends](list).
    +#### 🔌 Usage -
    -
    - +
    +
    -
    client.projects.import_tasks(...)
    -#### 📝 Description +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.ml.predict_interactive( + id=1, + task=1, +) + +``` +
    +
    +
    +
    + +#### ⚙️ Parameters
    @@ -3062,112 +2996,7 @@ client.projects.update(
    -Use this API endpoint to import labeling tasks in bulk. Note that each POST request is limited at 250K tasks and 200 MB. -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - -Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. - -For example, if the label configuration has a _$text_ variable, then each item in a data object must include a `text` field. - - -There are three possible ways to import tasks with this endpoint: - -#### 1\. **POST with data** - -Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. - -Update this example to specify your authorization token and Label Studio instance host, then run the following from -the command line: - -```bash -curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ --X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' -``` - -#### 2\. **POST with files** - -Send tasks as files. You can attach multiple files with different names. - -- **JSON**: text files in JavaScript object notation format -- **CSV**: text files with tables in Comma Separated Values format -- **TSV**: text files with tables in Tab Separated Value format -- **TXT**: simple text files are similar to CSV with one column and no header, supported for projects with one source only - -Update this example to specify your authorization token, Label Studio instance host, and file name and path, -then run the following from the command line: - -```bash -curl -H 'Authorization: Token abc123' \ --X POST 'https://localhost:8080/api/projects/1/import' -F ‘file=@path/to/my_file.csv’ -``` - -#### 3\. **POST with URL** - -You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. - -```bash -curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ --X POST 'https://localhost:8080/api/projects/1/import' \ ---data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' -``` - -
    -
    -
    -
    -
    - -#### 🔌 Usage - -
    -
    - -
    -
    - -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.projects.import_tasks( - id=1, - request=[{"key": "value"}], -) - -``` -
    -
    -
    -
    - -#### ⚙️ Parameters - -
    -
    - -
    -
    - -**id:** `int` — A unique integer value identifying this project. - -
    -
    - -
    -
    - -**request:** `typing.Sequence[typing.Dict[str, typing.Any]]` - -
    -
    - -
    -
    - -**commit_to_project:** `typing.Optional[bool]` — Set to "true" to immediately commit tasks to the project. +**id:** `int` — A unique integer value identifying this ML backend.
    @@ -3175,7 +3004,7 @@ client.projects.import_tasks(
    -**return_task_ids:** `typing.Optional[bool]` — Set to "true" to return task IDs in the response. +**task:** `int` — ID of task to annotate
    @@ -3183,7 +3012,7 @@ client.projects.import_tasks(
    -**preannotated_from_fields:** `typing.Optional[typing.Union[str, typing.Sequence[str]]]` — List of fields to preannotate from the task data. For example, if you provide a list of `{"text": "text", "prediction": "label"}` items in the request, the system will create a task with the `text` field and a prediction with the `label` field when `preannoted_from_fields=["prediction"]`. +**context:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` — Context for ML model
    @@ -3203,7 +3032,7 @@ client.projects.import_tasks(
    -
    client.projects.validate_config(...) +
    client.ml.train(...)
    @@ -3215,9 +3044,12 @@ client.projects.import_tasks(
    -Determine whether the label configuration for a specific project is valid. For more information about setting up labeling configs, see [Configure labeling interface](https://labelstud.io/guide/setup) and our [Tags reference](https://labelstud.io/tags/). -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). +After you connect a model to Label Studio as a machine learning backend and annotate at least one task, you can start training the model. Training logs appear in stdout and the console. + +For more information, see [Model training](https://labelstud.io/guide/ml.html#Model-training). + +You will need to specify an ID for the backend connection. You can find this using [List ML backends](list).
    @@ -3232,14 +3064,13 @@ The project ID can be found in the URL when viewing the project in Label Studio,
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.projects.validate_config( +client.ml.train( id=1, - label_config="label_config", ) ``` @@ -3256,7 +3087,7 @@ client.projects.validate_config(
    -**id:** `int` — A unique integer value identifying this project. +**id:** `int` — A unique integer value identifying this ML backend.
    @@ -3264,7 +3095,7 @@ client.projects.validate_config(
    -**label_config:** `str` — Label config in XML format. See more about it in documentation +**use_ground_truth:** `typing.Optional[bool]` — Whether to include ground truth annotations in training
    @@ -3284,8 +3115,7 @@ client.projects.validate_config(
    -## Ml -
    client.ml.list(...) +
    client.ml.list_model_versions(...)
    @@ -3297,9 +3127,8 @@ client.projects.validate_config(
    -List all configured Machine Learning (ML) backends for a specific project by ID. For more information about ML backends, see [Machine learning integration](https://labelstud.io/guide/ml). -You will need to provide the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +Get available versions of the model. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list).
    @@ -3314,12 +3143,14 @@ You will need to provide the project ID. This can be found in the URL when viewi
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.ml.list() +client.ml.list_model_versions( + id="id", +) ```
    @@ -3335,7 +3166,7 @@ client.ml.list()
    -**project:** `typing.Optional[int]` — Project ID +**id:** `str`
    @@ -3355,7 +3186,8 @@ client.ml.list()
    -
    client.ml.create(...) +## Predictions +
    client.predictions.list(...)
    @@ -3367,11 +3199,14 @@ client.ml.list()
    -Add an ML backend to a project. For more information about what you need to configure when adding an ML backend, see [Connect the model to Label studio](https://labelstud.io/guide/ml#Connect-the-model-to-Label-Studio). -If you are using Docker Compose, you may need to adjust your ML backend URL. See [localhost and Docker containers](https://labelstud.io/guide/ml#localhost-and-Docker-containers). +Get a list of all predictions. You can optionally filter these by task or by project. If you want to filter, you will need the project ID and/or task ID. Both of these can be found in the Label Studio URL when viewing a task, or you can use [List all projects](../projects/list) and [Get tasks list](../tasks/list). -If you are using files that are located in the cloud, local storage, or uploaded to Label Studio, you must configure your environment variables to allow the ML backend to interact with those files. See [Allow the ML backend to access Label Studio](https://labelstud.io/guide/ml#Allow-the-ML-backend-to-access-Label-Studio-data). +The terms "predictions" and pre-annotations" are used interchangeably. + +Predictions can be [imported directly into Label Studio](https://labelstud.io/guide/predictions) or [generated by a connected ML backend](https://labelstud.io/guide/ml.html#Pre-annotations-predictions). + +To import predictions via the API, see [Create prediction](create).
    @@ -3386,12 +3221,12 @@ Add an ML backend to a project. For more information about what you need to conf
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.ml.create() +client.predictions.list() ```
    @@ -3407,7 +3242,7 @@ client.ml.create()
    -**url:** `typing.Optional[str]` — ML backend URL +**task:** `typing.Optional[int]` — Filter predictions by task ID
    @@ -3415,7 +3250,7 @@ client.ml.create()
    -**project:** `typing.Optional[int]` — Project ID +**project:** `typing.Optional[int]` — Filter predictions by project ID
    @@ -3423,39 +3258,98 @@ client.ml.create()
    -**is_interactive:** `typing.Optional[bool]` — Is interactive +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    +
    +
    -
    -
    -**title:** `typing.Optional[str]` — Title -
    + +
    client.predictions.create(...)
    -**description:** `typing.Optional[str]` — Description - +#### 📝 Description + +
    +
    + +
    +
    + + +If you have predictions generated for your dataset from a model, either as pre-annotated tasks or pre-labeled tasks, you can import the predictions with your dataset into Label Studio for review and correction. + +To import predicted labels into Label Studio, you must use the [Basic Label Studio JSON format](https://labelstud.io/guide/tasks#Basic-Label-Studio-JSON-format) and set up your tasks with the predictions JSON key. The Label Studio ML backend also outputs tasks in this format. + +#### JSON format for predictions +Label Studio JSON format for pre-annotations must contain two sections: +* A data object which references the source of the data that the pre-annotations apply to. This can be a URL to an audio file, a pre-signed cloud storage link to an image, plain text, a reference to a CSV file stored in Label Studio, or something else. +* A predictions array that contains the pre-annotation results for the different types of labeling. See how to add results to the predictions array. + +For more information, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations)
    +
    +
    + +#### 🔌 Usage
    -**auth_method:** `typing.Optional[MlCreateRequestAuthMethod]` — Auth method - +
    +
    + +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.predictions.create( + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + score=0.95, + model_version="yolo-v8", +) + +``` +
    +
    +#### ⚙️ Parameters +
    -**basic_auth_user:** `typing.Optional[str]` — Basic auth user +
    +
    + +**task:** `typing.Optional[int]` — Task ID for which the prediction is created
    @@ -3463,7 +3357,7 @@ client.ml.create()
    -**basic_auth_pass:** `typing.Optional[str]` — Basic auth password +**result:** `typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]]` — Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions)
    @@ -3471,7 +3365,7 @@ client.ml.create()
    -**extra_params:** `typing.Optional[typing.Dict[str, typing.Any]]` — Extra parameters +**score:** `typing.Optional[float]` — Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first.
    @@ -3479,7 +3373,7 @@ client.ml.create()
    -**timeout:** `typing.Optional[int]` — Response model timeout +**model_version:** `typing.Optional[str]` — Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface
    @@ -3499,7 +3393,7 @@ client.ml.create()
    -
    client.ml.get(...) +
    client.predictions.get(...)
    @@ -3511,9 +3405,10 @@ client.ml.create()
    -Get details about a specific ML backend. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). -For more information, see [Machine learning integration](https://labelstud.io/guide/ml). +Get details about a specific prediction by its ID. To find the prediction ID, use [List predictions](list). + +For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations).
    @@ -3528,12 +3423,12 @@ For more information, see [Machine learning integration](https://labelstud.io/gu
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.ml.get( +client.predictions.get( id=1, ) @@ -3551,7 +3446,7 @@ client.ml.get(
    -**id:** `int` — A unique integer value identifying this ml backend. +**id:** `int` — Prediction ID
    @@ -3571,7 +3466,7 @@ client.ml.get(
    -
    client.ml.delete(...) +
    client.predictions.delete(...)
    @@ -3583,9 +3478,8 @@ client.ml.get(
    -Remove an existing ML backend connection. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). -For more information, see [Machine learning integration](https://labelstud.io/guide/ml). +Delete a prediction. To find the prediction ID, use [List predictions](list).
    @@ -3600,12 +3494,12 @@ For more information, see [Machine learning integration](https://labelstud.io/gu
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.ml.delete( +client.predictions.delete( id=1, ) @@ -3623,7 +3517,7 @@ client.ml.delete(
    -**id:** `int` — A unique integer value identifying this ml backend. +**id:** `int` — Prediction ID
    @@ -3643,7 +3537,7 @@ client.ml.delete(
    -
    client.ml.update(...) +
    client.predictions.update(...)
    @@ -3655,9 +3549,10 @@ client.ml.delete(
    -Update the ML backend parameters. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). -For more information, see [Machine learning integration](https://labelstud.io/guide/ml). +Update a prediction. To find the prediction ID, use [List predictions](list). + +For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations).
    @@ -3672,16 +3567,36 @@ For more information, see [Machine learning integration](https://labelstud.io/gu
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.ml.update( +client.predictions.update( id=1, -) - -``` + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + score=0.95, + model_version="yolo-v8", +) + +```
    @@ -3695,7 +3610,7 @@ client.ml.update(
    -**id:** `int` — A unique integer value identifying this ml backend. +**id:** `int` — Prediction ID
    @@ -3703,7 +3618,7 @@ client.ml.update(
    -**url:** `typing.Optional[str]` — ML backend URL +**task:** `typing.Optional[int]` — Task ID for which the prediction is created
    @@ -3711,7 +3626,7 @@ client.ml.update(
    -**project:** `typing.Optional[int]` — Project ID +**result:** `typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]]` — Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions)
    @@ -3719,7 +3634,7 @@ client.ml.update(
    -**is_interactive:** `typing.Optional[bool]` — Is interactive +**score:** `typing.Optional[float]` — Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first.
    @@ -3727,7 +3642,7 @@ client.ml.update(
    -**title:** `typing.Optional[str]` — Title +**model_version:** `typing.Optional[str]` — Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface
    @@ -3735,15 +3650,82 @@ client.ml.update(
    -**description:** `typing.Optional[str]` — Description +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    + + + + + + + +## Projects +
    client.projects.list(...)
    -**auth_method:** `typing.Optional[MlUpdateRequestAuthMethod]` — Auth method +#### 📝 Description + +
    +
    + +
    +
    + + +Return a list of the projects within your organization. + +To perform most tasks with the Label Studio API, you must specify the project ID, sometimes referred to as the `pk`. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using this API call. + +To retrieve a list of your Label Studio projects, update the following command to match your own environment. +Replace the domain name, port, and authorization token, then run the following from the command line: +```bash +curl -X GET https://localhost:8080/api/projects/ -H 'Authorization: Token abc123' +``` +
    +
    +
    +
    + +#### 🔌 Usage + +
    +
    + +
    +
    + +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +response = client.projects.list() +for item in response: + yield item +# alternatively, you can paginate page-by-page +for page in response.iter_pages(): + yield page + +``` +
    +
    +
    +
    + +#### ⚙️ Parameters + +
    +
    + +
    +
    + +**ordering:** `typing.Optional[str]` — Which field to use when ordering the results.
    @@ -3751,7 +3733,7 @@ client.ml.update(
    -**basic_auth_user:** `typing.Optional[str]` — Basic auth user +**ids:** `typing.Optional[str]` — ids
    @@ -3759,7 +3741,7 @@ client.ml.update(
    -**basic_auth_pass:** `typing.Optional[str]` — Basic auth password +**title:** `typing.Optional[str]` — title
    @@ -3767,7 +3749,7 @@ client.ml.update(
    -**extra_params:** `typing.Optional[typing.Dict[str, typing.Any]]` — Extra parameters +**page:** `typing.Optional[int]` — A page number within the paginated result set.
    @@ -3775,7 +3757,7 @@ client.ml.update(
    -**timeout:** `typing.Optional[int]` — Response model timeout +**page_size:** `typing.Optional[int]` — Number of results to return per page.
    @@ -3795,7 +3777,7 @@ client.ml.update(
    -
    client.ml.predict_interactive(...) +
    client.projects.create(...)
    @@ -3807,13 +3789,195 @@ client.ml.update(
    -Enable interactive pre-annotations for a specific task. -ML-assisted labeling with interactive pre-annotations works with image segmentation and object detection tasks using rectangles, ellipses, polygons, brush masks, and keypoints, as well as with HTML and text named entity recognition tasks. Your ML backend must support the type of labeling that you’re performing, recognize the input that you create, and be able to respond with the relevant output for a prediction. For more information, see [Interactive pre-annotations](https://labelstud.io/guide/ml.html#Interactive-pre-annotations). +Create a project and set up the labeling interface. For more information about setting up projects, see the following: +* [Create and configure projects](https://labelstud.io/guide/setup_project) +* [Configure labeling interface](https://labelstud.io/guide/setup) +* [Project settings](https://labelstud.io/guide/project_settings) -Before you can use interactive annotations, it must be enabled for you ML backend connection (`"is_interactive": true`). +```bash +curl -H Content-Type:application/json -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects' --data '{"label_config": "[...]"}' +``` +
    +
    +
    +
    -You will need the task ID and the ML backend connection ID. The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](../tasks/list). The ML backend connection ID is available via [List ML backends](list). +#### 🔌 Usage + +
    +
    + +
    +
    + +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.create() + +``` +
    +
    +
    +
    + +#### ⚙️ Parameters + +
    +
    + +
    +
    + +**title:** `typing.Optional[str]` — Project title + +
    +
    + +
    +
    + +**description:** `typing.Optional[str]` — Project description + +
    +
    + +
    +
    + +**label_config:** `typing.Optional[str]` — Label config in XML format + +
    +
    + +
    +
    + +**expert_instruction:** `typing.Optional[str]` — Labeling instructions to show to the user + +
    +
    + +
    +
    + +**show_instruction:** `typing.Optional[bool]` — Show labeling instructions + +
    +
    + +
    +
    + +**show_skip_button:** `typing.Optional[bool]` — Show skip button + +
    +
    + +
    +
    + +**enable_empty_annotation:** `typing.Optional[bool]` — Allow empty annotations + +
    +
    + +
    +
    + +**show_annotation_history:** `typing.Optional[bool]` — Show annotation history + +
    +
    + +
    +
    + +**reveal_preannotations_interactively:** `typing.Optional[bool]` — Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest + +
    +
    + +
    +
    + +**show_collab_predictions:** `typing.Optional[bool]` — Show predictions to annotators + +
    +
    + +
    +
    + +**maximum_annotations:** `typing.Optional[int]` — Maximum annotations per task + +
    +
    + +
    +
    + +**color:** `typing.Optional[str]` — Project color in HEX format + +
    +
    + +
    +
    + +**control_weights:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` — Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + +
    +
    + +
    +
    + +**workspace:** `typing.Optional[int]` — Workspace ID + +
    +
    + +
    +
    + +**model_version:** `typing.Optional[str]` — Model version + +
    +
    + +
    +
    + +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
    +
    +
    +
    + + + + +
    + +
    client.projects.get(...) +
    +
    + +#### 📝 Description + +
    +
    + +
    +
    + +Retrieve information about a specific project by project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list).
    @@ -3828,14 +3992,13 @@ You will need the task ID and the ML backend connection ID. The task ID is avail
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.ml.predict_interactive( +client.projects.get( id=1, - task=1, ) ``` @@ -3852,7 +4015,7 @@ client.ml.predict_interactive(
    -**id:** `int` — A unique integer value identifying this ML backend. +**id:** `int` — A unique integer value identifying this project.
    @@ -3860,15 +4023,72 @@ client.ml.predict_interactive(
    -**task:** `int` — ID of task to annotate +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +
    +
    +
    +
    + + +
    +
    +
    + +
    client.projects.delete(...) +
    +
    + +#### 📝 Description + +
    +
    + +
    +
    + + +Delete a project by specified project ID. Deleting a project permanently removes all tasks, annotations, and project data from Label Studio. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). +
    +
    +#### 🔌 Usage + +
    +
    +
    -**context:** `typing.Optional[typing.Dict[str, typing.Any]]` — Context for ML model +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.delete( + id=1, +) + +``` +
    +
    +
    +
    + +#### ⚙️ Parameters + +
    +
    + +
    +
    + +**id:** `int` — A unique integer value identifying this project.
    @@ -3888,7 +4108,7 @@ client.ml.predict_interactive(
    -
    client.ml.train(...) +
    client.projects.update(...)
    @@ -3900,11 +4120,19 @@ client.ml.predict_interactive(
    -After you connect a model to Label Studio as a machine learning backend and annotate at least one task, you can start training the model. Training logs appear in stdout and the console. -For more information, see [Model training](https://labelstud.io/guide/ml.html#Model-training). +Update the project settings for a specific project. For more information, see the following: +* [Create and configure projects](https://labelstud.io/guide/setup_project) +* [Configure labeling interface](https://labelstud.io/guide/setup) +* [Project settings](https://labelstud.io/guide/project_settings) + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). -You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + +If you are modifying the labeling config for project that has in-progress work, note the following: +* You cannot remove labels or change the type of labeling being performed unless you delete any existing annotations that are using those labels. +* If you make changes to the labeling configuration, any tabs that you might have created in the Data Manager are removed. +
    @@ -3919,12 +4147,12 @@ You will need to specify an ID for the backend connection. You can find this usi
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.ml.train( +client.projects.update( id=1, ) @@ -3942,7 +4170,7 @@ client.ml.train(
    -**id:** `int` — A unique integer value identifying this ML backend. +**id:** `int` — A unique integer value identifying this project.
    @@ -3950,7 +4178,7 @@ client.ml.train(
    -**use_ground_truth:** `typing.Optional[bool]` — Whether to include ground truth annotations in training +**title:** `typing.Optional[str]` — Project title
    @@ -3958,69 +4186,111 @@ client.ml.train(
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**description:** `typing.Optional[str]` — Project description
    + +
    +
    + +**label_config:** `typing.Optional[str]` — Label config in XML format +
    +
    +
    +**expert_instruction:** `typing.Optional[str]` — Labeling instructions to show to the user +
    -
    -
    client.ml.list_model_versions(...)
    -#### 📝 Description +**show_instruction:** `typing.Optional[bool]` — Show labeling instructions + +
    +
    +**show_skip_button:** `typing.Optional[bool]` — Show skip button + +
    +
    +
    -Get available versions of the model. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). +**enable_empty_annotation:** `typing.Optional[bool]` — Allow empty annotations +
    + +
    +
    + +**show_annotation_history:** `typing.Optional[bool]` — Show annotation history +
    -#### 🔌 Usage -
    +**reveal_preannotations_interactively:** `typing.Optional[bool]` — Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest + +
    +
    +
    -```python -from label_studio_sdk.client import LabelStudio +**show_collab_predictions:** `typing.Optional[bool]` — Show predictions to annotators + +
    +
    -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.ml.list_model_versions( - id="id", -) +
    +
    -``` +**maximum_annotations:** `typing.Optional[int]` — Maximum annotations per task +
    + +
    +
    + +**color:** `typing.Optional[str]` — Project color in HEX format +
    -#### ⚙️ Parameters +
    +
    + +**control_weights:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` — Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + +
    +
    +**workspace:** `typing.Optional[int]` — Workspace ID + +
    +
    +
    -**id:** `str` +**model_version:** `typing.Optional[str]` — Model version
    @@ -4040,8 +4310,7 @@ client.ml.list_model_versions(
    -## Predictions -
    client.predictions.list(...) +
    client.projects.import_tasks(...)
    @@ -4053,13 +4322,55 @@ client.ml.list_model_versions(
    -Get a list of all predictions. You can optionally filter these by task or by project. If you want to filter, you will need the project ID and/or task ID. Both of these can be found in the Label Studio URL when viewing a task, or you can use [List all projects](../projects/list) and [Get tasks list](../tasks/list). -The terms "predictions" and pre-annotations" are used interchangeably. +Use this API endpoint to import labeling tasks in bulk. Note that each POST request is limited at 250K tasks and 200 MB. +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + +Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. -Predictions can be [imported directly into Label Studio](https://labelstud.io/guide/predictions) or [generated by a connected ML backend](https://labelstud.io/guide/ml.html#Pre-annotations-predictions). +For example, if the label configuration has a *$text* variable, then each item in a data object must include a `text` field. + -To import predictions via the API, see [Create prediction](create). +There are three possible ways to import tasks with this endpoint: + +#### 1\. **POST with data** +Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. + +Update this example to specify your authorization token and Label Studio instance host, then run the following from +the command line: + +```bash +curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ +-X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' +``` + +#### 2\. **POST with files** +Send tasks as files. You can attach multiple files with different names. + +- **JSON**: text files in JavaScript object notation format +- **CSV**: text files with tables in Comma Separated Values format +- **TSV**: text files with tables in Tab Separated Value format +- **TXT**: simple text files are similar to CSV with one column and no header, supported for projects with one source only + +Update this example to specify your authorization token, Label Studio instance host, and file name and path, +then run the following from the command line: + +```bash +curl -H 'Authorization: Token abc123' \ +-X POST 'https://localhost:8080/api/projects/1/import' -F ‘file=@path/to/my_file.csv’ +``` + +#### 3\. **POST with URL** +You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. + +```bash +curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ +-X POST 'https://localhost:8080/api/projects/1/import' \ +--data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' +``` + +
    @@ -4074,12 +4385,15 @@ To import predictions via the API, see [Create prediction](create).
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.predictions.list() +client.projects.import_tasks( + id=1, + request=[{"key": "value"}], +) ```
    @@ -4095,7 +4409,7 @@ client.predictions.list()
    -**task:** `typing.Optional[int]` — Filter predictions by task ID +**id:** `int` — A unique integer value identifying this project.
    @@ -4103,7 +4417,31 @@ client.predictions.list()
    -**project:** `typing.Optional[int]` — Filter predictions by project ID +**request:** `typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]` + +
    +
    + +
    +
    + +**commit_to_project:** `typing.Optional[bool]` — Set to "true" to immediately commit tasks to the project. + +
    +
    + +
    +
    + +**return_task_ids:** `typing.Optional[bool]` — Set to "true" to return task IDs in the response. + +
    +
    + +
    +
    + +**preannotated_from_fields:** `typing.Optional[typing.Union[str, typing.Sequence[str]]]` — List of fields to preannotate from the task data. For example, if you provide a list of `{"text": "text", "prediction": "label"}` items in the request, the system will create a task with the `text` field and a prediction with the `label` field when `preannoted_from_fields=["prediction"]`.
    @@ -4123,7 +4461,7 @@ client.predictions.list()
    -
    client.predictions.create(...) +
    client.projects.validate_config(...)
    @@ -4135,18 +4473,10 @@ client.predictions.list()
    -If you have predictions generated for your dataset from a model, either as pre-annotated tasks or pre-labeled tasks, you can import the predictions with your dataset into Label Studio for review and correction. - -To import predicted labels into Label Studio, you must use the [Basic Label Studio JSON format](https://labelstud.io/guide/tasks#Basic-Label-Studio-JSON-format) and set up your tasks with the predictions JSON key. The Label Studio ML backend also outputs tasks in this format. - -#### JSON format for predictions - -Label Studio JSON format for pre-annotations must contain two sections: -- A data object which references the source of the data that the pre-annotations apply to. This can be a URL to an audio file, a pre-signed cloud storage link to an image, plain text, a reference to a CSV file stored in Label Studio, or something else. -- A predictions array that contains the pre-annotation results for the different types of labeling. See how to add results to the predictions array. +Determine whether the label configuration for a specific project is valid. For more information about setting up labeling configs, see [Configure labeling interface](https://labelstud.io/guide/setup) and our [Tags reference](https://labelstud.io/tags/). -For more information, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations) +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list).
    @@ -4161,32 +4491,14 @@ For more information, see [the JSON format reference in the Label Studio documen
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.predictions.create( - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - score=0.95, - model_version="yolo-v8", +client.projects.validate_config( + id=1, + label_config="label_config", ) ``` @@ -4203,23 +4515,7 @@ client.predictions.create(
    -**task:** `typing.Optional[int]` — Task ID for which the prediction is created - -
    -
    - -
    -
    - -**result:** `typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]]` — Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) - -
    -
    - -
    -
    - -**score:** `typing.Optional[float]` — Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. +**id:** `int` — A unique integer value identifying this project.
    @@ -4227,7 +4523,7 @@ client.predictions.create(
    -**model_version:** `typing.Optional[str]` — Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface +**label_config:** `str` — Label config in XML format. See more about it in documentation
    @@ -4247,7 +4543,8 @@ client.predictions.create(
    -
    client.predictions.get(...) +## Tasks +
    client.tasks.create_many_status(...)
    @@ -4259,9 +4556,14 @@ client.predictions.create(
    -Get details about a specific prediction by its ID. To find the prediction ID, use [List predictions](list). -For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). +Get information about an async project import operation. This can be especially useful to monitor status, as large import jobs can take time. + +You will need the project ID and the unique ID of the import operation. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +The import ID is returned as part of the response when you call [Import tasks](import-tasks).
    @@ -4276,13 +4578,14 @@ For information about the prediction format, see [the JSON format reference in t
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.predictions.get( +client.tasks.create_many_status( id=1, + import_pk="import_pk", ) ``` @@ -4299,7 +4602,15 @@ client.predictions.get(
    -**id:** `int` — Prediction ID +**id:** `int` — The project ID. + +
    +
    + +
    +
    + +**import_pk:** `str`
    @@ -4319,7 +4630,7 @@ client.predictions.get(
    -
    client.predictions.delete(...) +
    client.tasks.delete_all_tasks(...)
    @@ -4331,7 +4642,10 @@ client.predictions.get(
    -Delete a prediction. To find the prediction ID, use [List predictions](list). + +Delete all tasks from a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list).
    @@ -4346,12 +4660,12 @@ Delete a prediction. To find the prediction ID, use [List predictions](list).
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.predictions.delete( +client.tasks.delete_all_tasks( id=1, ) @@ -4369,7 +4683,7 @@ client.predictions.delete(
    -**id:** `int` — Prediction ID +**id:** `int` — A unique integer value identifying this project.
    @@ -4389,7 +4703,7 @@ client.predictions.delete(
    -
    client.predictions.update(...) +
    client.tasks.list(...)
    @@ -4401,9 +4715,12 @@ client.predictions.delete(
    -Update a prediction. To find the prediction ID, use [List predictions](list). -For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). +Retrieve a list of tasks. + +You can use the query parameters to filter the list by project and/or view (a tab within the Data Manager). You can also optionally add pagination to make the response easier to parse. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). The view ID can be found using [List views](../views/list).
    @@ -4418,34 +4735,17 @@ For information about the prediction format, see [the JSON format reference in t
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.predictions.update( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - score=0.95, - model_version="yolo-v8", -) +response = client.tasks.list() +for item in response: + yield item +# alternatively, you can paginate page-by-page +for page in response.iter_pages(): + yield page ```
    @@ -4461,7 +4761,7 @@ client.predictions.update(
    -**id:** `int` — Prediction ID +**page:** `typing.Optional[int]` — A page number within the paginated result set.
    @@ -4469,7 +4769,7 @@ client.predictions.update(
    -**task:** `typing.Optional[int]` — Task ID for which the prediction is created +**page_size:** `typing.Optional[int]` — Number of results to return per page.
    @@ -4477,7 +4777,7 @@ client.predictions.update(
    -**result:** `typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]]` — Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) +**view:** `typing.Optional[int]` — View ID
    @@ -4485,7 +4785,7 @@ client.predictions.update(
    -**score:** `typing.Optional[float]` — Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. +**project:** `typing.Optional[int]` — Project ID
    @@ -4493,7 +4793,7 @@ client.predictions.update(
    -**model_version:** `typing.Optional[str]` — Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface +**resolve_uri:** `typing.Optional[bool]` — Resolve task data URIs using Cloud Storage
    @@ -4501,53 +4801,75 @@ client.predictions.update(
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**fields:** `typing.Optional[TasksListRequestFields]` — Set to "all" if you want to include annotations and predictions in the response
    + +
    +
    + +**review:** `typing.Optional[bool]` — Get tasks for review +
    +
    +
    +**include:** `typing.Optional[str]` — Specify which fields to include in the response +
    -
    -## Projects Exports -
    client.projects.exports.create_export(...)
    -#### 📝 Description +**query:** `typing.Optional[str]` + +Additional query to filter tasks. It must be JSON encoded string of dict containing one of the following parameters: `{"filters": ..., "selectedItems": ..., "ordering": ...}`. Check [Data Manager > Create View > see `data` field](#tag/Data-Manager/operation/api_dm_views_create) for more details about filters, selectedItems and ordering. + +* **filters**: dict with `"conjunction"` string (`"or"` or `"and"`) and list of filters in `"items"` array. Each filter is a dictionary with keys: `"filter"`, `"operator"`, `"type"`, `"value"`. [Read more about available filters](https://labelstud.io/sdk/data_manager.html)
    Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` +* **selectedItems**: dictionary with keys: `"all"`, `"included"`, `"excluded"`. If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
    Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` +* **ordering**: list of fields to order by. Currently, ordering is supported by only one parameter.
    + Example: `["completed_at"]` + +
    +
    +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
    +
    + + + + + + +
    + +
    client.tasks.create(...)
    -If you have a large project it's recommended to use export snapshots, this easy export endpoint might have timeouts. -Export annotated tasks as a file in a specific format. -For example, to export JSON annotations for a project to a file called `annotations.json`, -run the following from the command line: +#### 📝 Description -```bash -curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON -H 'Authorization: Token abc123' --output 'annotations.json' -``` +
    +
    -To export all tasks, including skipped tasks and others without annotations, run the following from the command line: +
    +
    -```bash -curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON&download_all_tasks=true -H 'Authorization: Token abc123' --output 'annotations.json' -``` -To export specific tasks with IDs of 123 and 345, run the following from the command line: +Create a new labeling task in Label Studio. -```bash -curl -X GET https://localhost:8080/api/projects/{id}/export?ids[]=123\&ids[]=345 -H 'Authorization: Token abc123' --output 'annotations.json' -``` +The data you provide depends on your labeling config and data type. -You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +You will also need to provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list).
    @@ -4562,17 +4884,14 @@ You must provide a project ID. The project ID can be found in the URL when viewi
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.projects.exports.create_export( - id=1, - export_type="string", - download_all_tasks="string", - download_resources=True, - ids=1, +client.tasks.create( + data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, + project=1, ) ``` @@ -4589,31 +4908,7 @@ client.projects.exports.create_export(
    -**id:** `int` — A unique integer value identifying this project. - -
    -
    - -
    -
    - -**export_type:** `typing.Optional[str]` — Selected export format (JSON by default) - -
    -
    - -
    -
    - -**download_all_tasks:** `typing.Optional[str]` — If true, download all tasks regardless of status. If false, download only annotated tasks. - -
    -
    - -
    -
    - -**download_resources:** `typing.Optional[bool]` — If true, download all resource files such as images, audio, and others relevant to the tasks. +**data:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` — Task data dictionary with arbitrary keys and values
    @@ -4621,7 +4916,7 @@ client.projects.exports.create_export(
    -**ids:** `typing.Optional[typing.Union[int, typing.Sequence[int]]]` — Specify a list of task IDs to retrieve only the details for those tasks. +**project:** `typing.Optional[int]` — Project ID
    @@ -4641,7 +4936,7 @@ client.projects.exports.create_export(
    -
    client.projects.exports.list_formats(...) +
    client.tasks.get(...)
    @@ -4653,9 +4948,9 @@ client.projects.exports.create_export(
    -Before exporting annotations, you can check with formats are supported by the specified project. For more information about export formats, see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). -You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +Get task data, metadata, annotations and other attributes for a specific labeling task by task ID. +The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list).
    @@ -4670,13 +4965,13 @@ You must provide a project ID. The project ID can be found in the URL when viewi
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.projects.exports.list_formats( - id=1, +client.tasks.get( + id="id", ) ``` @@ -4693,7 +4988,7 @@ client.projects.exports.list_formats(
    -**id:** `int` — A unique integer value identifying this project. +**id:** `str` — Task ID
    @@ -4713,7 +5008,7 @@ client.projects.exports.list_formats(
    -
    client.projects.exports.list(...) +
    client.tasks.delete(...)
    @@ -4725,9 +5020,12 @@ client.projects.exports.list_formats(
    -Returns a list of export file (snapshots) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). -Included in the response is information about each snapshot, such as who created it and what format it is in. +Delete a task in Label Studio. + +You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + +This action cannot be undone.
    @@ -4742,13 +5040,13 @@ Included in the response is information about each snapshot, such as who created
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.projects.exports.list( - id=1, +client.tasks.delete( + id="id", ) ``` @@ -4765,7 +5063,7 @@ client.projects.exports.list(
    -**id:** `int` — A unique integer value identifying this project. +**id:** `str` — Task ID
    @@ -4785,7 +5083,7 @@ client.projects.exports.list(
    -
    client.projects.exports.create(...) +
    client.tasks.update(...)
    @@ -4797,11 +5095,10 @@ client.projects.exports.list(
    -Create a new export request to start a background task and generate an export file (snapshot) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). -A snapshot is a static export of your project's data and annotations at a specific point in time. It captures the current state of your tasks, annotations, and other relevant data, allowing you to download and review them later. Snapshots are particularly useful for large projects as they help avoid timeouts during export operations by processing the data asynchronously. +Update the attributes of an existing labeling task. -For more information, see the [Label Studio documentation on exporting annotations](https://labelstud.io/guide/export.html). +You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list).
    @@ -4816,13 +5113,15 @@ For more information, see the [Label Studio documentation on exporting annotatio
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.projects.exports.create( - id_=1, +client.tasks.update( + id="id", + data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, + project=1, ) ``` @@ -4839,7 +5138,7 @@ client.projects.exports.create(
    -**id_:** `int` — A unique integer value identifying this project. +**id:** `str` — Task ID
    @@ -4847,7 +5146,7 @@ client.projects.exports.create(
    -**title:** `typing.Optional[str]` +**data:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` — Task data dictionary with arbitrary keys and values
    @@ -4855,7 +5154,7 @@ client.projects.exports.create(
    -**id:** `typing.Optional[int]` +**project:** `typing.Optional[int]` — Project ID
    @@ -4863,83 +5162,125 @@ client.projects.exports.create(
    -**created_by:** `typing.Optional[UserSimple]` +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    +
    +
    -
    -
    -**created_at:** `typing.Optional[dt.datetime]` — Creation time -
    +
    +## ImportStorage +
    client.import_storage.list_types()
    -**finished_at:** `typing.Optional[dt.datetime]` — Complete or fail time - -
    -
    +#### 📝 Description
    -**status:** `typing.Optional[ExportCreateStatus]` - +
    +
    + +Retrieve a list of the import storages types. +
    +
    +#### 🔌 Usage +
    -**md5:** `typing.Optional[str]` - +
    +
    + +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.list_types() + +``` +
    +
    +#### ⚙️ Parameters + +
    +
    +
    -**counters:** `typing.Optional[typing.Dict[str, typing.Any]]` +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    +
    +
    -
    -
    -**converted_formats:** `typing.Optional[typing.Sequence[ConvertedFormat]]` -
    +
    +## ExportStorage +
    client.export_storage.list_types()
    -**task_filter_options:** `typing.Optional[TaskFilterOptions]` - -
    -
    +#### 📝 Description
    -**annotation_filter_options:** `typing.Optional[AnnotationFilterOptions]` - +
    +
    + +Retrieve a list of the export storages types. +
    +
    +#### 🔌 Usage +
    -**serialization_options:** `typing.Optional[SerializationOptions]` - +
    +
    + +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.list_types() + +``` +
    +
    +#### ⚙️ Parameters + +
    +
    +
    @@ -4955,7 +5296,8 @@ client.projects.exports.create(
    -
    client.projects.exports.get(...) +## Webhooks +
    client.webhooks.list(...)
    @@ -4967,11 +5309,12 @@ client.projects.exports.create(
    -Retrieve information about a specific export file (snapshot). -You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). +List all webhooks set up for your organization. + +Webhooks in Label Studio let you set up integrations that subscribe to certain events that occur inside Label Studio. When an event is triggered, Label Studio sends an HTTP POST request to the configured webhook URL. -You will also need the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +For more information, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks).
    @@ -4986,15 +5329,12 @@ You will also need the project ID. This can be found in the URL when viewing the
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.projects.exports.get( - id=1, - export_pk="export_pk", -) +client.webhooks.list() ```
    @@ -5002,23 +5342,15 @@ client.projects.exports.get(
    -#### ⚙️ Parameters - -
    -
    - +#### ⚙️ Parameters +
    -**id:** `int` — A unique integer value identifying this project. - -
    -
    -
    -**export_pk:** `str` — Primary key identifying the export file. +**project:** `typing.Optional[str]` — Project ID
    @@ -5038,7 +5370,7 @@ client.projects.exports.get(
    -
    client.projects.exports.delete(...) +
    client.webhooks.create(...)
    @@ -5050,9 +5382,13 @@ client.projects.exports.get(
    -Delete an export file by specified export ID. -You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). +Create a webhook. +Label Studio provides several out-of-the box webhook events, which you can find listed here: [Available Label Studio webhooks](https://labelstud.io/guide/webhooks#Available-Label-Studio-webhooks). + +If you want to create your own custom webhook, refer to [Create custom events for webhooks in Label Studio](https://labelstud.io/guide/webhook_create). + +Label Studio makes two main types of events available to integrate with webhooks: project-level task events and organization events. If you want to use organization-level webhook events, you will need to set `LABEL_STUDIO_ALLOW_ORGANIZATION_WEBHOOKS=true`.
    @@ -5067,14 +5403,13 @@ You will need the export ID. You can find this in the response when you [create
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.projects.exports.delete( - id=1, - export_pk="export_pk", +client.webhooks.create( + url="url", ) ``` @@ -5091,7 +5426,7 @@ client.projects.exports.delete(
    -**id:** `int` — A unique integer value identifying this project. +**url:** `str` — URL of webhook
    @@ -5099,7 +5434,7 @@ client.projects.exports.delete(
    -**export_pk:** `str` — Primary key identifying the export file. +**id:** `typing.Optional[int]`
    @@ -5107,77 +5442,55 @@ client.projects.exports.delete(
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**organization:** `typing.Optional[int]`
    -
    -
    +
    +
    +**project:** `typing.Optional[int]` +
    -
    -
    client.projects.exports.convert(...)
    -#### 📝 Description - -
    -
    +**send_payload:** `typing.Optional[bool]` — If value is False send only action + +
    +
    -You can use this to convert an export snapshot into the selected format. - -To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). - -You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). - -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). -
    -
    +**send_for_all_actions:** `typing.Optional[bool]` — If value is False - used only for actions from WebhookAction +
    -#### 🔌 Usage - -
    -
    -
    -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.projects.exports.convert( - id=1, - export_pk="export_pk", - export_type="export_type", -) - -``` -
    -
    +**headers:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` — Key Value Json of headers +
    -#### ⚙️ Parameters -
    +**is_active:** `typing.Optional[bool]` — If value is False the webhook is disabled + +
    +
    +
    -**id:** `int` — A unique integer value identifying this project. +**actions:** `typing.Optional[typing.Sequence[WebhookActionsItem]]`
    @@ -5185,7 +5498,7 @@ client.projects.exports.convert(
    -**export_pk:** `str` — Primary key identifying the export file. +**created_at:** `typing.Optional[dt.datetime]` — Creation time
    @@ -5193,7 +5506,7 @@ client.projects.exports.convert(
    -**export_type:** `str` — Export file format. +**updated_at:** `typing.Optional[dt.datetime]` — Last update time
    @@ -5213,7 +5526,7 @@ client.projects.exports.convert(
    -
    client.projects.exports.download(...) +
    client.webhooks.info(...)
    @@ -5225,11 +5538,8 @@ client.projects.exports.convert(
    -Download an export snapshot as a file in a specified format. To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). - -You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +Get descriptions of all available webhook actions to set up webhooks. For more information, see the [Webhook event reference](https://labelstud.io/guide/webhook_reference).
    @@ -5244,15 +5554,12 @@ The project ID can be found in the URL when viewing the project in Label Studio,
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.projects.exports.download( - id=1, - export_pk="export_pk", -) +client.webhooks.info() ```
    @@ -5268,23 +5575,7 @@ client.projects.exports.download(
    -**id:** `int` — A unique integer value identifying this project. - -
    -
    - -
    -
    - -**export_pk:** `str` — Primary key identifying the export file. - -
    -
    - -
    -
    - -**export_type:** `typing.Optional[str]` — Selected export format +**organization_only:** `typing.Optional[bool]` — organization-only or not
    @@ -5304,8 +5595,7 @@ client.projects.exports.download(
    -## Tasks -
    client.tasks.create_many_status(...) +
    client.webhooks.get(...)
    @@ -5317,13 +5607,10 @@ client.projects.exports.download(
    -Get information about an async project import operation. This can be especially useful to monitor status, as large import jobs can take time. - -You will need the project ID and the unique ID of the import operation. -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +Get information about a specific webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). -The import ID is returned as part of the response when you call [Import tasks](import-tasks). +For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference).
    @@ -5338,14 +5625,13 @@ The import ID is returned as part of the response when you call [Import tasks](i
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.tasks.create_many_status( +client.webhooks.get( id=1, - import_pk="import_pk", ) ``` @@ -5362,15 +5648,7 @@ client.tasks.create_many_status(
    -**id:** `int` — The project ID. - -
    -
    - -
    -
    - -**import_pk:** `str` +**id:** `int` — A unique integer value identifying this webhook.
    @@ -5390,7 +5668,7 @@ client.tasks.create_many_status(
    -
    client.tasks.delete_all_tasks(...) +
    client.webhooks.delete(...)
    @@ -5402,9 +5680,10 @@ client.tasks.create_many_status(
    -Delete all tasks from a specific project. -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +Delete a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + +For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference).
    @@ -5419,12 +5698,12 @@ The project ID can be found in the URL when viewing the project in Label Studio,
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.tasks.delete_all_tasks( +client.webhooks.delete( id=1, ) @@ -5442,7 +5721,7 @@ client.tasks.delete_all_tasks(
    -**id:** `int` — A unique integer value identifying this project. +**id:** `int` — A unique integer value identifying this webhook.
    @@ -5462,7 +5741,7 @@ client.tasks.delete_all_tasks(
    -
    client.tasks.list(...) +
    client.webhooks.update(...)
    @@ -5474,11 +5753,10 @@ client.tasks.delete_all_tasks(
    -Retrieve a list of tasks. -You can use the query parameters to filter the list by project and/or view (a tab within the Data Manager). You can also optionally add pagination to make the response easier to parse. +Update a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). The view ID can be found using [List views](../views/list). +For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference).
    @@ -5493,17 +5771,16 @@ The project ID can be found in the URL when viewing the project in Label Studio,
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -response = client.tasks.list() -for item in response: - yield item -# alternatively, you can paginate page-by-page -for page in response.iter_pages(): - yield page +client.webhooks.update( + id_=1, + url="url", + webhook_serializer_for_update_url="url", +) ```
    @@ -5519,7 +5796,7 @@ for page in response.iter_pages():
    -**page:** `typing.Optional[int]` — A page number within the paginated result set. +**id_:** `int` — A unique integer value identifying this webhook.
    @@ -5527,7 +5804,7 @@ for page in response.iter_pages():
    -**page_size:** `typing.Optional[int]` — Number of results to return per page. +**url:** `str` — URL of webhook
    @@ -5535,7 +5812,7 @@ for page in response.iter_pages():
    -**view:** `typing.Optional[int]` — View ID +**webhook_serializer_for_update_url:** `str` — URL of webhook
    @@ -5543,7 +5820,7 @@ for page in response.iter_pages():
    -**project:** `typing.Optional[int]` — Project ID +**send_payload:** `typing.Optional[bool]` — If value is False send only action
    @@ -5551,7 +5828,7 @@ for page in response.iter_pages():
    -**resolve_uri:** `typing.Optional[bool]` — Resolve task data URIs using Cloud Storage +**send_for_all_actions:** `typing.Optional[bool]` — If value is False - used only for actions from WebhookAction
    @@ -5559,7 +5836,7 @@ for page in response.iter_pages():
    -**fields:** `typing.Optional[TasksListRequestFields]` — Set to "all" if you want to include annotations and predictions in the response +**headers:** `typing.Optional[str]` — Key Value Json of headers
    @@ -5567,7 +5844,7 @@ for page in response.iter_pages():
    -**review:** `typing.Optional[bool]` — Get tasks for review +**is_active:** `typing.Optional[bool]` — If value is False the webhook is disabled
    @@ -5575,7 +5852,12 @@ for page in response.iter_pages():
    -**include:** `typing.Optional[str]` — Specify which fields to include in the response +**actions:** `typing.Optional[ + typing.Union[ + WebhooksUpdateRequestActionsItem, + typing.Sequence[WebhooksUpdateRequestActionsItem], + ] +]`
    @@ -5583,14 +5865,7 @@ for page in response.iter_pages():
    -**query:** `typing.Optional[str]` - -Additional query to filter tasks. It must be JSON encoded string of dict containing one of the following parameters: `{"filters": ..., "selectedItems": ..., "ordering": ...}`. Check [Data Manager > Create View > see `data` field](#tag/Data-Manager/operation/api_dm_views_create) for more details about filters, selectedItems and ordering. - -- **filters**: dict with `"conjunction"` string (`"or"` or `"and"`) and list of filters in `"items"` array. Each filter is a dictionary with keys: `"filter"`, `"operator"`, `"type"`, `"value"`. [Read more about available filters](https://labelstud.io/sdk/data_manager.html)
    Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` -- **selectedItems**: dictionary with keys: `"all"`, `"included"`, `"excluded"`. If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
    Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` -- **ordering**: list of fields to order by. Currently, ordering is supported by only one parameter.
    - Example: `["completed_at"]` +**id:** `typing.Optional[int]`
    @@ -5598,74 +5873,63 @@ Additional query to filter tasks. It must be JSON encoded string of dict contain
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**organization:** `typing.Optional[int]`
    - -
    +
    +
    +**project:** `typing.Optional[int]` +
    -
    -
    client.tasks.create(...)
    -#### 📝 Description - -
    -
    +**webhook_serializer_for_update_send_payload:** `typing.Optional[bool]` — If value is False send only action + +
    +
    -Create a new labeling task in Label Studio. - -The data you provide depends on your labeling config and data type. - -You will also need to provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). -
    -
    +**webhook_serializer_for_update_send_for_all_actions:** `typing.Optional[bool]` — If value is False - used only for actions from WebhookAction +
    -#### 🔌 Usage -
    +**webhook_serializer_for_update_headers:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` — Key Value Json of headers + +
    +
    +
    -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.tasks.create( - data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, - project=1, -) - -``` -
    -
    +**webhook_serializer_for_update_is_active:** `typing.Optional[bool]` — If value is False the webhook is disabled + -#### ⚙️ Parameters -
    +**webhook_serializer_for_update_actions:** `typing.Optional[typing.Sequence[WebhookSerializerForUpdateActionsItem]]` + +
    +
    +
    -**data:** `typing.Optional[typing.Dict[str, typing.Any]]` — Task data dictionary with arbitrary keys and values +**created_at:** `typing.Optional[dt.datetime]` — Creation time
    @@ -5673,7 +5937,7 @@ client.tasks.create(
    -**project:** `typing.Optional[int]` — Project ID +**updated_at:** `typing.Optional[dt.datetime]` — Last update time
    @@ -5693,7 +5957,8 @@ client.tasks.create(
    -
    client.tasks.get(...) +## Prompts +
    client.prompts.list()
    @@ -5705,8 +5970,7 @@ client.tasks.create(
    -Get task data, metadata, annotations and other attributes for a specific labeling task by task ID. -The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). +Get a list of prompts.
    @@ -5721,14 +5985,12 @@ The task ID is available from the Label Studio URL when viewing the task, or you
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.tasks.get( - id="id", -) +client.prompts.list() ```
    @@ -5744,14 +6006,6 @@ client.tasks.get(
    -**id:** `str` — Task ID - -
    -
    - -
    -
    - **request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    @@ -5764,7 +6018,7 @@ client.tasks.get(
    -
    client.tasks.delete(...) +
    client.prompts.create(...)
    @@ -5776,11 +6030,7 @@ client.tasks.get(
    -Delete a task in Label Studio. - -You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). - -This action cannot be undone. +Create a new prompt.
    @@ -5795,13 +6045,15 @@ You will need the task ID. This is available from the Label Studio URL when view
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.tasks.delete( - id="id", +client.prompts.create( + title="title", + input_fields=["input_fields"], + output_classes=["output_classes"], ) ``` @@ -5818,7 +6070,7 @@ client.tasks.delete(
    -**id:** `str` — Task ID +**title:** `str` — Title of the prompt
    @@ -5826,73 +6078,55 @@ client.tasks.delete(
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**input_fields:** `typing.Sequence[str]` — List of input fields
    -
    -
    +
    +
    +**output_classes:** `typing.Sequence[str]` — List of output classes +
    -
    -
    client.tasks.update(...)
    -#### 📝 Description - -
    -
    +**description:** `typing.Optional[str]` — Description of the prompt + +
    +
    -Update the attributes of an existing labeling task. - -You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). -
    -
    +**created_by:** `typing.Optional[PromptCreatedBy]` — User ID of the creator of the prompt +
    -#### 🔌 Usage -
    -
    -
    - -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.tasks.update( - id="id", - data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, - project=1, -) - -``` -
    -
    +**created_at:** `typing.Optional[dt.datetime]` — Date and time the prompt was created +
    -#### ⚙️ Parameters -
    +**updated_at:** `typing.Optional[dt.datetime]` — Date and time the prompt was last updated + +
    +
    +
    -**id:** `str` — Task ID +**organization:** `typing.Optional[PromptOrganization]` — Organization ID of the prompt
    @@ -5900,7 +6134,7 @@ client.tasks.update(
    -**data:** `typing.Optional[typing.Dict[str, typing.Any]]` — Task data dictionary with arbitrary keys and values +**associated_projects:** `typing.Optional[typing.Sequence[int]]` — List of associated projects IDs
    @@ -5908,7 +6142,7 @@ client.tasks.update(
    -**project:** `typing.Optional[int]` — Project ID +**skill_name:** `typing.Optional[str]` — Name of the skill
    @@ -5928,8 +6162,7 @@ client.tasks.update(
    -## ImportStorage -
    client.import_storage.list_types() +
    client.prompts.get(...)
    @@ -5941,7 +6174,7 @@ client.tasks.update(
    -Retrieve a list of the import storages types. +Get a prompt by ID.
    @@ -5956,12 +6189,14 @@ Retrieve a list of the import storages types.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.list_types() +client.prompts.get( + id=1, +) ```
    @@ -5977,6 +6212,14 @@ client.import_storage.list_types()
    +**id:** `int` — Prompt ID + +
    +
    + +
    +
    + **request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    @@ -5989,8 +6232,7 @@ client.import_storage.list_types()
    -## ImportStorage Azure -
    client.import_storage.azure.list(...) +
    client.prompts.delete(...)
    @@ -6002,11 +6244,7 @@ client.import_storage.list_types()
    -You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure import (source) storage connections for a specific project. - -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Delete a prompt by ID.
    @@ -6021,12 +6259,14 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.azure.list() +client.prompts.delete( + id=1, +) ```
    @@ -6042,7 +6282,7 @@ client.import_storage.azure.list()
    -**project:** `typing.Optional[int]` — Project ID +**id:** `int` — Prompt ID
    @@ -6062,7 +6302,7 @@ client.import_storage.azure.list()
    -
    client.import_storage.azure.create(...) +
    client.prompts.update(...)
    @@ -6074,13 +6314,7 @@ client.import_storage.azure.list()
    -Create a new source storage connection to Microsoft Azure Blob storage. - -For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. - -Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. - -After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). +Update a prompt by ID.
    @@ -6095,12 +6329,17 @@ For information about the required fields and prerequisites, see [Microsoft Azur
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.azure.create() +client.prompts.update( + id=1, + title="title", + input_fields=["input_fields"], + output_classes=["output_classes"], +) ```
    @@ -6116,7 +6355,7 @@ client.import_storage.azure.create()
    -**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. +**id:** `int` — Prompt ID
    @@ -6124,7 +6363,7 @@ client.import_storage.azure.create()
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**title:** `str` — Title of the prompt
    @@ -6132,7 +6371,7 @@ client.import_storage.azure.create()
    -**presign:** `typing.Optional[bool]` — Presign URLs for direct download +**input_fields:** `typing.Sequence[str]` — List of input fields
    @@ -6140,7 +6379,7 @@ client.import_storage.azure.create()
    -**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes +**output_classes:** `typing.Sequence[str]` — List of output classes
    @@ -6148,7 +6387,7 @@ client.import_storage.azure.create()
    -**title:** `typing.Optional[str]` — Storage title +**description:** `typing.Optional[str]` — Description of the prompt
    @@ -6156,7 +6395,7 @@ client.import_storage.azure.create()
    -**description:** `typing.Optional[str]` — Storage description +**created_by:** `typing.Optional[PromptCreatedBy]` — User ID of the creator of the prompt
    @@ -6164,7 +6403,7 @@ client.import_storage.azure.create()
    -**project:** `typing.Optional[int]` — Project ID +**created_at:** `typing.Optional[dt.datetime]` — Date and time the prompt was created
    @@ -6172,7 +6411,7 @@ client.import_storage.azure.create()
    -**container:** `typing.Optional[str]` — Azure blob container +**updated_at:** `typing.Optional[dt.datetime]` — Date and time the prompt was last updated
    @@ -6180,7 +6419,7 @@ client.import_storage.azure.create()
    -**prefix:** `typing.Optional[str]` — Azure blob prefix name +**organization:** `typing.Optional[PromptOrganization]` — Organization ID of the prompt
    @@ -6188,7 +6427,7 @@ client.import_storage.azure.create()
    -**account_name:** `typing.Optional[str]` — Azure Blob account name +**associated_projects:** `typing.Optional[typing.Sequence[int]]` — List of associated projects IDs
    @@ -6196,7 +6435,7 @@ client.import_storage.azure.create()
    -**account_key:** `typing.Optional[str]` — Azure Blob account key +**skill_name:** `typing.Optional[str]` — Name of the skill
    @@ -6216,7 +6455,7 @@ client.import_storage.azure.create()
    -
    client.import_storage.azure.validate(...) +
    client.prompts.batch_predictions(...)
    @@ -6228,7 +6467,7 @@ client.import_storage.azure.create()
    -Validate a specific Azure import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. +Create a new batch prediction.
    @@ -6243,12 +6482,12 @@ Validate a specific Azure import storage connection. This is useful to ensure th
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.azure.validate() +client.prompts.batch_predictions() ```
    @@ -6264,87 +6503,7 @@ client.import_storage.azure.validate()
    -**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated - -
    -
    - -
    -
    - -**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - -
    -
    - -
    -
    - -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - -
    -
    - -
    -
    - -**presign:** `typing.Optional[bool]` — Presign URLs for direct download - -
    -
    - -
    -
    - -**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes - -
    -
    - -
    -
    - -**title:** `typing.Optional[str]` — Storage title - -
    -
    - -
    -
    - -**description:** `typing.Optional[str]` — Storage description - -
    -
    - -
    -
    - -**project:** `typing.Optional[int]` — Project ID - -
    -
    - -
    -
    - -**container:** `typing.Optional[str]` — Azure blob container - -
    -
    - -
    -
    - -**prefix:** `typing.Optional[str]` — Azure blob prefix name - -
    -
    - -
    -
    - -**account_name:** `typing.Optional[str]` — Azure Blob account name +**modelrun_id:** `typing.Optional[int]` — Model Run ID to associate the prediction with
    @@ -6352,7 +6511,7 @@ client.import_storage.azure.validate()
    -**account_key:** `typing.Optional[str]` — Azure Blob account key +**results:** `typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]]`
    @@ -6372,7 +6531,7 @@ client.import_storage.azure.validate()
    -
    client.import_storage.azure.get(...) +
    client.prompts.batch_failed_predictions(...)
    @@ -6384,9 +6543,7 @@ client.import_storage.azure.validate()
    -Get a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Create a new batch of failed predictions.
    @@ -6401,14 +6558,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.azure.get( - id=1, -) +client.prompts.batch_failed_predictions() ```
    @@ -6424,7 +6579,17 @@ client.import_storage.azure.get(
    -**id:** `int` — A unique integer value identifying this azure blob import storage. +**modelrun_id:** `typing.Optional[int]` — Model Run ID where the failed predictions came from + +
    +
    + +
    +
    + +**failed_predictions:** `typing.Optional[ + typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem] +]`
    @@ -6444,7 +6609,8 @@ client.import_storage.azure.get(
    -
    client.import_storage.azure.delete(...) +## ModelProviders +
    client.model_providers.list()
    @@ -6456,11 +6622,7 @@ client.import_storage.azure.get(
    -Delete a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - -Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - -If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +Get all model provider connections created by the user in the current organization.
    @@ -6475,14 +6637,12 @@ If you want to remove the tasks that were synced from the external storage, you
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.azure.delete( - id=1, -) +client.model_providers.list() ```
    @@ -6498,14 +6658,6 @@ client.import_storage.azure.delete(
    -**id:** `int` — A unique integer value identifying this azure blob import storage. - -
    -
    - -
    -
    - **request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    @@ -6518,7 +6670,7 @@ client.import_storage.azure.delete(
    -
    client.import_storage.azure.update(...) +
    client.model_providers.create(...)
    @@ -6530,9 +6682,7 @@ client.import_storage.azure.delete(
    -Update a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Create a new model provider connection.
    @@ -6547,13 +6697,13 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.azure.update( - id=1, +client.model_providers.create( + provider="OpenAI", ) ``` @@ -6570,7 +6720,7 @@ client.import_storage.azure.update(
    -**id:** `int` — A unique integer value identifying this azure blob import storage. +**provider:** `ModelProviderConnectionProvider`
    @@ -6578,7 +6728,7 @@ client.import_storage.azure.update(
    -**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. +**api_key:** `typing.Optional[str]`
    @@ -6586,7 +6736,7 @@ client.import_storage.azure.update(
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**deployment_name:** `typing.Optional[str]`
    @@ -6594,7 +6744,7 @@ client.import_storage.azure.update(
    -**presign:** `typing.Optional[bool]` — Presign URLs for direct download +**endpoint:** `typing.Optional[str]`
    @@ -6602,7 +6752,7 @@ client.import_storage.azure.update(
    -**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes +**scope:** `typing.Optional[ModelProviderConnectionScope]`
    @@ -6610,7 +6760,7 @@ client.import_storage.azure.update(
    -**title:** `typing.Optional[str]` — Storage title +**organization:** `typing.Optional[ModelProviderConnectionOrganization]`
    @@ -6618,7 +6768,7 @@ client.import_storage.azure.update(
    -**description:** `typing.Optional[str]` — Storage description +**created_by:** `typing.Optional[ModelProviderConnectionCreatedBy]`
    @@ -6626,7 +6776,7 @@ client.import_storage.azure.update(
    -**project:** `typing.Optional[int]` — Project ID +**created_at:** `typing.Optional[dt.datetime]`
    @@ -6634,7 +6784,7 @@ client.import_storage.azure.update(
    -**container:** `typing.Optional[str]` — Azure blob container +**updated_at:** `typing.Optional[dt.datetime]`
    @@ -6642,7 +6792,7 @@ client.import_storage.azure.update(
    -**prefix:** `typing.Optional[str]` — Azure blob prefix name +**is_internal:** `typing.Optional[bool]` — Whether the model provider connection is internal, not visible to the user.
    @@ -6650,7 +6800,7 @@ client.import_storage.azure.update(
    -**account_name:** `typing.Optional[str]` — Azure Blob account name +**budget_limit:** `typing.Optional[float]` — Budget limit for the model provider connection (null if unlimited)
    @@ -6658,7 +6808,31 @@ client.import_storage.azure.update(
    -**account_key:** `typing.Optional[str]` — Azure Blob account key +**budget_last_reset_date:** `typing.Optional[dt.datetime]` — Date and time the budget was last reset + +
    +
    + +
    +
    + +**budget_reset_period:** `typing.Optional[ModelProviderConnectionBudgetResetPeriod]` — Budget reset period for the model provider connection (null if not reset) + +
    +
    + +
    +
    + +**budget_total_spent:** `typing.Optional[float]` — Tracked total budget spent for the given provider connection within the current budget period + +
    +
    + +
    +
    + +**budget_alert_threshold:** `typing.Optional[float]` — Budget alert threshold for the given provider connection
    @@ -6678,7 +6852,7 @@ client.import_storage.azure.update(
    -
    client.import_storage.azure.sync(...) +
    client.model_providers.get(...)
    @@ -6690,11 +6864,7 @@ client.import_storage.azure.update(
    -Sync tasks from an Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - -Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. - -Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +Get a model provider connection by ID.
    @@ -6709,13 +6879,13 @@ Sync operations with external containers only go one way. They either create tas
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.azure.sync( - id=1, +client.model_providers.get( + pk=1, ) ``` @@ -6732,7 +6902,7 @@ client.import_storage.azure.sync(
    -**id:** `int` — Storage ID +**pk:** `int` — Model Provider Connection ID
    @@ -6752,8 +6922,7 @@ client.import_storage.azure.sync(
    -## ExportStorage -
    client.export_storage.list_types() +
    client.model_providers.delete(...)
    @@ -6765,7 +6934,7 @@ client.import_storage.azure.sync(
    -Retrieve a list of the export storages types. +Delete a model provider connection by ID.
    @@ -6780,12 +6949,14 @@ Retrieve a list of the export storages types.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.list_types() +client.model_providers.delete( + pk=1, +) ```
    @@ -6801,6 +6972,14 @@ client.export_storage.list_types()
    +**pk:** `int` — Model Provider Connection ID + +
    +
    + +
    +
    + **request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    @@ -6813,8 +6992,7 @@ client.export_storage.list_types()
    -## ExportStorage Azure -
    client.export_storage.azure.list(...) +
    client.model_providers.update(...)
    @@ -6826,11 +7004,7 @@ client.export_storage.list_types()
    -You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure export (target) storage connections for a specific project. - -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Update a model provider connection by ID.
    @@ -6845,12 +7019,15 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.azure.list() +client.model_providers.update( + pk=1, + provider="OpenAI", +) ```
    @@ -6866,7 +7043,7 @@ client.export_storage.azure.list()
    -**project:** `typing.Optional[int]` — Project ID +**pk:** `int` — Model Provider Connection ID
    @@ -6874,71 +7051,63 @@ client.export_storage.azure.list()
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**provider:** `ModelProviderConnectionProvider`
    - -
    +
    +
    +**api_key:** `typing.Optional[str]` +
    -
    -
    client.export_storage.azure.create(...)
    -#### 📝 Description - -
    -
    +**deployment_name:** `typing.Optional[str]` + +
    +
    -Create a new target storage connection to Microsoft Azure Blob storage. - -For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. - -After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). -
    -
    +**endpoint:** `typing.Optional[str]` +
    -#### 🔌 Usage -
    +**scope:** `typing.Optional[ModelProviderConnectionScope]` + +
    +
    +
    -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.azure.create() - -``` -
    -
    +**organization:** `typing.Optional[ModelProviderConnectionOrganization]` + -#### ⚙️ Parameters -
    +**created_by:** `typing.Optional[ModelProviderConnectionCreatedBy]` + +
    +
    +
    -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled +**created_at:** `typing.Optional[dt.datetime]`
    @@ -6946,7 +7115,7 @@ client.export_storage.azure.create()
    -**title:** `typing.Optional[str]` — Storage title +**updated_at:** `typing.Optional[dt.datetime]`
    @@ -6954,7 +7123,7 @@ client.export_storage.azure.create()
    -**description:** `typing.Optional[str]` — Storage description +**is_internal:** `typing.Optional[bool]` — Whether the model provider connection is internal, not visible to the user.
    @@ -6962,7 +7131,7 @@ client.export_storage.azure.create()
    -**project:** `typing.Optional[int]` — Project ID +**budget_limit:** `typing.Optional[float]` — Budget limit for the model provider connection (null if unlimited)
    @@ -6970,7 +7139,7 @@ client.export_storage.azure.create()
    -**container:** `typing.Optional[str]` — Azure blob container +**budget_last_reset_date:** `typing.Optional[dt.datetime]` — Date and time the budget was last reset
    @@ -6978,7 +7147,7 @@ client.export_storage.azure.create()
    -**prefix:** `typing.Optional[str]` — Azure blob prefix name +**budget_reset_period:** `typing.Optional[ModelProviderConnectionBudgetResetPeriod]` — Budget reset period for the model provider connection (null if not reset)
    @@ -6986,7 +7155,7 @@ client.export_storage.azure.create()
    -**account_name:** `typing.Optional[str]` — Azure Blob account name +**budget_total_spent:** `typing.Optional[float]` — Tracked total budget spent for the given provider connection within the current budget period
    @@ -6994,7 +7163,7 @@ client.export_storage.azure.create()
    -**account_key:** `typing.Optional[str]` — Azure Blob account key +**budget_alert_threshold:** `typing.Optional[float]` — Budget alert threshold for the given provider connection
    @@ -7014,7 +7183,8 @@ client.export_storage.azure.create()
    -
    client.export_storage.azure.validate(...) +## Comments +
    client.comments.list(...)
    @@ -7026,7 +7196,8 @@ client.export_storage.azure.create()
    -Validate a specific Azure export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + +Get a list of comments for a specific project.
    @@ -7040,21 +7211,84 @@ Validate a specific Azure export storage connection. This is useful to ensure th
    -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.azure.validate() +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.comments.list() + +``` +
    +
    + +
    + +#### ⚙️ Parameters + +
    +
    + +
    +
    + +**project:** `typing.Optional[int]` — Project ID + +
    +
    + +
    +
    + +**expand_created_by:** `typing.Optional[bool]` — Expand the created_by field with object instead of ID + +
    +
    + +
    +
    + +**annotation:** `typing.Optional[int]` — Annotation ID + +
    +
    + +
    +
    + +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
    +
    +
    +
    + + + + +
    + +
    client.comments.create(...) +
    +
    + +#### 📝 Description + +
    +
    + +
    +
    + -``` +Create a new comment.
    -#### ⚙️ Parameters +#### 🔌 Usage
    @@ -7062,47 +7296,29 @@ client.export_storage.azure.validate()
    -**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated - -
    -
    +```python +from label_studio_sdk import LabelStudio -
    -
    +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.comments.create() -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled - +```
    - -
    -
    - -**title:** `typing.Optional[str]` — Storage title -
    -
    -
    - -**description:** `typing.Optional[str]` — Storage description - -
    -
    +#### ⚙️ Parameters
    -**project:** `typing.Optional[int]` — Project ID - -
    -
    -
    -**container:** `typing.Optional[str]` — Azure blob container +**annotation:** `typing.Optional[int]`
    @@ -7110,7 +7326,7 @@ client.export_storage.azure.validate()
    -**prefix:** `typing.Optional[str]` — Azure blob prefix name +**project:** `typing.Optional[int]`
    @@ -7118,7 +7334,7 @@ client.export_storage.azure.validate()
    -**account_name:** `typing.Optional[str]` — Azure Blob account name +**text:** `typing.Optional[str]`
    @@ -7126,7 +7342,7 @@ client.export_storage.azure.validate()
    -**account_key:** `typing.Optional[str]` — Azure Blob account key +**is_resolved:** `typing.Optional[bool]`
    @@ -7146,7 +7362,7 @@ client.export_storage.azure.validate()
    -
    client.export_storage.azure.get(...) +
    client.comments.get(...)
    @@ -7158,9 +7374,8 @@ client.export_storage.azure.validate()
    -Get a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Get a specific comment.
    @@ -7175,12 +7390,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.azure.get( +client.comments.get( id=1, ) @@ -7198,7 +7413,7 @@ client.export_storage.azure.get(
    -**id:** `int` — A unique integer value identifying this azure blob export storage. +**id:** `int` — Comment ID
    @@ -7218,7 +7433,7 @@ client.export_storage.azure.get(
    -
    client.export_storage.azure.delete(...) +
    client.comments.delete(...)
    @@ -7230,9 +7445,8 @@ client.export_storage.azure.get(
    -Delete a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +Delete a specific comment.
    @@ -7247,12 +7461,12 @@ Deleting an export/target storage connection does not affect tasks with synced d
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.azure.delete( +client.comments.delete( id=1, ) @@ -7270,7 +7484,7 @@ client.export_storage.azure.delete(
    -**id:** `int` — A unique integer value identifying this azure blob export storage. +**id:** `int` — Comment ID
    @@ -7290,7 +7504,7 @@ client.export_storage.azure.delete(
    -
    client.export_storage.azure.update(...) +
    client.comments.update(...)
    @@ -7302,9 +7516,8 @@ client.export_storage.azure.delete(
    -Update a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Update a specific comment.
    @@ -7319,12 +7532,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.azure.update( +client.comments.update( id=1, ) @@ -7342,39 +7555,7 @@ client.export_storage.azure.update(
    -**id:** `int` — A unique integer value identifying this azure blob export storage. - -
    -
    - -
    -
    - -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled - -
    -
    - -
    -
    - -**title:** `typing.Optional[str]` — Storage title - -
    -
    - -
    -
    - -**description:** `typing.Optional[str]` — Storage description - -
    -
    - -
    -
    - -**project:** `typing.Optional[int]` — Project ID +**id:** `int` — Comment ID
    @@ -7382,7 +7563,7 @@ client.export_storage.azure.update(
    -**container:** `typing.Optional[str]` — Azure blob container +**annotation:** `typing.Optional[int]`
    @@ -7390,7 +7571,7 @@ client.export_storage.azure.update(
    -**prefix:** `typing.Optional[str]` — Azure blob prefix name +**project:** `typing.Optional[int]`
    @@ -7398,7 +7579,7 @@ client.export_storage.azure.update(
    -**account_name:** `typing.Optional[str]` — Azure Blob account name +**text:** `typing.Optional[str]`
    @@ -7406,7 +7587,7 @@ client.export_storage.azure.update(
    -**account_key:** `typing.Optional[str]` — Azure Blob account key +**is_resolved:** `typing.Optional[bool]`
    @@ -7426,7 +7607,8 @@ client.export_storage.azure.update(
    -
    client.export_storage.azure.sync(...) +## Workspaces +
    client.workspaces.list()
    @@ -7438,11 +7620,12 @@ client.export_storage.azure.update(
    -Sync tasks to an Azure export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. +List all workspaces for your organization. -Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + +For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces).
    @@ -7457,14 +7640,12 @@ Sync operations with external containers only go one way. They either create tas
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.azure.sync( - id="id", -) +client.workspaces.list() ```
    @@ -7480,14 +7661,6 @@ client.export_storage.azure.sync(
    -**id:** `str` - -
    -
    - -
    -
    - **request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    @@ -7500,8 +7673,7 @@ client.export_storage.azure.sync(
    -## ExportStorage Gcs -
    client.export_storage.gcs.list(...) +
    client.workspaces.create(...)
    @@ -7513,11 +7685,12 @@ client.export_storage.azure.sync(
    -You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all GCS export (target) storage connections for a specific project. -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +Create a new workspace. -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + +For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces).
    @@ -7532,12 +7705,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.gcs.list() +client.workspaces.create() ```
    @@ -7553,7 +7726,47 @@ client.export_storage.gcs.list()
    -**project:** `typing.Optional[int]` — Project ID +**title:** `typing.Optional[str]` — Workspace title + +
    +
    + +
    +
    + +**description:** `typing.Optional[str]` — Workspace description + +
    +
    + +
    +
    + +**is_public:** `typing.Optional[bool]` — Is workspace public + +
    +
    + +
    +
    + +**is_personal:** `typing.Optional[bool]` — Is workspace personal + +
    +
    + +
    +
    + +**color:** `typing.Optional[str]` — Workspace color in HEX format + +
    +
    + +
    +
    + +**is_archived:** `typing.Optional[bool]` — Is workspace archived
    @@ -7573,7 +7786,7 @@ client.export_storage.gcs.list()
    -
    client.export_storage.gcs.create(...) +
    client.workspaces.get(...)
    @@ -7585,11 +7798,8 @@ client.export_storage.gcs.list()
    -Create a new target storage connection to Google Cloud Storage. - -For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. -After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). +Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list).
    @@ -7604,12 +7814,14 @@ For information about the required fields and prerequisites, see [Google Cloud S
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.gcs.create() +client.workspaces.get( + id=1, +) ```
    @@ -7625,7 +7837,7 @@ client.export_storage.gcs.create()
    -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**id:** `int` — Workspace ID
    @@ -7633,55 +7845,70 @@ client.export_storage.gcs.create()
    -**title:** `typing.Optional[str]` — Storage title +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    + +
    -
    -
    -**description:** `typing.Optional[str]` — Storage description -
    +
    +
    client.workspaces.delete(...)
    -**project:** `typing.Optional[int]` — Project ID - -
    -
    +#### 📝 Description
    -**bucket:** `typing.Optional[str]` — GCS bucket name - -
    -
    -
    -**prefix:** `typing.Optional[str]` — GCS bucket prefix - + +Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list).
    + + + +#### 🔌 Usage
    -**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - +
    +
    + +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.workspaces.delete( + id=1, +) + +``` +
    +
    +#### ⚙️ Parameters +
    -**google_project_id:** `typing.Optional[str]` — Google project ID +
    +
    + +**id:** `int` — Workspace ID
    @@ -7701,7 +7928,7 @@ client.export_storage.gcs.create()
    -
    client.export_storage.gcs.validate(...) +
    client.workspaces.update(...)
    @@ -7713,7 +7940,8 @@ client.export_storage.gcs.create()
    -Validate a specific GCS export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + +Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list).
    @@ -7728,12 +7956,14 @@ Validate a specific GCS export storage connection. This is useful to ensure that
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.gcs.validate() +client.workspaces.update( + id=1, +) ```
    @@ -7749,23 +7979,7 @@ client.export_storage.gcs.validate()
    -**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated - -
    -
    - -
    -
    - -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. - -
    -
    - -
    -
    - -**title:** `typing.Optional[str]` — Storage title +**id:** `int` — Workspace ID
    @@ -7773,7 +7987,7 @@ client.export_storage.gcs.validate()
    -**description:** `typing.Optional[str]` — Storage description +**title:** `typing.Optional[str]` — Workspace title
    @@ -7781,7 +7995,7 @@ client.export_storage.gcs.validate()
    -**project:** `typing.Optional[int]` — Project ID +**description:** `typing.Optional[str]` — Workspace description
    @@ -7789,7 +8003,7 @@ client.export_storage.gcs.validate()
    -**bucket:** `typing.Optional[str]` — GCS bucket name +**is_public:** `typing.Optional[bool]` — Is workspace public
    @@ -7797,7 +8011,7 @@ client.export_storage.gcs.validate()
    -**prefix:** `typing.Optional[str]` — GCS bucket prefix +**is_personal:** `typing.Optional[bool]` — Is workspace personal
    @@ -7805,7 +8019,7 @@ client.export_storage.gcs.validate()
    -**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. +**color:** `typing.Optional[str]` — Workspace color in HEX format
    @@ -7813,7 +8027,7 @@ client.export_storage.gcs.validate()
    -**google_project_id:** `typing.Optional[str]` — Google project ID +**is_archived:** `typing.Optional[bool]` — Is workspace archived
    @@ -7833,7 +8047,8 @@ client.export_storage.gcs.validate()
    -
    client.export_storage.gcs.get(...) +## ExportStorage Azure +
    client.export_storage.azure.list(...)
    @@ -7845,7 +8060,10 @@ client.export_storage.gcs.validate()
    -Get a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure export (target) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -7862,14 +8080,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.gcs.get( - id=1, -) +client.export_storage.azure.list() ```
    @@ -7885,7 +8101,7 @@ client.export_storage.gcs.get(
    -**id:** `int` — A unique integer value identifying this gcs export storage. +**project:** `typing.Optional[int]` — Project ID
    @@ -7905,7 +8121,7 @@ client.export_storage.gcs.get(
    -
    client.export_storage.gcs.delete(...) +
    client.export_storage.azure.create(...)
    @@ -7917,9 +8133,12 @@ client.export_storage.gcs.get(
    -Delete a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +Create a new target storage connection to Microsoft Azure Blob storage. + +For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync).
    @@ -7934,14 +8153,12 @@ Deleting an export/target storage connection does not affect tasks with synced d
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.gcs.delete( - id=1, -) +client.export_storage.azure.create() ```
    @@ -7957,7 +8174,63 @@ client.export_storage.gcs.delete(
    -**id:** `int` — A unique integer value identifying this gcs export storage. +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled + +
    +
    + +
    +
    + +**title:** `typing.Optional[str]` — Storage title + +
    +
    + +
    +
    + +**description:** `typing.Optional[str]` — Storage description + +
    +
    + +
    +
    + +**project:** `typing.Optional[int]` — Project ID + +
    +
    + +
    +
    + +**container:** `typing.Optional[str]` — Azure blob container + +
    +
    + +
    +
    + +**prefix:** `typing.Optional[str]` — Azure blob prefix name + +
    +
    + +
    +
    + +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
    +
    + +
    +
    + +**account_key:** `typing.Optional[str]` — Azure Blob account key
    @@ -7977,7 +8250,7 @@ client.export_storage.gcs.delete(
    -
    client.export_storage.gcs.update(...) +
    client.export_storage.azure.validate(...)
    @@ -7989,9 +8262,8 @@ client.export_storage.gcs.delete(
    -Update a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Validate a specific Azure export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data.
    @@ -8006,14 +8278,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.gcs.update( - id=1, -) +client.export_storage.azure.validate() ```
    @@ -8029,7 +8299,7 @@ client.export_storage.gcs.update(
    -**id:** `int` — A unique integer value identifying this gcs export storage. +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated
    @@ -8037,7 +8307,7 @@ client.export_storage.gcs.update(
    -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled
    @@ -8069,7 +8339,7 @@ client.export_storage.gcs.update(
    -**bucket:** `typing.Optional[str]` — GCS bucket name +**container:** `typing.Optional[str]` — Azure blob container
    @@ -8077,7 +8347,7 @@ client.export_storage.gcs.update(
    -**prefix:** `typing.Optional[str]` — GCS bucket prefix +**prefix:** `typing.Optional[str]` — Azure blob prefix name
    @@ -8085,7 +8355,7 @@ client.export_storage.gcs.update(
    -**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. +**account_name:** `typing.Optional[str]` — Azure Blob account name
    @@ -8093,7 +8363,7 @@ client.export_storage.gcs.update(
    -**google_project_id:** `typing.Optional[str]` — Google project ID +**account_key:** `typing.Optional[str]` — Azure Blob account key
    @@ -8113,7 +8383,7 @@ client.export_storage.gcs.update(
    -
    client.export_storage.gcs.sync(...) +
    client.export_storage.azure.get(...)
    @@ -8125,11 +8395,10 @@ client.export_storage.gcs.update(
    -Sync tasks to a GCS export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. +Get a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -8144,13 +8413,13 @@ Sync operations with external buckets only go one way. They either create tasks
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.gcs.sync( - id="id", +client.export_storage.azure.get( + id=1, ) ``` @@ -8167,7 +8436,7 @@ client.export_storage.gcs.sync(
    -**id:** `str` +**id:** `int` — A unique integer value identifying this azure blob export storage.
    @@ -8187,8 +8456,7 @@ client.export_storage.gcs.sync(
    -## ExportStorage Local -
    client.export_storage.local.list(...) +
    client.export_storage.azure.delete(...)
    @@ -8200,11 +8468,10 @@ client.export_storage.gcs.sync(
    -You can connect a local file directory to Label Studio as a source storage or target storage. Use this API request to get a list of all local file export (target) storage connections for a specific project. -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +Delete a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API.
    @@ -8219,12 +8486,14 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.local.list() +client.export_storage.azure.delete( + id=1, +) ```
    @@ -8240,7 +8509,7 @@ client.export_storage.local.list()
    -**project:** `typing.Optional[int]` — Project ID +**id:** `int` — A unique integer value identifying this azure blob export storage.
    @@ -8260,7 +8529,7 @@ client.export_storage.local.list()
    -
    client.export_storage.local.create(...) +
    client.export_storage.azure.update(...)
    @@ -8272,11 +8541,10 @@ client.export_storage.local.list()
    -Create a new target storage connection to a local file directory. -For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. +Update a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -8291,12 +8559,14 @@ For information about the required fields and prerequisites, see [Local storage]
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.local.create() +client.export_storage.azure.update( + id=1, +) ```
    @@ -8312,6 +8582,22 @@ client.export_storage.local.create()
    +**id:** `int` — A unique integer value identifying this azure blob export storage. + +
    +
    + +
    +
    + +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled + +
    +
    + +
    +
    + **title:** `typing.Optional[str]` — Storage title
    @@ -8336,7 +8622,7 @@ client.export_storage.local.create()
    -**path:** `typing.Optional[str]` — Path to local directory +**container:** `typing.Optional[str]` — Azure blob container
    @@ -8344,7 +8630,7 @@ client.export_storage.local.create()
    -**regex_filter:** `typing.Optional[str]` — Regex for filtering objects +**prefix:** `typing.Optional[str]` — Azure blob prefix name
    @@ -8352,7 +8638,15 @@ client.export_storage.local.create()
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
    +
    + +
    +
    + +**account_key:** `typing.Optional[str]` — Azure Blob account key
    @@ -8372,7 +8666,7 @@ client.export_storage.local.create()
    -
    client.export_storage.local.validate(...) +
    client.export_storage.azure.sync(...)
    @@ -8384,7 +8678,12 @@ client.export_storage.local.create()
    -Validate a specific local file export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + +Sync tasks to an Azure export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private.
    @@ -8399,12 +8698,14 @@ Validate a specific local file export storage connection. This is useful to ensu
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.local.validate() +client.export_storage.azure.sync( + id="id", +) ```
    @@ -8420,7 +8721,7 @@ client.export_storage.local.validate()
    -**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated +**id:** `str`
    @@ -8428,47 +8729,73 @@ client.export_storage.local.validate()
    -**title:** `typing.Optional[str]` — Storage title +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    + +
    -
    -
    -**description:** `typing.Optional[str]` — Storage description -
    +
    +## ExportStorage Gcs +
    client.export_storage.gcs.list(...)
    -**project:** `typing.Optional[int]` — Project ID - -
    -
    +#### 📝 Description
    -**path:** `typing.Optional[str]` — Path to local directory - +
    +
    + + +You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all GCS export (target) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
    +
    +#### 🔌 Usage +
    -**regex_filter:** `typing.Optional[str]` — Regex for filtering objects - +
    +
    + +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.gcs.list() + +``` +
    +
    +#### ⚙️ Parameters +
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +
    +
    + +**project:** `typing.Optional[int]` — Project ID
    @@ -8488,7 +8815,7 @@ client.export_storage.local.validate()
    -
    client.export_storage.local.get(...) +
    client.export_storage.gcs.create(...)
    @@ -8500,9 +8827,12 @@ client.export_storage.local.validate()
    -Get a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Create a new target storage connection to Google Cloud Storage. + +For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync).
    @@ -8517,14 +8847,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.local.get( - id=1, -) +client.export_storage.gcs.create() ```
    @@ -8540,7 +8868,7 @@ client.export_storage.local.get(
    -**id:** `int` — A unique integer value identifying this local files export storage. +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled.
    @@ -8548,71 +8876,55 @@ client.export_storage.local.get(
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**title:** `typing.Optional[str]` — Storage title
    - -
    +
    +
    +**description:** `typing.Optional[str]` — Storage description +
    -
    -
    client.export_storage.local.delete(...)
    -#### 📝 Description - -
    -
    +**project:** `typing.Optional[int]` — Project ID + +
    +
    -Delete a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - -Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. -
    -
    +**bucket:** `typing.Optional[str]` — GCS bucket name +
    -#### 🔌 Usage - -
    -
    -
    -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.local.delete( - id=1, -) - -``` -
    -
    +**prefix:** `typing.Optional[str]` — GCS bucket prefix +
    -#### ⚙️ Parameters -
    +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + +
    +
    +
    -**id:** `int` — A unique integer value identifying this local files export storage. +**google_project_id:** `typing.Optional[str]` — Google project ID
    @@ -8632,7 +8944,7 @@ client.export_storage.local.delete(
    -
    client.export_storage.local.update(...) +
    client.export_storage.gcs.validate(...)
    @@ -8644,9 +8956,8 @@ client.export_storage.local.delete(
    -Update a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Validate a specific GCS export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data.
    @@ -8661,14 +8972,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.local.update( - id=1, -) +client.export_storage.gcs.validate() ```
    @@ -8684,7 +8993,15 @@ client.export_storage.local.update(
    -**id:** `int` — A unique integer value identifying this local files export storage. +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
    +
    + +
    +
    + +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled.
    @@ -8716,7 +9033,7 @@ client.export_storage.local.update(
    -**path:** `typing.Optional[str]` — Path to local directory +**bucket:** `typing.Optional[str]` — GCS bucket name
    @@ -8724,7 +9041,7 @@ client.export_storage.local.update(
    -**regex_filter:** `typing.Optional[str]` — Regex for filtering objects +**prefix:** `typing.Optional[str]` — GCS bucket prefix
    @@ -8732,7 +9049,15 @@ client.export_storage.local.update(
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + +
    +
    + +
    +
    + +**google_project_id:** `typing.Optional[str]` — Google project ID
    @@ -8752,7 +9077,7 @@ client.export_storage.local.update(
    -
    client.export_storage.local.sync(...) +
    client.export_storage.gcs.get(...)
    @@ -8764,11 +9089,10 @@ client.export_storage.local.update(
    -Sync tasks to an local file export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Sync operations with external local file directories only go one way. They either create tasks from objects in the directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. +Get a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -8783,13 +9107,13 @@ Sync operations with external local file directories only go one way. They eithe
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.local.sync( - id="id", +client.export_storage.gcs.get( + id=1, ) ``` @@ -8806,7 +9130,7 @@ client.export_storage.local.sync(
    -**id:** `str` +**id:** `int` — A unique integer value identifying this gcs export storage.
    @@ -8826,8 +9150,7 @@ client.export_storage.local.sync(
    -## ExportStorage Redis -
    client.export_storage.redis.list(...) +
    client.export_storage.gcs.delete(...)
    @@ -8839,11 +9162,10 @@ client.export_storage.local.sync(
    -You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis export (target) storage connections for a specific project. -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +Delete a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API.
    @@ -8858,12 +9180,14 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.redis.list() +client.export_storage.gcs.delete( + id=1, +) ```
    @@ -8879,7 +9203,7 @@ client.export_storage.redis.list()
    -**project:** `typing.Optional[int]` — Project ID +**id:** `int` — A unique integer value identifying this gcs export storage.
    @@ -8899,7 +9223,7 @@ client.export_storage.redis.list()
    -
    client.export_storage.redis.create(...) +
    client.export_storage.gcs.update(...)
    @@ -8911,11 +9235,10 @@ client.export_storage.redis.list()
    -Create a new target storage connection to Redis. -For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. +Update a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -8930,12 +9253,14 @@ For information about the required fields and prerequisites, see [Redis database
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.redis.create() +client.export_storage.gcs.update( + id=1, +) ```
    @@ -8951,7 +9276,7 @@ client.export_storage.redis.create()
    -**db:** `typing.Optional[int]` — Database ID of database to use +**id:** `int` — A unique integer value identifying this gcs export storage.
    @@ -8991,7 +9316,7 @@ client.export_storage.redis.create()
    -**path:** `typing.Optional[str]` — Storage prefix (optional) +**bucket:** `typing.Optional[str]` — GCS bucket name
    @@ -8999,7 +9324,7 @@ client.export_storage.redis.create()
    -**host:** `typing.Optional[str]` — Server Host IP (optional) +**prefix:** `typing.Optional[str]` — GCS bucket prefix
    @@ -9007,7 +9332,7 @@ client.export_storage.redis.create()
    -**port:** `typing.Optional[str]` — Server Port (optional) +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details.
    @@ -9015,7 +9340,7 @@ client.export_storage.redis.create()
    -**password:** `typing.Optional[str]` — Server Password (optional) +**google_project_id:** `typing.Optional[str]` — Google project ID
    @@ -9035,7 +9360,7 @@ client.export_storage.redis.create()
    -
    client.export_storage.redis.validate(...) +
    client.export_storage.gcs.sync(...)
    @@ -9047,7 +9372,12 @@ client.export_storage.redis.create()
    -Validate a specific Redis export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + +Sync tasks to a GCS export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private.
    @@ -9062,12 +9392,14 @@ Validate a specific Redis export storage connection. This is useful to ensure th
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.redis.validate() +client.export_storage.gcs.sync( + id="id", +) ```
    @@ -9083,79 +9415,7 @@ client.export_storage.redis.validate()
    -**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated - -
    -
    - -
    -
    - -**db:** `typing.Optional[int]` — Database ID of database to use - -
    -
    - -
    -
    - -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. - -
    -
    - -
    -
    - -**title:** `typing.Optional[str]` — Storage title - -
    -
    - -
    -
    - -**description:** `typing.Optional[str]` — Storage description - -
    -
    - -
    -
    - -**project:** `typing.Optional[int]` — Project ID - -
    -
    - -
    -
    - -**path:** `typing.Optional[str]` — Storage prefix (optional) - -
    -
    - -
    -
    - -**host:** `typing.Optional[str]` — Server Host IP (optional) - -
    -
    - -
    -
    - -**port:** `typing.Optional[str]` — Server Port (optional) - -
    -
    - -
    -
    - -**password:** `typing.Optional[str]` — Server Password (optional) +**id:** `str`
    @@ -9175,7 +9435,8 @@ client.export_storage.redis.validate()
    -
    client.export_storage.redis.get(...) +## ExportStorage Local +
    client.export_storage.local.list(...)
    @@ -9187,7 +9448,10 @@ client.export_storage.redis.validate()
    -Get a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +You can connect a local file directory to Label Studio as a source storage or target storage. Use this API request to get a list of all local file export (target) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -9204,14 +9468,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.redis.get( - id=1, -) +client.export_storage.local.list() ```
    @@ -9227,7 +9489,7 @@ client.export_storage.redis.get(
    -**id:** `int` — A unique integer value identifying this redis export storage. +**project:** `typing.Optional[int]` — Project ID
    @@ -9247,7 +9509,7 @@ client.export_storage.redis.get(
    -
    client.export_storage.redis.delete(...) +
    client.export_storage.local.create(...)
    @@ -9259,9 +9521,12 @@ client.export_storage.redis.get(
    -Delete a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +Create a new target storage connection to a local file directory. + +For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync).
    @@ -9276,30 +9541,68 @@ Deleting an export/target storage connection does not affect tasks with synced d
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.redis.delete( - id=1, -) +client.export_storage.local.create() + +``` +
    +
    + + + +#### ⚙️ Parameters + +
    +
    + +
    +
    + +**title:** `typing.Optional[str]` — Storage title + +
    +
    + +
    +
    -``` +**description:** `typing.Optional[str]` — Storage description +
    + +
    +
    + +**project:** `typing.Optional[int]` — Project ID +
    -#### ⚙️ Parameters +
    +
    + +**path:** `typing.Optional[str]` — Path to local directory + +
    +
    +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects + +
    +
    +
    -**id:** `int` — A unique integer value identifying this redis export storage. +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -9319,7 +9622,7 @@ client.export_storage.redis.delete(
    -
    client.export_storage.redis.update(...) +
    client.export_storage.local.validate(...)
    @@ -9331,9 +9634,8 @@ client.export_storage.redis.delete(
    -Update a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Validate a specific local file export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data.
    @@ -9348,14 +9650,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.redis.update( - id=1, -) +client.export_storage.local.validate() ```
    @@ -9371,23 +9671,7 @@ client.export_storage.redis.update(
    -**id:** `int` — A unique integer value identifying this redis export storage. - -
    -
    - -
    -
    - -**db:** `typing.Optional[int]` — Database ID of database to use - -
    -
    - -
    -
    - -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated
    @@ -9419,15 +9703,7 @@ client.export_storage.redis.update(
    -**path:** `typing.Optional[str]` — Storage prefix (optional) - -
    -
    - -
    -
    - -**host:** `typing.Optional[str]` — Server Host IP (optional) +**path:** `typing.Optional[str]` — Path to local directory
    @@ -9435,7 +9711,7 @@ client.export_storage.redis.update(
    -**port:** `typing.Optional[str]` — Server Port (optional) +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects
    @@ -9443,7 +9719,7 @@ client.export_storage.redis.update(
    -**password:** `typing.Optional[str]` — Server Password (optional) +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -9463,7 +9739,7 @@ client.export_storage.redis.update(
    -
    client.export_storage.redis.sync(...) +
    client.export_storage.local.get(...)
    @@ -9475,11 +9751,10 @@ client.export_storage.redis.update(
    -Sync tasks to an Redis export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. +Get a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -9494,13 +9769,13 @@ Sync operations with external databases only go one way. They either create task
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.redis.sync( - id="id", +client.export_storage.local.get( + id=1, ) ``` @@ -9517,7 +9792,7 @@ client.export_storage.redis.sync(
    -**id:** `str` +**id:** `int` — A unique integer value identifying this local files export storage.
    @@ -9537,8 +9812,7 @@ client.export_storage.redis.sync(
    -## ExportStorage S3 -
    client.export_storage.s3.list(...) +
    client.export_storage.local.delete(...)
    @@ -9550,11 +9824,10 @@ client.export_storage.redis.sync(
    -You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +Delete a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API.
    @@ -9569,12 +9842,14 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3.list() +client.export_storage.local.delete( + id=1, +) ```
    @@ -9590,7 +9865,7 @@ client.export_storage.s3.list()
    -**project:** `typing.Optional[int]` — Project ID +**id:** `int` — A unique integer value identifying this local files export storage.
    @@ -9610,7 +9885,7 @@ client.export_storage.s3.list()
    -
    client.export_storage.s3.create(...) +
    client.export_storage.local.update(...)
    @@ -9622,11 +9897,10 @@ client.export_storage.s3.list()
    -Create a new target storage connection to S3 storage. -For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. +Update a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -9641,12 +9915,14 @@ For information about the required fields and prerequisites, see [Amazon S3](htt
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3.create() +client.export_storage.local.update( + id=1, +) ```
    @@ -9662,7 +9938,7 @@ client.export_storage.s3.create()
    -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**id:** `int` — A unique integer value identifying this local files export storage.
    @@ -9694,47 +9970,7 @@ client.export_storage.s3.create()
    -**bucket:** `typing.Optional[str]` — S3 bucket name - -
    -
    - -
    -
    - -**prefix:** `typing.Optional[str]` — S3 bucket prefix - -
    -
    - -
    -
    - -**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID - -
    -
    - -
    -
    - -**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY - -
    -
    - -
    -
    - -**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN - -
    -
    - -
    -
    - -**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID +**path:** `typing.Optional[str]` — Path to local directory
    @@ -9742,7 +9978,7 @@ client.export_storage.s3.create()
    -**region_name:** `typing.Optional[str]` — AWS Region +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects
    @@ -9750,7 +9986,7 @@ client.export_storage.s3.create()
    -**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -9770,7 +10006,7 @@ client.export_storage.s3.create()
    -
    client.export_storage.s3.validate(...) +
    client.export_storage.local.sync(...)
    @@ -9782,7 +10018,12 @@ client.export_storage.s3.create()
    -Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + +Sync tasks to an local file export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Sync operations with external local file directories only go one way. They either create tasks from objects in the directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private.
    @@ -9797,12 +10038,14 @@ Validate a specific S3 export storage connection. This is useful to ensure that
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3.validate() +client.export_storage.local.sync( + id="id", +) ```
    @@ -9818,7 +10061,7 @@ client.export_storage.s3.validate()
    -**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated +**id:** `str`
    @@ -9826,95 +10069,73 @@ client.export_storage.s3.validate()
    -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    - -
    -
    - -**title:** `typing.Optional[str]` — Storage title -
    -
    -
    -**description:** `typing.Optional[str]` — Storage description -
    +
    +## ExportStorage Redis +
    client.export_storage.redis.list(...)
    -**project:** `typing.Optional[int]` — Project ID - -
    -
    +#### 📝 Description
    -**bucket:** `typing.Optional[str]` — S3 bucket name - -
    -
    -
    -**prefix:** `typing.Optional[str]` — S3 bucket prefix - -
    -
    -
    -
    +You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis export (target) storage connections for a specific project. -**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID - +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    - -
    -
    - -**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY -
    +#### 🔌 Usage +
    -**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN - -
    -
    -
    -**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID - +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.list() + +``` +
    +
    +#### ⚙️ Parameters +
    -**region_name:** `typing.Optional[str]` — AWS Region - -
    -
    -
    -**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**project:** `typing.Optional[int]` — Project ID
    @@ -9934,7 +10155,7 @@ client.export_storage.s3.validate()
    -
    client.export_storage.s3.get(...) +
    client.export_storage.redis.create(...)
    @@ -9946,9 +10167,12 @@ client.export_storage.s3.validate()
    -Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Create a new target storage connection to Redis. + +For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync).
    @@ -9963,14 +10187,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3.get( - id=1, -) +client.export_storage.redis.create() ```
    @@ -9986,7 +10208,7 @@ client.export_storage.s3.get(
    -**id:** `int` — A unique integer value identifying this s3 export storage. +**db:** `typing.Optional[int]` — Database ID of database to use
    @@ -9994,71 +10216,63 @@ client.export_storage.s3.get(
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled.
    - -
    - - - - -
    -
    client.export_storage.s3.delete(...)
    -#### 📝 Description - -
    -
    +**title:** `typing.Optional[str]` — Storage title + +
    +
    -Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - -Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. -
    -
    +**description:** `typing.Optional[str]` — Storage description +
    -#### 🔌 Usage -
    +**project:** `typing.Optional[int]` — Project ID + +
    +
    +
    -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.s3.delete( - id=1, -) - -``` +**path:** `typing.Optional[str]` — Storage prefix (optional) +
    + +
    +
    + +**host:** `typing.Optional[str]` — Server Host IP (optional) +
    -#### ⚙️ Parameters -
    +**port:** `typing.Optional[str]` — Server Port (optional) + +
    +
    +
    -**id:** `int` — A unique integer value identifying this s3 export storage. +**password:** `typing.Optional[str]` — Server Password (optional)
    @@ -10078,7 +10292,7 @@ client.export_storage.s3.delete(
    -
    client.export_storage.s3.update(...) +
    client.export_storage.redis.validate(...)
    @@ -10090,9 +10304,8 @@ client.export_storage.s3.delete(
    -Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Validate a specific Redis export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data.
    @@ -10107,14 +10320,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3.update( - id=1, -) +client.export_storage.redis.validate() ```
    @@ -10130,31 +10341,7 @@ client.export_storage.s3.update(
    -**id:** `int` — A unique integer value identifying this s3 export storage. - -
    -
    - -
    -
    - -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. - -
    -
    - -
    -
    - -**title:** `typing.Optional[str]` — Storage title - -
    -
    - -
    -
    - -**description:** `typing.Optional[str]` — Storage description +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated
    @@ -10162,7 +10349,7 @@ client.export_storage.s3.update(
    -**project:** `typing.Optional[int]` — Project ID +**db:** `typing.Optional[int]` — Database ID of database to use
    @@ -10170,7 +10357,7 @@ client.export_storage.s3.update(
    -**bucket:** `typing.Optional[str]` — S3 bucket name +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled.
    @@ -10178,7 +10365,7 @@ client.export_storage.s3.update(
    -**prefix:** `typing.Optional[str]` — S3 bucket prefix +**title:** `typing.Optional[str]` — Storage title
    @@ -10186,7 +10373,7 @@ client.export_storage.s3.update(
    -**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID +**description:** `typing.Optional[str]` — Storage description
    @@ -10194,7 +10381,7 @@ client.export_storage.s3.update(
    -**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY +**project:** `typing.Optional[int]` — Project ID
    @@ -10202,7 +10389,7 @@ client.export_storage.s3.update(
    -**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN +**path:** `typing.Optional[str]` — Storage prefix (optional)
    @@ -10210,7 +10397,7 @@ client.export_storage.s3.update(
    -**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID +**host:** `typing.Optional[str]` — Server Host IP (optional)
    @@ -10218,7 +10405,7 @@ client.export_storage.s3.update(
    -**region_name:** `typing.Optional[str]` — AWS Region +**port:** `typing.Optional[str]` — Server Port (optional)
    @@ -10226,7 +10413,7 @@ client.export_storage.s3.update(
    -**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**password:** `typing.Optional[str]` — Server Password (optional)
    @@ -10246,7 +10433,7 @@ client.export_storage.s3.update(
    -
    client.export_storage.s3.sync(...) +
    client.export_storage.redis.get(...)
    @@ -10258,11 +10445,10 @@ client.export_storage.s3.update(
    -Sync tasks to an S3 export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. +Get a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -10277,13 +10463,13 @@ Sync operations with external buckets only go one way. They either create tasks
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3.sync( - id="id", +client.export_storage.redis.get( + id=1, ) ``` @@ -10300,7 +10486,7 @@ client.export_storage.s3.sync(
    -**id:** `str` +**id:** `int` — A unique integer value identifying this redis export storage.
    @@ -10320,8 +10506,7 @@ client.export_storage.s3.sync(
    -## ImportStorage Gcs -
    client.import_storage.gcs.list(...) +
    client.export_storage.redis.delete(...)
    @@ -10333,11 +10518,10 @@ client.export_storage.s3.sync(
    -You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +Delete a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API.
    @@ -10352,12 +10536,14 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.gcs.list() +client.export_storage.redis.delete( + id=1, +) ```
    @@ -10373,7 +10559,7 @@ client.import_storage.gcs.list()
    -**project:** `typing.Optional[int]` — Project ID +**id:** `int` — A unique integer value identifying this redis export storage.
    @@ -10393,7 +10579,7 @@ client.import_storage.gcs.list()
    -
    client.import_storage.gcs.create(...) +
    client.export_storage.redis.update(...)
    @@ -10405,13 +10591,10 @@ client.import_storage.gcs.list()
    -Create a new source storage connection to a Google Cloud Storage bucket. -For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. +Update a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. - -After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -10426,12 +10609,14 @@ For information about the required fields and prerequisites, see [Google Cloud S
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.gcs.create() +client.export_storage.redis.update( + id=1, +) ```
    @@ -10447,15 +10632,7 @@ client.import_storage.gcs.create()
    -**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - -
    -
    - -
    -
    - -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**id:** `int` — A unique integer value identifying this redis export storage.
    @@ -10463,7 +10640,7 @@ client.import_storage.gcs.create()
    -**presign:** `typing.Optional[bool]` — Presign URLs for direct download +**db:** `typing.Optional[int]` — Database ID of database to use
    @@ -10471,7 +10648,7 @@ client.import_storage.gcs.create()
    -**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled.
    @@ -10503,7 +10680,7 @@ client.import_storage.gcs.create()
    -**bucket:** `typing.Optional[str]` — GCS bucket name +**path:** `typing.Optional[str]` — Storage prefix (optional)
    @@ -10511,7 +10688,7 @@ client.import_storage.gcs.create()
    -**prefix:** `typing.Optional[str]` — GCS bucket prefix +**host:** `typing.Optional[str]` — Server Host IP (optional)
    @@ -10519,7 +10696,7 @@ client.import_storage.gcs.create()
    -**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. +**port:** `typing.Optional[str]` — Server Port (optional)
    @@ -10527,7 +10704,7 @@ client.import_storage.gcs.create()
    -**google_project_id:** `typing.Optional[str]` — Google project ID +**password:** `typing.Optional[str]` — Server Password (optional)
    @@ -10547,7 +10724,7 @@ client.import_storage.gcs.create()
    -
    client.import_storage.gcs.validate(...) +
    client.export_storage.redis.sync(...)
    @@ -10559,7 +10736,12 @@ client.import_storage.gcs.create()
    -Validate a specific GCS import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + +Sync tasks to an Redis export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private.
    @@ -10574,12 +10756,14 @@ Validate a specific GCS import storage connection. This is useful to ensure that
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.gcs.validate() +client.export_storage.redis.sync( + id="id", +) ```
    @@ -10595,7 +10779,7 @@ client.import_storage.gcs.validate()
    -**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated +**id:** `str`
    @@ -10603,87 +10787,73 @@ client.import_storage.gcs.validate()
    -**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    - -
    -
    - -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. -
    -
    -
    -**presign:** `typing.Optional[bool]` — Presign URLs for direct download -
    +
    +## ExportStorage S3 +
    client.export_storage.s3.list(...)
    -**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes - -
    -
    +#### 📝 Description
    -**title:** `typing.Optional[str]` — Storage title - -
    -
    -
    -**description:** `typing.Optional[str]` — Storage description - -
    -
    -
    -
    +You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. -**project:** `typing.Optional[int]` — Project ID - +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
    +
    +#### 🔌 Usage +
    -**bucket:** `typing.Optional[str]` — GCS bucket name - -
    -
    -
    -**prefix:** `typing.Optional[str]` — GCS bucket prefix - +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.list() + +``` +
    +
    +#### ⚙️ Parameters +
    -**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - -
    -
    -
    -**google_project_id:** `typing.Optional[str]` — Google project ID +**project:** `typing.Optional[int]` — Project ID
    @@ -10703,7 +10873,7 @@ client.import_storage.gcs.validate()
    -
    client.import_storage.gcs.get(...) +
    client.export_storage.s3.create(...)
    @@ -10715,9 +10885,12 @@ client.import_storage.gcs.validate()
    -Get a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Create a new target storage connection to S3 storage. + +For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync).
    @@ -10732,14 +10905,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.gcs.get( - id=1, -) +client.export_storage.s3.create() ```
    @@ -10755,7 +10926,7 @@ client.import_storage.gcs.get(
    -**id:** `int` — A unique integer value identifying this gcs import storage. +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled.
    @@ -10763,73 +10934,87 @@ client.import_storage.gcs.get(
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**title:** `typing.Optional[str]` — Storage title
    - -
    +
    +
    +**description:** `typing.Optional[str]` — Storage description +
    -
    -
    client.import_storage.gcs.delete(...)
    -#### 📝 Description +**project:** `typing.Optional[int]` — Project ID + +
    +
    +**bucket:** `typing.Optional[str]` — S3 bucket name + +
    +
    +
    -Delete a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - -Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - -If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. -
    -
    +**prefix:** `typing.Optional[str]` — S3 bucket prefix + -#### 🔌 Usage -
    +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID + +
    +
    +
    -```python -from label_studio_sdk.client import LabelStudio +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
    +
    -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.import_storage.gcs.delete( - id=1, -) +
    +
    -``` +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN +
    + +
    +
    + +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID +
    -#### ⚙️ Parameters -
    +**region_name:** `typing.Optional[str]` — AWS Region + +
    +
    +
    -**id:** `int` — A unique integer value identifying this gcs import storage. +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint
    @@ -10849,7 +11034,7 @@ client.import_storage.gcs.delete(
    -
    client.import_storage.gcs.update(...) +
    client.export_storage.s3.validate(...)
    @@ -10861,9 +11046,8 @@ client.import_storage.gcs.delete(
    -Update a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data.
    @@ -10878,14 +11062,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.gcs.update( - id=1, -) +client.export_storage.s3.validate() ```
    @@ -10901,7 +11083,7 @@ client.import_storage.gcs.update(
    -**id:** `int` — A unique integer value identifying this gcs import storage. +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated
    @@ -10909,7 +11091,7 @@ client.import_storage.gcs.update(
    -**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled.
    @@ -10917,7 +11099,7 @@ client.import_storage.gcs.update(
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**title:** `typing.Optional[str]` — Storage title
    @@ -10925,7 +11107,7 @@ client.import_storage.gcs.update(
    -**presign:** `typing.Optional[bool]` — Presign URLs for direct download +**description:** `typing.Optional[str]` — Storage description
    @@ -10933,7 +11115,7 @@ client.import_storage.gcs.update(
    -**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes +**project:** `typing.Optional[int]` — Project ID
    @@ -10941,7 +11123,7 @@ client.import_storage.gcs.update(
    -**title:** `typing.Optional[str]` — Storage title +**bucket:** `typing.Optional[str]` — S3 bucket name
    @@ -10949,7 +11131,7 @@ client.import_storage.gcs.update(
    -**description:** `typing.Optional[str]` — Storage description +**prefix:** `typing.Optional[str]` — S3 bucket prefix
    @@ -10957,7 +11139,7 @@ client.import_storage.gcs.update(
    -**project:** `typing.Optional[int]` — Project ID +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID
    @@ -10965,7 +11147,7 @@ client.import_storage.gcs.update(
    -**bucket:** `typing.Optional[str]` — GCS bucket name +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY
    @@ -10973,7 +11155,7 @@ client.import_storage.gcs.update(
    -**prefix:** `typing.Optional[str]` — GCS bucket prefix +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN
    @@ -10981,7 +11163,7 @@ client.import_storage.gcs.update(
    -**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID
    @@ -10989,7 +11171,15 @@ client.import_storage.gcs.update(
    -**google_project_id:** `typing.Optional[str]` — Google project ID +**region_name:** `typing.Optional[str]` — AWS Region + +
    +
    + +
    +
    + +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint
    @@ -11009,7 +11199,7 @@ client.import_storage.gcs.update(
    -
    client.import_storage.gcs.sync(...) +
    client.export_storage.s3.get(...)
    @@ -11021,11 +11211,10 @@ client.import_storage.gcs.update(
    -Sync tasks from a GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. +Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -11040,12 +11229,12 @@ Sync operations with external buckets only go one way. They either create tasks
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.gcs.sync( +client.export_storage.s3.get( id=1, ) @@ -11063,7 +11252,7 @@ client.import_storage.gcs.sync(
    -**id:** `int` — Storage ID +**id:** `int` — A unique integer value identifying this s3 export storage.
    @@ -11083,8 +11272,7 @@ client.import_storage.gcs.sync(
    -## ImportStorage Local -
    client.import_storage.local.list(...) +
    client.export_storage.s3.delete(...)
    @@ -11096,11 +11284,10 @@ client.import_storage.gcs.sync(
    -If you have local files that you want to add to Label Studio from a specific directory, you can set up a specific local directory on the machine where LS is running as source or target storage. Use this API request to get a list of all local file import (source) storage connections for a specific project. -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API.
    @@ -11115,12 +11302,14 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.local.list() +client.export_storage.s3.delete( + id=1, +) ```
    @@ -11136,7 +11325,7 @@ client.import_storage.local.list()
    -**project:** `typing.Optional[int]` — Project ID +**id:** `int` — A unique integer value identifying this s3 export storage.
    @@ -11156,7 +11345,7 @@ client.import_storage.local.list()
    -
    client.import_storage.local.create(...) +
    client.export_storage.s3.update(...)
    @@ -11168,11 +11357,10 @@ client.import_storage.local.list()
    -Create a new source storage connection to a local file directory. -For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. +Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -11187,12 +11375,14 @@ For information about the required fields and prerequisites, see [Local storage]
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.local.create() +client.export_storage.s3.update( + id=1, +) ```
    @@ -11208,15 +11398,7 @@ client.import_storage.local.create()
    -**title:** `typing.Optional[str]` — Storage title - -
    -
    - -
    -
    - -**description:** `typing.Optional[str]` — Storage description +**id:** `int` — A unique integer value identifying this s3 export storage.
    @@ -11224,7 +11406,7 @@ client.import_storage.local.create()
    -**project:** `typing.Optional[int]` — Project ID +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled.
    @@ -11232,7 +11414,7 @@ client.import_storage.local.create()
    -**path:** `typing.Optional[str]` — Path to local directory +**title:** `typing.Optional[str]` — Storage title
    @@ -11240,7 +11422,7 @@ client.import_storage.local.create()
    -**regex_filter:** `typing.Optional[str]` — Regex for filtering objects +**description:** `typing.Optional[str]` — Storage description
    @@ -11248,7 +11430,7 @@ client.import_storage.local.create()
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**project:** `typing.Optional[int]` — Project ID
    @@ -11256,67 +11438,15 @@ client.import_storage.local.create()
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**bucket:** `typing.Optional[str]` — S3 bucket name
    - -
    - - - - -
    - -
    client.import_storage.local.validate(...) -
    -
    - -#### 📝 Description - -
    -
    - -
    -
    - -Validate a specific local file import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. -
    -
    -
    -
    - -#### 🔌 Usage - -
    -
    - -
    -
    - -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.import_storage.local.validate() - -``` -
    -
    -
    -
    - -#### ⚙️ Parameters - -
    -
    -**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated +**prefix:** `typing.Optional[str]` — S3 bucket prefix
    @@ -11324,7 +11454,7 @@ client.import_storage.local.validate()
    -**title:** `typing.Optional[str]` — Storage title +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID
    @@ -11332,7 +11462,7 @@ client.import_storage.local.validate()
    -**description:** `typing.Optional[str]` — Storage description +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY
    @@ -11340,7 +11470,7 @@ client.import_storage.local.validate()
    -**project:** `typing.Optional[int]` — Project ID +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN
    @@ -11348,7 +11478,7 @@ client.import_storage.local.validate()
    -**path:** `typing.Optional[str]` — Path to local directory +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID
    @@ -11356,7 +11486,7 @@ client.import_storage.local.validate()
    -**regex_filter:** `typing.Optional[str]` — Regex for filtering objects +**region_name:** `typing.Optional[str]` — AWS Region
    @@ -11364,7 +11494,7 @@ client.import_storage.local.validate()
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint
    @@ -11384,7 +11514,7 @@ client.import_storage.local.validate()
    -
    client.import_storage.local.get(...) +
    client.export_storage.s3.sync(...)
    @@ -11396,9 +11526,12 @@ client.import_storage.local.validate()
    -Get a specific local file import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Sync tasks to an S3 export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private.
    @@ -11413,13 +11546,13 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.local.get( - id=1, +client.export_storage.s3.sync( + id="id", ) ``` @@ -11436,7 +11569,7 @@ client.import_storage.local.get(
    -**id:** `int` — A unique integer value identifying this local files import storage. +**id:** `str`
    @@ -11456,7 +11589,8 @@ client.import_storage.local.get(
    -
    client.import_storage.local.delete(...) +## ExportStorage S3S +
    client.export_storage.s3s.list(...)
    @@ -11468,11 +11602,12 @@ client.import_storage.local.get(
    -Delete a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. +You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. -If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -11487,14 +11622,12 @@ If you want to remove the tasks that were synced from the external storage, you
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.local.delete( - id=1, -) +client.export_storage.s3s.list() ```
    @@ -11510,7 +11643,7 @@ client.import_storage.local.delete(
    -**id:** `int` — A unique integer value identifying this local files import storage. +**project:** `typing.Optional[int]` — Project ID
    @@ -11530,7 +11663,7 @@ client.import_storage.local.delete(
    -
    client.import_storage.local.update(...) +
    client.export_storage.s3s.create(...)
    @@ -11542,9 +11675,10 @@ client.import_storage.local.delete(
    -Update a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Create a new target storage connection to a S3 bucket with IAM role access. + +For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation.
    @@ -11559,14 +11693,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.local.update( - id=1, -) +client.export_storage.s3s.create() ```
    @@ -11582,7 +11714,31 @@ client.import_storage.local.update(
    -**id:** `int` — A unique integer value identifying this local files import storage. +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
    +
    + +
    +
    + +**title:** `typing.Optional[str]` — Storage title + +
    +
    + +
    +
    + +**description:** `typing.Optional[str]` — Storage description + +
    +
    + +
    +
    + +**project:** `typing.Optional[int]` — Project ID
    @@ -11590,7 +11746,7 @@ client.import_storage.local.update(
    -**title:** `typing.Optional[str]` — Storage title +**bucket:** `typing.Optional[str]` — S3 bucket name
    @@ -11598,7 +11754,7 @@ client.import_storage.local.update(
    -**description:** `typing.Optional[str]` — Storage description +**prefix:** `typing.Optional[str]` — S3 bucket prefix
    @@ -11606,7 +11762,7 @@ client.import_storage.local.update(
    -**project:** `typing.Optional[int]` — Project ID +**external_id:** `typing.Optional[str]` — AWS External ID
    @@ -11614,7 +11770,7 @@ client.import_storage.local.update(
    -**path:** `typing.Optional[str]` — Path to local directory +**role_arn:** `typing.Optional[str]` — AWS Role ARN
    @@ -11622,7 +11778,7 @@ client.import_storage.local.update(
    -**regex_filter:** `typing.Optional[str]` — Regex for filtering objects +**region_name:** `typing.Optional[str]` — AWS Region
    @@ -11630,7 +11786,7 @@ client.import_storage.local.update(
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint
    @@ -11650,7 +11806,7 @@ client.import_storage.local.update(
    -
    client.import_storage.local.sync(...) +
    client.export_storage.s3s.get(...)
    @@ -11662,11 +11818,8 @@ client.import_storage.local.update(
    -Sync tasks from a local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - -Sync operations with external sources only go one way. They either create tasks from objects in the source directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. -Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list).
    @@ -11681,12 +11834,12 @@ Sync operations with external sources only go one way. They either create tasks
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.local.sync( +client.export_storage.s3s.get( id=1, ) @@ -11704,7 +11857,7 @@ client.import_storage.local.sync(
    -**id:** `int` — Storage ID +**id:** `int` — Export storage ID
    @@ -11724,8 +11877,7 @@ client.import_storage.local.sync(
    -## ImportStorage Redis -
    client.import_storage.redis.list(...) +
    client.export_storage.s3s.delete(...)
    @@ -11737,11 +11889,8 @@ client.import_storage.local.sync(
    -You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis import (source) storage connections for a specific project. -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list).
    @@ -11756,12 +11905,14 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.redis.list() +client.export_storage.s3s.delete( + id=1, +) ```
    @@ -11777,7 +11928,7 @@ client.import_storage.redis.list()
    -**project:** `typing.Optional[int]` — Project ID +**id:** `int` — Export storage ID
    @@ -11797,7 +11948,7 @@ client.import_storage.redis.list()
    -
    client.import_storage.redis.create(...) +
    client.export_storage.s3s.update(...)
    @@ -11809,11 +11960,8 @@ client.import_storage.redis.list()
    -Create a new source storage connection to a Redis database. - -For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. -After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). +Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list).
    @@ -11828,12 +11976,14 @@ For information about the required fields and prerequisites, see [Redis database
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.redis.create() +client.export_storage.s3s.update( + id=1, +) ```
    @@ -11849,7 +11999,7 @@ client.import_storage.redis.create()
    -**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. +**id:** `int` — Export storage ID
    @@ -11857,7 +12007,7 @@ client.import_storage.redis.create()
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled.
    @@ -11889,7 +12039,7 @@ client.import_storage.redis.create()
    -**path:** `typing.Optional[str]` — Storage prefix (optional) +**bucket:** `typing.Optional[str]` — S3 bucket name
    @@ -11897,7 +12047,7 @@ client.import_storage.redis.create()
    -**host:** `typing.Optional[str]` — Server Host IP (optional) +**prefix:** `typing.Optional[str]` — S3 bucket prefix
    @@ -11905,7 +12055,7 @@ client.import_storage.redis.create()
    -**port:** `typing.Optional[str]` — Server Port (optional) +**external_id:** `typing.Optional[str]` — AWS External ID
    @@ -11913,7 +12063,23 @@ client.import_storage.redis.create()
    -**password:** `typing.Optional[str]` — Server Password (optional) +**role_arn:** `typing.Optional[str]` — AWS Role ARN + +
    +
    + +
    +
    + +**region_name:** `typing.Optional[str]` — AWS Region + +
    +
    + +
    +
    + +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint
    @@ -11933,7 +12099,7 @@ client.import_storage.redis.create()
    -
    client.import_storage.redis.validate(...) +
    client.export_storage.s3s.validate(...)
    @@ -11945,7 +12111,8 @@ client.import_storage.redis.create()
    -Validate a specific Redis import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + +Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data.
    @@ -11960,12 +12127,12 @@ Validate a specific Redis import storage connection. This is useful to ensure th
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.redis.validate() +client.export_storage.s3s.validate() ```
    @@ -11981,7 +12148,7 @@ client.import_storage.redis.validate()
    -**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled.
    @@ -11989,7 +12156,7 @@ client.import_storage.redis.validate()
    -**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. +**title:** `typing.Optional[str]` — Storage title
    @@ -11997,7 +12164,7 @@ client.import_storage.redis.validate()
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**description:** `typing.Optional[str]` — Storage description
    @@ -12005,7 +12172,7 @@ client.import_storage.redis.validate()
    -**title:** `typing.Optional[str]` — Storage title +**project:** `typing.Optional[int]` — Project ID
    @@ -12013,7 +12180,7 @@ client.import_storage.redis.validate()
    -**description:** `typing.Optional[str]` — Storage description +**bucket:** `typing.Optional[str]` — S3 bucket name
    @@ -12021,7 +12188,7 @@ client.import_storage.redis.validate()
    -**project:** `typing.Optional[int]` — Project ID +**prefix:** `typing.Optional[str]` — S3 bucket prefix
    @@ -12029,7 +12196,7 @@ client.import_storage.redis.validate()
    -**path:** `typing.Optional[str]` — Storage prefix (optional) +**external_id:** `typing.Optional[str]` — AWS External ID
    @@ -12037,7 +12204,7 @@ client.import_storage.redis.validate()
    -**host:** `typing.Optional[str]` — Server Host IP (optional) +**role_arn:** `typing.Optional[str]` — AWS Role ARN
    @@ -12045,7 +12212,7 @@ client.import_storage.redis.validate()
    -**port:** `typing.Optional[str]` — Server Port (optional) +**region_name:** `typing.Optional[str]` — AWS Region
    @@ -12053,7 +12220,7 @@ client.import_storage.redis.validate()
    -**password:** `typing.Optional[str]` — Server Password (optional) +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint
    @@ -12073,7 +12240,8 @@ client.import_storage.redis.validate()
    -
    client.import_storage.redis.get(...) +## ImportStorage Azure +
    client.import_storage.azure.list(...)
    @@ -12085,7 +12253,10 @@ client.import_storage.redis.validate()
    -Get a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure import (source) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -12102,14 +12273,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.redis.get( - id=1, -) +client.import_storage.azure.list() ```
    @@ -12125,7 +12294,7 @@ client.import_storage.redis.get(
    -**id:** `int` — A unique integer value identifying this redis import storage. +**project:** `typing.Optional[int]` — Project ID
    @@ -12145,7 +12314,7 @@ client.import_storage.redis.get(
    -
    client.import_storage.redis.delete(...) +
    client.import_storage.azure.create(...)
    @@ -12157,11 +12326,14 @@ client.import_storage.redis.get(
    -Delete a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. +Create a new source storage connection to Microsoft Azure Blob storage. -If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. + +Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync).
    @@ -12176,14 +12348,12 @@ If you want to remove the tasks that were synced from the external storage, you
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.redis.delete( - id=1, -) +client.import_storage.azure.create() ```
    @@ -12199,7 +12369,87 @@ client.import_storage.redis.delete(
    -**id:** `int` — A unique integer value identifying this redis import storage. +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
    +
    + +
    +
    + +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
    +
    + +
    +
    + +**presign:** `typing.Optional[bool]` — Presign URLs for direct download + +
    +
    + +
    +
    + +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
    +
    + +
    +
    + +**title:** `typing.Optional[str]` — Storage title + +
    +
    + +
    +
    + +**description:** `typing.Optional[str]` — Storage description + +
    +
    + +
    +
    + +**project:** `typing.Optional[int]` — Project ID + +
    +
    + +
    +
    + +**container:** `typing.Optional[str]` — Azure blob container + +
    +
    + +
    +
    + +**prefix:** `typing.Optional[str]` — Azure blob prefix name + +
    +
    + +
    +
    + +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
    +
    + +
    +
    + +**account_key:** `typing.Optional[str]` — Azure Blob account key
    @@ -12219,7 +12469,7 @@ client.import_storage.redis.delete(
    -
    client.import_storage.redis.update(...) +
    client.import_storage.azure.validate(...)
    @@ -12231,9 +12481,8 @@ client.import_storage.redis.delete(
    -Update a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Validate a specific Azure import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data.
    @@ -12248,14 +12497,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.redis.update( - id=1, -) +client.import_storage.azure.validate() ```
    @@ -12271,7 +12518,7 @@ client.import_storage.redis.update(
    -**id:** `int` — A unique integer value identifying this redis import storage. +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated
    @@ -12295,6 +12542,22 @@ client.import_storage.redis.update(
    +**presign:** `typing.Optional[bool]` — Presign URLs for direct download + +
    +
    + +
    +
    + +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
    +
    + +
    +
    + **title:** `typing.Optional[str]` — Storage title
    @@ -12319,7 +12582,7 @@ client.import_storage.redis.update(
    -**path:** `typing.Optional[str]` — Storage prefix (optional) +**container:** `typing.Optional[str]` — Azure blob container
    @@ -12327,7 +12590,7 @@ client.import_storage.redis.update(
    -**host:** `typing.Optional[str]` — Server Host IP (optional) +**prefix:** `typing.Optional[str]` — Azure blob prefix name
    @@ -12335,7 +12598,7 @@ client.import_storage.redis.update(
    -**port:** `typing.Optional[str]` — Server Port (optional) +**account_name:** `typing.Optional[str]` — Azure Blob account name
    @@ -12343,7 +12606,7 @@ client.import_storage.redis.update(
    -**password:** `typing.Optional[str]` — Server Password (optional) +**account_key:** `typing.Optional[str]` — Azure Blob account key
    @@ -12363,7 +12626,7 @@ client.import_storage.redis.update(
    -
    client.import_storage.redis.sync(...) +
    client.import_storage.azure.get(...)
    @@ -12375,11 +12638,10 @@ client.import_storage.redis.update(
    -Sync tasks from a Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. +Get a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -12394,12 +12656,12 @@ Sync operations with external databases only go one way. They either create task
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.redis.sync( +client.import_storage.azure.get( id=1, ) @@ -12417,7 +12679,7 @@ client.import_storage.redis.sync(
    -**id:** `int` — Storage ID +**id:** `int` — A unique integer value identifying this azure blob import storage.
    @@ -12437,8 +12699,7 @@ client.import_storage.redis.sync(
    -## ImportStorage S3 -
    client.import_storage.s3.list(...) +
    client.import_storage.azure.delete(...)
    @@ -12450,11 +12711,12 @@ client.import_storage.redis.sync(
    -You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +Delete a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + +If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API.
    @@ -12469,12 +12731,14 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.s3.list() +client.import_storage.azure.delete( + id=1, +) ```
    @@ -12490,7 +12754,7 @@ client.import_storage.s3.list()
    -**project:** `typing.Optional[int]` — Project ID +**id:** `int` — A unique integer value identifying this azure blob import storage.
    @@ -12510,7 +12774,7 @@ client.import_storage.s3.list()
    -
    client.import_storage.s3.create(...) +
    client.import_storage.azure.update(...)
    @@ -12522,13 +12786,10 @@ client.import_storage.s3.list()
    -Create a new source storage connection to a S3 bucket. - -For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. -Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. +Update a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -12543,12 +12804,14 @@ For information about the required fields and prerequisites, see [Amazon S3](htt
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.s3.create() +client.import_storage.azure.update( + id=1, +) ```
    @@ -12564,7 +12827,7 @@ client.import_storage.s3.create()
    -**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. +**id:** `int` — A unique integer value identifying this azure blob import storage.
    @@ -12572,7 +12835,7 @@ client.import_storage.s3.create()
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
    @@ -12580,7 +12843,7 @@ client.import_storage.s3.create()
    -**presign:** `typing.Optional[bool]` — Presign URLs for download +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -12588,7 +12851,7 @@ client.import_storage.s3.create()
    -**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes +**presign:** `typing.Optional[bool]` — Presign URLs for direct download
    @@ -12596,7 +12859,7 @@ client.import_storage.s3.create()
    -**recursive_scan:** `typing.Optional[bool]` — Scan recursively +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes
    @@ -12628,39 +12891,7 @@ client.import_storage.s3.create()
    -**bucket:** `typing.Optional[str]` — S3 bucket name - -
    -
    - -
    -
    - -**prefix:** `typing.Optional[str]` — S3 bucket prefix - -
    -
    - -
    -
    - -**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID - -
    -
    - -
    -
    - -**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY - -
    -
    - -
    -
    - -**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN +**container:** `typing.Optional[str]` — Azure blob container
    @@ -12668,7 +12899,7 @@ client.import_storage.s3.create()
    -**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID +**prefix:** `typing.Optional[str]` — Azure blob prefix name
    @@ -12676,7 +12907,7 @@ client.import_storage.s3.create()
    -**region_name:** `typing.Optional[str]` — AWS Region +**account_name:** `typing.Optional[str]` — Azure Blob account name
    @@ -12684,7 +12915,7 @@ client.import_storage.s3.create()
    -**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**account_key:** `typing.Optional[str]` — Azure Blob account key
    @@ -12704,7 +12935,7 @@ client.import_storage.s3.create()
    -
    client.import_storage.s3.validate(...) +
    client.import_storage.azure.sync(...)
    @@ -12716,7 +12947,12 @@ client.import_storage.s3.create()
    -Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + +Sync tasks from an Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private.
    @@ -12731,12 +12967,14 @@ Validate a specific S3 import storage connection. This is useful to ensure that
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.s3.validate() +client.import_storage.azure.sync( + id=1, +) ```
    @@ -12752,7 +12990,7 @@ client.import_storage.s3.validate()
    -**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated +**id:** `int` — Storage ID
    @@ -12760,127 +12998,73 @@ client.import_storage.s3.validate()
    -**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    - -
    -
    - -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. -
    -
    -
    -**presign:** `typing.Optional[bool]` — Presign URLs for download -
    +
    +## ImportStorage Gcs +
    client.import_storage.gcs.list(...)
    -**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes - -
    -
    +#### 📝 Description
    -**recursive_scan:** `typing.Optional[bool]` — Scan recursively - -
    -
    -
    -**title:** `typing.Optional[str]` — Storage title - -
    -
    - -
    -
    -**description:** `typing.Optional[str]` — Storage description - -
    -
    +You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. -
    -
    +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). -**project:** `typing.Optional[int]` — Project ID - +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    - -
    -
    - -**bucket:** `typing.Optional[str]` — S3 bucket name -
    -
    -
    - -**prefix:** `typing.Optional[str]` — S3 bucket prefix - -
    -
    +#### 🔌 Usage
    -**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID - -
    -
    -
    -**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY - -
    -
    +```python +from label_studio_sdk import LabelStudio -
    -
    +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.gcs.list() -**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN - +```
    - -
    -
    - -**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID -
    +#### ⚙️ Parameters +
    -**region_name:** `typing.Optional[str]` — AWS Region - -
    -
    -
    -**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**project:** `typing.Optional[int]` — Project ID
    @@ -12900,7 +13084,7 @@ client.import_storage.s3.validate()
    -
    client.import_storage.s3.get(...) +
    client.import_storage.gcs.create(...)
    @@ -12912,9 +13096,14 @@ client.import_storage.s3.validate()
    -Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Create a new source storage connection to a Google Cloud Storage bucket. + +For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. + +Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync).
    @@ -12929,14 +13118,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.s3.get( - id=1, -) +client.import_storage.gcs.create() ```
    @@ -12952,7 +13139,7 @@ client.import_storage.s3.get(
    -**id:** `int` — A unique integer value identifying this s3 import storage. +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
    @@ -12960,73 +13147,79 @@ client.import_storage.s3.get(
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    - -
    +
    +
    +**presign:** `typing.Optional[bool]` — Presign URLs for direct download +
    -
    -
    client.import_storage.s3.delete(...)
    -#### 📝 Description +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
    +
    +**title:** `typing.Optional[str]` — Storage title + +
    +
    +
    -Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - -Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - -If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. -
    -
    +**description:** `typing.Optional[str]` — Storage description + -#### 🔌 Usage -
    +**project:** `typing.Optional[int]` — Project ID + +
    +
    +
    -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.import_storage.s3.delete( - id=1, -) - -``` +**bucket:** `typing.Optional[str]` — GCS bucket name +
    + +
    +
    + +**prefix:** `typing.Optional[str]` — GCS bucket prefix +
    -#### ⚙️ Parameters -
    +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + +
    +
    +
    -**id:** `int` — A unique integer value identifying this s3 import storage. +**google_project_id:** `typing.Optional[str]` — Google project ID
    @@ -13046,7 +13239,7 @@ client.import_storage.s3.delete(
    -
    client.import_storage.s3.update(...) +
    client.import_storage.gcs.validate(...)
    @@ -13058,9 +13251,8 @@ client.import_storage.s3.delete(
    -Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Validate a specific GCS import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data.
    @@ -13075,14 +13267,12 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.s3.update( - id=1, -) +client.import_storage.gcs.validate() ```
    @@ -13098,7 +13288,7 @@ client.import_storage.s3.update(
    -**id:** `int` — A unique integer value identifying this s3 import storage. +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated
    @@ -13122,7 +13312,7 @@ client.import_storage.s3.update(
    -**presign:** `typing.Optional[bool]` — Presign URLs for download +**presign:** `typing.Optional[bool]` — Presign URLs for direct download
    @@ -13138,14 +13328,6 @@ client.import_storage.s3.update(
    -**recursive_scan:** `typing.Optional[bool]` — Scan recursively - -
    -
    - -
    -
    - **title:** `typing.Optional[str]` — Storage title
    @@ -13170,39 +13352,7 @@ client.import_storage.s3.update(
    -**bucket:** `typing.Optional[str]` — S3 bucket name - -
    -
    - -
    -
    - -**prefix:** `typing.Optional[str]` — S3 bucket prefix - -
    -
    - -
    -
    - -**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID - -
    -
    - -
    -
    - -**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY - -
    -
    - -
    -
    - -**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN +**bucket:** `typing.Optional[str]` — GCS bucket name
    @@ -13210,7 +13360,7 @@ client.import_storage.s3.update(
    -**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID +**prefix:** `typing.Optional[str]` — GCS bucket prefix
    @@ -13218,7 +13368,7 @@ client.import_storage.s3.update(
    -**region_name:** `typing.Optional[str]` — AWS Region +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details.
    @@ -13226,7 +13376,7 @@ client.import_storage.s3.update(
    -**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**google_project_id:** `typing.Optional[str]` — Google project ID
    @@ -13246,7 +13396,7 @@ client.import_storage.s3.update(
    -
    client.import_storage.s3.sync(...) +
    client.import_storage.gcs.get(...)
    @@ -13258,11 +13408,10 @@ client.import_storage.s3.update(
    -Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. +Get a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -13277,12 +13426,12 @@ Sync operations with external buckets only go one way. They either create tasks
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.s3.sync( +client.import_storage.gcs.get( id=1, ) @@ -13300,7 +13449,7 @@ client.import_storage.s3.sync(
    -**id:** `int` — Storage ID +**id:** `int` — A unique integer value identifying this gcs import storage.
    @@ -13320,8 +13469,7 @@ client.import_storage.s3.sync(
    -## Webhooks -
    client.webhooks.list(...) +
    client.import_storage.gcs.delete(...)
    @@ -13333,11 +13481,12 @@ client.import_storage.s3.sync(
    -List all webhooks set up for your organization. -Webhooks in Label Studio let you set up integrations that subscribe to certain events that occur inside Label Studio. When an event is triggered, Label Studio sends an HTTP POST request to the configured webhook URL. +Delete a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. -For more information, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks). +If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API.
    @@ -13352,12 +13501,14 @@ For more information, see [Set up webhooks in Label Studio](https://labelstud.io
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.webhooks.list() +client.import_storage.gcs.delete( + id=1, +) ```
    @@ -13373,7 +13524,7 @@ client.webhooks.list()
    -**project:** `typing.Optional[str]` — Project ID +**id:** `int` — A unique integer value identifying this gcs import storage.
    @@ -13393,7 +13544,7 @@ client.webhooks.list()
    -
    client.webhooks.create(...) +
    client.import_storage.gcs.update(...)
    @@ -13405,12 +13556,10 @@ client.webhooks.list()
    -Create a webhook. -Label Studio provides several out-of-the box webhook events, which you can find listed here: [Available Label Studio webhooks](https://labelstud.io/guide/webhooks#Available-Label-Studio-webhooks). -If you want to create your own custom webhook, refer to [Create custom events for webhooks in Label Studio](https://labelstud.io/guide/webhook_create). +Update a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -Label Studio makes two main types of events available to integrate with webhooks: project-level task events and organization events. If you want to use organization-level webhook events, you will need to set `LABEL_STUDIO_ALLOW_ORGANIZATION_WEBHOOKS=true`. +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -13425,13 +13574,13 @@ If you want to create your own custom webhook, refer to [Create custom events fo
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.webhooks.create( - url="url", +client.import_storage.gcs.update( + id=1, ) ``` @@ -13448,7 +13597,7 @@ client.webhooks.create(
    -**url:** `str` — URL of webhook +**id:** `int` — A unique integer value identifying this gcs import storage.
    @@ -13456,7 +13605,7 @@ client.webhooks.create(
    -**id:** `typing.Optional[int]` +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
    @@ -13464,7 +13613,7 @@ client.webhooks.create(
    -**organization:** `typing.Optional[int]` +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -13472,7 +13621,7 @@ client.webhooks.create(
    -**project:** `typing.Optional[int]` +**presign:** `typing.Optional[bool]` — Presign URLs for direct download
    @@ -13480,7 +13629,7 @@ client.webhooks.create(
    -**send_payload:** `typing.Optional[bool]` — If value is False send only action +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes
    @@ -13488,7 +13637,7 @@ client.webhooks.create(
    -**send_for_all_actions:** `typing.Optional[bool]` — If value is False - used only for actions from WebhookAction +**title:** `typing.Optional[str]` — Storage title
    @@ -13496,7 +13645,7 @@ client.webhooks.create(
    -**headers:** `typing.Optional[typing.Dict[str, typing.Any]]` — Key Value Json of headers +**description:** `typing.Optional[str]` — Storage description
    @@ -13504,7 +13653,7 @@ client.webhooks.create(
    -**is_active:** `typing.Optional[bool]` — If value is False the webhook is disabled +**project:** `typing.Optional[int]` — Project ID
    @@ -13512,7 +13661,7 @@ client.webhooks.create(
    -**actions:** `typing.Optional[typing.Sequence[WebhookActionsItem]]` +**bucket:** `typing.Optional[str]` — GCS bucket name
    @@ -13520,7 +13669,7 @@ client.webhooks.create(
    -**created_at:** `typing.Optional[dt.datetime]` — Creation time +**prefix:** `typing.Optional[str]` — GCS bucket prefix
    @@ -13528,7 +13677,15 @@ client.webhooks.create(
    -**updated_at:** `typing.Optional[dt.datetime]` — Last update time +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + +
    +
    + +
    +
    + +**google_project_id:** `typing.Optional[str]` — Google project ID
    @@ -13548,7 +13705,7 @@ client.webhooks.create(
    -
    client.webhooks.info(...) +
    client.import_storage.gcs.sync(...)
    @@ -13560,7 +13717,12 @@ client.webhooks.create(
    -Get descriptions of all available webhook actions to set up webhooks. For more information, see the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + +Sync tasks from a GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private.
    @@ -13575,12 +13737,14 @@ Get descriptions of all available webhook actions to set up webhooks. For more i
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.webhooks.info() +client.import_storage.gcs.sync( + id=1, +) ```
    @@ -13596,7 +13760,7 @@ client.webhooks.info()
    -**organization_only:** `typing.Optional[bool]` — organization-only or not +**id:** `int` — Storage ID
    @@ -13616,7 +13780,8 @@ client.webhooks.info()
    -
    client.webhooks.get(...) +## ImportStorage Local +
    client.import_storage.local.list(...)
    @@ -13628,9 +13793,12 @@ client.webhooks.info()
    -Get information about a specific webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). -For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). +If you have local files that you want to add to Label Studio from a specific directory, you can set up a specific local directory on the machine where LS is running as source or target storage. Use this API request to get a list of all local file import (source) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -13645,14 +13813,12 @@ For more information about webhooks, see [Set up webhooks in Label Studio](https
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.webhooks.get( - id=1, -) +client.import_storage.local.list() ```
    @@ -13668,7 +13834,7 @@ client.webhooks.get(
    -**id:** `int` — A unique integer value identifying this webhook. +**project:** `typing.Optional[int]` — Project ID
    @@ -13688,7 +13854,7 @@ client.webhooks.get(
    -
    client.webhooks.delete(...) +
    client.import_storage.local.create(...)
    @@ -13700,9 +13866,12 @@ client.webhooks.get(
    -Delete a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). -For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). +Create a new source storage connection to a local file directory. + +For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync).
    @@ -13717,14 +13886,12 @@ For more information about webhooks, see [Set up webhooks in Label Studio](https
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.webhooks.delete( - id=1, -) +client.import_storage.local.create() ```
    @@ -13740,7 +13907,47 @@ client.webhooks.delete(
    -**id:** `int` — A unique integer value identifying this webhook. +**title:** `typing.Optional[str]` — Storage title + +
    +
    + +
    +
    + +**description:** `typing.Optional[str]` — Storage description + +
    +
    + +
    +
    + +**project:** `typing.Optional[int]` — Project ID + +
    +
    + +
    +
    + +**path:** `typing.Optional[str]` — Path to local directory + +
    +
    + +
    +
    + +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects + +
    +
    + +
    +
    + +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -13760,7 +13967,7 @@ client.webhooks.delete(
    -
    client.webhooks.update(...) +
    client.import_storage.local.validate(...)
    @@ -13772,9 +13979,8 @@ client.webhooks.delete(
    -Update a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). -For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). +Validate a specific local file import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data.
    @@ -13789,16 +13995,12 @@ For more information about webhooks, see [Set up webhooks in Label Studio](https
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.webhooks.update( - id_=1, - url="url", - webhook_serializer_for_update_url="url", -) +client.import_storage.local.validate() ```
    @@ -13814,7 +14016,7 @@ client.webhooks.update(
    -**id_:** `int` — A unique integer value identifying this webhook. +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated
    @@ -13822,7 +14024,7 @@ client.webhooks.update(
    -**url:** `str` — URL of webhook +**title:** `typing.Optional[str]` — Storage title
    @@ -13830,7 +14032,7 @@ client.webhooks.update(
    -**webhook_serializer_for_update_url:** `str` — URL of webhook +**description:** `typing.Optional[str]` — Storage description
    @@ -13838,7 +14040,7 @@ client.webhooks.update(
    -**send_payload:** `typing.Optional[bool]` — If value is False send only action +**project:** `typing.Optional[int]` — Project ID
    @@ -13846,7 +14048,7 @@ client.webhooks.update(
    -**send_for_all_actions:** `typing.Optional[bool]` — If value is False - used only for actions from WebhookAction +**path:** `typing.Optional[str]` — Path to local directory
    @@ -13854,7 +14056,7 @@ client.webhooks.update(
    -**headers:** `typing.Optional[str]` — Key Value Json of headers +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects
    @@ -13862,7 +14064,7 @@ client.webhooks.update(
    -**is_active:** `typing.Optional[bool]` — If value is False the webhook is disabled +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -13870,92 +14072,72 @@ client.webhooks.update(
    -**actions:** `typing.Optional[ - typing.Union[ - WebhooksUpdateRequestActionsItem, - typing.Sequence[WebhooksUpdateRequestActionsItem], - ] -]` +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    - -
    -
    - -**id:** `typing.Optional[int]` -
    -
    -
    -**organization:** `typing.Optional[int]` -
    +
    +
    client.import_storage.local.get(...)
    -**project:** `typing.Optional[int]` - -
    -
    +#### 📝 Description
    -**webhook_serializer_for_update_send_payload:** `typing.Optional[bool]` — If value is False send only action - -
    -
    -
    -**webhook_serializer_for_update_send_for_all_actions:** `typing.Optional[bool]` — If value is False - used only for actions from WebhookAction - -
    -
    -
    -
    +Get a specific local file import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -**webhook_serializer_for_update_headers:** `typing.Optional[typing.Dict[str, typing.Any]]` — Key Value Json of headers - +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
    +
    +#### 🔌 Usage +
    -**webhook_serializer_for_update_is_active:** `typing.Optional[bool]` — If value is False the webhook is disabled - -
    -
    -
    -**webhook_serializer_for_update_actions:** `typing.Optional[typing.Sequence[WebhookSerializerForUpdateActionsItem]]` - +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.local.get( + id=1, +) + +``` +
    +
    +#### ⚙️ Parameters +
    -**created_at:** `typing.Optional[dt.datetime]` — Creation time - -
    -
    -
    -**updated_at:** `typing.Optional[dt.datetime]` — Last update time +**id:** `int` — A unique integer value identifying this local files import storage.
    @@ -13975,8 +14157,7 @@ client.webhooks.update(
    -## Prompts -
    client.prompts.list() +
    client.import_storage.local.delete(...)
    @@ -13988,7 +14169,12 @@ client.webhooks.update(
    -Get a list of prompts. + +Delete a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + +If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API.
    @@ -14003,12 +14189,14 @@ Get a list of prompts.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.list() +client.import_storage.local.delete( + id=1, +) ```
    @@ -14024,6 +14212,14 @@ client.prompts.list()
    +**id:** `int` — A unique integer value identifying this local files import storage. + +
    +
    + +
    +
    + **request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    @@ -14036,7 +14232,7 @@ client.prompts.list()
    -
    client.prompts.create(...) +
    client.import_storage.local.update(...)
    @@ -14048,7 +14244,10 @@ client.prompts.list()
    -Create a new prompt. + +Update a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -14063,15 +14262,13 @@ Create a new prompt.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.create( - title="title", - input_fields=["input_fields"], - output_classes=["output_classes"], +client.import_storage.local.update( + id=1, ) ``` @@ -14088,31 +14285,7 @@ client.prompts.create(
    -**title:** `str` — Title of the prompt - -
    -
    - -
    -
    - -**input_fields:** `typing.Sequence[str]` — List of input fields - -
    -
    - -
    -
    - -**output_classes:** `typing.Sequence[str]` — List of output classes - -
    -
    - -
    -
    - -**description:** `typing.Optional[str]` — Description of the prompt +**id:** `int` — A unique integer value identifying this local files import storage.
    @@ -14120,7 +14293,7 @@ client.prompts.create(
    -**created_by:** `typing.Optional[PromptCreatedBy]` — User ID of the creator of the prompt +**title:** `typing.Optional[str]` — Storage title
    @@ -14128,7 +14301,7 @@ client.prompts.create(
    -**created_at:** `typing.Optional[dt.datetime]` — Date and time the prompt was created +**description:** `typing.Optional[str]` — Storage description
    @@ -14136,7 +14309,7 @@ client.prompts.create(
    -**updated_at:** `typing.Optional[dt.datetime]` — Date and time the prompt was last updated +**project:** `typing.Optional[int]` — Project ID
    @@ -14144,7 +14317,7 @@ client.prompts.create(
    -**organization:** `typing.Optional[PromptOrganization]` — Organization ID of the prompt +**path:** `typing.Optional[str]` — Path to local directory
    @@ -14152,7 +14325,7 @@ client.prompts.create(
    -**associated_projects:** `typing.Optional[typing.Sequence[int]]` — List of associated projects IDs +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects
    @@ -14160,7 +14333,7 @@ client.prompts.create(
    -**skill_name:** `typing.Optional[str]` — Name of the skill +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -14180,7 +14353,7 @@ client.prompts.create(
    -
    client.prompts.get(...) +
    client.import_storage.local.sync(...)
    @@ -14192,7 +14365,12 @@ client.prompts.create(
    -Get a prompt by ID. + +Sync tasks from a local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Sync operations with external sources only go one way. They either create tasks from objects in the source directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private.
    @@ -14207,12 +14385,12 @@ Get a prompt by ID.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.get( +client.import_storage.local.sync( id=1, ) @@ -14230,7 +14408,7 @@ client.prompts.get(
    -**id:** `int` — Prompt ID +**id:** `int` — Storage ID
    @@ -14250,7 +14428,8 @@ client.prompts.get(
    -
    client.prompts.delete(...) +## ImportStorage Redis +
    client.import_storage.redis.list(...)
    @@ -14262,7 +14441,12 @@ client.prompts.get(
    -Delete a prompt by ID. + +You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis import (source) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -14277,14 +14461,12 @@ Delete a prompt by ID.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.delete( - id=1, -) +client.import_storage.redis.list() ```
    @@ -14300,7 +14482,7 @@ client.prompts.delete(
    -**id:** `int` — Prompt ID +**project:** `typing.Optional[int]` — Project ID
    @@ -14320,7 +14502,7 @@ client.prompts.delete(
    -
    client.prompts.update(...) +
    client.import_storage.redis.create(...)
    @@ -14332,7 +14514,12 @@ client.prompts.delete(
    -Update a prompt by ID. + +Create a new source storage connection to a Redis database. + +For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync).
    @@ -14347,17 +14534,12 @@ Update a prompt by ID.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.update( - id=1, - title="title", - input_fields=["input_fields"], - output_classes=["output_classes"], -) +client.import_storage.redis.create() ```
    @@ -14373,23 +14555,7 @@ client.prompts.update(
    -**id:** `int` — Prompt ID - -
    -
    - -
    -
    - -**title:** `str` — Title of the prompt - -
    -
    - -
    -
    - -**input_fields:** `typing.Sequence[str]` — List of input fields +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
    @@ -14397,7 +14563,7 @@ client.prompts.update(
    -**output_classes:** `typing.Sequence[str]` — List of output classes +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -14405,7 +14571,7 @@ client.prompts.update(
    -**description:** `typing.Optional[str]` — Description of the prompt +**title:** `typing.Optional[str]` — Storage title
    @@ -14413,7 +14579,7 @@ client.prompts.update(
    -**created_by:** `typing.Optional[PromptCreatedBy]` — User ID of the creator of the prompt +**description:** `typing.Optional[str]` — Storage description
    @@ -14421,7 +14587,7 @@ client.prompts.update(
    -**created_at:** `typing.Optional[dt.datetime]` — Date and time the prompt was created +**project:** `typing.Optional[int]` — Project ID
    @@ -14429,7 +14595,7 @@ client.prompts.update(
    -**updated_at:** `typing.Optional[dt.datetime]` — Date and time the prompt was last updated +**path:** `typing.Optional[str]` — Storage prefix (optional)
    @@ -14437,7 +14603,7 @@ client.prompts.update(
    -**organization:** `typing.Optional[PromptOrganization]` — Organization ID of the prompt +**host:** `typing.Optional[str]` — Server Host IP (optional)
    @@ -14445,7 +14611,7 @@ client.prompts.update(
    -**associated_projects:** `typing.Optional[typing.Sequence[int]]` — List of associated projects IDs +**port:** `typing.Optional[str]` — Server Port (optional)
    @@ -14453,7 +14619,7 @@ client.prompts.update(
    -**skill_name:** `typing.Optional[str]` — Name of the skill +**password:** `typing.Optional[str]` — Server Password (optional)
    @@ -14473,7 +14639,7 @@ client.prompts.update(
    -
    client.prompts.batch_predictions(...) +
    client.import_storage.redis.validate(...)
    @@ -14485,7 +14651,8 @@ client.prompts.update(
    -Create a new batch prediction. + +Validate a specific Redis import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data.
    @@ -14500,12 +14667,12 @@ Create a new batch prediction.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.batch_predictions() +client.import_storage.redis.validate() ```
    @@ -14521,7 +14688,7 @@ client.prompts.batch_predictions()
    -**modelrun_id:** `typing.Optional[int]` — Model Run ID to associate the prediction with +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated
    @@ -14529,7 +14696,71 @@ client.prompts.batch_predictions()
    -**results:** `typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]]` +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
    +
    + +
    +
    + +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
    +
    + +
    +
    + +**title:** `typing.Optional[str]` — Storage title + +
    +
    + +
    +
    + +**description:** `typing.Optional[str]` — Storage description + +
    +
    + +
    +
    + +**project:** `typing.Optional[int]` — Project ID + +
    +
    + +
    +
    + +**path:** `typing.Optional[str]` — Storage prefix (optional) + +
    +
    + +
    +
    + +**host:** `typing.Optional[str]` — Server Host IP (optional) + +
    +
    + +
    +
    + +**port:** `typing.Optional[str]` — Server Port (optional) + +
    +
    + +
    +
    + +**password:** `typing.Optional[str]` — Server Password (optional)
    @@ -14549,7 +14780,7 @@ client.prompts.batch_predictions()
    -
    client.prompts.batch_failed_predictions(...) +
    client.import_storage.redis.get(...)
    @@ -14561,7 +14792,10 @@ client.prompts.batch_predictions()
    -Create a new batch of failed predictions. + +Get a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -14576,12 +14810,14 @@ Create a new batch of failed predictions.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.batch_failed_predictions() +client.import_storage.redis.get( + id=1, +) ```
    @@ -14597,17 +14833,7 @@ client.prompts.batch_failed_predictions()
    -**modelrun_id:** `typing.Optional[int]` — Model Run ID where the failed predictions came from - -
    -
    - -
    -
    - -**failed_predictions:** `typing.Optional[ - typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem] -]` +**id:** `int` — A unique integer value identifying this redis import storage.
    @@ -14627,8 +14853,7 @@ client.prompts.batch_failed_predictions()
    -## Prompts Versions -
    client.prompts.versions.list(...) +
    client.import_storage.redis.delete(...)
    @@ -14640,7 +14865,12 @@ client.prompts.batch_failed_predictions()
    -Get a list of prompt versions. + +Delete a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + +If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API.
    @@ -14655,12 +14885,12 @@ Get a list of prompt versions.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.versions.list( +client.import_storage.redis.delete( id=1, ) @@ -14678,7 +14908,7 @@ client.prompts.versions.list(
    -**id:** `int` — Prompt ID +**id:** `int` — A unique integer value identifying this redis import storage.
    @@ -14698,7 +14928,7 @@ client.prompts.versions.list(
    -
    client.prompts.versions.create(...) +
    client.import_storage.redis.update(...)
    @@ -14710,7 +14940,10 @@ client.prompts.versions.list(
    -Create a new version of a prompt. + +Update a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -14725,12 +14958,12 @@ Create a new version of a prompt.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.versions.create( +client.import_storage.redis.update( id=1, ) @@ -14748,15 +14981,7 @@ client.prompts.versions.create(
    -**id:** `int` — Prompt ID - -
    -
    - -
    -
    - -**title:** `typing.Optional[str]` +**id:** `int` — A unique integer value identifying this redis import storage.
    @@ -14764,7 +14989,7 @@ client.prompts.versions.create(
    -**parent_model:** `typing.Optional[int]` +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
    @@ -14772,7 +14997,7 @@ client.prompts.versions.create(
    -**model_provider_connection:** `typing.Optional[int]` +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -14780,7 +15005,7 @@ client.prompts.versions.create(
    -**prompt:** `typing.Optional[str]` +**title:** `typing.Optional[str]` — Storage title
    @@ -14788,7 +15013,7 @@ client.prompts.versions.create(
    -**provider:** `typing.Optional[PromptVersionProvider]` +**description:** `typing.Optional[str]` — Storage description
    @@ -14796,7 +15021,7 @@ client.prompts.versions.create(
    -**provider_model_id:** `typing.Optional[str]` +**project:** `typing.Optional[int]` — Project ID
    @@ -14804,7 +15029,7 @@ client.prompts.versions.create(
    -**created_by:** `typing.Optional[PromptVersionCreatedBy]` +**path:** `typing.Optional[str]` — Storage prefix (optional)
    @@ -14812,7 +15037,7 @@ client.prompts.versions.create(
    -**created_at:** `typing.Optional[dt.datetime]` +**host:** `typing.Optional[str]` — Server Host IP (optional)
    @@ -14820,7 +15045,7 @@ client.prompts.versions.create(
    -**updated_at:** `typing.Optional[dt.datetime]` +**port:** `typing.Optional[str]` — Server Port (optional)
    @@ -14828,7 +15053,7 @@ client.prompts.versions.create(
    -**organization:** `typing.Optional[PromptVersionOrganization]` +**password:** `typing.Optional[str]` — Server Password (optional)
    @@ -14848,7 +15073,7 @@ client.prompts.versions.create(
    -
    client.prompts.versions.get(...) +
    client.import_storage.redis.sync(...)
    @@ -14860,7 +15085,12 @@ client.prompts.versions.create(
    -Get a prompt version by ID. + +Sync tasks from a Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private.
    @@ -14875,14 +15105,13 @@ Get a prompt version by ID.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.versions.get( +client.import_storage.redis.sync( id=1, - version_id=1, ) ``` @@ -14899,15 +15128,7 @@ client.prompts.versions.get(
    -**id:** `int` — Prompt ID - -
    -
    - -
    -
    - -**version_id:** `int` — Prompt Version ID +**id:** `int` — Storage ID
    @@ -14927,7 +15148,8 @@ client.prompts.versions.get(
    -
    client.prompts.versions.delete(...) +## ImportStorage S3 +
    client.import_storage.s3.list(...)
    @@ -14939,7 +15161,12 @@ client.prompts.versions.get(
    -Delete a prompt version by ID. + +You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -14954,15 +15181,12 @@ Delete a prompt version by ID.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.versions.delete( - id=1, - version_id=1, -) +client.import_storage.s3.list() ```
    @@ -14978,15 +15202,7 @@ client.prompts.versions.delete(
    -**id:** `int` — Prompt ID - -
    -
    - -
    -
    - -**version_id:** `int` — Prompt Version ID +**project:** `typing.Optional[int]` — Project ID
    @@ -15006,7 +15222,7 @@ client.prompts.versions.delete(
    -
    client.prompts.versions.update(...) +
    client.import_storage.s3.create(...)
    @@ -15018,7 +15234,14 @@ client.prompts.versions.delete(
    -Update a prompt version by ID. + +Create a new source storage connection to a S3 bucket. + +For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + +Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync).
    @@ -15033,15 +15256,12 @@ Update a prompt version by ID.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.versions.update( - id=1, - version_id=1, -) +client.import_storage.s3.create() ```
    @@ -15057,7 +15277,7 @@ client.prompts.versions.update(
    -**id:** `int` — Prompt ID +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
    @@ -15065,7 +15285,7 @@ client.prompts.versions.update(
    -**version_id:** `int` — Prompt Version ID +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -15073,7 +15293,7 @@ client.prompts.versions.update(
    -**title:** `typing.Optional[str]` +**presign:** `typing.Optional[bool]` — Presign URLs for download
    @@ -15081,7 +15301,7 @@ client.prompts.versions.update(
    -**parent_model:** `typing.Optional[int]` +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes
    @@ -15089,7 +15309,7 @@ client.prompts.versions.update(
    -**model_provider_connection:** `typing.Optional[int]` +**recursive_scan:** `typing.Optional[bool]` — Scan recursively
    @@ -15097,7 +15317,7 @@ client.prompts.versions.update(
    -**prompt:** `typing.Optional[str]` +**title:** `typing.Optional[str]` — Storage title
    @@ -15105,7 +15325,7 @@ client.prompts.versions.update(
    -**provider:** `typing.Optional[PromptVersionProvider]` +**description:** `typing.Optional[str]` — Storage description
    @@ -15113,7 +15333,7 @@ client.prompts.versions.update(
    -**provider_model_id:** `typing.Optional[str]` +**project:** `typing.Optional[int]` — Project ID
    @@ -15121,7 +15341,7 @@ client.prompts.versions.update(
    -**created_by:** `typing.Optional[PromptVersionCreatedBy]` +**bucket:** `typing.Optional[str]` — S3 bucket name
    @@ -15129,7 +15349,7 @@ client.prompts.versions.update(
    -**created_at:** `typing.Optional[dt.datetime]` +**prefix:** `typing.Optional[str]` — S3 bucket prefix
    @@ -15137,7 +15357,7 @@ client.prompts.versions.update(
    -**updated_at:** `typing.Optional[dt.datetime]` +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID
    @@ -15145,7 +15365,39 @@ client.prompts.versions.update(
    -**organization:** `typing.Optional[PromptVersionOrganization]` +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
    +
    + +
    +
    + +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
    +
    + +
    +
    + +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID + +
    +
    + +
    +
    + +**region_name:** `typing.Optional[str]` — AWS Region + +
    +
    + +
    +
    + +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint
    @@ -15165,7 +15417,7 @@ client.prompts.versions.update(
    -
    client.prompts.versions.cost_estimate(...) +
    client.import_storage.s3.validate(...)
    @@ -15177,7 +15429,8 @@ client.prompts.versions.update(
    -Get cost estimate for running a prompt version on a particular project/subset + +Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data.
    @@ -15192,17 +15445,12 @@ Get cost estimate for running a prompt version on a particular project/subset
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.versions.cost_estimate( - prompt_id=1, - version_id=1, - project_id=1, - project_subset=1, -) +client.import_storage.s3.validate() ```
    @@ -15218,7 +15466,7 @@ client.prompts.versions.cost_estimate(
    -**prompt_id:** `int` — Prompt ID +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated
    @@ -15226,7 +15474,7 @@ client.prompts.versions.cost_estimate(
    -**version_id:** `int` — Prompt Version ID +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
    @@ -15234,7 +15482,7 @@ client.prompts.versions.cost_estimate(
    -**project_id:** `int` — ID of the project to get an estimate for running on +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -15242,7 +15490,7 @@ client.prompts.versions.cost_estimate(
    -**project_subset:** `int` — Subset of the project to get an estimate for running on (e.g. 'All', 'Sample', or 'HasGT') +**presign:** `typing.Optional[bool]` — Presign URLs for download
    @@ -15250,71 +15498,87 @@ client.prompts.versions.cost_estimate(
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes
    - -
    +
    +
    +**recursive_scan:** `typing.Optional[bool]` — Scan recursively +
    -
    -
    client.prompts.versions.get_refined_prompt(...)
    -#### 📝 Description +**title:** `typing.Optional[str]` — Storage title + +
    +
    +**description:** `typing.Optional[str]` — Storage description + +
    +
    +
    -Get the refined prompt based on the `refinement_job_id`. -
    -
    +**project:** `typing.Optional[int]` — Project ID + -#### 🔌 Usage -
    +**bucket:** `typing.Optional[str]` — S3 bucket name + +
    +
    +
    -```python -from label_studio_sdk.client import LabelStudio +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
    +
    -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.prompts.versions.get_refined_prompt( - prompt_id=1, - version_id=1, - refinement_job_id="refinement_job_id", -) +
    +
    -``` +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID +
    + +
    +
    + +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY +
    -#### ⚙️ Parameters -
    +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
    +
    +
    -**prompt_id:** `int` — Prompt ID +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID
    @@ -15322,7 +15586,7 @@ client.prompts.versions.get_refined_prompt(
    -**version_id:** `int` — Prompt Version ID +**region_name:** `typing.Optional[str]` — AWS Region
    @@ -15330,7 +15594,7 @@ client.prompts.versions.get_refined_prompt(
    -**refinement_job_id:** `str` — Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint
    @@ -15350,7 +15614,7 @@ client.prompts.versions.get_refined_prompt(
    -
    client.prompts.versions.refine_prompt(...) +
    client.import_storage.s3.get(...)
    @@ -15362,7 +15626,10 @@ client.prompts.versions.get_refined_prompt(
    -Refine a prompt version using a teacher model and save the refined prompt as a new version. + +Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -15377,14 +15644,13 @@ Refine a prompt version using a teacher model and save the refined prompt as a n
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.versions.refine_prompt( - prompt_id=1, - version_id=1, +client.import_storage.s3.get( + id=1, ) ``` @@ -15401,47 +15667,7 @@ client.prompts.versions.refine_prompt(
    -**prompt_id:** `int` — Prompt ID - -
    -
    - -
    -
    - -**version_id:** `int` — Base Prompt Version ID - -
    -
    - -
    -
    - -**async_:** `typing.Optional[bool]` — Run the refinement job asynchronously - -
    -
    - -
    -
    - -**teacher_model_provider_connection_id:** `typing.Optional[int]` — Model Provider Connection ID to use to refine the prompt - -
    -
    - -
    -
    - -**teacher_model_name:** `typing.Optional[str]` — Name of the model to use to refine the prompt - -
    -
    - -
    -
    - -**project_id:** `typing.Optional[int]` — Project ID to target the refined prompt for +**id:** `int` — A unique integer value identifying this s3 import storage.
    @@ -15461,8 +15687,7 @@ client.prompts.versions.refine_prompt(
    -## Prompts Runs -
    client.prompts.runs.list(...) +
    client.import_storage.s3.delete(...)
    @@ -15474,7 +15699,12 @@ client.prompts.versions.refine_prompt(
    -Get information (status, etadata, etc) about an existing inference run + +Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + +If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API.
    @@ -15489,16 +15719,13 @@ Get information (status, etadata, etc) about an existing inference run
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.runs.list( +client.import_storage.s3.delete( id=1, - version_id=1, - project=1, - project_subset="All", ) ``` @@ -15515,31 +15742,7 @@ client.prompts.runs.list(
    -**id:** `int` — Prompt ID - -
    -
    - -
    -
    - -**version_id:** `int` — Prompt Version ID - -
    -
    - -
    -
    - -**project:** `int` — The ID of the project that this Interence Run makes predictions on - -
    -
    - -
    -
    - -**project_subset:** `RunsListRequestProjectSubset` — Defines which tasks are operated on (e.g. HasGT will only operate on tasks with a ground truth annotation, but All will operate on all records) +**id:** `int` — A unique integer value identifying this s3 import storage.
    @@ -15559,7 +15762,7 @@ client.prompts.runs.list(
    -
    client.prompts.runs.create(...) +
    client.import_storage.s3.update(...)
    @@ -15571,7 +15774,10 @@ client.prompts.runs.list(
    -Run a prompt inference. + +Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -15586,16 +15792,13 @@ Run a prompt inference.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.runs.create( +client.import_storage.s3.update( id=1, - version_id=1, - project=1, - project_subset="All", ) ``` @@ -15612,7 +15815,7 @@ client.prompts.runs.create(
    -**id:** `int` — Prompt ID +**id:** `int` — A unique integer value identifying this s3 import storage.
    @@ -15620,7 +15823,7 @@ client.prompts.runs.create(
    -**version_id:** `int` — Prompt Version ID +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
    @@ -15628,7 +15831,7 @@ client.prompts.runs.create(
    -**project:** `int` +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -15636,7 +15839,7 @@ client.prompts.runs.create(
    -**project_subset:** `InferenceRunProjectSubset` +**presign:** `typing.Optional[bool]` — Presign URLs for download
    @@ -15644,7 +15847,7 @@ client.prompts.runs.create(
    -**organization:** `typing.Optional[InferenceRunOrganization]` +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes
    @@ -15652,7 +15855,7 @@ client.prompts.runs.create(
    -**model_version:** `typing.Optional[str]` +**recursive_scan:** `typing.Optional[bool]` — Scan recursively
    @@ -15660,7 +15863,7 @@ client.prompts.runs.create(
    -**created_by:** `typing.Optional[InferenceRunCreatedBy]` +**title:** `typing.Optional[str]` — Storage title
    @@ -15668,7 +15871,7 @@ client.prompts.runs.create(
    -**status:** `typing.Optional[InferenceRunStatus]` +**description:** `typing.Optional[str]` — Storage description
    @@ -15676,7 +15879,7 @@ client.prompts.runs.create(
    -**job_id:** `typing.Optional[str]` +**project:** `typing.Optional[int]` — Project ID
    @@ -15684,7 +15887,7 @@ client.prompts.runs.create(
    -**created_at:** `typing.Optional[dt.datetime]` +**bucket:** `typing.Optional[str]` — S3 bucket name
    @@ -15692,7 +15895,7 @@ client.prompts.runs.create(
    -**triggered_at:** `typing.Optional[dt.datetime]` +**prefix:** `typing.Optional[str]` — S3 bucket prefix
    @@ -15700,7 +15903,7 @@ client.prompts.runs.create(
    -**predictions_updated_at:** `typing.Optional[dt.datetime]` +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID
    @@ -15708,7 +15911,7 @@ client.prompts.runs.create(
    -**completed_at:** `typing.Optional[dt.datetime]` +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY
    @@ -15716,70 +15919,31 @@ client.prompts.runs.create(
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN
    -
    -
    - - - - -
    - -## Prompts Indicators -
    client.prompts.indicators.list(...) -
    -
    - -#### 📝 Description
    -
    -
    - -Get key indicators for the Prompt dashboard. -
    -
    +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID +
    -#### 🔌 Usage - -
    -
    -
    -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.prompts.indicators.list( - pk=1, -) - -``` -
    -
    +**region_name:** `typing.Optional[str]` — AWS Region +
    -#### ⚙️ Parameters - -
    -
    -
    -**pk:** `int` — Inference run ID +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint
    @@ -15799,7 +15963,7 @@ client.prompts.indicators.list(
    -
    client.prompts.indicators.get(...) +
    client.import_storage.s3.sync(...)
    @@ -15811,7 +15975,12 @@ client.prompts.indicators.list(
    -Get a specific key indicator for the Prompt dashboard. + +Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private.
    @@ -15826,14 +15995,13 @@ Get a specific key indicator for the Prompt dashboard.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.prompts.indicators.get( - indicator_key="indicator_key", - pk=1, +client.import_storage.s3.sync( + id=1, ) ``` @@ -15850,15 +16018,7 @@ client.prompts.indicators.get(
    -**indicator_key:** `str` — Key of the indicator - -
    -
    - -
    -
    - -**pk:** `int` — Inference run ID +**id:** `int` — Storage ID
    @@ -15878,8 +16038,8 @@ client.prompts.indicators.get(
    -## ModelProviders -
    client.model_providers.list() +## ImportStorage S3S +
    client.import_storage.s3s.list(...)
    @@ -15891,7 +16051,12 @@ client.prompts.indicators.get(
    -Get all model provider connections created by the user in the current organization. + +You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -15906,12 +16071,12 @@ Get all model provider connections created by the user in the current organizati
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.model_providers.list() +client.import_storage.s3s.list() ```
    @@ -15927,6 +16092,14 @@ client.model_providers.list()
    +**project:** `typing.Optional[int]` — Project ID + +
    +
    + +
    +
    + **request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    @@ -15939,7 +16112,7 @@ client.model_providers.list()
    -
    client.model_providers.create(...) +
    client.import_storage.s3s.create(...)
    @@ -15951,7 +16124,14 @@ client.model_providers.list()
    -Create a new model provider connection. + +Create a new source storage connection to a S3 bucket. + +For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + +Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync).
    @@ -15966,14 +16146,12 @@ Create a new model provider connection.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.model_providers.create( - provider="OpenAI", -) +client.import_storage.s3s.create() ```
    @@ -15989,15 +16167,7 @@ client.model_providers.create(
    -**provider:** `ModelProviderConnectionProvider` - -
    -
    - -
    -
    - -**api_key:** `typing.Optional[str]` +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
    @@ -16005,7 +16175,7 @@ client.model_providers.create(
    -**deployment_name:** `typing.Optional[str]` +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -16013,7 +16183,7 @@ client.model_providers.create(
    -**endpoint:** `typing.Optional[str]` +**presign:** `typing.Optional[bool]` — Presign URLs for download
    @@ -16021,7 +16191,7 @@ client.model_providers.create(
    -**scope:** `typing.Optional[ModelProviderConnectionScope]` +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes
    @@ -16029,7 +16199,7 @@ client.model_providers.create(
    -**organization:** `typing.Optional[ModelProviderConnectionOrganization]` +**recursive_scan:** `typing.Optional[bool]` — Scan recursively
    @@ -16037,7 +16207,7 @@ client.model_providers.create(
    -**created_by:** `typing.Optional[ModelProviderConnectionCreatedBy]` +**title:** `typing.Optional[str]` — Storage title
    @@ -16045,7 +16215,7 @@ client.model_providers.create(
    -**created_at:** `typing.Optional[dt.datetime]` +**description:** `typing.Optional[str]` — Storage description
    @@ -16053,7 +16223,7 @@ client.model_providers.create(
    -**updated_at:** `typing.Optional[dt.datetime]` +**project:** `typing.Optional[int]` — Project ID
    @@ -16061,7 +16231,7 @@ client.model_providers.create(
    -**is_internal:** `typing.Optional[bool]` — Whether the model provider connection is internal, not visible to the user. +**bucket:** `typing.Optional[str]` — S3 bucket name
    @@ -16069,7 +16239,7 @@ client.model_providers.create(
    -**budget_limit:** `typing.Optional[float]` — Budget limit for the model provider connection (null if unlimited) +**prefix:** `typing.Optional[str]` — S3 bucket prefix
    @@ -16077,7 +16247,7 @@ client.model_providers.create(
    -**budget_last_reset_date:** `typing.Optional[dt.datetime]` — Date and time the budget was last reset +**external_id:** `typing.Optional[str]` — AWS External ID
    @@ -16085,7 +16255,7 @@ client.model_providers.create(
    -**budget_reset_period:** `typing.Optional[ModelProviderConnectionBudgetResetPeriod]` — Budget reset period for the model provider connection (null if not reset) +**role_arn:** `typing.Optional[str]` — AWS Role ARN
    @@ -16093,7 +16263,7 @@ client.model_providers.create(
    -**budget_total_spent:** `typing.Optional[float]` — Tracked total budget spent for the given provider connection within the current budget period +**region_name:** `typing.Optional[str]` — AWS Region
    @@ -16101,7 +16271,7 @@ client.model_providers.create(
    -**budget_alert_threshold:** `typing.Optional[float]` — Budget alert threshold for the given provider connection +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint
    @@ -16121,7 +16291,7 @@ client.model_providers.create(
    -
    client.model_providers.get(...) +
    client.import_storage.s3s.get(...)
    @@ -16133,7 +16303,8 @@ client.model_providers.create(
    -Get a model provider connection by ID. + +Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list).
    @@ -16148,13 +16319,13 @@ Get a model provider connection by ID.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.model_providers.get( - pk=1, +client.import_storage.s3s.get( + id=1, ) ``` @@ -16171,7 +16342,7 @@ client.model_providers.get(
    -**pk:** `int` — Model Provider Connection ID +**id:** `int` — Import storage ID
    @@ -16191,7 +16362,7 @@ client.model_providers.get(
    -
    client.model_providers.delete(...) +
    client.import_storage.s3s.delete(...)
    @@ -16203,7 +16374,12 @@ client.model_providers.get(
    -Delete a model provider connection by ID. + +Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + +If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API.
    @@ -16218,13 +16394,13 @@ Delete a model provider connection by ID.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.model_providers.delete( - pk=1, +client.import_storage.s3s.delete( + id=1, ) ``` @@ -16241,7 +16417,7 @@ client.model_providers.delete(
    -**pk:** `int` — Model Provider Connection ID +**id:** `int` — Import storage ID
    @@ -16261,7 +16437,7 @@ client.model_providers.delete(
    -
    client.model_providers.update(...) +
    client.import_storage.s3s.update(...)
    @@ -16273,7 +16449,10 @@ client.model_providers.delete(
    -Update a model provider connection by ID. + +Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
    @@ -16288,14 +16467,13 @@ Update a model provider connection by ID.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.model_providers.update( - pk=1, - provider="OpenAI", +client.import_storage.s3s.update( + id=1, ) ``` @@ -16312,15 +16490,7 @@ client.model_providers.update(
    -**pk:** `int` — Model Provider Connection ID - -
    -
    - -
    -
    - -**provider:** `ModelProviderConnectionProvider` +**id:** `int` — Import storage ID
    @@ -16328,7 +16498,7 @@ client.model_providers.update(
    -**api_key:** `typing.Optional[str]` +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
    @@ -16336,7 +16506,7 @@ client.model_providers.update(
    -**deployment_name:** `typing.Optional[str]` +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -16344,7 +16514,7 @@ client.model_providers.update(
    -**endpoint:** `typing.Optional[str]` +**presign:** `typing.Optional[bool]` — Presign URLs for download
    @@ -16352,7 +16522,7 @@ client.model_providers.update(
    -**scope:** `typing.Optional[ModelProviderConnectionScope]` +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes
    @@ -16360,7 +16530,7 @@ client.model_providers.update(
    -**organization:** `typing.Optional[ModelProviderConnectionOrganization]` +**recursive_scan:** `typing.Optional[bool]` — Scan recursively
    @@ -16368,7 +16538,7 @@ client.model_providers.update(
    -**created_by:** `typing.Optional[ModelProviderConnectionCreatedBy]` +**title:** `typing.Optional[str]` — Storage title
    @@ -16376,7 +16546,7 @@ client.model_providers.update(
    -**created_at:** `typing.Optional[dt.datetime]` +**description:** `typing.Optional[str]` — Storage description
    @@ -16384,7 +16554,7 @@ client.model_providers.update(
    -**updated_at:** `typing.Optional[dt.datetime]` +**project:** `typing.Optional[int]` — Project ID
    @@ -16392,7 +16562,7 @@ client.model_providers.update(
    -**is_internal:** `typing.Optional[bool]` — Whether the model provider connection is internal, not visible to the user. +**bucket:** `typing.Optional[str]` — S3 bucket name
    @@ -16400,7 +16570,7 @@ client.model_providers.update(
    -**budget_limit:** `typing.Optional[float]` — Budget limit for the model provider connection (null if unlimited) +**prefix:** `typing.Optional[str]` — S3 bucket prefix
    @@ -16408,7 +16578,7 @@ client.model_providers.update(
    -**budget_last_reset_date:** `typing.Optional[dt.datetime]` — Date and time the budget was last reset +**external_id:** `typing.Optional[str]` — AWS External ID
    @@ -16416,7 +16586,7 @@ client.model_providers.update(
    -**budget_reset_period:** `typing.Optional[ModelProviderConnectionBudgetResetPeriod]` — Budget reset period for the model provider connection (null if not reset) +**role_arn:** `typing.Optional[str]` — AWS Role ARN
    @@ -16424,7 +16594,7 @@ client.model_providers.update(
    -**budget_total_spent:** `typing.Optional[float]` — Tracked total budget spent for the given provider connection within the current budget period +**region_name:** `typing.Optional[str]` — AWS Region
    @@ -16432,7 +16602,7 @@ client.model_providers.update(
    -**budget_alert_threshold:** `typing.Optional[float]` — Budget alert threshold for the given provider connection +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint
    @@ -16452,8 +16622,7 @@ client.model_providers.update(
    -## Comments -
    client.comments.list(...) +
    client.import_storage.s3s.validate(...)
    @@ -16465,7 +16634,8 @@ client.model_providers.update(
    -Get a list of comments for a specific project. + +Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data.
    @@ -16480,12 +16650,12 @@ Get a list of comments for a specific project.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.comments.list() +client.import_storage.s3s.validate() ```
    @@ -16501,7 +16671,7 @@ client.comments.list()
    -**project:** `typing.Optional[int]` — Project ID +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
    @@ -16509,7 +16679,7 @@ client.comments.list()
    -**expand_created_by:** `typing.Optional[bool]` — Expand the created_by field with object instead of ID +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
    @@ -16517,7 +16687,7 @@ client.comments.list()
    -**annotation:** `typing.Optional[int]` — Annotation ID +**presign:** `typing.Optional[bool]` — Presign URLs for download
    @@ -16525,67 +16695,31 @@ client.comments.list()
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes
    - -
    - - - - -
    - -
    client.comments.create(...) -
    -
    - -#### 📝 Description
    -
    -
    - -Create a new comment. -
    -
    +**recursive_scan:** `typing.Optional[bool]` — Scan recursively +
    -#### 🔌 Usage - -
    -
    -
    -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.comments.create() - -``` -
    -
    +**title:** `typing.Optional[str]` — Storage title +
    -#### ⚙️ Parameters - -
    -
    -
    -**annotation:** `typing.Optional[int]` +**description:** `typing.Optional[str]` — Storage description
    @@ -16593,7 +16727,7 @@ client.comments.create()
    -**project:** `typing.Optional[int]` +**project:** `typing.Optional[int]` — Project ID
    @@ -16601,7 +16735,7 @@ client.comments.create()
    -**text:** `typing.Optional[str]` +**bucket:** `typing.Optional[str]` — S3 bucket name
    @@ -16609,7 +16743,7 @@ client.comments.create()
    -**is_resolved:** `typing.Optional[bool]` +**prefix:** `typing.Optional[str]` — S3 bucket prefix
    @@ -16617,69 +16751,31 @@ client.comments.create()
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**external_id:** `typing.Optional[str]` — AWS External ID
    -
    -
    - - -
    -
    -
    - -
    client.comments.get(...) -
    -
    - -#### 📝 Description - -
    -
    -Get a specific comment. -
    -
    +**role_arn:** `typing.Optional[str]` — AWS Role ARN +
    -#### 🔌 Usage - -
    -
    -
    -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.comments.get( - id=1, -) - -``` -
    -
    +**region_name:** `typing.Optional[str]` — AWS Region +
    -#### ⚙️ Parameters - -
    -
    -
    -**id:** `int` — Comment ID +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint
    @@ -16699,7 +16795,7 @@ client.comments.get(
    -
    client.comments.delete(...) +
    client.import_storage.s3s.sync(...)
    @@ -16711,7 +16807,8 @@ client.comments.get(
    -Delete a specific comment. + +Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list).
    @@ -16726,12 +16823,12 @@ Delete a specific comment.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.comments.delete( +client.import_storage.s3s.sync( id=1, ) @@ -16749,7 +16846,7 @@ client.comments.delete(
    -**id:** `int` — Comment ID +**id:** `int` — Storage ID
    @@ -16769,7 +16866,8 @@ client.comments.delete(
    -
    client.comments.update(...) +## Projects Exports +
    client.projects.exports.list_formats(...)
    @@ -16781,7 +16879,10 @@ client.comments.delete(
    -Update a specific comment. + +Before exporting annotations, you can check with formats are supported by the specified project. For more information about export formats, see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + +You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list).
    @@ -16796,62 +16897,30 @@ Update a specific comment.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.comments.update( - id=1, -) - -``` -
    -
    - - - -#### ⚙️ Parameters - -
    -
    - -
    -
    - -**id:** `int` — Comment ID - -
    -
    - -
    -
    +client.projects.exports.list_formats( + id=1, +) -**annotation:** `typing.Optional[int]` - +```
    - -
    -
    - -**project:** `typing.Optional[int]` -
    +#### ⚙️ Parameters +
    -**text:** `typing.Optional[str]` - -
    -
    -
    -**is_resolved:** `typing.Optional[bool]` +**id:** `int` — A unique integer value identifying this project.
    @@ -16871,8 +16940,7 @@ client.comments.update(
    -## ImportStorage S3S -
    client.import_storage.s3s.list(...) +
    client.projects.exports.list(...)
    @@ -16884,11 +16952,10 @@ client.comments.update(
    -You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +Returns a list of export file (snapshots) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Included in the response is information about each snapshot, such as who created it and what format it is in.
    @@ -16903,12 +16970,14 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.s3s.list() +client.projects.exports.list( + id=1, +) ```
    @@ -16924,7 +16993,7 @@ client.import_storage.s3s.list()
    -**project:** `typing.Optional[int]` — Project ID +**id:** `int` — A unique integer value identifying this project.
    @@ -16944,7 +17013,7 @@ client.import_storage.s3s.list()
    -
    client.import_storage.s3s.create(...) +
    client.projects.exports.create(...)
    @@ -16956,13 +17025,12 @@ client.import_storage.s3s.list()
    -Create a new source storage connection to a S3 bucket. -For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. +Create a new export request to start a background task and generate an export file (snapshot) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). -Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. +A snapshot is a static export of your project's data and annotations at a specific point in time. It captures the current state of your tasks, annotations, and other relevant data, allowing you to download and review them later. Snapshots are particularly useful for large projects as they help avoid timeouts during export operations by processing the data asynchronously. -After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). +For more information, see the [Label Studio documentation on exporting annotations](https://labelstud.io/guide/export.html).
    @@ -16977,12 +17045,14 @@ For information about the required fields and prerequisites, see [Amazon S3](htt
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.s3s.create() +client.projects.exports.create( + id_=1, +) ```
    @@ -16998,15 +17068,7 @@ client.import_storage.s3s.create()
    -**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - -
    -
    - -
    -
    - -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**id_:** `int` — A unique integer value identifying this project.
    @@ -17014,7 +17076,7 @@ client.import_storage.s3s.create()
    -**presign:** `typing.Optional[bool]` — Presign URLs for download +**title:** `typing.Optional[str]`
    @@ -17022,7 +17084,7 @@ client.import_storage.s3s.create()
    -**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes +**id:** `typing.Optional[int]`
    @@ -17030,7 +17092,7 @@ client.import_storage.s3s.create()
    -**recursive_scan:** `typing.Optional[bool]` — Scan recursively +**created_by:** `typing.Optional[UserSimple]`
    @@ -17038,7 +17100,7 @@ client.import_storage.s3s.create()
    -**title:** `typing.Optional[str]` — Storage title +**created_at:** `typing.Optional[dt.datetime]` — Creation time
    @@ -17046,7 +17108,7 @@ client.import_storage.s3s.create()
    -**description:** `typing.Optional[str]` — Storage description +**finished_at:** `typing.Optional[dt.datetime]` — Complete or fail time
    @@ -17054,7 +17116,7 @@ client.import_storage.s3s.create()
    -**project:** `typing.Optional[int]` — Project ID +**status:** `typing.Optional[ExportCreateStatus]`
    @@ -17062,7 +17124,7 @@ client.import_storage.s3s.create()
    -**bucket:** `typing.Optional[str]` — S3 bucket name +**md5:** `typing.Optional[str]`
    @@ -17070,7 +17132,7 @@ client.import_storage.s3s.create()
    -**prefix:** `typing.Optional[str]` — S3 bucket prefix +**counters:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]`
    @@ -17078,7 +17140,7 @@ client.import_storage.s3s.create()
    -**external_id:** `typing.Optional[str]` — AWS External ID +**converted_formats:** `typing.Optional[typing.Sequence[ConvertedFormat]]`
    @@ -17086,7 +17148,7 @@ client.import_storage.s3s.create()
    -**role_arn:** `typing.Optional[str]` — AWS Role ARN +**task_filter_options:** `typing.Optional[TaskFilterOptions]`
    @@ -17094,7 +17156,7 @@ client.import_storage.s3s.create()
    -**region_name:** `typing.Optional[str]` — AWS Region +**annotation_filter_options:** `typing.Optional[AnnotationFilterOptions]`
    @@ -17102,7 +17164,7 @@ client.import_storage.s3s.create()
    -**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**serialization_options:** `typing.Optional[SerializationOptions]`
    @@ -17122,7 +17184,7 @@ client.import_storage.s3s.create()
    -
    client.import_storage.s3s.get(...) +
    client.projects.exports.get(...)
    @@ -17134,7 +17196,12 @@ client.import_storage.s3s.create()
    -Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Retrieve information about a specific export file (snapshot). + +You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). + +You will also need the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list).
    @@ -17149,13 +17216,14 @@ Get a specific S3 import storage connection. You will need to provide the import
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.s3s.get( +client.projects.exports.get( id=1, + export_pk="export_pk", ) ``` @@ -17172,7 +17240,15 @@ client.import_storage.s3s.get(
    -**id:** `int` — Import storage ID +**id:** `int` — A unique integer value identifying this project. + +
    +
    + +
    +
    + +**export_pk:** `str` — Primary key identifying the export file.
    @@ -17192,7 +17268,7 @@ client.import_storage.s3s.get(
    -
    client.import_storage.s3s.delete(...) +
    client.projects.exports.delete(...)
    @@ -17204,11 +17280,10 @@ client.import_storage.s3s.get(
    -Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. +Delete an export file by specified export ID. -If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list).
    @@ -17223,13 +17298,14 @@ If you want to remove the tasks that were synced from the external storage, you
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.s3s.delete( +client.projects.exports.delete( id=1, + export_pk="export_pk", ) ``` @@ -17246,7 +17322,15 @@ client.import_storage.s3s.delete(
    -**id:** `int` — Import storage ID +**id:** `int` — A unique integer value identifying this project. + +
    +
    + +
    +
    + +**export_pk:** `str` — Primary key identifying the export file.
    @@ -17266,7 +17350,7 @@ client.import_storage.s3s.delete(
    -
    client.import_storage.s3s.update(...) +
    client.projects.exports.convert(...)
    @@ -17278,9 +17362,14 @@ client.import_storage.s3s.delete(
    -Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +You can use this to convert an export snapshot into the selected format. + +To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + +You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list).
    @@ -17295,13 +17384,15 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.s3s.update( +client.projects.exports.convert( id=1, + export_pk="export_pk", + export_type="export_type", ) ``` @@ -17318,7 +17409,7 @@ client.import_storage.s3s.update(
    -**id:** `int` — Import storage ID +**id:** `int` — A unique integer value identifying this project.
    @@ -17326,7 +17417,7 @@ client.import_storage.s3s.update(
    -**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. +**export_pk:** `str` — Primary key identifying the export file.
    @@ -17334,7 +17425,7 @@ client.import_storage.s3s.update(
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**export_type:** `str` — Export file format.
    @@ -17342,79 +17433,75 @@ client.import_storage.s3s.update(
    -**presign:** `typing.Optional[bool]` — Presign URLs for download +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    - -
    -
    - -**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes -
    -
    -
    -**recursive_scan:** `typing.Optional[bool]` — Scan recursively -
    +
    +
    client.projects.exports.download(...)
    -**title:** `typing.Optional[str]` — Storage title - -
    -
    +#### 📝 Description
    -**description:** `typing.Optional[str]` — Storage description - -
    -
    -
    -**project:** `typing.Optional[int]` — Project ID - + +Download an export snapshot as a file in a specified format. To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + +You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +
    +
    +#### 🔌 Usage +
    -**bucket:** `typing.Optional[str]` — S3 bucket name - -
    -
    -
    -**prefix:** `typing.Optional[str]` — S3 bucket prefix - +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.exports.download( + id=1, + export_pk="export_pk", +) + +``` +
    +
    +#### ⚙️ Parameters +
    -**external_id:** `typing.Optional[str]` — AWS External ID - -
    -
    -
    -**role_arn:** `typing.Optional[str]` — AWS Role ARN +**id:** `int` — A unique integer value identifying this project.
    @@ -17422,7 +17509,7 @@ client.import_storage.s3s.update(
    -**region_name:** `typing.Optional[str]` — AWS Region +**export_pk:** `str` — Primary key identifying the export file.
    @@ -17430,7 +17517,7 @@ client.import_storage.s3s.update(
    -**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**export_type:** `typing.Optional[str]` — Selected export format
    @@ -17450,7 +17537,8 @@ client.import_storage.s3s.update(
    -
    client.import_storage.s3s.validate(...) +## Prompts Versions +
    client.prompts.versions.list(...)
    @@ -17462,7 +17550,7 @@ client.import_storage.s3s.update(
    -Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. +Get a list of prompt versions.
    @@ -17477,12 +17565,14 @@ Validate a specific S3 import storage connection. This is useful to ensure that
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.s3s.validate() +client.prompts.versions.list( + id=1, +) ```
    @@ -17498,31 +17588,77 @@ client.import_storage.s3s.validate()
    -**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. +**id:** `int` — Prompt ID + +
    +
    + +
    +
    + +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    + +
    + + + + +
    + +
    client.prompts.versions.create(...) +
    +
    + +#### 📝 Description + +
    +
    + +
    +
    + +Create a new version of a prompt. +
    +
    +
    +
    + +#### 🔌 Usage
    -**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - +
    +
    + +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.prompts.versions.create( + id=1, +) + +``` +
    +
    +#### ⚙️ Parameters +
    -**presign:** `typing.Optional[bool]` — Presign URLs for download - -
    -
    -
    -**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes +**id:** `int` — Prompt ID
    @@ -17530,7 +17666,7 @@ client.import_storage.s3s.validate()
    -**recursive_scan:** `typing.Optional[bool]` — Scan recursively +**title:** `typing.Optional[str]`
    @@ -17538,7 +17674,7 @@ client.import_storage.s3s.validate()
    -**title:** `typing.Optional[str]` — Storage title +**parent_model:** `typing.Optional[int]`
    @@ -17546,7 +17682,7 @@ client.import_storage.s3s.validate()
    -**description:** `typing.Optional[str]` — Storage description +**model_provider_connection:** `typing.Optional[int]`
    @@ -17554,7 +17690,7 @@ client.import_storage.s3s.validate()
    -**project:** `typing.Optional[int]` — Project ID +**prompt:** `typing.Optional[str]`
    @@ -17562,7 +17698,7 @@ client.import_storage.s3s.validate()
    -**bucket:** `typing.Optional[str]` — S3 bucket name +**provider:** `typing.Optional[PromptVersionProvider]`
    @@ -17570,7 +17706,7 @@ client.import_storage.s3s.validate()
    -**prefix:** `typing.Optional[str]` — S3 bucket prefix +**provider_model_id:** `typing.Optional[str]`
    @@ -17578,7 +17714,7 @@ client.import_storage.s3s.validate()
    -**external_id:** `typing.Optional[str]` — AWS External ID +**created_by:** `typing.Optional[PromptVersionCreatedBy]`
    @@ -17586,7 +17722,7 @@ client.import_storage.s3s.validate()
    -**role_arn:** `typing.Optional[str]` — AWS Role ARN +**created_at:** `typing.Optional[dt.datetime]`
    @@ -17594,7 +17730,7 @@ client.import_storage.s3s.validate()
    -**region_name:** `typing.Optional[str]` — AWS Region +**updated_at:** `typing.Optional[dt.datetime]`
    @@ -17602,7 +17738,7 @@ client.import_storage.s3s.validate()
    -**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**organization:** `typing.Optional[PromptVersionOrganization]`
    @@ -17622,7 +17758,7 @@ client.import_storage.s3s.validate()
    -
    client.import_storage.s3s.sync(...) +
    client.prompts.versions.get(...)
    @@ -17634,7 +17770,7 @@ client.import_storage.s3s.validate()
    -Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). +Get a prompt version by ID.
    @@ -17649,13 +17785,14 @@ Sync tasks from an S3 import storage connection. You will need to provide the im
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.s3s.sync( +client.prompts.versions.get( id=1, + version_id=1, ) ``` @@ -17672,7 +17809,15 @@ client.import_storage.s3s.sync(
    -**id:** `int` — Storage ID +**id:** `int` — Prompt ID + +
    +
    + +
    +
    + +**version_id:** `int` — Prompt Version ID
    @@ -17692,8 +17837,7 @@ client.import_storage.s3s.sync(
    -## ExportStorage S3S -
    client.export_storage.s3s.list(...) +
    client.prompts.versions.delete(...)
    @@ -17705,11 +17849,7 @@ client.import_storage.s3s.sync(
    -You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. - -The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - -For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +Delete a prompt version by ID.
    @@ -17724,12 +17864,15 @@ For more information about working with external storage, see [Sync data from ex
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3s.list() +client.prompts.versions.delete( + id=1, + version_id=1, +) ```
    @@ -17745,7 +17888,15 @@ client.export_storage.s3s.list()
    -**project:** `typing.Optional[int]` — Project ID +**id:** `int` — Prompt ID + +
    +
    + +
    +
    + +**version_id:** `int` — Prompt Version ID
    @@ -17765,7 +17916,7 @@ client.export_storage.s3s.list()
    -
    client.export_storage.s3s.create(...) +
    client.prompts.versions.update(...)
    @@ -17777,9 +17928,7 @@ client.export_storage.s3s.list()
    -Create a new target storage connection to a S3 bucket with IAM role access. - -For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation. +Update a prompt version by ID.
    @@ -17794,12 +17943,15 @@ For information about the required fields and prerequisites, see [Amazon S3](htt
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3s.create() +client.prompts.versions.update( + id=1, + version_id=1, +) ```
    @@ -17815,7 +17967,7 @@ client.export_storage.s3s.create()
    -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**id:** `int` — Prompt ID
    @@ -17823,7 +17975,7 @@ client.export_storage.s3s.create()
    -**title:** `typing.Optional[str]` — Storage title +**version_id:** `int` — Prompt Version ID
    @@ -17831,7 +17983,7 @@ client.export_storage.s3s.create()
    -**description:** `typing.Optional[str]` — Storage description +**title:** `typing.Optional[str]`
    @@ -17839,7 +17991,7 @@ client.export_storage.s3s.create()
    -**project:** `typing.Optional[int]` — Project ID +**parent_model:** `typing.Optional[int]`
    @@ -17847,7 +17999,7 @@ client.export_storage.s3s.create()
    -**bucket:** `typing.Optional[str]` — S3 bucket name +**model_provider_connection:** `typing.Optional[int]`
    @@ -17855,7 +18007,7 @@ client.export_storage.s3s.create()
    -**prefix:** `typing.Optional[str]` — S3 bucket prefix +**prompt:** `typing.Optional[str]`
    @@ -17863,7 +18015,7 @@ client.export_storage.s3s.create()
    -**external_id:** `typing.Optional[str]` — AWS External ID +**provider:** `typing.Optional[PromptVersionProvider]`
    @@ -17871,7 +18023,7 @@ client.export_storage.s3s.create()
    -**role_arn:** `typing.Optional[str]` — AWS Role ARN +**provider_model_id:** `typing.Optional[str]`
    @@ -17879,7 +18031,7 @@ client.export_storage.s3s.create()
    -**region_name:** `typing.Optional[str]` — AWS Region +**created_by:** `typing.Optional[PromptVersionCreatedBy]`
    @@ -17887,7 +18039,23 @@ client.export_storage.s3s.create()
    -**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**created_at:** `typing.Optional[dt.datetime]` + +
    +
    + +
    +
    + +**updated_at:** `typing.Optional[dt.datetime]` + +
    +
    + +
    +
    + +**organization:** `typing.Optional[PromptVersionOrganization]`
    @@ -17907,7 +18075,7 @@ client.export_storage.s3s.create()
    -
    client.export_storage.s3s.get(...) +
    client.prompts.versions.cost_estimate(...)
    @@ -17919,7 +18087,7 @@ client.export_storage.s3s.create()
    -Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). +Get cost estimate for running a prompt version on a particular project/subset
    @@ -17934,13 +18102,16 @@ Get a specific S3 export storage connection. You will need to provide the export
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3s.get( - id=1, +client.prompts.versions.cost_estimate( + prompt_id=1, + version_id=1, + project_id=1, + project_subset=1, ) ``` @@ -17957,7 +18128,7 @@ client.export_storage.s3s.get(
    -**id:** `int` — Export storage ID +**prompt_id:** `int` — Prompt ID
    @@ -17965,69 +18136,23 @@ client.export_storage.s3s.get(
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**version_id:** `int` — Prompt Version ID -
    -
    -
    -
    - - - - -
    - -
    client.export_storage.s3s.delete(...) -
    -
    - -#### 📝 Description - -
    -
    - -
    -
    - -Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). -
    -
    -#### 🔌 Usage - -
    -
    -
    -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.s3s.delete( - id=1, -) - -``` -
    -
    +**project_id:** `int` — ID of the project to get an estimate for running on +
    -#### ⚙️ Parameters - -
    -
    -
    -**id:** `int` — Export storage ID +**project_subset:** `int` — Subset of the project to get an estimate for running on (e.g. 'All', 'Sample', or 'HasGT')
    @@ -18047,7 +18172,7 @@ client.export_storage.s3s.delete(
    -
    client.export_storage.s3s.update(...) +
    client.prompts.versions.get_refined_prompt(...)
    @@ -18059,7 +18184,7 @@ client.export_storage.s3s.delete(
    -Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). +Get the refined prompt based on the `refinement_job_id`.
    @@ -18074,13 +18199,15 @@ Update a specific S3 export storage connection. You will need to provide the exp
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3s.update( - id=1, +client.prompts.versions.get_refined_prompt( + prompt_id=1, + version_id=1, + refinement_job_id="refinement_job_id", ) ``` @@ -18089,79 +18216,15 @@ client.export_storage.s3s.update(
    -#### ⚙️ Parameters - -
    -
    - -
    -
    - -**id:** `int` — Export storage ID - -
    -
    - -
    -
    - -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. - -
    -
    - -
    -
    - -**title:** `typing.Optional[str]` — Storage title - -
    -
    - -
    -
    - -**description:** `typing.Optional[str]` — Storage description - -
    -
    - -
    -
    - -**project:** `typing.Optional[int]` — Project ID - -
    -
    - -
    -
    - -**bucket:** `typing.Optional[str]` — S3 bucket name - -
    -
    - -
    -
    - -**prefix:** `typing.Optional[str]` — S3 bucket prefix - -
    -
    +#### ⚙️ Parameters
    -**external_id:** `typing.Optional[str]` — AWS External ID - -
    -
    -
    -**role_arn:** `typing.Optional[str]` — AWS Role ARN +**prompt_id:** `int` — Prompt ID
    @@ -18169,7 +18232,7 @@ client.export_storage.s3s.update(
    -**region_name:** `typing.Optional[str]` — AWS Region +**version_id:** `int` — Prompt Version ID
    @@ -18177,7 +18240,7 @@ client.export_storage.s3s.update(
    -**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**refinement_job_id:** `str` — Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint
    @@ -18197,7 +18260,7 @@ client.export_storage.s3s.update(
    -
    client.export_storage.s3s.validate(...) +
    client.prompts.versions.refine_prompt(...)
    @@ -18209,7 +18272,7 @@ client.export_storage.s3s.update(
    -Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. +Refine a prompt version using a teacher model and save the refined prompt as a new version.
    @@ -18224,12 +18287,15 @@ Validate a specific S3 export storage connection. This is useful to ensure that
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3s.validate() +client.prompts.versions.refine_prompt( + prompt_id=1, + version_id=1, +) ```
    @@ -18245,7 +18311,7 @@ client.export_storage.s3s.validate()
    -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**prompt_id:** `int` — Prompt ID
    @@ -18253,7 +18319,7 @@ client.export_storage.s3s.validate()
    -**title:** `typing.Optional[str]` — Storage title +**version_id:** `int` — Base Prompt Version ID
    @@ -18261,7 +18327,7 @@ client.export_storage.s3s.validate()
    -**description:** `typing.Optional[str]` — Storage description +**async_:** `typing.Optional[bool]` — Run the refinement job asynchronously
    @@ -18269,7 +18335,7 @@ client.export_storage.s3s.validate()
    -**project:** `typing.Optional[int]` — Project ID +**teacher_model_provider_connection_id:** `typing.Optional[int]` — Model Provider Connection ID to use to refine the prompt
    @@ -18277,7 +18343,7 @@ client.export_storage.s3s.validate()
    -**bucket:** `typing.Optional[str]` — S3 bucket name +**teacher_model_name:** `typing.Optional[str]` — Name of the model to use to refine the prompt
    @@ -18285,7 +18351,7 @@ client.export_storage.s3s.validate()
    -**prefix:** `typing.Optional[str]` — S3 bucket prefix +**project_id:** `typing.Optional[int]` — Project ID to target the refined prompt for
    @@ -18293,56 +18359,38 @@ client.export_storage.s3s.validate()
    -**external_id:** `typing.Optional[str]` — AWS External ID +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    + +
    -
    -
    -**role_arn:** `typing.Optional[str]` — AWS Role ARN -
    +
    +## Prompts Runs +
    client.prompts.runs.list(...)
    -**region_name:** `typing.Optional[str]` — AWS Region - -
    -
    +#### 📝 Description
    -**s3endpoint:** `typing.Optional[str]` — S3 Endpoint - -
    -
    -
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
    -
    +Get information (status, etadata, etc) about an existing inference run - - -
    - -## Workspaces -
    client.workspaces.list() -
    -
    -#### 📝 Description +#### 🔌 Usage
    @@ -18350,17 +18398,26 @@ client.export_storage.s3s.validate()
    -List all workspaces for your organization. +```python +from label_studio_sdk import LabelStudio -Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.prompts.runs.list( + id=1, + version_id=1, + project=1, + project_subset="All", +) -For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). +```
    -#### 🔌 Usage +#### ⚙️ Parameters
    @@ -18368,25 +18425,35 @@ For more information, see [Workspaces in Label Studio](https://docs.humansignal.
    -```python -from label_studio_sdk.client import LabelStudio +**id:** `int` — Prompt ID + +
    +
    -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.workspaces.list() +
    +
    -``` +**version_id:** `int` — Prompt Version ID +
    + +
    +
    + +**project:** `int` — The ID of the project that this Interence Run makes predictions on +
    -#### ⚙️ Parameters -
    +**project_subset:** `RunsListRequestProjectSubset` — Defines which tasks are operated on (e.g. HasGT will only operate on tasks with a ground truth annotation, but All will operate on all records) + +
    +
    +
    @@ -18402,7 +18469,7 @@ client.workspaces.list()
    -
    client.workspaces.create(...) +
    client.prompts.runs.create(...)
    @@ -18414,11 +18481,7 @@ client.workspaces.list()
    -Create a new workspace. - -Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. - -For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). +Run a prompt inference.
    @@ -18433,12 +18496,17 @@ For more information, see [Workspaces in Label Studio](https://docs.humansignal.
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.workspaces.create() +client.prompts.runs.create( + id=1, + version_id=1, + project=1, + project_subset="All", +) ```
    @@ -18454,7 +18522,7 @@ client.workspaces.create()
    -**title:** `typing.Optional[str]` — Workspace title +**id:** `int` — Prompt ID
    @@ -18462,7 +18530,7 @@ client.workspaces.create()
    -**description:** `typing.Optional[str]` — Workspace description +**version_id:** `int` — Prompt Version ID
    @@ -18470,7 +18538,7 @@ client.workspaces.create()
    -**is_public:** `typing.Optional[bool]` — Is workspace public +**project:** `int`
    @@ -18478,7 +18546,7 @@ client.workspaces.create()
    -**is_personal:** `typing.Optional[bool]` — Is workspace personal +**project_subset:** `InferenceRunProjectSubset`
    @@ -18486,7 +18554,7 @@ client.workspaces.create()
    -**color:** `typing.Optional[str]` — Workspace color in HEX format +**organization:** `typing.Optional[InferenceRunOrganization]`
    @@ -18494,7 +18562,7 @@ client.workspaces.create()
    -**is_archived:** `typing.Optional[bool]` — Is workspace archived +**model_version:** `typing.Optional[str]`
    @@ -18502,69 +18570,55 @@ client.workspaces.create()
    -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**created_by:** `typing.Optional[InferenceRunCreatedBy]`
    - -
    +
    +
    +**status:** `typing.Optional[InferenceRunStatus]` +
    -
    -
    client.workspaces.get(...)
    -#### 📝 Description - -
    -
    +**job_id:** `typing.Optional[str]` + +
    +
    -Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). -
    -
    +**created_at:** `typing.Optional[dt.datetime]` +
    -#### 🔌 Usage - -
    -
    -
    -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.workspaces.get( - id=1, -) - -``` -
    -
    +**triggered_at:** `typing.Optional[dt.datetime]` +
    -#### ⚙️ Parameters -
    +**predictions_updated_at:** `typing.Optional[dt.datetime]` + +
    +
    +
    -**id:** `int` — Workspace ID +**completed_at:** `typing.Optional[dt.datetime]`
    @@ -18584,7 +18638,8 @@ client.workspaces.get(
    -
    client.workspaces.delete(...) +## Prompts Indicators +
    client.prompts.indicators.list(...)
    @@ -18596,7 +18651,7 @@ client.workspaces.get(
    -Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). +Get key indicators for the Prompt dashboard.
    @@ -18611,13 +18666,13 @@ Delete a specific workspace. You will need to provide the workspace ID. You can
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.workspaces.delete( - id=1, +client.prompts.indicators.list( + pk=1, ) ``` @@ -18634,7 +18689,7 @@ client.workspaces.delete(
    -**id:** `int` — Workspace ID +**pk:** `int` — Inference run ID
    @@ -18654,7 +18709,7 @@ client.workspaces.delete(
    -
    client.workspaces.update(...) +
    client.prompts.indicators.get(...)
    @@ -18666,7 +18721,7 @@ client.workspaces.delete(
    -Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). +Get a specific key indicator for the Prompt dashboard.
    @@ -18681,13 +18736,14 @@ Update a specific workspace. You will need to provide the workspace ID. You can
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.workspaces.update( - id=1, +client.prompts.indicators.get( + indicator_key="indicator_key", + pk=1, ) ``` @@ -18704,47 +18760,7 @@ client.workspaces.update(
    -**id:** `int` — Workspace ID - -
    -
    - -
    -
    - -**title:** `typing.Optional[str]` — Workspace title - -
    -
    - -
    -
    - -**description:** `typing.Optional[str]` — Workspace description - -
    -
    - -
    -
    - -**is_public:** `typing.Optional[bool]` — Is workspace public - -
    -
    - -
    -
    - -**is_personal:** `typing.Optional[bool]` — Is workspace personal - -
    -
    - -
    -
    - -**color:** `typing.Optional[str]` — Workspace color in HEX format +**indicator_key:** `str` — Key of the indicator
    @@ -18752,7 +18768,7 @@ client.workspaces.update(
    -**is_archived:** `typing.Optional[bool]` — Is workspace archived +**pk:** `int` — Inference run ID
    @@ -18785,6 +18801,7 @@ client.workspaces.update(
    + List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list).
    @@ -18800,7 +18817,7 @@ List all workspace memberships for a specific workspace. You will need to provid
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -18855,6 +18872,7 @@ client.workspaces.members.list(
    + Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list).
    @@ -18870,7 +18888,7 @@ Create a new workspace membership. You will need to provide the workspace ID. Yo
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -18933,6 +18951,7 @@ client.workspaces.members.create(
    + Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list).
    @@ -18948,7 +18967,7 @@ Delete a specific workspace membership. You will need to provide the workspace I
    ```python -from label_studio_sdk.client import LabelStudio +from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", diff --git a/src/label_studio_sdk/__init__.py b/src/label_studio_sdk/__init__.py index de943d26e..eaf385777 100644 --- a/src/label_studio_sdk/__init__.py +++ b/src/label_studio_sdk/__init__.py @@ -133,6 +133,7 @@ ActionsCreateRequestSelectedItemsIncluded, ) from .annotations import AnnotationsCreateBulkResponseItem +from .client import AsyncLabelStudio, LabelStudio from .environment import LabelStudioEnvironment from .export_storage import ExportStorageListTypesResponseItem from .import_storage import ImportStorageListTypesResponseItem @@ -192,6 +193,7 @@ "AnnotationsCreateBulkResponseItem", "AnnotationsDmField", "AnnotationsDmFieldLastAction", + "AsyncLabelStudio", "AzureBlobExportStorage", "AzureBlobExportStorageStatus", "AzureBlobImportStorage", @@ -236,6 +238,7 @@ "KeyIndicatorsItem", "KeyIndicatorsItemAdditionalKpisItem", "KeyIndicatorsItemExtraKpisItem", + "LabelStudio", "LabelStudioEnvironment", "LocalFilesExportStorage", "LocalFilesExportStorageStatus", diff --git a/src/label_studio_sdk/actions/client.py b/src/label_studio_sdk/actions/client.py index f674a5db2..0d609bebd 100644 --- a/src/label_studio_sdk/actions/client.py +++ b/src/label_studio_sdk/actions/client.py @@ -1,15 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing +from ..core.client_wrapper import SyncClientWrapper +from ..core.request_options import RequestOptions from json.decoder import JSONDecodeError - from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.request_options import RequestOptions -from .types.actions_create_request_filters import ActionsCreateRequestFilters from .types.actions_create_request_id import ActionsCreateRequestId -from .types.actions_create_request_ordering_item import ActionsCreateRequestOrderingItem +from .types.actions_create_request_filters import ActionsCreateRequestFilters from .types.actions_create_request_selected_items import ActionsCreateRequestSelectedItems +from .types.actions_create_request_ordering_item import ActionsCreateRequestOrderingItem +from ..core.serialization import convert_and_respect_annotation_metadata +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -34,7 +35,7 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -42,7 +43,9 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> No client.actions.list() """ _response = self._client_wrapper.httpx_client.request( - "api/dm/actions/", method="GET", request_options=request_options + "api/dm/actions/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -61,7 +64,7 @@ def create( filters: typing.Optional[ActionsCreateRequestFilters] = OMIT, selected_items: typing.Optional[ActionsCreateRequestSelectedItems] = OMIT, ordering: typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] = OMIT, - request_options: typing.Optional[RequestOptions] = None + request_options: typing.Optional[RequestOptions] = None, ) -> None: """ Perform a Data Manager action with the selected tasks and filters. Note: More complex actions require additional parameters in the request body. Call `GET api/actions?project=` to explore them.
    Example: `GET api/actions?id=delete_tasks&project=1` @@ -95,12 +98,12 @@ def create( Examples -------- - from label_studio_sdk import ( + from label_studio_sdk import LabelStudio + from label_studio_sdk.actions import ( ActionsCreateRequestFilters, ActionsCreateRequestFiltersItemsItem, ActionsCreateRequestSelectedItemsExcluded, ) - from label_studio_sdk.client import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -129,8 +132,23 @@ def create( _response = self._client_wrapper.httpx_client.request( "api/dm/actions/", method="POST", - params={"id": id, "project": project, "view": view}, - json={"filters": filters, "selectedItems": selected_items, "ordering": ordering}, + params={ + "id": id, + "project": project, + "view": view, + }, + json={ + "filters": convert_and_respect_annotation_metadata( + object_=filters, annotation=ActionsCreateRequestFilters, direction="write" + ), + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, annotation=ActionsCreateRequestSelectedItems, direction="write" + ), + "ordering": ordering, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -162,15 +180,25 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.actions.list() + + + async def main() -> None: + await client.actions.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/dm/actions/", method="GET", request_options=request_options + "api/dm/actions/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -189,7 +217,7 @@ async def create( filters: typing.Optional[ActionsCreateRequestFilters] = OMIT, selected_items: typing.Optional[ActionsCreateRequestSelectedItems] = OMIT, ordering: typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] = OMIT, - request_options: typing.Optional[RequestOptions] = None + request_options: typing.Optional[RequestOptions] = None, ) -> None: """ Perform a Data Manager action with the selected tasks and filters. Note: More complex actions require additional parameters in the request body. Call `GET api/actions?project=` to explore them.
    Example: `GET api/actions?id=delete_tasks&project=1` @@ -223,42 +251,65 @@ async def create( Examples -------- - from label_studio_sdk import ( + import asyncio + + from label_studio_sdk import AsyncLabelStudio + from label_studio_sdk.actions import ( ActionsCreateRequestFilters, ActionsCreateRequestFiltersItemsItem, ActionsCreateRequestSelectedItemsExcluded, ) - from label_studio_sdk.client import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.actions.create( - id="retrieve_tasks_predictions", - project=1, - filters=ActionsCreateRequestFilters( - conjunction="or", - items=[ - ActionsCreateRequestFiltersItemsItem( - filter="filter:tasks:id", - operator="greater", - type="Number", - value=123, - ) - ], - ), - selected_items=ActionsCreateRequestSelectedItemsExcluded( - all_=True, - excluded=[124, 125, 126], - ), - ordering=["tasks:total_annotations"], - ) + + + async def main() -> None: + await client.actions.create( + id="retrieve_tasks_predictions", + project=1, + filters=ActionsCreateRequestFilters( + conjunction="or", + items=[ + ActionsCreateRequestFiltersItemsItem( + filter="filter:tasks:id", + operator="greater", + type="Number", + value=123, + ) + ], + ), + selected_items=ActionsCreateRequestSelectedItemsExcluded( + all_=True, + excluded=[124, 125, 126], + ), + ordering=["tasks:total_annotations"], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/dm/actions/", method="POST", - params={"id": id, "project": project, "view": view}, - json={"filters": filters, "selectedItems": selected_items, "ordering": ordering}, + params={ + "id": id, + "project": project, + "view": view, + }, + json={ + "filters": convert_and_respect_annotation_metadata( + object_=filters, annotation=ActionsCreateRequestFilters, direction="write" + ), + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, annotation=ActionsCreateRequestSelectedItems, direction="write" + ), + "ordering": ordering, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) diff --git a/src/label_studio_sdk/actions/types/actions_create_request_filters.py b/src/label_studio_sdk/actions/types/actions_create_request_filters.py index 5d2194a77..170a4acd6 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_filters.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_filters.py @@ -1,43 +1,33 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt -import typing - -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from ...core.pydantic_utilities import UniversalBaseModel from .actions_create_request_filters_conjunction import ActionsCreateRequestFiltersConjunction +import pydantic +import typing from .actions_create_request_filters_items_item import ActionsCreateRequestFiltersItemsItem +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -class ActionsCreateRequestFilters(pydantic_v1.BaseModel): +class ActionsCreateRequestFilters(UniversalBaseModel): """ Filters to apply on tasks. You can use [the helper class `Filters` from this page](https://labelstud.io/sdk/data_manager.html) to create Data Manager Filters.
    Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` """ - conjunction: ActionsCreateRequestFiltersConjunction = pydantic_v1.Field() + conjunction: ActionsCreateRequestFiltersConjunction = pydantic.Field() """ Logical conjunction for the filters. This conjunction (either "or" or "and") will be applied to all items in the filters list. It is not possible to combine "or" and "and" within one list of filters. All filters will be either combined with "or" or with "and", but not a mix of both. """ - items: typing.List[ActionsCreateRequestFiltersItemsItem] = pydantic_v1.Field() + items: typing.List[ActionsCreateRequestFiltersItemsItem] = pydantic.Field() """ List of filter items """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py b/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py index 70d93aa6a..f70a875f2 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py @@ -1,50 +1,40 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt -import typing - -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from ...core.pydantic_utilities import UniversalBaseModel from .actions_create_request_filters_items_item_filter import ActionsCreateRequestFiltersItemsItemFilter +import pydantic from .actions_create_request_filters_items_item_operator import ActionsCreateRequestFiltersItemsItemOperator from .actions_create_request_filters_items_item_value import ActionsCreateRequestFiltersItemsItemValue +from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import typing -class ActionsCreateRequestFiltersItemsItem(pydantic_v1.BaseModel): - filter: ActionsCreateRequestFiltersItemsItemFilter = pydantic_v1.Field() +class ActionsCreateRequestFiltersItemsItem(UniversalBaseModel): + filter: ActionsCreateRequestFiltersItemsItemFilter = pydantic.Field() """ Filter identifier, it should start with `filter:tasks:` prefix, e.g. `filter:tasks:agreement`. For `task.data` fields it may look like `filter:tasks:data.field_name`. If you need more info about columns, check the [Get data manager columns](#tag/Data-Manager/operation/api_dm_columns_list) API endpoint. Possible values:
  • `filter:tasks:agreement`
    (Number) Agreement for annotation results for a specific task (Enterprise only)

  • `filter:tasks:annotations_results`
    (String) Annotation results for the tasks

  • `filter:tasks:annotators`
    (List) Annotators that completed the task (Community). Can include assigned annotators (Enterprise only). Important note: the filter `type` should be List, but the filter `value` is integer

  • `filter:tasks:cancelled_annotations`
    (Number) Number of cancelled or skipped annotations for the task

  • `filter:tasks:comments`
    (Number) Number of comments in a task

  • `filter:tasks:completed_at`
    (Datetime) Time when a task was fully annotated

  • `filter:tasks:created_at`
    (Datetime) Time the task was created at

  • `filter:tasks:file_upload`
    (String) Name of the file uploaded to create the tasks

  • `filter:tasks:ground_truth`
    (Boolean) Ground truth status of the tasks

  • `filter:tasks:id`
    (Number) Task ID

  • `filter:tasks:inner_id`
    (Number) Task Inner ID, it starts from 1 for all projects

  • `filter:tasks:predictions_model_versions`
    (String) Model version used for the predictions

  • `filter:tasks:predictions_results`
    (String) Prediction results for the tasks

  • `filter:tasks:predictions_score`
    (Number) Prediction score for the task

  • `filter:tasks:reviewed`
    (Boolean) Whether the tasks have been reviewed (Enterprise only)

  • `filter:tasks:reviewers`
    (String) Reviewers that reviewed the task, or assigned reviewers (Enterprise only). Important note: the filter `type` should be List, but the filter `value` is integer

  • `filter:tasks:reviews_accepted`
    (Number) Number of annotations accepted for a task in review (Enterprise only)

  • `filter:tasks:reviews_rejected`
    (Number) Number of annotations rejected for a task in review (Enterprise only)

  • `filter:tasks:total_annotations`
    (Number) Total number of annotations on a task

  • `filter:tasks:total_predictions`
    (Number) Total number of predictions for the task

  • `filter:tasks:unresolved_comment_count`
    (Number) Number of unresolved comments in a task

  • `filter:tasks:updated_at`
    (Datetime) Time the task was updated at (e.g. new annotation was created, review added, etc)
  • """ - operator: ActionsCreateRequestFiltersItemsItemOperator = pydantic_v1.Field() + operator: ActionsCreateRequestFiltersItemsItemOperator = pydantic.Field() """ Filter operator. Possible values:
  • `contains`
    Contains

  • `ends_with`
    Ends with

  • `equal`
    Equal to

  • `exists`
    Exists

  • `greater`
    Greater than

  • `greater_or_equal`
    Greater than or equal to

  • `in`
    Is between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `less`
    Less than

  • `less_or_equal`
    Less than or equal to

  • `not_contains`
    Does not contain

  • `not_equal`
    Not equal to

  • `not_exists`
    Does not exist

  • `not_in`
    Is not between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `starts_with`
    Starts with
  • """ - type: str = pydantic_v1.Field() + type: str = pydantic.Field() """ Type of the filter value. Possible values:
  • `Boolean`
    Boolean

  • `Datetime`
    Datetime string in `strftime('%Y-%m-%dT%H:%M:%S.%fZ')` format

  • `List`
    List of items

  • `Number`
    Float or Integer

  • `String`
    String

  • `Unknown`
    Unknown is explicitly converted to string format
  • """ - value: ActionsCreateRequestFiltersItemsItemValue = pydantic_v1.Field() + value: ActionsCreateRequestFiltersItemsItemValue = pydantic.Field() """ Value to filter by """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item_value.py b/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item_value.py index 77f95fff5..73112921c 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item_value.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item_value.py @@ -2,4 +2,6 @@ import typing -ActionsCreateRequestFiltersItemsItemValue = typing.Union[str, int, float, bool, typing.Dict[str, typing.Any]] +ActionsCreateRequestFiltersItemsItemValue = typing.Union[ + str, int, float, bool, typing.Dict[str, typing.Optional[typing.Any]] +] diff --git a/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py b/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py index 2ceac5aac..64eb7cee4 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from .actions_create_request_selected_items_excluded import ActionsCreateRequestSelectedItemsExcluded from .actions_create_request_selected_items_included import ActionsCreateRequestSelectedItemsIncluded +from .actions_create_request_selected_items_excluded import ActionsCreateRequestSelectedItemsExcluded ActionsCreateRequestSelectedItems = typing.Union[ ActionsCreateRequestSelectedItemsIncluded, ActionsCreateRequestSelectedItemsExcluded diff --git a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py index 90ddd0140..e1d15e854 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py @@ -1,39 +1,29 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel +import typing_extensions +from ...core.serialization import FieldMetadata +import pydantic import typing +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class ActionsCreateRequestSelectedItemsExcluded(pydantic_v1.BaseModel): - all_: bool = pydantic_v1.Field(alias="all") +class ActionsCreateRequestSelectedItemsExcluded(UniversalBaseModel): + all_: typing_extensions.Annotated[bool, FieldMetadata(alias="all")] = pydantic.Field() """ All tasks are selected """ - excluded: typing.Optional[typing.List[int]] = pydantic_v1.Field(default=None) + excluded: typing.Optional[typing.List[int]] = pydantic.Field(default=None) """ List of excluded task IDs """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py index 12695b434..90f2ec326 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py @@ -1,39 +1,29 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel +import typing_extensions +from ...core.serialization import FieldMetadata +import pydantic import typing +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class ActionsCreateRequestSelectedItemsIncluded(pydantic_v1.BaseModel): - all_: bool = pydantic_v1.Field(alias="all") +class ActionsCreateRequestSelectedItemsIncluded(UniversalBaseModel): + all_: typing_extensions.Annotated[bool, FieldMetadata(alias="all")] = pydantic.Field() """ No tasks are selected """ - included: typing.Optional[typing.List[int]] = pydantic_v1.Field(default=None) + included: typing.Optional[typing.List[int]] = pydantic.Field(default=None) """ List of included task IDs """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/annotations/client.py b/src/label_studio_sdk/annotations/client.py index d6b56155f..44512e3e6 100644 --- a/src/label_studio_sdk/annotations/client.py +++ b/src/label_studio_sdk/annotations/client.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.annotation import Annotation +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from .types.annotations_create_bulk_response_item import AnnotationsCreateBulkResponseItem +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -21,6 +21,7 @@ def __init__(self, *, client_wrapper: SyncClientWrapper): def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Annotation: """ + Tasks can have multiple annotations. Use this call to retrieve a specific annotation using its ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. @@ -40,7 +41,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -50,11 +51,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/annotations/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore + return typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -62,6 +71,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete an annotation. This action can't be undone! @@ -82,7 +92,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -92,7 +102,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/annotations/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -106,7 +118,7 @@ def update( self, id: int, *, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, task: typing.Optional[int] = OMIT, project: typing.Optional[int] = OMIT, completed_by: typing.Optional[int] = OMIT, @@ -117,6 +129,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> Annotation: """ + Update attributes for an existing annotation. You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. @@ -128,7 +141,7 @@ def update( id : int A unique integer value identifying this annotation. - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) task : typing.Optional[int] @@ -162,7 +175,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -204,12 +217,21 @@ def update( "ground_truth": ground_truth, "lead_time": lead_time, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore + return typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -217,6 +239,7 @@ def update( def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Annotation]: """ + List all annotations for a task. You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). @@ -236,7 +259,7 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -246,11 +269,19 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", method="GET", request_options=request_options + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Annotation], _response.json()) # type: ignore + return typing.cast( + typing.List[Annotation], + parse_obj_as( + type_=typing.List[Annotation], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -260,7 +291,7 @@ def create( self, id: int, *, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, task: typing.Optional[int] = OMIT, project: typing.Optional[int] = OMIT, completed_by: typing.Optional[int] = OMIT, @@ -271,10 +302,12 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> Annotation: """ + Add annotations to a task like an annotator does. You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). + The content of the result field depends on your labeling configuration. For example, send the following data as part of your POST request to send an empty annotation with the ID of the user who completed the task: @@ -294,7 +327,7 @@ def create( id : int Task ID - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) task : typing.Optional[int] @@ -328,7 +361,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -370,12 +403,21 @@ def create( "ground_truth": ground_truth, "lead_time": lead_time, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore + return typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -387,7 +429,7 @@ def create_bulk( tasks: typing.Optional[typing.Sequence[int]] = OMIT, lead_time: typing.Optional[float] = OMIT, project: typing.Optional[int] = OMIT, - result: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + result: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[AnnotationsCreateBulkResponseItem]: """ @@ -401,7 +443,7 @@ def create_bulk( project : typing.Optional[int] - result : typing.Optional[typing.Dict[str, typing.Any]] + result : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -413,7 +455,7 @@ def create_bulk( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -423,13 +465,27 @@ def create_bulk( _response = self._client_wrapper.httpx_client.request( "api/annotations/bulk", method="POST", - json={"tasks": tasks, "lead_time": lead_time, "project": project, "result": result}, + json={ + "tasks": tasks, + "lead_time": lead_time, + "project": project, + "result": result, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[AnnotationsCreateBulkResponseItem], _response.json()) # type: ignore + return typing.cast( + typing.List[AnnotationsCreateBulkResponseItem], + parse_obj_as( + type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -442,6 +498,7 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Annotation: """ + Tasks can have multiple annotations. Use this call to retrieve a specific annotation using its ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. @@ -461,21 +518,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.annotations.get( - id=1, - ) + + + async def main() -> None: + await client.annotations.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/annotations/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore + return typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -483,6 +556,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete an annotation. This action can't be undone! @@ -503,17 +577,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.annotations.delete( - id=1, - ) + + + async def main() -> None: + await client.annotations.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/annotations/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -527,7 +611,7 @@ async def update( self, id: int, *, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, task: typing.Optional[int] = OMIT, project: typing.Optional[int] = OMIT, completed_by: typing.Optional[int] = OMIT, @@ -538,6 +622,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> Annotation: """ + Update attributes for an existing annotation. You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. @@ -549,7 +634,7 @@ async def update( id : int A unique integer value identifying this annotation. - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) task : typing.Optional[int] @@ -583,34 +668,42 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.annotations.update( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - was_cancelled=False, - ground_truth=True, - ) + + + async def main() -> None: + await client.annotations.update( + id=1, + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + was_cancelled=False, + ground_truth=True, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/annotations/{jsonable_encoder(id)}/", @@ -625,12 +718,21 @@ async def update( "ground_truth": ground_truth, "lead_time": lead_time, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore + return typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -640,6 +742,7 @@ async def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[Annotation]: """ + List all annotations for a task. You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). @@ -659,21 +762,37 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.annotations.list( - id=1, - ) + + + async def main() -> None: + await client.annotations.list( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", method="GET", request_options=request_options + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Annotation], _response.json()) # type: ignore + return typing.cast( + typing.List[Annotation], + parse_obj_as( + type_=typing.List[Annotation], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -683,7 +802,7 @@ async def create( self, id: int, *, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, task: typing.Optional[int] = OMIT, project: typing.Optional[int] = OMIT, completed_by: typing.Optional[int] = OMIT, @@ -694,10 +813,12 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> Annotation: """ + Add annotations to a task like an annotator does. You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). + The content of the result field depends on your labeling configuration. For example, send the following data as part of your POST request to send an empty annotation with the ID of the user who completed the task: @@ -717,7 +838,7 @@ async def create( id : int Task ID - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) task : typing.Optional[int] @@ -751,34 +872,42 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.annotations.create( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - was_cancelled=False, - ground_truth=True, - ) + + + async def main() -> None: + await client.annotations.create( + id=1, + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + was_cancelled=False, + ground_truth=True, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/tasks/{jsonable_encoder(id)}/annotations/", @@ -793,12 +922,21 @@ async def create( "ground_truth": ground_truth, "lead_time": lead_time, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore + return typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -810,7 +948,7 @@ async def create_bulk( tasks: typing.Optional[typing.Sequence[int]] = OMIT, lead_time: typing.Optional[float] = OMIT, project: typing.Optional[int] = OMIT, - result: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + result: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[AnnotationsCreateBulkResponseItem]: """ @@ -824,7 +962,7 @@ async def create_bulk( project : typing.Optional[int] - result : typing.Optional[typing.Dict[str, typing.Any]] + result : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -836,23 +974,45 @@ async def create_bulk( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.annotations.create_bulk() + + + async def main() -> None: + await client.annotations.create_bulk() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/annotations/bulk", method="POST", - json={"tasks": tasks, "lead_time": lead_time, "project": project, "result": result}, + json={ + "tasks": tasks, + "lead_time": lead_time, + "project": project, + "result": result, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[AnnotationsCreateBulkResponseItem], _response.json()) # type: ignore + return typing.cast( + typing.List[AnnotationsCreateBulkResponseItem], + parse_obj_as( + type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py b/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py index 62f8d8365..a023f4f3d 100644 --- a/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py +++ b/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py @@ -1,29 +1,19 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing +from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class AnnotationsCreateBulkResponseItem(pydantic_v1.BaseModel): +class AnnotationsCreateBulkResponseItem(UniversalBaseModel): id: typing.Optional[int] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/base_client.py b/src/label_studio_sdk/base_client.py index 175150891..30cf6ec0a 100644 --- a/src/label_studio_sdk/base_client.py +++ b/src/label_studio_sdk/base_client.py @@ -1,29 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -import os import typing - +from .environment import LabelStudioEnvironment +import os import httpx - -from .actions.client import ActionsClient, AsyncActionsClient -from .annotations.client import AnnotationsClient, AsyncAnnotationsClient -from .comments.client import AsyncCommentsClient, CommentsClient from .core.api_error import ApiError -from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .environment import LabelStudioEnvironment -from .export_storage.client import AsyncExportStorageClient, ExportStorageClient -from .files.client import AsyncFilesClient, FilesClient -from .import_storage.client import AsyncImportStorageClient, ImportStorageClient -from .ml.client import AsyncMlClient, MlClient -from .model_providers.client import AsyncModelProvidersClient, ModelProvidersClient -from .predictions.client import AsyncPredictionsClient, PredictionsClient -from .projects.client import AsyncProjectsClient, ProjectsClient -from .prompts.client import AsyncPromptsClient, PromptsClient -from .tasks.client import AsyncTasksClient, TasksClient -from .users.client import AsyncUsersClient, UsersClient -from .views.client import AsyncViewsClient, ViewsClient -from .webhooks.client import AsyncWebhooksClient, WebhooksClient -from .workspaces.client import AsyncWorkspacesClient, WorkspacesClient +from .core.client_wrapper import SyncClientWrapper +from .annotations.client import AnnotationsClient +from .users.client import UsersClient +from .actions.client import ActionsClient +from .views.client import ViewsClient +from .files.client import FilesClient +from .ml.client import MlClient +from .predictions.client import PredictionsClient +from .projects.client import ProjectsClient +from .tasks.client import TasksClient +from .import_storage.client import ImportStorageClient +from .export_storage.client import ExportStorageClient +from .webhooks.client import WebhooksClient +from .prompts.client import PromptsClient +from .model_providers.client import ModelProvidersClient +from .comments.client import CommentsClient +from .workspaces.client import WorkspacesClient +from .core.client_wrapper import AsyncClientWrapper +from .annotations.client import AsyncAnnotationsClient +from .users.client import AsyncUsersClient +from .actions.client import AsyncActionsClient +from .views.client import AsyncViewsClient +from .files.client import AsyncFilesClient +from .ml.client import AsyncMlClient +from .predictions.client import AsyncPredictionsClient +from .projects.client import AsyncProjectsClient +from .tasks.client import AsyncTasksClient +from .import_storage.client import AsyncImportStorageClient +from .export_storage.client import AsyncExportStorageClient +from .webhooks.client import AsyncWebhooksClient +from .prompts.client import AsyncPromptsClient +from .model_providers.client import AsyncModelProvidersClient +from .comments.client import AsyncCommentsClient +from .workspaces.client import AsyncWorkspacesClient class LabelStudioBase: @@ -56,7 +71,7 @@ class LabelStudioBase: Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -71,7 +86,7 @@ def __init__( api_key: typing.Optional[str] = os.getenv("LABEL_STUDIO_API_KEY"), timeout: typing.Optional[float] = None, follow_redirects: typing.Optional[bool] = True, - httpx_client: typing.Optional[httpx.Client] = None + httpx_client: typing.Optional[httpx.Client] = None, ): _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None if api_key is None: @@ -93,9 +108,9 @@ def __init__( self.actions = ActionsClient(client_wrapper=self._client_wrapper) self.views = ViewsClient(client_wrapper=self._client_wrapper) self.files = FilesClient(client_wrapper=self._client_wrapper) - self.projects = ProjectsClient(client_wrapper=self._client_wrapper) self.ml = MlClient(client_wrapper=self._client_wrapper) self.predictions = PredictionsClient(client_wrapper=self._client_wrapper) + self.projects = ProjectsClient(client_wrapper=self._client_wrapper) self.tasks = TasksClient(client_wrapper=self._client_wrapper) self.import_storage = ImportStorageClient(client_wrapper=self._client_wrapper) self.export_storage = ExportStorageClient(client_wrapper=self._client_wrapper) @@ -136,7 +151,7 @@ class AsyncLabelStudioBase: Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", @@ -151,7 +166,7 @@ def __init__( api_key: typing.Optional[str] = os.getenv("LABEL_STUDIO_API_KEY"), timeout: typing.Optional[float] = None, follow_redirects: typing.Optional[bool] = True, - httpx_client: typing.Optional[httpx.AsyncClient] = None + httpx_client: typing.Optional[httpx.AsyncClient] = None, ): _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None if api_key is None: @@ -173,9 +188,9 @@ def __init__( self.actions = AsyncActionsClient(client_wrapper=self._client_wrapper) self.views = AsyncViewsClient(client_wrapper=self._client_wrapper) self.files = AsyncFilesClient(client_wrapper=self._client_wrapper) - self.projects = AsyncProjectsClient(client_wrapper=self._client_wrapper) self.ml = AsyncMlClient(client_wrapper=self._client_wrapper) self.predictions = AsyncPredictionsClient(client_wrapper=self._client_wrapper) + self.projects = AsyncProjectsClient(client_wrapper=self._client_wrapper) self.tasks = AsyncTasksClient(client_wrapper=self._client_wrapper) self.import_storage = AsyncImportStorageClient(client_wrapper=self._client_wrapper) self.export_storage = AsyncExportStorageClient(client_wrapper=self._client_wrapper) diff --git a/src/label_studio_sdk/comments/client.py b/src/label_studio_sdk/comments/client.py index fe86f4410..dec7e7a72 100644 --- a/src/label_studio_sdk/comments/client.py +++ b/src/label_studio_sdk/comments/client.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing +from ..core.client_wrapper import SyncClientWrapper +from ..core.request_options import RequestOptions +from ..types.comment import Comment +from ..core.pydantic_utilities import parse_obj_as from json.decoder import JSONDecodeError - from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 -from ..core.request_options import RequestOptions -from ..types.comment import Comment +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -27,6 +27,7 @@ def list( request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[Comment]: """ + Get a list of comments for a specific project. Parameters @@ -50,7 +51,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -60,12 +61,22 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/comments/", method="GET", - params={"project": project, "expand_created_by": expand_created_by, "annotation": annotation}, + params={ + "project": project, + "expand_created_by": expand_created_by, + "annotation": annotation, + }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Comment], _response.json()) # type: ignore + return typing.cast( + typing.List[Comment], + parse_obj_as( + type_=typing.List[Comment], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,6 +92,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> Comment: """ + Create a new comment. Parameters @@ -103,7 +115,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -113,13 +125,27 @@ def create( _response = self._client_wrapper.httpx_client.request( "api/comments/", method="POST", - json={"annotation": annotation, "project": project, "text": text, "is_resolved": is_resolved}, + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Comment, _response.json()) # type: ignore + return typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -127,6 +153,7 @@ def create( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Comment: """ + Get a specific comment. Parameters @@ -144,7 +171,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -154,11 +181,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/comments/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Comment, _response.json()) # type: ignore + return typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -166,6 +201,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific comment. Parameters @@ -182,7 +218,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -192,7 +228,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/comments/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -213,6 +251,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> Comment: """ + Update a specific comment. Parameters @@ -238,7 +277,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -250,13 +289,27 @@ def update( _response = self._client_wrapper.httpx_client.request( f"api/comments/{jsonable_encoder(id)}", method="PATCH", - json={"annotation": annotation, "project": project, "text": text, "is_resolved": is_resolved}, + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Comment, _response.json()) # type: ignore + return typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -276,6 +329,7 @@ async def list( request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[Comment]: """ + Get a list of comments for a specific project. Parameters @@ -299,22 +353,40 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.comments.list() + + + async def main() -> None: + await client.comments.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/comments/", method="GET", - params={"project": project, "expand_created_by": expand_created_by, "annotation": annotation}, + params={ + "project": project, + "expand_created_by": expand_created_by, + "annotation": annotation, + }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Comment], _response.json()) # type: ignore + return typing.cast( + typing.List[Comment], + parse_obj_as( + type_=typing.List[Comment], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -330,6 +402,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> Comment: """ + Create a new comment. Parameters @@ -352,23 +425,45 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.comments.create() + + + async def main() -> None: + await client.comments.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/comments/", method="POST", - json={"annotation": annotation, "project": project, "text": text, "is_resolved": is_resolved}, + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Comment, _response.json()) # type: ignore + return typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -376,6 +471,7 @@ async def create( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Comment: """ + Get a specific comment. Parameters @@ -393,21 +489,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.comments.get( - id=1, - ) + + + async def main() -> None: + await client.comments.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/comments/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Comment, _response.json()) # type: ignore + return typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -415,6 +527,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific comment. Parameters @@ -431,17 +544,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.comments.delete( - id=1, - ) + + + async def main() -> None: + await client.comments.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/comments/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -462,6 +585,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> Comment: """ + Update a specific comment. Parameters @@ -487,25 +611,47 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.comments.update( - id=1, - ) + + + async def main() -> None: + await client.comments.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/comments/{jsonable_encoder(id)}", method="PATCH", - json={"annotation": annotation, "project": project, "text": text, "is_resolved": is_resolved}, + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Comment, _response.json()) # type: ignore + return typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/core/__init__.py b/src/label_studio_sdk/core/__init__.py index 0f239041e..42031ad0b 100644 --- a/src/label_studio_sdk/core/__init__.py +++ b/src/label_studio_sdk/core/__init__.py @@ -3,14 +3,23 @@ from .api_error import ApiError from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper from .datetime_utils import serialize_datetime -from .file import File, convert_file_dict_to_httpx_tuples +from .file import File, convert_file_dict_to_httpx_tuples, with_content_type from .http_client import AsyncHttpClient, HttpClient from .jsonable_encoder import jsonable_encoder from .pagination import AsyncPager, SyncPager -from .pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .pydantic_utilities import ( + IS_PYDANTIC_V2, + UniversalBaseModel, + UniversalRootModel, + parse_obj_as, + universal_field_validator, + universal_root_validator, + update_forward_refs, +) from .query_encoder import encode_query from .remove_none_from_dict import remove_none_from_dict from .request_options import RequestOptions +from .serialization import FieldMetadata, convert_and_respect_annotation_metadata __all__ = [ "ApiError", @@ -18,16 +27,24 @@ "AsyncHttpClient", "AsyncPager", "BaseClientWrapper", + "FieldMetadata", "File", "HttpClient", + "IS_PYDANTIC_V2", "RequestOptions", "SyncClientWrapper", "SyncPager", + "UniversalBaseModel", + "UniversalRootModel", + "convert_and_respect_annotation_metadata", "convert_file_dict_to_httpx_tuples", - "deep_union_pydantic_dicts", "encode_query", "jsonable_encoder", - "pydantic_v1", + "parse_obj_as", "remove_none_from_dict", "serialize_datetime", + "universal_field_validator", + "universal_root_validator", + "update_forward_refs", + "with_content_type", ] diff --git a/src/label_studio_sdk/core/client_wrapper.py b/src/label_studio_sdk/core/client_wrapper.py index 89b53e4f7..69b387d94 100644 --- a/src/label_studio_sdk/core/client_wrapper.py +++ b/src/label_studio_sdk/core/client_wrapper.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. import typing - import httpx - -from .http_client import AsyncHttpClient, HttpClient +from .http_client import HttpClient +from .http_client import AsyncHttpClient class BaseClientWrapper: @@ -36,9 +35,9 @@ def __init__( super().__init__(api_key=api_key, base_url=base_url, timeout=timeout) self.httpx_client = HttpClient( httpx_client=httpx_client, - base_headers=self.get_headers(), - base_timeout=self.get_timeout(), - base_url=self.get_base_url(), + base_headers=self.get_headers, + base_timeout=self.get_timeout, + base_url=self.get_base_url, ) @@ -49,7 +48,7 @@ def __init__( super().__init__(api_key=api_key, base_url=base_url, timeout=timeout) self.httpx_client = AsyncHttpClient( httpx_client=httpx_client, - base_headers=self.get_headers(), - base_timeout=self.get_timeout(), - base_url=self.get_base_url(), + base_headers=self.get_headers, + base_timeout=self.get_timeout, + base_url=self.get_base_url, ) diff --git a/src/label_studio_sdk/core/file.py b/src/label_studio_sdk/core/file.py index cb0d40bbb..44b0d27c0 100644 --- a/src/label_studio_sdk/core/file.py +++ b/src/label_studio_sdk/core/file.py @@ -1,25 +1,30 @@ # This file was auto-generated by Fern from our API Definition. -import typing +from typing import IO, Dict, List, Mapping, Optional, Tuple, Union, cast # File typing inspired by the flexibility of types within the httpx library # https://github.com/encode/httpx/blob/master/httpx/_types.py -FileContent = typing.Union[typing.IO[bytes], bytes, str] -File = typing.Union[ +FileContent = Union[IO[bytes], bytes, str] +File = Union[ # file (or bytes) FileContent, # (filename, file (or bytes)) - typing.Tuple[typing.Optional[str], FileContent], + Tuple[Optional[str], FileContent], # (filename, file (or bytes), content_type) - typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str]], + Tuple[Optional[str], FileContent, Optional[str]], # (filename, file (or bytes), content_type, headers) - typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str], typing.Mapping[str, str]], + Tuple[ + Optional[str], + FileContent, + Optional[str], + Mapping[str, str], + ], ] def convert_file_dict_to_httpx_tuples( - d: typing.Dict[str, typing.Union[File, typing.List[File]]] -) -> typing.List[typing.Tuple[str, File]]: + d: Dict[str, Union[File, List[File]]], +) -> List[Tuple[str, File]]: """ The format we use is a list of tuples, where the first element is the name of the file and the second is the file object. Typically HTTPX wants @@ -36,3 +41,27 @@ def convert_file_dict_to_httpx_tuples( else: httpx_tuples.append((key, file_like)) return httpx_tuples + + +def with_content_type(*, file: File, default_content_type: str) -> File: + """ + This function resolves to the file's content type, if provided, and defaults + to the default_content_type value if not. + """ + if isinstance(file, tuple): + if len(file) == 2: + filename, content = cast(Tuple[Optional[str], FileContent], file) # type: ignore + return (filename, content, default_content_type) + elif len(file) == 3: + filename, content, file_content_type = cast(Tuple[Optional[str], FileContent, Optional[str]], file) # type: ignore + out_content_type = file_content_type or default_content_type + return (filename, content, out_content_type) + elif len(file) == 4: + filename, content, file_content_type, headers = cast( # type: ignore + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], file + ) + out_content_type = file_content_type or default_content_type + return (filename, content, out_content_type, headers) + else: + raise ValueError(f"Unexpected tuple length: {len(file)}") + return (None, file, default_content_type) diff --git a/src/label_studio_sdk/core/http_client.py b/src/label_studio_sdk/core/http_client.py index 09c68368b..1a1a1311a 100644 --- a/src/label_studio_sdk/core/http_client.py +++ b/src/label_studio_sdk/core/http_client.py @@ -90,7 +90,8 @@ def _should_retry(response: httpx.Response) -> bool: def remove_omit_from_dict( - original: typing.Dict[str, typing.Optional[typing.Any]], omit: typing.Optional[typing.Any] + original: typing.Dict[str, typing.Optional[typing.Any]], + omit: typing.Optional[typing.Any], ) -> typing.Dict[str, typing.Any]: if omit is None: return original @@ -108,7 +109,7 @@ def maybe_filter_request_body( ) -> typing.Optional[typing.Any]: if data is None: return ( - jsonable_encoder(request_options.get("additional_body_parameters", {})) + jsonable_encoder(request_options.get("additional_body_parameters", {})) or {} if request_options is not None else None ) @@ -118,7 +119,7 @@ def maybe_filter_request_body( data_content = { **(jsonable_encoder(remove_omit_from_dict(data, omit))), # type: ignore **( - jsonable_encoder(request_options.get("additional_body_parameters", {})) + jsonable_encoder(request_options.get("additional_body_parameters", {})) or {} if request_options is not None else {} ), @@ -142,7 +143,8 @@ def get_request_body( # If both data and json are None, we send json data in the event extra properties are specified json_body = maybe_filter_request_body(json, request_options, omit) - return json_body, data_body + # If you have an empty JSON body, you should just send None + return (json_body if json_body != {} else None), data_body if data_body != {} else None class HttpClient: @@ -150,9 +152,9 @@ def __init__( self, *, httpx_client: httpx.Client, - base_timeout: typing.Optional[float], - base_headers: typing.Dict[str, str], - base_url: typing.Optional[str] = None, + base_timeout: typing.Callable[[], typing.Optional[float]], + base_headers: typing.Callable[[], typing.Dict[str, str]], + base_url: typing.Optional[typing.Callable[[], str]] = None, ): self.base_url = base_url self.base_timeout = base_timeout @@ -160,7 +162,10 @@ def __init__( self.httpx_client = httpx_client def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str: - base_url = self.base_url if maybe_base_url is None else maybe_base_url + base_url = maybe_base_url + if self.base_url is not None and base_url is None: + base_url = self.base_url() + if base_url is None: raise ValueError("A base_url is required to make this request, please provide one and try again.") return base_url @@ -185,7 +190,7 @@ def request( timeout = ( request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self.base_timeout + else self.base_timeout() ) json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) @@ -196,9 +201,9 @@ def request( headers=jsonable_encoder( remove_none_from_dict( { - **self.base_headers, + **self.base_headers(), **(headers if headers is not None else {}), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), + **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}), } ) ), @@ -209,7 +214,7 @@ def request( { **(params if params is not None else {}), **( - request_options.get("additional_query_parameters", {}) + request_options.get("additional_query_parameters", {}) or {} if request_options is not None else {} ), @@ -222,7 +227,11 @@ def request( json=json_body, data=data_body, content=content, - files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, + files=( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit) + else None + ), timeout=timeout, ) @@ -267,7 +276,7 @@ def stream( timeout = ( request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self.base_timeout + else self.base_timeout() ) json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) @@ -278,7 +287,7 @@ def stream( headers=jsonable_encoder( remove_none_from_dict( { - **self.base_headers, + **self.base_headers(), **(headers if headers is not None else {}), **(request_options.get("additional_headers", {}) if request_options is not None else {}), } @@ -304,7 +313,11 @@ def stream( json=json_body, data=data_body, content=content, - files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, + files=( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit) + else None + ), timeout=timeout, ) as stream: yield stream @@ -315,9 +328,9 @@ def __init__( self, *, httpx_client: httpx.AsyncClient, - base_timeout: typing.Optional[float], - base_headers: typing.Dict[str, str], - base_url: typing.Optional[str] = None, + base_timeout: typing.Callable[[], typing.Optional[float]], + base_headers: typing.Callable[[], typing.Dict[str, str]], + base_url: typing.Optional[typing.Callable[[], str]] = None, ): self.base_url = base_url self.base_timeout = base_timeout @@ -325,7 +338,10 @@ def __init__( self.httpx_client = httpx_client def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str: - base_url = self.base_url if maybe_base_url is None else maybe_base_url + base_url = maybe_base_url + if self.base_url is not None and base_url is None: + base_url = self.base_url() + if base_url is None: raise ValueError("A base_url is required to make this request, please provide one and try again.") return base_url @@ -350,7 +366,7 @@ async def request( timeout = ( request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self.base_timeout + else self.base_timeout() ) json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) @@ -362,9 +378,9 @@ async def request( headers=jsonable_encoder( remove_none_from_dict( { - **self.base_headers, + **self.base_headers(), **(headers if headers is not None else {}), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), + **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}), } ) ), @@ -375,7 +391,7 @@ async def request( { **(params if params is not None else {}), **( - request_options.get("additional_query_parameters", {}) + request_options.get("additional_query_parameters", {}) or {} if request_options is not None else {} ), @@ -388,7 +404,11 @@ async def request( json=json_body, data=data_body, content=content, - files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, + files=( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if files is not None + else None + ), timeout=timeout, ) @@ -432,7 +452,7 @@ async def stream( timeout = ( request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self.base_timeout + else self.base_timeout() ) json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) @@ -443,7 +463,7 @@ async def stream( headers=jsonable_encoder( remove_none_from_dict( { - **self.base_headers, + **self.base_headers(), **(headers if headers is not None else {}), **(request_options.get("additional_headers", {}) if request_options is not None else {}), } @@ -469,7 +489,11 @@ async def stream( json=json_body, data=data_body, content=content, - files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, + files=( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if files is not None + else None + ), timeout=timeout, ) as stream: yield stream diff --git a/src/label_studio_sdk/core/jsonable_encoder.py b/src/label_studio_sdk/core/jsonable_encoder.py index 7f4827326..1b631e901 100644 --- a/src/label_studio_sdk/core/jsonable_encoder.py +++ b/src/label_studio_sdk/core/jsonable_encoder.py @@ -8,33 +8,27 @@ https://github.com/tiangolo/fastapi/blob/master/fastapi/encoders.py """ +import base64 import dataclasses import datetime as dt -from collections import defaultdict from enum import Enum from pathlib import PurePath from types import GeneratorType -from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Set, Union + +import pydantic from .datetime_utils import serialize_datetime -from .pydantic_utilities import pydantic_v1 +from .pydantic_utilities import ( + IS_PYDANTIC_V2, + encode_by_type, + to_jsonable_with_fallback, +) SetIntStr = Set[Union[int, str]] DictIntStrAny = Dict[Union[int, str], Any] -def generate_encoders_by_class_tuples( - type_encoder_map: Dict[Any, Callable[[Any], Any]] -) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]: - encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple) - for type_, encoder in type_encoder_map.items(): - encoders_by_class_tuples[encoder] += (type_,) - return encoders_by_class_tuples - - -encoders_by_class_tuples = generate_encoders_by_class_tuples(pydantic_v1.json.ENCODERS_BY_TYPE) - - def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any: custom_encoder = custom_encoder or {} if custom_encoder: @@ -44,17 +38,24 @@ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any] for encoder_type, encoder_instance in custom_encoder.items(): if isinstance(obj, encoder_type): return encoder_instance(obj) - if isinstance(obj, pydantic_v1.BaseModel): - encoder = getattr(obj.__config__, "json_encoders", {}) + if isinstance(obj, pydantic.BaseModel): + if IS_PYDANTIC_V2: + encoder = getattr(obj.model_config, "json_encoders", {}) # type: ignore # Pydantic v2 + else: + encoder = getattr(obj.__config__, "json_encoders", {}) # type: ignore # Pydantic v1 if custom_encoder: encoder.update(custom_encoder) obj_dict = obj.dict(by_alias=True) if "__root__" in obj_dict: obj_dict = obj_dict["__root__"] + if "root" in obj_dict: + obj_dict = obj_dict["root"] return jsonable_encoder(obj_dict, custom_encoder=encoder) if dataclasses.is_dataclass(obj): - obj_dict = dataclasses.asdict(obj) + obj_dict = dataclasses.asdict(obj) # type: ignore return jsonable_encoder(obj_dict, custom_encoder=custom_encoder) + if isinstance(obj, bytes): + return base64.b64encode(obj).decode("utf-8") if isinstance(obj, Enum): return obj.value if isinstance(obj, PurePath): @@ -80,20 +81,21 @@ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any] encoded_list.append(jsonable_encoder(item, custom_encoder=custom_encoder)) return encoded_list - if type(obj) in pydantic_v1.json.ENCODERS_BY_TYPE: - return pydantic_v1.json.ENCODERS_BY_TYPE[type(obj)](obj) - for encoder, classes_tuple in encoders_by_class_tuples.items(): - if isinstance(obj, classes_tuple): - return encoder(obj) + def fallback_serializer(o: Any) -> Any: + attempt_encode = encode_by_type(o) + if attempt_encode is not None: + return attempt_encode - try: - data = dict(obj) - except Exception as e: - errors: List[Exception] = [] - errors.append(e) try: - data = vars(obj) + data = dict(o) except Exception as e: + errors: List[Exception] = [] errors.append(e) - raise ValueError(errors) from e - return jsonable_encoder(data, custom_encoder=custom_encoder) + try: + data = vars(o) + except Exception as e: + errors.append(e) + raise ValueError(errors) from e + return jsonable_encoder(data, custom_encoder=custom_encoder) + + return to_jsonable_with_fallback(obj, fallback_serializer) diff --git a/src/label_studio_sdk/core/pagination.py b/src/label_studio_sdk/core/pagination.py index c900ce322..5f482635a 100644 --- a/src/label_studio_sdk/core/pagination.py +++ b/src/label_studio_sdk/core/pagination.py @@ -4,11 +4,12 @@ from typing_extensions import Self -from .pydantic_utilities import pydantic_v1 +import pydantic # Generic to represent the underlying type of the results within a page T = typing.TypeVar("T") + # SDKs implement a Page ABC per-pagination request, the endpoint then retuns a pager that wraps this type # for example, an endpoint will return SyncPager[UserPage] where UserPage implements the Page ABC. ex: # @@ -18,16 +19,16 @@ # # This should be the outer function that returns the SyncPager again # get_next=lambda: list(..., cursor: response.cursor) (or list(..., offset: offset + 1)) # ) -class BasePage(pydantic_v1.BaseModel, typing.Generic[T]): +class BasePage(pydantic.BaseModel, typing.Generic[T]): has_next: bool items: typing.Optional[typing.List[T]] -class SyncPage(BasePage, typing.Generic[T]): +class SyncPage(BasePage[T], typing.Generic[T]): get_next: typing.Optional[typing.Callable[[], typing.Optional[Self]]] -class AsyncPage(BasePage, typing.Generic[T]): +class AsyncPage(BasePage[T], typing.Generic[T]): get_next: typing.Optional[typing.Callable[[], typing.Awaitable[typing.Optional[Self]]]] diff --git a/src/label_studio_sdk/core/pydantic_utilities.py b/src/label_studio_sdk/core/pydantic_utilities.py index a72c1a52f..ee8f0e410 100644 --- a/src/label_studio_sdk/core/pydantic_utilities.py +++ b/src/label_studio_sdk/core/pydantic_utilities.py @@ -1,28 +1,296 @@ # This file was auto-generated by Fern from our API Definition. +# nopycln: file +import datetime as dt import typing +from collections import defaultdict + +import typing_extensions import pydantic +from .datetime_utils import serialize_datetime +from .serialization import convert_and_respect_annotation_metadata + IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") if IS_PYDANTIC_V2: - import pydantic.v1 as pydantic_v1 # type: ignore # nopycln: import + # isort will try to reformat the comments on these imports, which breaks mypy + # isort: off + from pydantic.v1.datetime_parse import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + parse_date as parse_date, + ) + from pydantic.v1.datetime_parse import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + parse_datetime as parse_datetime, + ) + from pydantic.v1.json import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + ENCODERS_BY_TYPE as encoders_by_type, + ) + from pydantic.v1.typing import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + get_args as get_args, + ) + from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + get_origin as get_origin, + ) + from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + is_literal_type as is_literal_type, + ) + from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + is_union as is_union, + ) + from pydantic.v1.fields import ModelField as ModelField # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 else: - import pydantic as pydantic_v1 # type: ignore # nopycln: import + from pydantic.datetime_parse import parse_date as parse_date # type: ignore # Pydantic v1 + from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore # Pydantic v1 + from pydantic.fields import ModelField as ModelField # type: ignore # Pydantic v1 + from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore # Pydantic v1 + from pydantic.typing import get_args as get_args # type: ignore # Pydantic v1 + from pydantic.typing import get_origin as get_origin # type: ignore # Pydantic v1 + from pydantic.typing import is_literal_type as is_literal_type # type: ignore # Pydantic v1 + from pydantic.typing import is_union as is_union # type: ignore # Pydantic v1 + + # isort: on + + +T = typing.TypeVar("T") +Model = typing.TypeVar("Model", bound=pydantic.BaseModel) + + +def parse_obj_as(type_: typing.Type[T], object_: typing.Any) -> T: + dealiased_object = convert_and_respect_annotation_metadata(object_=object_, annotation=type_, direction="read") + if IS_PYDANTIC_V2: + adapter = pydantic.TypeAdapter(type_) # type: ignore # Pydantic v2 + return adapter.validate_python(dealiased_object) + else: + return pydantic.parse_obj_as(type_, dealiased_object) + + +def to_jsonable_with_fallback( + obj: typing.Any, fallback_serializer: typing.Callable[[typing.Any], typing.Any] +) -> typing.Any: + if IS_PYDANTIC_V2: + from pydantic_core import to_jsonable_python + + return to_jsonable_python(obj, fallback=fallback_serializer) + else: + return fallback_serializer(obj) + + +class UniversalBaseModel(pydantic.BaseModel): + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + # Allow fields begining with `model_` to be used in the model + protected_namespaces=(), + ) # type: ignore # Pydantic v2 + + @pydantic.model_serializer(mode="wrap", when_used="json") # type: ignore # Pydantic v2 + def serialize_model(self, handler: pydantic.SerializerFunctionWrapHandler) -> typing.Any: # type: ignore # Pydantic v2 + serialized = handler(self) + data = {k: serialize_datetime(v) if isinstance(v, dt.datetime) else v for k, v in serialized.items()} + return data + + else: + + class Config: + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} + + @classmethod + def model_construct( + cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any + ) -> "Model": + dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") + return cls.construct(_fields_set, **dealiased_object) + + @classmethod + def construct( + cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any + ) -> "Model": + dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") + if IS_PYDANTIC_V2: + return super().model_construct(_fields_set, **dealiased_object) # type: ignore # Pydantic v2 + else: + return super().construct(_fields_set, **dealiased_object) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + if IS_PYDANTIC_V2: + return super().model_dump_json(**kwargs_with_defaults) # type: ignore # Pydantic v2 + else: + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + """ + Override the default dict method to `exclude_unset` by default. This function patches + `exclude_unset` to work include fields within non-None default values. + """ + # Note: the logic here is multi-plexed given the levers exposed in Pydantic V1 vs V2 + # Pydantic V1's .dict can be extremely slow, so we do not want to call it twice. + # + # We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models + # that we have less control over, and this is less intrusive than custom serializers for now. + if IS_PYDANTIC_V2: + kwargs_with_defaults_exclude_unset: typing.Any = { + **kwargs, + "by_alias": True, + "exclude_unset": True, + "exclude_none": False, + } + kwargs_with_defaults_exclude_none: typing.Any = { + **kwargs, + "by_alias": True, + "exclude_none": True, + "exclude_unset": False, + } + dict_dump = deep_union_pydantic_dicts( + super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore # Pydantic v2 + super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore # Pydantic v2 + ) + + else: + _fields_set = self.__fields_set__.copy() + + fields = _get_model_fields(self.__class__) + for name, field in fields.items(): + if name not in _fields_set: + default = _get_field_default(field) + + # If the default values are non-null act like they've been set + # This effectively allows exclude_unset to work like exclude_none where + # the latter passes through intentionally set none values. + if default is not None or ("exclude_unset" in kwargs and not kwargs["exclude_unset"]): + _fields_set.add(name) + + if default is not None: + self.__fields_set__.add(name) + + kwargs_with_defaults_exclude_unset_include_fields: typing.Any = { + "by_alias": True, + "exclude_unset": True, + "include": _fields_set, + **kwargs, + } + + dict_dump = super().dict(**kwargs_with_defaults_exclude_unset_include_fields) + + return convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write") + + +def _union_list_of_pydantic_dicts( + source: typing.List[typing.Any], destination: typing.List[typing.Any] +) -> typing.List[typing.Any]: + converted_list: typing.List[typing.Any] = [] + for i, item in enumerate(source): + destination_value = destination[i] # type: ignore + if isinstance(item, dict): + converted_list.append(deep_union_pydantic_dicts(item, destination_value)) + elif isinstance(item, list): + converted_list.append(_union_list_of_pydantic_dicts(item, destination_value)) + else: + converted_list.append(item) + return converted_list def deep_union_pydantic_dicts( source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any] ) -> typing.Dict[str, typing.Any]: for key, value in source.items(): + node = destination.setdefault(key, {}) if isinstance(value, dict): - node = destination.setdefault(key, {}) deep_union_pydantic_dicts(value, node) + # Note: we do not do this same processing for sets given we do not have sets of models + # and given the sets are unordered, the processing of the set and matching objects would + # be non-trivial. + elif isinstance(value, list): + destination[key] = _union_list_of_pydantic_dicts(value, node) else: destination[key] = value return destination -__all__ = ["pydantic_v1"] +if IS_PYDANTIC_V2: + + class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore # Pydantic v2 + pass + + UniversalRootModel: typing_extensions.TypeAlias = V2RootModel # type: ignore +else: + UniversalRootModel: typing_extensions.TypeAlias = UniversalBaseModel # type: ignore + + +def encode_by_type(o: typing.Any) -> typing.Any: + encoders_by_class_tuples: typing.Dict[typing.Callable[[typing.Any], typing.Any], typing.Tuple[typing.Any, ...]] = ( + defaultdict(tuple) + ) + for type_, encoder in encoders_by_type.items(): + encoders_by_class_tuples[encoder] += (type_,) + + if type(o) in encoders_by_type: + return encoders_by_type[type(o)](o) + for encoder, classes_tuple in encoders_by_class_tuples.items(): + if isinstance(o, classes_tuple): + return encoder(o) + + +def update_forward_refs(model: typing.Type["Model"], **localns: typing.Any) -> None: + if IS_PYDANTIC_V2: + model.model_rebuild(raise_errors=False) # type: ignore # Pydantic v2 + else: + model.update_forward_refs(**localns) + + +# Mirrors Pydantic's internal typing +AnyCallable = typing.Callable[..., typing.Any] + + +def universal_root_validator( + pre: bool = False, +) -> typing.Callable[[AnyCallable], AnyCallable]: + def decorator(func: AnyCallable) -> AnyCallable: + if IS_PYDANTIC_V2: + return pydantic.model_validator(mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 + else: + return pydantic.root_validator(pre=pre)(func) # type: ignore # Pydantic v1 + + return decorator + + +def universal_field_validator(field_name: str, pre: bool = False) -> typing.Callable[[AnyCallable], AnyCallable]: + def decorator(func: AnyCallable) -> AnyCallable: + if IS_PYDANTIC_V2: + return pydantic.field_validator(field_name, mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 + else: + return pydantic.validator(field_name, pre=pre)(func) # type: ignore # Pydantic v1 + + return decorator + + +PydanticField = typing.Union[ModelField, pydantic.fields.FieldInfo] + + +def _get_model_fields( + model: typing.Type["Model"], +) -> typing.Mapping[str, PydanticField]: + if IS_PYDANTIC_V2: + return model.model_fields # type: ignore # Pydantic v2 + else: + return model.__fields__ # type: ignore # Pydantic v1 + + +def _get_field_default(field: PydanticField) -> typing.Any: + try: + value = field.get_default() # type: ignore # Pydantic < v1.10.15 + except: + value = field.default + if IS_PYDANTIC_V2: + from pydantic_core import PydanticUndefined + + if value == PydanticUndefined: + return None + return value + return value diff --git a/src/label_studio_sdk/core/query_encoder.py b/src/label_studio_sdk/core/query_encoder.py index 1f5f766b4..3183001d4 100644 --- a/src/label_studio_sdk/core/query_encoder.py +++ b/src/label_studio_sdk/core/query_encoder.py @@ -1,33 +1,58 @@ # This file was auto-generated by Fern from our API Definition. -from collections import ChainMap -from typing import Any, Dict, Optional +from typing import Any, Dict, List, Optional, Tuple -from .pydantic_utilities import pydantic_v1 +import pydantic # Flattens dicts to be of the form {"key[subkey][subkey2]": value} where value is not a dict -def traverse_query_dict(dict_flat: Dict[str, Any], key_prefix: Optional[str] = None) -> Dict[str, Any]: - result = {} +def traverse_query_dict(dict_flat: Dict[str, Any], key_prefix: Optional[str] = None) -> List[Tuple[str, Any]]: + result = [] for k, v in dict_flat.items(): key = f"{key_prefix}[{k}]" if key_prefix is not None else k if isinstance(v, dict): - result.update(traverse_query_dict(v, key)) + result.extend(traverse_query_dict(v, key)) + elif isinstance(v, list): + for arr_v in v: + if isinstance(arr_v, dict): + result.extend(traverse_query_dict(arr_v, key)) + else: + result.append((key, arr_v)) else: - result[key] = v + result.append((key, v)) return result -def single_query_encoder(query_key: str, query_value: Any) -> Dict[str, Any]: - if isinstance(query_value, pydantic_v1.BaseModel) or isinstance(query_value, dict): - if isinstance(query_value, pydantic_v1.BaseModel): +def single_query_encoder(query_key: str, query_value: Any) -> List[Tuple[str, Any]]: + if isinstance(query_value, pydantic.BaseModel) or isinstance(query_value, dict): + if isinstance(query_value, pydantic.BaseModel): obj_dict = query_value.dict(by_alias=True) else: obj_dict = query_value return traverse_query_dict(obj_dict, query_key) + elif isinstance(query_value, list): + encoded_values: List[Tuple[str, Any]] = [] + for value in query_value: + if isinstance(value, pydantic.BaseModel) or isinstance(value, dict): + if isinstance(value, pydantic.BaseModel): + obj_dict = value.dict(by_alias=True) + elif isinstance(value, dict): + obj_dict = value - return {query_key: query_value} + encoded_values.extend(single_query_encoder(query_key, obj_dict)) + else: + encoded_values.append((query_key, value)) + return encoded_values -def encode_query(query: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]: - return dict(ChainMap(*[single_query_encoder(k, v) for k, v in query.items()])) if query is not None else None + return [(query_key, query_value)] + + +def encode_query(query: Optional[Dict[str, Any]]) -> Optional[List[Tuple[str, Any]]]: + if query is None: + return None + + encoded_query = [] + for k, v in query.items(): + encoded_query.extend(single_query_encoder(k, v)) + return encoded_query diff --git a/src/label_studio_sdk/core/request_options.py b/src/label_studio_sdk/core/request_options.py index d0bf0dbce..1b3880443 100644 --- a/src/label_studio_sdk/core/request_options.py +++ b/src/label_studio_sdk/core/request_options.py @@ -23,6 +23,8 @@ class RequestOptions(typing.TypedDict, total=False): - additional_query_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's query parameters dict - additional_body_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's body parameters dict + + - chunk_size: int. The size, in bytes, to process each chunk of data being streamed back within the response. This equates to leveraging `chunk_size` within `requests` or `httpx`, and is only leveraged for file downloads. """ timeout_in_seconds: NotRequired[int] @@ -30,3 +32,4 @@ class RequestOptions(typing.TypedDict, total=False): additional_headers: NotRequired[typing.Dict[str, typing.Any]] additional_query_parameters: NotRequired[typing.Dict[str, typing.Any]] additional_body_parameters: NotRequired[typing.Dict[str, typing.Any]] + chunk_size: NotRequired[int] diff --git a/src/label_studio_sdk/core/serialization.py b/src/label_studio_sdk/core/serialization.py new file mode 100644 index 000000000..cb5dcbf93 --- /dev/null +++ b/src/label_studio_sdk/core/serialization.py @@ -0,0 +1,272 @@ +# This file was auto-generated by Fern from our API Definition. + +import collections +import inspect +import typing + +import typing_extensions + +import pydantic + + +class FieldMetadata: + """ + Metadata class used to annotate fields to provide additional information. + + Example: + class MyDict(TypedDict): + field: typing.Annotated[str, FieldMetadata(alias="field_name")] + + Will serialize: `{"field": "value"}` + To: `{"field_name": "value"}` + """ + + alias: str + + def __init__(self, *, alias: str) -> None: + self.alias = alias + + +def convert_and_respect_annotation_metadata( + *, + object_: typing.Any, + annotation: typing.Any, + inner_type: typing.Optional[typing.Any] = None, + direction: typing.Literal["read", "write"], +) -> typing.Any: + """ + Respect the metadata annotations on a field, such as aliasing. This function effectively + manipulates the dict-form of an object to respect the metadata annotations. This is primarily used for + TypedDicts, which cannot support aliasing out of the box, and can be extended for additional + utilities, such as defaults. + + Parameters + ---------- + object_ : typing.Any + + annotation : type + The type we're looking to apply typing annotations from + + inner_type : typing.Optional[type] + + Returns + ------- + typing.Any + """ + + if object_ is None: + return None + if inner_type is None: + inner_type = annotation + + clean_type = _remove_annotations(inner_type) + # Pydantic models + if ( + inspect.isclass(clean_type) + and issubclass(clean_type, pydantic.BaseModel) + and isinstance(object_, typing.Mapping) + ): + return _convert_mapping(object_, clean_type, direction) + # TypedDicts + if typing_extensions.is_typeddict(clean_type) and isinstance(object_, typing.Mapping): + return _convert_mapping(object_, clean_type, direction) + + if ( + typing_extensions.get_origin(clean_type) == typing.Dict + or typing_extensions.get_origin(clean_type) == dict + or clean_type == typing.Dict + ) and isinstance(object_, typing.Dict): + key_type = typing_extensions.get_args(clean_type)[0] + value_type = typing_extensions.get_args(clean_type)[1] + + return { + key: convert_and_respect_annotation_metadata( + object_=value, + annotation=annotation, + inner_type=value_type, + direction=direction, + ) + for key, value in object_.items() + } + + # If you're iterating on a string, do not bother to coerce it to a sequence. + if not isinstance(object_, str): + if ( + typing_extensions.get_origin(clean_type) == typing.Set + or typing_extensions.get_origin(clean_type) == set + or clean_type == typing.Set + ) and isinstance(object_, typing.Set): + inner_type = typing_extensions.get_args(clean_type)[0] + return { + convert_and_respect_annotation_metadata( + object_=item, + annotation=annotation, + inner_type=inner_type, + direction=direction, + ) + for item in object_ + } + elif ( + ( + typing_extensions.get_origin(clean_type) == typing.List + or typing_extensions.get_origin(clean_type) == list + or clean_type == typing.List + ) + and isinstance(object_, typing.List) + ) or ( + ( + typing_extensions.get_origin(clean_type) == typing.Sequence + or typing_extensions.get_origin(clean_type) == collections.abc.Sequence + or clean_type == typing.Sequence + ) + and isinstance(object_, typing.Sequence) + ): + inner_type = typing_extensions.get_args(clean_type)[0] + return [ + convert_and_respect_annotation_metadata( + object_=item, + annotation=annotation, + inner_type=inner_type, + direction=direction, + ) + for item in object_ + ] + + if typing_extensions.get_origin(clean_type) == typing.Union: + # We should be able to ~relatively~ safely try to convert keys against all + # member types in the union, the edge case here is if one member aliases a field + # of the same name to a different name from another member + # Or if another member aliases a field of the same name that another member does not. + for member in typing_extensions.get_args(clean_type): + object_ = convert_and_respect_annotation_metadata( + object_=object_, + annotation=annotation, + inner_type=member, + direction=direction, + ) + return object_ + + annotated_type = _get_annotation(annotation) + if annotated_type is None: + return object_ + + # If the object is not a TypedDict, a Union, or other container (list, set, sequence, etc.) + # Then we can safely call it on the recursive conversion. + return object_ + + +def _convert_mapping( + object_: typing.Mapping[str, object], + expected_type: typing.Any, + direction: typing.Literal["read", "write"], +) -> typing.Mapping[str, object]: + converted_object: typing.Dict[str, object] = {} + annotations = typing_extensions.get_type_hints(expected_type, include_extras=True) + aliases_to_field_names = _get_alias_to_field_name(annotations) + for key, value in object_.items(): + if direction == "read" and key in aliases_to_field_names: + dealiased_key = aliases_to_field_names.get(key) + if dealiased_key is not None: + type_ = annotations.get(dealiased_key) + else: + type_ = annotations.get(key) + # Note you can't get the annotation by the field name if you're in read mode, so you must check the aliases map + # + # So this is effectively saying if we're in write mode, and we don't have a type, or if we're in read mode and we don't have an alias + # then we can just pass the value through as is + if type_ is None: + converted_object[key] = value + elif direction == "read" and key not in aliases_to_field_names: + converted_object[key] = convert_and_respect_annotation_metadata( + object_=value, annotation=type_, direction=direction + ) + else: + converted_object[_alias_key(key, type_, direction, aliases_to_field_names)] = ( + convert_and_respect_annotation_metadata(object_=value, annotation=type_, direction=direction) + ) + return converted_object + + +def _get_annotation(type_: typing.Any) -> typing.Optional[typing.Any]: + maybe_annotated_type = typing_extensions.get_origin(type_) + if maybe_annotated_type is None: + return None + + if maybe_annotated_type == typing_extensions.NotRequired: + type_ = typing_extensions.get_args(type_)[0] + maybe_annotated_type = typing_extensions.get_origin(type_) + + if maybe_annotated_type == typing_extensions.Annotated: + return type_ + + return None + + +def _remove_annotations(type_: typing.Any) -> typing.Any: + maybe_annotated_type = typing_extensions.get_origin(type_) + if maybe_annotated_type is None: + return type_ + + if maybe_annotated_type == typing_extensions.NotRequired: + return _remove_annotations(typing_extensions.get_args(type_)[0]) + + if maybe_annotated_type == typing_extensions.Annotated: + return _remove_annotations(typing_extensions.get_args(type_)[0]) + + return type_ + + +def get_alias_to_field_mapping(type_: typing.Any) -> typing.Dict[str, str]: + annotations = typing_extensions.get_type_hints(type_, include_extras=True) + return _get_alias_to_field_name(annotations) + + +def get_field_to_alias_mapping(type_: typing.Any) -> typing.Dict[str, str]: + annotations = typing_extensions.get_type_hints(type_, include_extras=True) + return _get_field_to_alias_name(annotations) + + +def _get_alias_to_field_name( + field_to_hint: typing.Dict[str, typing.Any], +) -> typing.Dict[str, str]: + aliases = {} + for field, hint in field_to_hint.items(): + maybe_alias = _get_alias_from_type(hint) + if maybe_alias is not None: + aliases[maybe_alias] = field + return aliases + + +def _get_field_to_alias_name( + field_to_hint: typing.Dict[str, typing.Any], +) -> typing.Dict[str, str]: + aliases = {} + for field, hint in field_to_hint.items(): + maybe_alias = _get_alias_from_type(hint) + if maybe_alias is not None: + aliases[field] = maybe_alias + return aliases + + +def _get_alias_from_type(type_: typing.Any) -> typing.Optional[str]: + maybe_annotated_type = _get_annotation(type_) + + if maybe_annotated_type is not None: + # The actual annotations are 1 onward, the first is the annotated type + annotations = typing_extensions.get_args(maybe_annotated_type)[1:] + + for annotation in annotations: + if isinstance(annotation, FieldMetadata) and annotation.alias is not None: + return annotation.alias + return None + + +def _alias_key( + key: str, + type_: typing.Any, + direction: typing.Literal["read", "write"], + aliases_to_field_names: typing.Dict[str, str], +) -> str: + if direction == "read": + return aliases_to_field_names.get(key, key) + return _get_alias_from_type(type_=type_) or key diff --git a/src/label_studio_sdk/errors/bad_request_error.py b/src/label_studio_sdk/errors/bad_request_error.py index 44a05f49b..9c13c61f9 100644 --- a/src/label_studio_sdk/errors/bad_request_error.py +++ b/src/label_studio_sdk/errors/bad_request_error.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import typing - from ..core.api_error import ApiError +import typing class BadRequestError(ApiError): - def __init__(self, body: typing.Any): + def __init__(self, body: typing.Optional[typing.Any]): super().__init__(status_code=400, body=body) diff --git a/src/label_studio_sdk/export_storage/azure/client.py b/src/label_studio_sdk/export_storage/azure/client.py index fb58ccb1d..ceb4d337c 100644 --- a/src/label_studio_sdk/export_storage/azure/client.py +++ b/src/label_studio_sdk/export_storage/azure/client.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.azure_blob_export_storage import AzureBlobExportStorage +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.azure_create_response import AzureCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.azure_update_response import AzureUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -24,6 +24,7 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[AzureBlobExportStorage]: """ + You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure export (target) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -45,7 +46,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -53,11 +54,22 @@ def list( client.export_storage.azure.list() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/export/azure", method="GET", params={"project": project}, request_options=request_options + "api/storages/export/azure", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[AzureBlobExportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[AzureBlobExportStorage], + parse_obj_as( + type_=typing.List[AzureBlobExportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -77,6 +89,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> AzureCreateResponse: """ + Create a new target storage connection to Microsoft Azure Blob storage. For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. @@ -119,7 +132,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -139,12 +152,21 @@ def create( "account_name": account_name, "account_key": account_key, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureCreateResponse, _response.json()) # type: ignore + return typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -165,6 +187,7 @@ def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific Azure export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. Parameters @@ -205,7 +228,7 @@ def validate( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -226,6 +249,9 @@ def validate( "account_name": account_name, "account_key": account_key, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -239,6 +265,7 @@ def validate( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ + Get a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -258,7 +285,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -268,11 +295,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobExportStorage, _response.json()) # type: ignore + return typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -280,6 +315,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. @@ -298,7 +334,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -308,7 +344,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -333,6 +371,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> AzureUpdateResponse: """ + Update a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -376,7 +415,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -398,12 +437,21 @@ def update( "account_name": account_name, "account_key": account_key, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureUpdateResponse, _response.json()) # type: ignore + return typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -411,6 +459,7 @@ def update( def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ + Sync tasks to an Azure export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. @@ -431,7 +480,7 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -441,11 +490,19 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/export/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobExportStorage, _response.json()) # type: ignore + return typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -460,6 +517,7 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[AzureBlobExportStorage]: """ + You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure export (target) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -481,19 +539,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.azure.list() + + + async def main() -> None: + await client.export_storage.azure.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/azure", method="GET", params={"project": project}, request_options=request_options + "api/storages/export/azure", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[AzureBlobExportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[AzureBlobExportStorage], + parse_obj_as( + type_=typing.List[AzureBlobExportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -513,6 +590,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> AzureCreateResponse: """ + Create a new target storage connection to Microsoft Azure Blob storage. For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. @@ -555,12 +633,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.azure.create() + + + async def main() -> None: + await client.export_storage.azure.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/export/azure", @@ -575,12 +661,21 @@ async def create( "account_name": account_name, "account_key": account_key, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureCreateResponse, _response.json()) # type: ignore + return typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -601,6 +696,7 @@ async def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific Azure export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. Parameters @@ -641,12 +737,20 @@ async def validate( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.azure.validate() + + + async def main() -> None: + await client.export_storage.azure.validate() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/export/azure/validate", @@ -662,6 +766,9 @@ async def validate( "account_name": account_name, "account_key": account_key, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -675,6 +782,7 @@ async def validate( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ + Get a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -694,21 +802,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.azure.get( - id=1, - ) + + + async def main() -> None: + await client.export_storage.azure.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobExportStorage, _response.json()) # type: ignore + return typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -716,6 +840,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. @@ -734,17 +859,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.azure.delete( - id=1, - ) + + + async def main() -> None: + await client.export_storage.azure.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -769,6 +904,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> AzureUpdateResponse: """ + Update a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -812,14 +948,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.azure.update( - id=1, - ) + + + async def main() -> None: + await client.export_storage.azure.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/azure/{jsonable_encoder(id)}", @@ -834,12 +978,21 @@ async def update( "account_name": account_name, "account_key": account_key, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureUpdateResponse, _response.json()) # type: ignore + return typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -847,6 +1000,7 @@ async def update( async def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ + Sync tasks to an Azure export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. @@ -867,21 +1021,37 @@ async def sync(self, id: str, *, request_options: typing.Optional[RequestOptions Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.azure.sync( - id="id", - ) + + + async def main() -> None: + await client.export_storage.azure.sync( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/export/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobExportStorage, _response.json()) # type: ignore + return typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py b/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py index 6c2906c26..d82c30787 100644 --- a/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py +++ b/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py @@ -1,67 +1,57 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class AzureCreateResponse(pydantic_v1.BaseModel): - can_delete_objects: typing.Optional[bool] = pydantic_v1.Field(default=None) +class AzureCreateResponse(UniversalBaseModel): + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) """ Deletion from storage enabled """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - container: typing.Optional[str] = pydantic_v1.Field(default=None) + container: typing.Optional[str] = pydantic.Field(default=None) """ Azure blob container """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ Azure blob prefix name """ - account_name: typing.Optional[str] = pydantic_v1.Field(default=None) + account_name: typing.Optional[str] = pydantic.Field(default=None) """ Azure Blob account name """ - account_key: typing.Optional[str] = pydantic_v1.Field(default=None) + account_key: typing.Optional[str] = pydantic.Field(default=None) """ Azure Blob account key """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py b/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py index 6aaf7218a..e530bafb9 100644 --- a/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py +++ b/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py @@ -1,67 +1,57 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class AzureUpdateResponse(pydantic_v1.BaseModel): - can_delete_objects: typing.Optional[bool] = pydantic_v1.Field(default=None) +class AzureUpdateResponse(UniversalBaseModel): + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) """ Deletion from storage enabled """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - container: typing.Optional[str] = pydantic_v1.Field(default=None) + container: typing.Optional[str] = pydantic.Field(default=None) """ Azure blob container """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ Azure blob prefix name """ - account_name: typing.Optional[str] = pydantic_v1.Field(default=None) + account_name: typing.Optional[str] = pydantic.Field(default=None) """ Azure Blob account name """ - account_key: typing.Optional[str] = pydantic_v1.Field(default=None) + account_key: typing.Optional[str] = pydantic.Field(default=None) """ Azure Blob account key """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/export_storage/client.py b/src/label_studio_sdk/export_storage/client.py index c683ebb8c..abb4cd3d9 100644 --- a/src/label_studio_sdk/export_storage/client.py +++ b/src/label_studio_sdk/export_storage/client.py @@ -1,19 +1,25 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.client_wrapper import SyncClientWrapper +from .azure.client import AzureClient +from .gcs.client import GcsClient +from .local.client import LocalClient +from .redis.client import RedisClient +from .s3.client import S3Client +from .s3s.client import S3SClient import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.pydantic_utilities import pydantic_v1 from ..core.request_options import RequestOptions -from .azure.client import AsyncAzureClient, AzureClient -from .gcs.client import AsyncGcsClient, GcsClient -from .local.client import AsyncLocalClient, LocalClient -from .redis.client import AsyncRedisClient, RedisClient -from .s3.client import AsyncS3Client, S3Client -from .s3s.client import AsyncS3SClient, S3SClient from .types.export_storage_list_types_response_item import ExportStorageListTypesResponseItem +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper +from .azure.client import AsyncAzureClient +from .gcs.client import AsyncGcsClient +from .local.client import AsyncLocalClient +from .redis.client import AsyncRedisClient +from .s3.client import AsyncS3Client +from .s3s.client import AsyncS3SClient class ExportStorageClient: @@ -44,7 +50,7 @@ def list_types( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -52,11 +58,19 @@ def list_types( client.export_storage.list_types() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/export/types", method="GET", request_options=request_options + "api/storages/export/types", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[ExportStorageListTypesResponseItem], _response.json()) # type: ignore + return typing.cast( + typing.List[ExportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -91,19 +105,35 @@ async def list_types( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.list_types() + + + async def main() -> None: + await client.export_storage.list_types() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/types", method="GET", request_options=request_options + "api/storages/export/types", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[ExportStorageListTypesResponseItem], _response.json()) # type: ignore + return typing.cast( + typing.List[ExportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/export_storage/gcs/client.py b/src/label_studio_sdk/export_storage/gcs/client.py index f7a687f3a..ac53b5756 100644 --- a/src/label_studio_sdk/export_storage/gcs/client.py +++ b/src/label_studio_sdk/export_storage/gcs/client.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.gcs_export_storage import GcsExportStorage +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.gcs_create_response import GcsCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.gcs_update_response import GcsUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -24,6 +24,7 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[GcsExportStorage]: """ + You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all GCS export (target) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -45,7 +46,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -53,11 +54,22 @@ def list( client.export_storage.gcs.list() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", method="GET", params={"project": project}, request_options=request_options + "api/storages/export/gcs", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[GcsExportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[GcsExportStorage], + parse_obj_as( + type_=typing.List[GcsExportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -77,6 +89,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> GcsCreateResponse: """ + Create a new target storage connection to Google Cloud Storage. For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. @@ -119,7 +132,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -139,12 +152,21 @@ def create( "google_application_credentials": google_application_credentials, "google_project_id": google_project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsCreateResponse, _response.json()) # type: ignore + return typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -165,6 +187,7 @@ def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific GCS export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. Parameters @@ -205,7 +228,7 @@ def validate( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -226,6 +249,9 @@ def validate( "google_application_credentials": google_application_credentials, "google_project_id": google_project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -239,6 +265,7 @@ def validate( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ + Get a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -258,7 +285,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -268,11 +295,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsExportStorage, _response.json()) # type: ignore + return typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -280,6 +315,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. @@ -298,7 +334,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -308,7 +344,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -333,6 +371,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> GcsUpdateResponse: """ + Update a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -376,7 +415,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -398,12 +437,21 @@ def update( "google_application_credentials": google_application_credentials, "google_project_id": google_project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsUpdateResponse, _response.json()) # type: ignore + return typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -411,6 +459,7 @@ def update( def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ + Sync tasks to a GCS export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. @@ -431,7 +480,7 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -441,11 +490,19 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsExportStorage, _response.json()) # type: ignore + return typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -460,6 +517,7 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[GcsExportStorage]: """ + You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all GCS export (target) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -481,19 +539,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.gcs.list() + + + async def main() -> None: + await client.export_storage.gcs.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", method="GET", params={"project": project}, request_options=request_options + "api/storages/export/gcs", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[GcsExportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[GcsExportStorage], + parse_obj_as( + type_=typing.List[GcsExportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -513,6 +590,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> GcsCreateResponse: """ + Create a new target storage connection to Google Cloud Storage. For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. @@ -555,12 +633,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.gcs.create() + + + async def main() -> None: + await client.export_storage.gcs.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/export/gcs", @@ -575,12 +661,21 @@ async def create( "google_application_credentials": google_application_credentials, "google_project_id": google_project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsCreateResponse, _response.json()) # type: ignore + return typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -601,6 +696,7 @@ async def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific GCS export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. Parameters @@ -641,12 +737,20 @@ async def validate( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.gcs.validate() + + + async def main() -> None: + await client.export_storage.gcs.validate() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/export/gcs/validate", @@ -662,6 +766,9 @@ async def validate( "google_application_credentials": google_application_credentials, "google_project_id": google_project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -675,6 +782,7 @@ async def validate( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ + Get a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -694,21 +802,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.gcs.get( - id=1, - ) + + + async def main() -> None: + await client.export_storage.gcs.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsExportStorage, _response.json()) # type: ignore + return typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -716,6 +840,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. @@ -734,17 +859,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.gcs.delete( - id=1, - ) + + + async def main() -> None: + await client.export_storage.gcs.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -769,6 +904,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> GcsUpdateResponse: """ + Update a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -812,14 +948,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.gcs.update( - id=1, - ) + + + async def main() -> None: + await client.export_storage.gcs.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/gcs/{jsonable_encoder(id)}", @@ -834,12 +978,21 @@ async def update( "google_application_credentials": google_application_credentials, "google_project_id": google_project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsUpdateResponse, _response.json()) # type: ignore + return typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -847,6 +1000,7 @@ async def update( async def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ + Sync tasks to a GCS export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. @@ -867,21 +1021,37 @@ async def sync(self, id: str, *, request_options: typing.Optional[RequestOptions Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.gcs.sync( - id="id", - ) + + + async def main() -> None: + await client.export_storage.gcs.sync( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsExportStorage, _response.json()) # type: ignore + return typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py b/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py index ee08f295f..955c9d0cf 100644 --- a/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py +++ b/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py @@ -1,67 +1,57 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class GcsCreateResponse(pydantic_v1.BaseModel): - can_delete_objects: typing.Optional[bool] = pydantic_v1.Field(default=None) +class GcsCreateResponse(UniversalBaseModel): + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) """ Deletion from storage enabled. """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ GCS bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ GCS bucket prefix """ - google_application_credentials: typing.Optional[str] = pydantic_v1.Field(default=None) + google_application_credentials: typing.Optional[str] = pydantic.Field(default=None) """ The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. """ - google_project_id: typing.Optional[str] = pydantic_v1.Field(default=None) + google_project_id: typing.Optional[str] = pydantic.Field(default=None) """ Google project ID """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py b/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py index 0d48743bf..48f05d0f3 100644 --- a/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py +++ b/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py @@ -1,67 +1,57 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class GcsUpdateResponse(pydantic_v1.BaseModel): - can_delete_objects: typing.Optional[bool] = pydantic_v1.Field(default=None) +class GcsUpdateResponse(UniversalBaseModel): + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) """ Deletion from storage enabled. """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ GCS bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ GCS bucket prefix """ - google_application_credentials: typing.Optional[str] = pydantic_v1.Field(default=None) + google_application_credentials: typing.Optional[str] = pydantic.Field(default=None) """ The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. """ - google_project_id: typing.Optional[str] = pydantic_v1.Field(default=None) + google_project_id: typing.Optional[str] = pydantic.Field(default=None) """ Google project ID """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/export_storage/local/client.py b/src/label_studio_sdk/export_storage/local/client.py index 08fc66798..cbf6de190 100644 --- a/src/label_studio_sdk/export_storage/local/client.py +++ b/src/label_studio_sdk/export_storage/local/client.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.local_files_export_storage import LocalFilesExportStorage +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.local_create_response import LocalCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.local_update_response import LocalUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -24,6 +24,7 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[LocalFilesExportStorage]: """ + You can connect a local file directory to Label Studio as a source storage or target storage. Use this API request to get a list of all local file export (target) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -45,7 +46,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -53,11 +54,22 @@ def list( client.export_storage.local.list() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", method="GET", params={"project": project}, request_options=request_options + "api/storages/export/localfiles", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[LocalFilesExportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[LocalFilesExportStorage], + parse_obj_as( + type_=typing.List[LocalFilesExportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -75,6 +87,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> LocalCreateResponse: """ + Create a new target storage connection to a local file directory. For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. @@ -111,7 +124,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -129,12 +142,21 @@ def create( "regex_filter": regex_filter, "use_blob_urls": use_blob_urls, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalCreateResponse, _response.json()) # type: ignore + return typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -153,6 +175,7 @@ def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific local file export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. Parameters @@ -187,7 +210,7 @@ def validate( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -206,6 +229,9 @@ def validate( "regex_filter": regex_filter, "use_blob_urls": use_blob_urls, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -219,6 +245,7 @@ def validate( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesExportStorage: """ + Get a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -238,7 +265,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -248,11 +275,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesExportStorage, _response.json()) # type: ignore + return typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -260,6 +295,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. @@ -278,7 +314,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -288,7 +324,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -311,6 +349,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> LocalUpdateResponse: """ + Update a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -348,7 +387,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -368,12 +407,21 @@ def update( "regex_filter": regex_filter, "use_blob_urls": use_blob_urls, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalUpdateResponse, _response.json()) # type: ignore + return typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -381,6 +429,7 @@ def update( def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesExportStorage: """ + Sync tasks to an local file export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Sync operations with external local file directories only go one way. They either create tasks from objects in the directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. @@ -401,7 +450,7 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -417,7 +466,13 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesExportStorage, _response.json()) # type: ignore + return typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -432,6 +487,7 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[LocalFilesExportStorage]: """ + You can connect a local file directory to Label Studio as a source storage or target storage. Use this API request to get a list of all local file export (target) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -453,19 +509,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.local.list() + + + async def main() -> None: + await client.export_storage.local.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", method="GET", params={"project": project}, request_options=request_options + "api/storages/export/localfiles", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[LocalFilesExportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[LocalFilesExportStorage], + parse_obj_as( + type_=typing.List[LocalFilesExportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -483,6 +558,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> LocalCreateResponse: """ + Create a new target storage connection to a local file directory. For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. @@ -519,12 +595,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.local.create() + + + async def main() -> None: + await client.export_storage.local.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/export/localfiles", @@ -537,12 +621,21 @@ async def create( "regex_filter": regex_filter, "use_blob_urls": use_blob_urls, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalCreateResponse, _response.json()) # type: ignore + return typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -561,6 +654,7 @@ async def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific local file export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. Parameters @@ -595,12 +689,20 @@ async def validate( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.local.validate() + + + async def main() -> None: + await client.export_storage.local.validate() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/export/localfiles/validate", @@ -614,6 +716,9 @@ async def validate( "regex_filter": regex_filter, "use_blob_urls": use_blob_urls, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -627,6 +732,7 @@ async def validate( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesExportStorage: """ + Get a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -646,21 +752,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.local.get( - id=1, - ) + + + async def main() -> None: + await client.export_storage.local.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesExportStorage, _response.json()) # type: ignore + return typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -668,6 +790,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. @@ -686,17 +809,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.local.delete( - id=1, - ) + + + async def main() -> None: + await client.export_storage.local.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -719,6 +852,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> LocalUpdateResponse: """ + Update a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -756,14 +890,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.local.update( - id=1, - ) + + + async def main() -> None: + await client.export_storage.local.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/localfiles/{jsonable_encoder(id)}", @@ -776,12 +918,21 @@ async def update( "regex_filter": regex_filter, "use_blob_urls": use_blob_urls, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalUpdateResponse, _response.json()) # type: ignore + return typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -791,6 +942,7 @@ async def sync( self, id: str, *, request_options: typing.Optional[RequestOptions] = None ) -> LocalFilesExportStorage: """ + Sync tasks to an local file export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Sync operations with external local file directories only go one way. They either create tasks from objects in the directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. @@ -811,14 +963,22 @@ async def sync( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.local.sync( - id="id", - ) + + + async def main() -> None: + await client.export_storage.local.sync( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/localfiles/{jsonable_encoder(id)}/sync", @@ -827,7 +987,13 @@ async def sync( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesExportStorage, _response.json()) # type: ignore + return typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/export_storage/local/types/local_create_response.py b/src/label_studio_sdk/export_storage/local/types/local_create_response.py index eba7c780b..95051747a 100644 --- a/src/label_studio_sdk/export_storage/local/types/local_create_response.py +++ b/src/label_studio_sdk/export_storage/local/types/local_create_response.py @@ -1,57 +1,47 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class LocalCreateResponse(pydantic_v1.BaseModel): - title: typing.Optional[str] = pydantic_v1.Field(default=None) +class LocalCreateResponse(UniversalBaseModel): + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - path: typing.Optional[str] = pydantic_v1.Field(default=None) + path: typing.Optional[str] = pydantic.Field(default=None) """ Path to local directory """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/export_storage/local/types/local_update_response.py b/src/label_studio_sdk/export_storage/local/types/local_update_response.py index a9d7c3970..e5dd8df6c 100644 --- a/src/label_studio_sdk/export_storage/local/types/local_update_response.py +++ b/src/label_studio_sdk/export_storage/local/types/local_update_response.py @@ -1,57 +1,47 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class LocalUpdateResponse(pydantic_v1.BaseModel): - title: typing.Optional[str] = pydantic_v1.Field(default=None) +class LocalUpdateResponse(UniversalBaseModel): + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - path: typing.Optional[str] = pydantic_v1.Field(default=None) + path: typing.Optional[str] = pydantic.Field(default=None) """ Path to local directory """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/export_storage/redis/client.py b/src/label_studio_sdk/export_storage/redis/client.py index 1b515f40c..3e7f4b0ae 100644 --- a/src/label_studio_sdk/export_storage/redis/client.py +++ b/src/label_studio_sdk/export_storage/redis/client.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.redis_export_storage import RedisExportStorage +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.redis_create_response import RedisCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.redis_update_response import RedisUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -24,6 +24,7 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[RedisExportStorage]: """ + You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis export (target) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -45,7 +46,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -53,11 +54,22 @@ def list( client.export_storage.redis.list() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/export/redis", method="GET", params={"project": project}, request_options=request_options + "api/storages/export/redis", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[RedisExportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[RedisExportStorage], + parse_obj_as( + type_=typing.List[RedisExportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -78,6 +90,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> RedisCreateResponse: """ + Create a new target storage connection to Redis. For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. @@ -123,7 +136,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -144,12 +157,21 @@ def create( "port": port, "password": password, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisCreateResponse, _response.json()) # type: ignore + return typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -171,6 +193,7 @@ def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific Redis export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. Parameters @@ -214,7 +237,7 @@ def validate( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -236,6 +259,9 @@ def validate( "port": port, "password": password, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -249,6 +275,7 @@ def validate( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ + Get a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -268,7 +295,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -278,11 +305,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisExportStorage, _response.json()) # type: ignore + return typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -290,6 +325,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. @@ -308,7 +344,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -318,7 +354,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -344,6 +382,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> RedisUpdateResponse: """ + Update a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -390,7 +429,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -413,12 +452,21 @@ def update( "port": port, "password": password, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisUpdateResponse, _response.json()) # type: ignore + return typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -426,6 +474,7 @@ def update( def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ + Sync tasks to an Redis export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. @@ -446,7 +495,7 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -456,11 +505,19 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/export/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisExportStorage, _response.json()) # type: ignore + return typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -475,6 +532,7 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[RedisExportStorage]: """ + You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis export (target) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -496,19 +554,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.redis.list() + + + async def main() -> None: + await client.export_storage.redis.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/redis", method="GET", params={"project": project}, request_options=request_options + "api/storages/export/redis", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[RedisExportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[RedisExportStorage], + parse_obj_as( + type_=typing.List[RedisExportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -529,6 +606,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> RedisCreateResponse: """ + Create a new target storage connection to Redis. For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. @@ -574,12 +652,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.redis.create() + + + async def main() -> None: + await client.export_storage.redis.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/export/redis", @@ -595,12 +681,21 @@ async def create( "port": port, "password": password, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisCreateResponse, _response.json()) # type: ignore + return typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -622,6 +717,7 @@ async def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific Redis export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. Parameters @@ -665,12 +761,20 @@ async def validate( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.redis.validate() + + + async def main() -> None: + await client.export_storage.redis.validate() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/export/redis/validate", @@ -687,6 +791,9 @@ async def validate( "port": port, "password": password, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -700,6 +807,7 @@ async def validate( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ + Get a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -719,21 +827,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.redis.get( - id=1, - ) + + + async def main() -> None: + await client.export_storage.redis.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisExportStorage, _response.json()) # type: ignore + return typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -741,6 +865,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. @@ -759,17 +884,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.redis.delete( - id=1, - ) + + + async def main() -> None: + await client.export_storage.redis.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -795,6 +930,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> RedisUpdateResponse: """ + Update a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -841,14 +977,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.redis.update( - id=1, - ) + + + async def main() -> None: + await client.export_storage.redis.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/redis/{jsonable_encoder(id)}", @@ -864,12 +1008,21 @@ async def update( "port": port, "password": password, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisUpdateResponse, _response.json()) # type: ignore + return typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -877,6 +1030,7 @@ async def update( async def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ + Sync tasks to an Redis export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. @@ -897,21 +1051,37 @@ async def sync(self, id: str, *, request_options: typing.Optional[RequestOptions Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.redis.sync( - id="id", - ) + + + async def main() -> None: + await client.export_storage.redis.sync( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/export/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisExportStorage, _response.json()) # type: ignore + return typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py b/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py index 35f0b2d47..7aab4a479 100644 --- a/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py +++ b/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py @@ -1,72 +1,62 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class RedisCreateResponse(pydantic_v1.BaseModel): - db: typing.Optional[int] = pydantic_v1.Field(default=None) +class RedisCreateResponse(UniversalBaseModel): + db: typing.Optional[int] = pydantic.Field(default=None) """ Database ID of database to use """ - can_delete_objects: typing.Optional[bool] = pydantic_v1.Field(default=None) + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) """ Deletion from storage enabled. """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - path: typing.Optional[str] = pydantic_v1.Field(default=None) + path: typing.Optional[str] = pydantic.Field(default=None) """ Storage prefix (optional) """ - host: typing.Optional[str] = pydantic_v1.Field(default=None) + host: typing.Optional[str] = pydantic.Field(default=None) """ Server Host IP (optional) """ - port: typing.Optional[str] = pydantic_v1.Field(default=None) + port: typing.Optional[str] = pydantic.Field(default=None) """ Server Port (optional) """ - password: typing.Optional[str] = pydantic_v1.Field(default=None) + password: typing.Optional[str] = pydantic.Field(default=None) """ Server Password (optional) """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py b/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py index 034a973da..8eec3c821 100644 --- a/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py +++ b/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py @@ -1,72 +1,62 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class RedisUpdateResponse(pydantic_v1.BaseModel): - db: typing.Optional[int] = pydantic_v1.Field(default=None) +class RedisUpdateResponse(UniversalBaseModel): + db: typing.Optional[int] = pydantic.Field(default=None) """ Database ID of database to use """ - can_delete_objects: typing.Optional[bool] = pydantic_v1.Field(default=None) + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) """ Deletion from storage enabled. """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - path: typing.Optional[str] = pydantic_v1.Field(default=None) + path: typing.Optional[str] = pydantic.Field(default=None) """ Storage prefix (optional) """ - host: typing.Optional[str] = pydantic_v1.Field(default=None) + host: typing.Optional[str] = pydantic.Field(default=None) """ Server Host IP (optional) """ - port: typing.Optional[str] = pydantic_v1.Field(default=None) + port: typing.Optional[str] = pydantic.Field(default=None) """ Server Port (optional) """ - password: typing.Optional[str] = pydantic_v1.Field(default=None) + password: typing.Optional[str] = pydantic.Field(default=None) """ Server Password (optional) """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/export_storage/s3/client.py b/src/label_studio_sdk/export_storage/s3/client.py index 42a248c0a..3619a374b 100644 --- a/src/label_studio_sdk/export_storage/s3/client.py +++ b/src/label_studio_sdk/export_storage/s3/client.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3export_storage import S3ExportStorage +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.s3create_response import S3CreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.s3update_response import S3UpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -24,6 +24,7 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3ExportStorage]: """ + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -45,7 +46,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -53,11 +54,22 @@ def list( client.export_storage.s3.list() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3", method="GET", params={"project": project}, request_options=request_options + "api/storages/export/s3", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[S3ExportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[S3ExportStorage], + parse_obj_as( + type_=typing.List[S3ExportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,6 +93,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> S3CreateResponse: """ + Create a new target storage connection to S3 storage. For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. @@ -135,7 +148,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -159,12 +172,21 @@ def create( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3CreateResponse, _response.json()) # type: ignore + return typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -189,6 +211,7 @@ def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. Parameters @@ -241,7 +264,7 @@ def validate( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -266,6 +289,9 @@ def validate( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -279,6 +305,7 @@ def validate( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -298,7 +325,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -308,11 +335,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ExportStorage, _response.json()) # type: ignore + return typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -320,6 +355,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. @@ -338,7 +374,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -348,7 +384,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -377,6 +415,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> S3UpdateResponse: """ + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -432,7 +471,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -458,12 +497,21 @@ def update( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3UpdateResponse, _response.json()) # type: ignore + return typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -471,6 +519,7 @@ def update( def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ + Sync tasks to an S3 export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. @@ -491,7 +540,7 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -501,11 +550,19 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/export/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ExportStorage, _response.json()) # type: ignore + return typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -520,6 +577,7 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3ExportStorage]: """ + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -541,19 +599,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.s3.list() + + + async def main() -> None: + await client.export_storage.s3.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3", method="GET", params={"project": project}, request_options=request_options + "api/storages/export/s3", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[S3ExportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[S3ExportStorage], + parse_obj_as( + type_=typing.List[S3ExportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -577,6 +654,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> S3CreateResponse: """ + Create a new target storage connection to S3 storage. For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. @@ -631,12 +709,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.s3.create() + + + async def main() -> None: + await client.export_storage.s3.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/export/s3", @@ -655,12 +741,21 @@ async def create( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3CreateResponse, _response.json()) # type: ignore + return typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -685,6 +780,7 @@ async def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. Parameters @@ -737,12 +833,20 @@ async def validate( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.s3.validate() + + + async def main() -> None: + await client.export_storage.s3.validate() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/export/s3/validate", @@ -762,6 +866,9 @@ async def validate( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -775,6 +882,7 @@ async def validate( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -794,21 +902,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.s3.get( - id=1, - ) + + + async def main() -> None: + await client.export_storage.s3.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ExportStorage, _response.json()) # type: ignore + return typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -816,6 +940,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. @@ -834,17 +959,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.s3.delete( - id=1, - ) + + + async def main() -> None: + await client.export_storage.s3.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -873,6 +1008,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> S3UpdateResponse: """ + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -928,14 +1064,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.s3.update( - id=1, - ) + + + async def main() -> None: + await client.export_storage.s3.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/s3/{jsonable_encoder(id)}", @@ -954,12 +1098,21 @@ async def update( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3UpdateResponse, _response.json()) # type: ignore + return typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -967,6 +1120,7 @@ async def update( async def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ + Sync tasks to an S3 export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. @@ -987,21 +1141,37 @@ async def sync(self, id: str, *, request_options: typing.Optional[RequestOptions Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.s3.sync( - id="id", - ) + + + async def main() -> None: + await client.export_storage.s3.sync( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/export/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ExportStorage, _response.json()) # type: ignore + return typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/export_storage/s3/types/s3create_response.py b/src/label_studio_sdk/export_storage/s3/types/s3create_response.py index 0b35dccf1..cc74a5583 100644 --- a/src/label_studio_sdk/export_storage/s3/types/s3create_response.py +++ b/src/label_studio_sdk/export_storage/s3/types/s3create_response.py @@ -1,89 +1,81 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +import typing_extensions +from ....core.serialization import FieldMetadata +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class S3CreateResponse(pydantic_v1.BaseModel): - can_delete_objects: typing.Optional[bool] = pydantic_v1.Field(default=None) +class S3CreateResponse(UniversalBaseModel): + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) """ Deletion from storage enabled. """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket prefix """ - aws_access_key_id: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_access_key_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS_ACCESS_KEY_ID """ - aws_secret_access_key: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_secret_access_key: typing.Optional[str] = pydantic.Field(default=None) """ AWS_SECRET_ACCESS_KEY """ - aws_session_token: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_session_token: typing.Optional[str] = pydantic.Field(default=None) """ AWS_SESSION_TOKEN """ - aws_sse_kms_key_id: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_sse_kms_key_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS SSE KMS Key ID """ - region_name: typing.Optional[str] = pydantic_v1.Field(default=None) + region_name: typing.Optional[str] = pydantic.Field(default=None) """ AWS Region """ - s3endpoint: typing.Optional[str] = pydantic_v1.Field(alias="s3_endpoint", default=None) + s3endpoint: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="s3_endpoint")] = pydantic.Field( + default=None + ) """ S3 Endpoint """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/export_storage/s3/types/s3update_response.py b/src/label_studio_sdk/export_storage/s3/types/s3update_response.py index 6d4ed3931..466c2b64c 100644 --- a/src/label_studio_sdk/export_storage/s3/types/s3update_response.py +++ b/src/label_studio_sdk/export_storage/s3/types/s3update_response.py @@ -1,89 +1,81 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +import typing_extensions +from ....core.serialization import FieldMetadata +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class S3UpdateResponse(pydantic_v1.BaseModel): - can_delete_objects: typing.Optional[bool] = pydantic_v1.Field(default=None) +class S3UpdateResponse(UniversalBaseModel): + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) """ Deletion from storage enabled. """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket prefix """ - aws_access_key_id: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_access_key_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS_ACCESS_KEY_ID """ - aws_secret_access_key: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_secret_access_key: typing.Optional[str] = pydantic.Field(default=None) """ AWS_SECRET_ACCESS_KEY """ - aws_session_token: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_session_token: typing.Optional[str] = pydantic.Field(default=None) """ AWS_SESSION_TOKEN """ - aws_sse_kms_key_id: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_sse_kms_key_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS SSE KMS Key ID """ - region_name: typing.Optional[str] = pydantic_v1.Field(default=None) + region_name: typing.Optional[str] = pydantic.Field(default=None) """ AWS Region """ - s3endpoint: typing.Optional[str] = pydantic_v1.Field(alias="s3_endpoint", default=None) + s3endpoint: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="s3_endpoint")] = pydantic.Field( + default=None + ) """ S3 Endpoint """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/export_storage/s3s/client.py b/src/label_studio_sdk/export_storage/s3s/client.py index 6a42d4726..460586a50 100644 --- a/src/label_studio_sdk/export_storage/s3s/client.py +++ b/src/label_studio_sdk/export_storage/s3s/client.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing +from ...core.client_wrapper import SyncClientWrapper +from ...core.request_options import RequestOptions +from ...types.s3s_export_storage import S3SExportStorage +from ...core.pydantic_utilities import parse_obj_as from json.decoder import JSONDecodeError - from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 -from ...core.request_options import RequestOptions -from ...types.s3s_export_storage import S3SExportStorage +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -22,6 +22,7 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3SExportStorage]: """ + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -43,7 +44,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -51,11 +52,22 @@ def list( client.export_storage.s3s.list() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", method="GET", params={"project": project}, request_options=request_options + "api/storages/export/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[S3SExportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[S3SExportStorage], + parse_obj_as( + type_=typing.List[S3SExportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -77,6 +89,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> S3SExportStorage: """ + Create a new target storage connection to a S3 bucket with IAM role access. For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation. @@ -123,7 +136,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -145,12 +158,21 @@ def create( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SExportStorage, _response.json()) # type: ignore + return typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -158,6 +180,7 @@ def create( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SExportStorage: """ + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Parameters @@ -175,7 +198,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -185,11 +208,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SExportStorage, _response.json()) # type: ignore + return typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -197,6 +228,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Parameters @@ -213,7 +245,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -223,7 +255,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -250,6 +284,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> S3SExportStorage: """ + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Parameters @@ -297,7 +332,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -321,12 +356,21 @@ def update( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SExportStorage, _response.json()) # type: ignore + return typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -348,6 +392,7 @@ def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. Parameters @@ -391,7 +436,7 @@ def validate( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -413,6 +458,9 @@ def validate( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -433,6 +481,7 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3SExportStorage]: """ + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -454,19 +503,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.s3s.list() + + + async def main() -> None: + await client.export_storage.s3s.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", method="GET", params={"project": project}, request_options=request_options + "api/storages/export/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[S3SExportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[S3SExportStorage], + parse_obj_as( + type_=typing.List[S3SExportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -488,6 +556,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> S3SExportStorage: """ + Create a new target storage connection to a S3 bucket with IAM role access. For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation. @@ -534,12 +603,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.s3s.create() + + + async def main() -> None: + await client.export_storage.s3s.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/export/s3s", @@ -556,12 +633,21 @@ async def create( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SExportStorage, _response.json()) # type: ignore + return typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -569,6 +655,7 @@ async def create( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SExportStorage: """ + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Parameters @@ -586,21 +673,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.s3s.get( - id=1, - ) + + + async def main() -> None: + await client.export_storage.s3s.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SExportStorage, _response.json()) # type: ignore + return typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -608,6 +711,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Parameters @@ -624,17 +728,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.s3s.delete( - id=1, - ) + + + async def main() -> None: + await client.export_storage.s3s.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -661,6 +775,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> S3SExportStorage: """ + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). Parameters @@ -708,14 +823,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.s3s.update( - id=1, - ) + + + async def main() -> None: + await client.export_storage.s3s.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/s3s/{jsonable_encoder(id)}", @@ -732,12 +855,21 @@ async def update( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SExportStorage, _response.json()) # type: ignore + return typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -759,6 +891,7 @@ async def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. Parameters @@ -802,12 +935,20 @@ async def validate( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.export_storage.s3s.validate() + + + async def main() -> None: + await client.export_storage.s3s.validate() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/export/s3s/validate", @@ -824,6 +965,9 @@ async def validate( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) diff --git a/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py b/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py index 1231710f2..684e9172a 100644 --- a/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py +++ b/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py @@ -1,30 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing +from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class ExportStorageListTypesResponseItem(pydantic_v1.BaseModel): +class ExportStorageListTypesResponseItem(UniversalBaseModel): name: typing.Optional[str] = None title: typing.Optional[str] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/files/client.py b/src/label_studio_sdk/files/client.py index 4a68269a4..a832258c3 100644 --- a/src/label_studio_sdk/files/client.py +++ b/src/label_studio_sdk/files/client.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.file_upload import FileUpload +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -37,7 +37,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -47,11 +47,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/import/file-upload/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FileUpload, _response.json()) # type: ignore + return typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -75,7 +83,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -85,7 +93,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/import/file-upload/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -104,10 +114,10 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> FileUpload: """ + Update a specific uploaded file. To get the file upload ID, use [Get files list](list). You will need to include the file data in the request body. For example: - ```bash curl -H 'Authorization: Token abc123' \ -X POST 'https://localhost:8080/api/import/file-upload/245' -F ‘file=@path/to/my_file.csv’ ``` @@ -131,7 +141,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -143,13 +153,22 @@ def update( _response = self._client_wrapper.httpx_client.request( f"api/import/file-upload/{jsonable_encoder(id_)}", method="PATCH", - json={"id": id, "file": file}, + json={ + "id": id, + "file": file, + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FileUpload, _response.json()) # type: ignore + return typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -164,6 +183,7 @@ def list( request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[FileUpload]: """ + Retrieve the list of uploaded files used to create labeling tasks for a specific project. These are files that have been uploaded directly to Label Studio. You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). @@ -189,7 +209,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -201,12 +221,21 @@ def list( _response = self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/file-uploads", method="GET", - params={"all": all_, "ids": ids}, + params={ + "all": all_, + "ids": ids, + }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[FileUpload], _response.json()) # type: ignore + return typing.cast( + typing.List[FileUpload], + parse_obj_as( + type_=typing.List[FileUpload], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -214,6 +243,7 @@ def list( def delete_many(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete uploaded files for a specific project. These are files that have been uploaded directly to Label Studio. You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). @@ -232,7 +262,7 @@ def delete_many(self, id: int, *, request_options: typing.Optional[RequestOption Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -242,7 +272,9 @@ def delete_many(self, id: int, *, request_options: typing.Optional[RequestOption ) """ _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", method="DELETE", request_options=request_options + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -269,7 +301,7 @@ def download(self, filename: str, *, request_options: typing.Optional[RequestOpt Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -279,7 +311,9 @@ def download(self, filename: str, *, request_options: typing.Optional[RequestOpt ) """ _response = self._client_wrapper.httpx_client.request( - f"data/upload/{jsonable_encoder(filename)}", method="GET", request_options=request_options + f"data/upload/{jsonable_encoder(filename)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -313,21 +347,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.files.get( - id=1, - ) + + + async def main() -> None: + await client.files.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/import/file-upload/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FileUpload, _response.json()) # type: ignore + return typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -351,17 +401,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.files.delete( - id=1, - ) + + + async def main() -> None: + await client.files.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/import/file-upload/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -380,10 +440,10 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> FileUpload: """ + Update a specific uploaded file. To get the file upload ID, use [Get files list](list). You will need to include the file data in the request body. For example: - ```bash curl -H 'Authorization: Token abc123' \ -X POST 'https://localhost:8080/api/import/file-upload/245' -F ‘file=@path/to/my_file.csv’ ``` @@ -407,25 +467,42 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.files.update( - id_=1, - ) + + + async def main() -> None: + await client.files.update( + id_=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/import/file-upload/{jsonable_encoder(id_)}", method="PATCH", - json={"id": id, "file": file}, + json={ + "id": id, + "file": file, + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FileUpload, _response.json()) # type: ignore + return typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -440,6 +517,7 @@ async def list( request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[FileUpload]: """ + Retrieve the list of uploaded files used to create labeling tasks for a specific project. These are files that have been uploaded directly to Label Studio. You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). @@ -465,24 +543,41 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.files.list( - id=1, - ) + + + async def main() -> None: + await client.files.list( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/file-uploads", method="GET", - params={"all": all_, "ids": ids}, + params={ + "all": all_, + "ids": ids, + }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[FileUpload], _response.json()) # type: ignore + return typing.cast( + typing.List[FileUpload], + parse_obj_as( + type_=typing.List[FileUpload], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -490,6 +585,7 @@ async def list( async def delete_many(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete uploaded files for a specific project. These are files that have been uploaded directly to Label Studio. You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). @@ -508,17 +604,27 @@ async def delete_many(self, id: int, *, request_options: typing.Optional[Request Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.files.delete_many( - id=1, - ) + + + async def main() -> None: + await client.files.delete_many( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", method="DELETE", request_options=request_options + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -545,17 +651,27 @@ async def download(self, filename: str, *, request_options: typing.Optional[Requ Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.files.download( - filename="filename", - ) + + + async def main() -> None: + await client.files.download( + filename="filename", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"data/upload/{jsonable_encoder(filename)}", method="GET", request_options=request_options + f"data/upload/{jsonable_encoder(filename)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: diff --git a/src/label_studio_sdk/import_storage/azure/client.py b/src/label_studio_sdk/import_storage/azure/client.py index 5518a133f..655b8b5d4 100644 --- a/src/label_studio_sdk/import_storage/azure/client.py +++ b/src/label_studio_sdk/import_storage/azure/client.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.azure_blob_import_storage import AzureBlobImportStorage +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.azure_create_response import AzureCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.azure_update_response import AzureUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -24,6 +24,7 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[AzureBlobImportStorage]: """ + You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure import (source) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -45,7 +46,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -53,11 +54,22 @@ def list( client.import_storage.azure.list() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/azure/", method="GET", params={"project": project}, request_options=request_options + "api/storages/azure/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[AzureBlobImportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[AzureBlobImportStorage], + parse_obj_as( + type_=typing.List[AzureBlobImportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -80,6 +92,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> AzureCreateResponse: """ + Create a new source storage connection to Microsoft Azure Blob storage. For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. @@ -133,7 +146,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -156,12 +169,21 @@ def create( "account_name": account_name, "account_key": account_key, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureCreateResponse, _response.json()) # type: ignore + return typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -185,6 +207,7 @@ def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific Azure import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. Parameters @@ -234,7 +257,7 @@ def validate( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -258,6 +281,9 @@ def validate( "account_name": account_name, "account_key": account_key, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -271,6 +297,7 @@ def validate( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ + Get a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -290,7 +317,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -300,11 +327,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobImportStorage, _response.json()) # type: ignore + return typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -312,6 +347,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. @@ -332,7 +368,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -342,7 +378,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -370,6 +408,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> AzureUpdateResponse: """ + Update a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -422,7 +461,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -447,12 +486,21 @@ def update( "account_name": account_name, "account_key": account_key, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureUpdateResponse, _response.json()) # type: ignore + return typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -460,6 +508,7 @@ def update( def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ + Sync tasks from an Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. @@ -481,7 +530,7 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -491,11 +540,19 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobImportStorage, _response.json()) # type: ignore + return typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -510,6 +567,7 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[AzureBlobImportStorage]: """ + You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure import (source) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -531,19 +589,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.azure.list() + + + async def main() -> None: + await client.import_storage.azure.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/azure/", method="GET", params={"project": project}, request_options=request_options + "api/storages/azure/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[AzureBlobImportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[AzureBlobImportStorage], + parse_obj_as( + type_=typing.List[AzureBlobImportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -566,6 +643,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> AzureCreateResponse: """ + Create a new source storage connection to Microsoft Azure Blob storage. For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. @@ -619,12 +697,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.azure.create() + + + async def main() -> None: + await client.import_storage.azure.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/azure/", @@ -642,12 +728,21 @@ async def create( "account_name": account_name, "account_key": account_key, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureCreateResponse, _response.json()) # type: ignore + return typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -671,6 +766,7 @@ async def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific Azure import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. Parameters @@ -720,12 +816,20 @@ async def validate( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.azure.validate() + + + async def main() -> None: + await client.import_storage.azure.validate() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/azure/validate", @@ -744,6 +848,9 @@ async def validate( "account_name": account_name, "account_key": account_key, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -757,6 +864,7 @@ async def validate( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ + Get a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -776,21 +884,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.azure.get( - id=1, - ) + + + async def main() -> None: + await client.import_storage.azure.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobImportStorage, _response.json()) # type: ignore + return typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -798,6 +922,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. @@ -818,17 +943,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.azure.delete( - id=1, - ) + + + async def main() -> None: + await client.import_storage.azure.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -856,6 +991,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> AzureUpdateResponse: """ + Update a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -908,14 +1044,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.azure.update( - id=1, - ) + + + async def main() -> None: + await client.import_storage.azure.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/storages/azure/{jsonable_encoder(id)}", @@ -933,12 +1077,21 @@ async def update( "account_name": account_name, "account_key": account_key, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureUpdateResponse, _response.json()) # type: ignore + return typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -946,6 +1099,7 @@ async def update( async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ + Sync tasks from an Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. @@ -967,21 +1121,37 @@ async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.azure.sync( - id=1, - ) + + + async def main() -> None: + await client.import_storage.azure.sync( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobImportStorage, _response.json()) # type: ignore + return typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py b/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py index 950ca525e..b59cf0117 100644 --- a/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py +++ b/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py @@ -1,82 +1,72 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class AzureCreateResponse(pydantic_v1.BaseModel): - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) +class AzureCreateResponse(UniversalBaseModel): + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. """ - presign: typing.Optional[bool] = pydantic_v1.Field(default=None) + presign: typing.Optional[bool] = pydantic.Field(default=None) """ Presign URLs for direct download """ - presign_ttl: typing.Optional[int] = pydantic_v1.Field(default=None) + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) """ Presign TTL in minutes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - container: typing.Optional[str] = pydantic_v1.Field(default=None) + container: typing.Optional[str] = pydantic.Field(default=None) """ Azure blob container """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ Azure blob prefix name """ - account_name: typing.Optional[str] = pydantic_v1.Field(default=None) + account_name: typing.Optional[str] = pydantic.Field(default=None) """ Azure Blob account name """ - account_key: typing.Optional[str] = pydantic_v1.Field(default=None) + account_key: typing.Optional[str] = pydantic.Field(default=None) """ Azure Blob account key """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py b/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py index 15d0db36f..afacbeb28 100644 --- a/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py +++ b/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py @@ -1,82 +1,72 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class AzureUpdateResponse(pydantic_v1.BaseModel): - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) +class AzureUpdateResponse(UniversalBaseModel): + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. """ - presign: typing.Optional[bool] = pydantic_v1.Field(default=None) + presign: typing.Optional[bool] = pydantic.Field(default=None) """ Presign URLs for direct download """ - presign_ttl: typing.Optional[int] = pydantic_v1.Field(default=None) + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) """ Presign TTL in minutes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - container: typing.Optional[str] = pydantic_v1.Field(default=None) + container: typing.Optional[str] = pydantic.Field(default=None) """ Azure blob container """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ Azure blob prefix name """ - account_name: typing.Optional[str] = pydantic_v1.Field(default=None) + account_name: typing.Optional[str] = pydantic.Field(default=None) """ Azure Blob account name """ - account_key: typing.Optional[str] = pydantic_v1.Field(default=None) + account_key: typing.Optional[str] = pydantic.Field(default=None) """ Azure Blob account key """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/import_storage/client.py b/src/label_studio_sdk/import_storage/client.py index 7fb7adbca..6ac60efe9 100644 --- a/src/label_studio_sdk/import_storage/client.py +++ b/src/label_studio_sdk/import_storage/client.py @@ -1,19 +1,25 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.client_wrapper import SyncClientWrapper +from .azure.client import AzureClient +from .gcs.client import GcsClient +from .local.client import LocalClient +from .redis.client import RedisClient +from .s3.client import S3Client +from .s3s.client import S3SClient import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.pydantic_utilities import pydantic_v1 from ..core.request_options import RequestOptions -from .azure.client import AsyncAzureClient, AzureClient -from .gcs.client import AsyncGcsClient, GcsClient -from .local.client import AsyncLocalClient, LocalClient -from .redis.client import AsyncRedisClient, RedisClient -from .s3.client import AsyncS3Client, S3Client -from .s3s.client import AsyncS3SClient, S3SClient from .types.import_storage_list_types_response_item import ImportStorageListTypesResponseItem +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper +from .azure.client import AsyncAzureClient +from .gcs.client import AsyncGcsClient +from .local.client import AsyncLocalClient +from .redis.client import AsyncRedisClient +from .s3.client import AsyncS3Client +from .s3s.client import AsyncS3SClient class ImportStorageClient: @@ -44,7 +50,7 @@ def list_types( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -52,11 +58,19 @@ def list_types( client.import_storage.list_types() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/types", method="GET", request_options=request_options + "api/storages/types", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[ImportStorageListTypesResponseItem], _response.json()) # type: ignore + return typing.cast( + typing.List[ImportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -91,19 +105,35 @@ async def list_types( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.list_types() + + + async def main() -> None: + await client.import_storage.list_types() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/types", method="GET", request_options=request_options + "api/storages/types", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[ImportStorageListTypesResponseItem], _response.json()) # type: ignore + return typing.cast( + typing.List[ImportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/gcs/client.py b/src/label_studio_sdk/import_storage/gcs/client.py index b36dbe5a8..5262a9ea0 100644 --- a/src/label_studio_sdk/import_storage/gcs/client.py +++ b/src/label_studio_sdk/import_storage/gcs/client.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.gcs_import_storage import GcsImportStorage +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.gcs_create_response import GcsCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.gcs_update_response import GcsUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -24,6 +24,7 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[GcsImportStorage]: """ + You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -45,7 +46,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -53,11 +54,22 @@ def list( client.import_storage.gcs.list() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/gcs/", method="GET", params={"project": project}, request_options=request_options + "api/storages/gcs/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[GcsImportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[GcsImportStorage], + parse_obj_as( + type_=typing.List[GcsImportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -80,6 +92,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> GcsCreateResponse: """ + Create a new source storage connection to a Google Cloud Storage bucket. For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. @@ -133,7 +146,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -156,12 +169,21 @@ def create( "google_application_credentials": google_application_credentials, "google_project_id": google_project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsCreateResponse, _response.json()) # type: ignore + return typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -185,6 +207,7 @@ def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific GCS import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. Parameters @@ -234,7 +257,7 @@ def validate( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -258,6 +281,9 @@ def validate( "google_application_credentials": google_application_credentials, "google_project_id": google_project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -271,6 +297,7 @@ def validate( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ + Get a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -290,7 +317,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -300,11 +327,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsImportStorage, _response.json()) # type: ignore + return typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -312,6 +347,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. @@ -332,7 +368,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -342,7 +378,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -370,6 +408,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> GcsUpdateResponse: """ + Update a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -422,7 +461,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -447,12 +486,21 @@ def update( "google_application_credentials": google_application_credentials, "google_project_id": google_project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsUpdateResponse, _response.json()) # type: ignore + return typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -460,6 +508,7 @@ def update( def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ + Sync tasks from a GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. @@ -481,7 +530,7 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -491,11 +540,19 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsImportStorage, _response.json()) # type: ignore + return typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -510,6 +567,7 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[GcsImportStorage]: """ + You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -531,19 +589,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.gcs.list() + + + async def main() -> None: + await client.import_storage.gcs.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/gcs/", method="GET", params={"project": project}, request_options=request_options + "api/storages/gcs/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[GcsImportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[GcsImportStorage], + parse_obj_as( + type_=typing.List[GcsImportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -566,6 +643,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> GcsCreateResponse: """ + Create a new source storage connection to a Google Cloud Storage bucket. For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. @@ -619,12 +697,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.gcs.create() + + + async def main() -> None: + await client.import_storage.gcs.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/gcs/", @@ -642,12 +728,21 @@ async def create( "google_application_credentials": google_application_credentials, "google_project_id": google_project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsCreateResponse, _response.json()) # type: ignore + return typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -671,6 +766,7 @@ async def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific GCS import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. Parameters @@ -720,12 +816,20 @@ async def validate( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.gcs.validate() + + + async def main() -> None: + await client.import_storage.gcs.validate() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/gcs/validate", @@ -744,6 +848,9 @@ async def validate( "google_application_credentials": google_application_credentials, "google_project_id": google_project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -757,6 +864,7 @@ async def validate( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ + Get a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -776,21 +884,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.gcs.get( - id=1, - ) + + + async def main() -> None: + await client.import_storage.gcs.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsImportStorage, _response.json()) # type: ignore + return typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -798,6 +922,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. @@ -818,17 +943,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.gcs.delete( - id=1, - ) + + + async def main() -> None: + await client.import_storage.gcs.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -856,6 +991,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> GcsUpdateResponse: """ + Update a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -908,14 +1044,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.gcs.update( - id=1, - ) + + + async def main() -> None: + await client.import_storage.gcs.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/storages/gcs/{jsonable_encoder(id)}", @@ -933,12 +1077,21 @@ async def update( "google_application_credentials": google_application_credentials, "google_project_id": google_project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsUpdateResponse, _response.json()) # type: ignore + return typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -946,6 +1099,7 @@ async def update( async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ + Sync tasks from a GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. @@ -967,21 +1121,37 @@ async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.gcs.sync( - id=1, - ) + + + async def main() -> None: + await client.import_storage.gcs.sync( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsImportStorage, _response.json()) # type: ignore + return typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py b/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py index 979e4d829..58c05a731 100644 --- a/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py +++ b/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py @@ -1,82 +1,72 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class GcsCreateResponse(pydantic_v1.BaseModel): - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) +class GcsCreateResponse(UniversalBaseModel): + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. """ - presign: typing.Optional[bool] = pydantic_v1.Field(default=None) + presign: typing.Optional[bool] = pydantic.Field(default=None) """ Presign URLs for direct download """ - presign_ttl: typing.Optional[int] = pydantic_v1.Field(default=None) + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) """ Presign TTL in minutes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ GCS bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ GCS bucket prefix """ - google_application_credentials: typing.Optional[str] = pydantic_v1.Field(default=None) + google_application_credentials: typing.Optional[str] = pydantic.Field(default=None) """ The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. """ - google_project_id: typing.Optional[str] = pydantic_v1.Field(default=None) + google_project_id: typing.Optional[str] = pydantic.Field(default=None) """ Google project ID """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py b/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py index 1aa166421..54c7e415c 100644 --- a/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py +++ b/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py @@ -1,82 +1,72 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class GcsUpdateResponse(pydantic_v1.BaseModel): - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) +class GcsUpdateResponse(UniversalBaseModel): + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. """ - presign: typing.Optional[bool] = pydantic_v1.Field(default=None) + presign: typing.Optional[bool] = pydantic.Field(default=None) """ Presign URLs for direct download """ - presign_ttl: typing.Optional[int] = pydantic_v1.Field(default=None) + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) """ Presign TTL in minutes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ GCS bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ GCS bucket prefix """ - google_application_credentials: typing.Optional[str] = pydantic_v1.Field(default=None) + google_application_credentials: typing.Optional[str] = pydantic.Field(default=None) """ The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. """ - google_project_id: typing.Optional[str] = pydantic_v1.Field(default=None) + google_project_id: typing.Optional[str] = pydantic.Field(default=None) """ Google project ID """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/import_storage/local/client.py b/src/label_studio_sdk/import_storage/local/client.py index d0613d8c0..ad230f7ed 100644 --- a/src/label_studio_sdk/import_storage/local/client.py +++ b/src/label_studio_sdk/import_storage/local/client.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.local_files_import_storage import LocalFilesImportStorage +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.local_create_response import LocalCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.local_update_response import LocalUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -24,6 +24,7 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[LocalFilesImportStorage]: """ + If you have local files that you want to add to Label Studio from a specific directory, you can set up a specific local directory on the machine where LS is running as source or target storage. Use this API request to get a list of all local file import (source) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -45,7 +46,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -53,11 +54,22 @@ def list( client.import_storage.local.list() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", method="GET", params={"project": project}, request_options=request_options + "api/storages/localfiles/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[LocalFilesImportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[LocalFilesImportStorage], + parse_obj_as( + type_=typing.List[LocalFilesImportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -75,6 +87,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> LocalCreateResponse: """ + Create a new source storage connection to a local file directory. For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. @@ -111,7 +124,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -129,12 +142,21 @@ def create( "regex_filter": regex_filter, "use_blob_urls": use_blob_urls, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalCreateResponse, _response.json()) # type: ignore + return typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -153,6 +175,7 @@ def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific local file import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. Parameters @@ -187,7 +210,7 @@ def validate( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -206,6 +229,9 @@ def validate( "regex_filter": regex_filter, "use_blob_urls": use_blob_urls, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -219,6 +245,7 @@ def validate( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesImportStorage: """ + Get a specific local file import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -238,7 +265,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -248,11 +275,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesImportStorage, _response.json()) # type: ignore + return typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -260,6 +295,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. @@ -280,7 +316,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -290,7 +326,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -313,6 +351,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> LocalUpdateResponse: """ + Update a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -350,7 +389,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -370,12 +409,21 @@ def update( "regex_filter": regex_filter, "use_blob_urls": use_blob_urls, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalUpdateResponse, _response.json()) # type: ignore + return typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -383,6 +431,7 @@ def update( def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesImportStorage: """ + Sync tasks from a local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Sync operations with external sources only go one way. They either create tasks from objects in the source directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. @@ -404,7 +453,7 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -414,11 +463,19 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesImportStorage, _response.json()) # type: ignore + return typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -433,6 +490,7 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[LocalFilesImportStorage]: """ + If you have local files that you want to add to Label Studio from a specific directory, you can set up a specific local directory on the machine where LS is running as source or target storage. Use this API request to get a list of all local file import (source) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -454,19 +512,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.local.list() + + + async def main() -> None: + await client.import_storage.local.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", method="GET", params={"project": project}, request_options=request_options + "api/storages/localfiles/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[LocalFilesImportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[LocalFilesImportStorage], + parse_obj_as( + type_=typing.List[LocalFilesImportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -484,6 +561,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> LocalCreateResponse: """ + Create a new source storage connection to a local file directory. For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. @@ -520,12 +598,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.local.create() + + + async def main() -> None: + await client.import_storage.local.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/localfiles/", @@ -538,12 +624,21 @@ async def create( "regex_filter": regex_filter, "use_blob_urls": use_blob_urls, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalCreateResponse, _response.json()) # type: ignore + return typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -562,6 +657,7 @@ async def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific local file import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. Parameters @@ -596,12 +692,20 @@ async def validate( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.local.validate() + + + async def main() -> None: + await client.import_storage.local.validate() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/localfiles/validate", @@ -615,6 +719,9 @@ async def validate( "regex_filter": regex_filter, "use_blob_urls": use_blob_urls, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -628,6 +735,7 @@ async def validate( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesImportStorage: """ + Get a specific local file import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -647,21 +755,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.local.get( - id=1, - ) + + + async def main() -> None: + await client.import_storage.local.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesImportStorage, _response.json()) # type: ignore + return typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -669,6 +793,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. @@ -689,17 +814,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.local.delete( - id=1, - ) + + + async def main() -> None: + await client.import_storage.local.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -722,6 +857,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> LocalUpdateResponse: """ + Update a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -759,14 +895,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.local.update( - id=1, - ) + + + async def main() -> None: + await client.import_storage.local.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/storages/localfiles/{jsonable_encoder(id)}", @@ -779,12 +923,21 @@ async def update( "regex_filter": regex_filter, "use_blob_urls": use_blob_urls, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalUpdateResponse, _response.json()) # type: ignore + return typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -794,6 +947,7 @@ async def sync( self, id: int, *, request_options: typing.Optional[RequestOptions] = None ) -> LocalFilesImportStorage: """ + Sync tasks from a local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Sync operations with external sources only go one way. They either create tasks from objects in the source directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. @@ -815,21 +969,37 @@ async def sync( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.local.sync( - id=1, - ) + + + async def main() -> None: + await client.import_storage.local.sync( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesImportStorage, _response.json()) # type: ignore + return typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/local/types/local_create_response.py b/src/label_studio_sdk/import_storage/local/types/local_create_response.py index eba7c780b..95051747a 100644 --- a/src/label_studio_sdk/import_storage/local/types/local_create_response.py +++ b/src/label_studio_sdk/import_storage/local/types/local_create_response.py @@ -1,57 +1,47 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class LocalCreateResponse(pydantic_v1.BaseModel): - title: typing.Optional[str] = pydantic_v1.Field(default=None) +class LocalCreateResponse(UniversalBaseModel): + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - path: typing.Optional[str] = pydantic_v1.Field(default=None) + path: typing.Optional[str] = pydantic.Field(default=None) """ Path to local directory """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/import_storage/local/types/local_update_response.py b/src/label_studio_sdk/import_storage/local/types/local_update_response.py index a9d7c3970..e5dd8df6c 100644 --- a/src/label_studio_sdk/import_storage/local/types/local_update_response.py +++ b/src/label_studio_sdk/import_storage/local/types/local_update_response.py @@ -1,57 +1,47 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class LocalUpdateResponse(pydantic_v1.BaseModel): - title: typing.Optional[str] = pydantic_v1.Field(default=None) +class LocalUpdateResponse(UniversalBaseModel): + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - path: typing.Optional[str] = pydantic_v1.Field(default=None) + path: typing.Optional[str] = pydantic.Field(default=None) """ Path to local directory """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/import_storage/redis/client.py b/src/label_studio_sdk/import_storage/redis/client.py index 338d9bdfd..dd9d6225a 100644 --- a/src/label_studio_sdk/import_storage/redis/client.py +++ b/src/label_studio_sdk/import_storage/redis/client.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.redis_import_storage import RedisImportStorage +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.redis_create_response import RedisCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.redis_update_response import RedisUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -24,6 +24,7 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[RedisImportStorage]: """ + You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis import (source) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -45,7 +46,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -53,11 +54,22 @@ def list( client.import_storage.redis.list() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/redis/", method="GET", params={"project": project}, request_options=request_options + "api/storages/redis/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[RedisImportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[RedisImportStorage], + parse_obj_as( + type_=typing.List[RedisImportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -78,6 +90,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> RedisCreateResponse: """ + Create a new source storage connection to a Redis database. For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. @@ -123,7 +136,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -144,12 +157,21 @@ def create( "port": port, "password": password, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisCreateResponse, _response.json()) # type: ignore + return typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -171,6 +193,7 @@ def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific Redis import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. Parameters @@ -214,7 +237,7 @@ def validate( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -236,6 +259,9 @@ def validate( "port": port, "password": password, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -249,6 +275,7 @@ def validate( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ + Get a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -268,7 +295,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -278,11 +305,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisImportStorage, _response.json()) # type: ignore + return typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -290,6 +325,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. @@ -310,7 +346,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -320,7 +356,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -346,6 +384,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> RedisUpdateResponse: """ + Update a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -392,7 +431,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -415,12 +454,21 @@ def update( "port": port, "password": password, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisUpdateResponse, _response.json()) # type: ignore + return typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -428,6 +476,7 @@ def update( def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ + Sync tasks from a Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. @@ -449,7 +498,7 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -459,11 +508,19 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisImportStorage, _response.json()) # type: ignore + return typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -478,6 +535,7 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[RedisImportStorage]: """ + You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis import (source) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -499,19 +557,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.redis.list() + + + async def main() -> None: + await client.import_storage.redis.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/redis/", method="GET", params={"project": project}, request_options=request_options + "api/storages/redis/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[RedisImportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[RedisImportStorage], + parse_obj_as( + type_=typing.List[RedisImportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -532,6 +609,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> RedisCreateResponse: """ + Create a new source storage connection to a Redis database. For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. @@ -577,12 +655,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.redis.create() + + + async def main() -> None: + await client.import_storage.redis.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/redis/", @@ -598,12 +684,21 @@ async def create( "port": port, "password": password, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisCreateResponse, _response.json()) # type: ignore + return typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -625,6 +720,7 @@ async def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific Redis import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. Parameters @@ -668,12 +764,20 @@ async def validate( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.redis.validate() + + + async def main() -> None: + await client.import_storage.redis.validate() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/redis/validate", @@ -690,6 +794,9 @@ async def validate( "port": port, "password": password, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -703,6 +810,7 @@ async def validate( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ + Get a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -722,21 +830,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.redis.get( - id=1, - ) + + + async def main() -> None: + await client.import_storage.redis.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisImportStorage, _response.json()) # type: ignore + return typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -744,6 +868,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. @@ -764,17 +889,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.redis.delete( - id=1, - ) + + + async def main() -> None: + await client.import_storage.redis.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -800,6 +935,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> RedisUpdateResponse: """ + Update a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -846,14 +982,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.redis.update( - id=1, - ) + + + async def main() -> None: + await client.import_storage.redis.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/storages/redis/{jsonable_encoder(id)}", @@ -869,12 +1013,21 @@ async def update( "port": port, "password": password, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisUpdateResponse, _response.json()) # type: ignore + return typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -882,6 +1035,7 @@ async def update( async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ + Sync tasks from a Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. @@ -903,21 +1057,37 @@ async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.redis.sync( - id=1, - ) + + + async def main() -> None: + await client.import_storage.redis.sync( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisImportStorage, _response.json()) # type: ignore + return typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py b/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py index e2f925211..fa8fba715 100644 --- a/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py +++ b/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py @@ -1,72 +1,62 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class RedisCreateResponse(pydantic_v1.BaseModel): - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) +class RedisCreateResponse(UniversalBaseModel): + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - path: typing.Optional[str] = pydantic_v1.Field(default=None) + path: typing.Optional[str] = pydantic.Field(default=None) """ Storage prefix (optional) """ - host: typing.Optional[str] = pydantic_v1.Field(default=None) + host: typing.Optional[str] = pydantic.Field(default=None) """ Server Host IP (optional) """ - port: typing.Optional[str] = pydantic_v1.Field(default=None) + port: typing.Optional[str] = pydantic.Field(default=None) """ Server Port (optional) """ - password: typing.Optional[str] = pydantic_v1.Field(default=None) + password: typing.Optional[str] = pydantic.Field(default=None) """ Server Password (optional) """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py b/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py index e2047071d..247ff9057 100644 --- a/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py +++ b/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py @@ -1,72 +1,62 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class RedisUpdateResponse(pydantic_v1.BaseModel): - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) +class RedisUpdateResponse(UniversalBaseModel): + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - path: typing.Optional[str] = pydantic_v1.Field(default=None) + path: typing.Optional[str] = pydantic.Field(default=None) """ Storage prefix (optional) """ - host: typing.Optional[str] = pydantic_v1.Field(default=None) + host: typing.Optional[str] = pydantic.Field(default=None) """ Server Host IP (optional) """ - port: typing.Optional[str] = pydantic_v1.Field(default=None) + port: typing.Optional[str] = pydantic.Field(default=None) """ Server Port (optional) """ - password: typing.Optional[str] = pydantic_v1.Field(default=None) + password: typing.Optional[str] = pydantic.Field(default=None) """ Server Password (optional) """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/import_storage/s3/client.py b/src/label_studio_sdk/import_storage/s3/client.py index 5c4a68ad8..227044611 100644 --- a/src/label_studio_sdk/import_storage/s3/client.py +++ b/src/label_studio_sdk/import_storage/s3/client.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3import_storage import S3ImportStorage +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.s3create_response import S3CreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.s3update_response import S3UpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -24,6 +24,7 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3ImportStorage]: """ + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -45,7 +46,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -53,11 +54,22 @@ def list( client.import_storage.s3.list() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/s3/", method="GET", params={"project": project}, request_options=request_options + "api/storages/s3/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[S3ImportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[S3ImportStorage], + parse_obj_as( + type_=typing.List[S3ImportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -85,6 +97,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> S3CreateResponse: """ + Create a new source storage connection to a S3 bucket. For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. @@ -153,7 +166,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -181,12 +194,21 @@ def create( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3CreateResponse, _response.json()) # type: ignore + return typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -215,6 +237,7 @@ def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. Parameters @@ -279,7 +302,7 @@ def validate( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -308,6 +331,9 @@ def validate( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -321,6 +347,7 @@ def validate( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -340,7 +367,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -350,11 +377,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ImportStorage, _response.json()) # type: ignore + return typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -362,6 +397,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. @@ -382,7 +418,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -392,7 +428,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -425,6 +463,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> S3UpdateResponse: """ + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -492,7 +531,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -522,12 +561,21 @@ def update( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3UpdateResponse, _response.json()) # type: ignore + return typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -535,6 +583,7 @@ def update( def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. @@ -556,7 +605,7 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -566,11 +615,19 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ImportStorage, _response.json()) # type: ignore + return typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -585,6 +642,7 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3ImportStorage]: """ + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -606,19 +664,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3.list() + + + async def main() -> None: + await client.import_storage.s3.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3/", method="GET", params={"project": project}, request_options=request_options + "api/storages/s3/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[S3ImportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[S3ImportStorage], + parse_obj_as( + type_=typing.List[S3ImportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -646,6 +723,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> S3CreateResponse: """ + Create a new source storage connection to a S3 bucket. For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. @@ -714,12 +792,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3.create() + + + async def main() -> None: + await client.import_storage.s3.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/s3/", @@ -742,12 +828,21 @@ async def create( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3CreateResponse, _response.json()) # type: ignore + return typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -776,6 +871,7 @@ async def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. Parameters @@ -840,12 +936,20 @@ async def validate( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3.validate() + + + async def main() -> None: + await client.import_storage.s3.validate() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/s3/validate", @@ -869,6 +973,9 @@ async def validate( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -882,6 +989,7 @@ async def validate( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -901,21 +1009,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3.get( - id=1, - ) + + + async def main() -> None: + await client.import_storage.s3.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ImportStorage, _response.json()) # type: ignore + return typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -923,6 +1047,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. @@ -943,17 +1068,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3.delete( - id=1, - ) + + + async def main() -> None: + await client.import_storage.s3.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -986,6 +1121,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> S3UpdateResponse: """ + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -1053,14 +1189,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3.update( - id=1, - ) + + + async def main() -> None: + await client.import_storage.s3.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/storages/s3/{jsonable_encoder(id)}", @@ -1083,12 +1227,21 @@ async def update( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3UpdateResponse, _response.json()) # type: ignore + return typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1096,6 +1249,7 @@ async def update( async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. @@ -1117,21 +1271,37 @@ async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3.sync( - id=1, - ) + + + async def main() -> None: + await client.import_storage.s3.sync( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ImportStorage, _response.json()) # type: ignore + return typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/s3/types/s3create_response.py b/src/label_studio_sdk/import_storage/s3/types/s3create_response.py index 9aed6c6a2..86b6e5fd3 100644 --- a/src/label_studio_sdk/import_storage/s3/types/s3create_response.py +++ b/src/label_studio_sdk/import_storage/s3/types/s3create_response.py @@ -1,109 +1,101 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +import typing_extensions +from ....core.serialization import FieldMetadata +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class S3CreateResponse(pydantic_v1.BaseModel): - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) +class S3CreateResponse(UniversalBaseModel): + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. """ - presign: typing.Optional[bool] = pydantic_v1.Field(default=None) + presign: typing.Optional[bool] = pydantic.Field(default=None) """ Presign URLs for download """ - presign_ttl: typing.Optional[int] = pydantic_v1.Field(default=None) + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) """ Presign TTL in minutes """ - recursive_scan: typing.Optional[bool] = pydantic_v1.Field(default=None) + recursive_scan: typing.Optional[bool] = pydantic.Field(default=None) """ Scan recursively """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket prefix """ - aws_access_key_id: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_access_key_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS_ACCESS_KEY_ID """ - aws_secret_access_key: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_secret_access_key: typing.Optional[str] = pydantic.Field(default=None) """ AWS_SECRET_ACCESS_KEY """ - aws_session_token: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_session_token: typing.Optional[str] = pydantic.Field(default=None) """ AWS_SESSION_TOKEN """ - aws_sse_kms_key_id: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_sse_kms_key_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS SSE KMS Key ID """ - region_name: typing.Optional[str] = pydantic_v1.Field(default=None) + region_name: typing.Optional[str] = pydantic.Field(default=None) """ AWS Region """ - s3endpoint: typing.Optional[str] = pydantic_v1.Field(alias="s3_endpoint", default=None) + s3endpoint: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="s3_endpoint")] = pydantic.Field( + default=None + ) """ S3 Endpoint """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/import_storage/s3/types/s3update_response.py b/src/label_studio_sdk/import_storage/s3/types/s3update_response.py index ee47545ef..3ed56596d 100644 --- a/src/label_studio_sdk/import_storage/s3/types/s3update_response.py +++ b/src/label_studio_sdk/import_storage/s3/types/s3update_response.py @@ -1,109 +1,101 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +import typing_extensions +from ....core.serialization import FieldMetadata +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class S3UpdateResponse(pydantic_v1.BaseModel): - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) +class S3UpdateResponse(UniversalBaseModel): + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. """ - presign: typing.Optional[bool] = pydantic_v1.Field(default=None) + presign: typing.Optional[bool] = pydantic.Field(default=None) """ Presign URLs for download """ - presign_ttl: typing.Optional[int] = pydantic_v1.Field(default=None) + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) """ Presign TTL in minutes """ - recursive_scan: typing.Optional[bool] = pydantic_v1.Field(default=None) + recursive_scan: typing.Optional[bool] = pydantic.Field(default=None) """ Scan recursively """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Storage description """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket prefix """ - aws_access_key_id: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_access_key_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS_ACCESS_KEY_ID """ - aws_secret_access_key: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_secret_access_key: typing.Optional[str] = pydantic.Field(default=None) """ AWS_SECRET_ACCESS_KEY """ - aws_session_token: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_session_token: typing.Optional[str] = pydantic.Field(default=None) """ AWS_SESSION_TOKEN """ - aws_sse_kms_key_id: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_sse_kms_key_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS SSE KMS Key ID """ - region_name: typing.Optional[str] = pydantic_v1.Field(default=None) + region_name: typing.Optional[str] = pydantic.Field(default=None) """ AWS Region """ - s3endpoint: typing.Optional[str] = pydantic_v1.Field(alias="s3_endpoint", default=None) + s3endpoint: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="s3_endpoint")] = pydantic.Field( + default=None + ) """ S3 Endpoint """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/import_storage/s3s/client.py b/src/label_studio_sdk/import_storage/s3s/client.py index 80c617a70..eca17a326 100644 --- a/src/label_studio_sdk/import_storage/s3s/client.py +++ b/src/label_studio_sdk/import_storage/s3s/client.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing +from ...core.client_wrapper import SyncClientWrapper +from ...core.request_options import RequestOptions +from ...types.s3s_import_storage import S3SImportStorage +from ...core.pydantic_utilities import parse_obj_as from json.decoder import JSONDecodeError - from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 -from ...core.request_options import RequestOptions -from ...types.s3s_import_storage import S3SImportStorage +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -22,6 +22,7 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3SImportStorage]: """ + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -43,7 +44,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -51,11 +52,22 @@ def list( client.import_storage.s3s.list() """ _response = self._client_wrapper.httpx_client.request( - "api/storages/s3s", method="GET", params={"project": project}, request_options=request_options + "api/storages/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[S3SImportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[S3SImportStorage], + parse_obj_as( + type_=typing.List[S3SImportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,6 +93,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> S3SImportStorage: """ + Create a new source storage connection to a S3 bucket. For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. @@ -143,7 +156,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -169,12 +182,21 @@ def create( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -182,6 +204,7 @@ def create( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Parameters @@ -199,7 +222,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -209,11 +232,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -221,6 +252,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. @@ -241,7 +273,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -251,7 +283,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -282,6 +316,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> S3SImportStorage: """ + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -343,7 +378,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -371,12 +406,21 @@ def update( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -402,6 +446,7 @@ def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. Parameters @@ -457,7 +502,7 @@ def validate( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -483,6 +528,9 @@ def validate( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -496,6 +544,7 @@ def validate( def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Parameters @@ -513,7 +562,7 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -523,11 +572,19 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/s3s/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -542,6 +599,7 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3SImportStorage]: """ + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -563,19 +621,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3s.list() + + + async def main() -> None: + await client.import_storage.s3s.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3s", method="GET", params={"project": project}, request_options=request_options + "api/storages/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[S3SImportStorage], _response.json()) # type: ignore + return typing.cast( + typing.List[S3SImportStorage], + parse_obj_as( + type_=typing.List[S3SImportStorage], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -601,6 +678,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> S3SImportStorage: """ + Create a new source storage connection to a S3 bucket. For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. @@ -663,12 +741,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3s.create() + + + async def main() -> None: + await client.import_storage.s3s.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/s3s", @@ -689,12 +775,21 @@ async def create( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -702,6 +797,7 @@ async def create( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Parameters @@ -719,21 +815,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3s.get( - id=1, - ) + + + async def main() -> None: + await client.import_storage.s3s.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/storages/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -741,6 +853,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. @@ -761,17 +874,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3s.delete( - id=1, - ) + + + async def main() -> None: + await client.import_storage.s3s.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/storages/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -802,6 +925,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> S3SImportStorage: """ + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). @@ -863,14 +987,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3s.update( - id=1, - ) + + + async def main() -> None: + await client.import_storage.s3s.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/storages/s3s/{jsonable_encoder(id)}", @@ -891,12 +1023,21 @@ async def update( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -922,6 +1063,7 @@ async def validate( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. Parameters @@ -977,12 +1119,20 @@ async def validate( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3s.validate() + + + async def main() -> None: + await client.import_storage.s3s.validate() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/storages/s3s/validate", @@ -1003,6 +1153,9 @@ async def validate( "region_name": region_name, "s3_endpoint": s3endpoint, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -1016,6 +1169,7 @@ async def validate( async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). Parameters @@ -1033,21 +1187,37 @@ async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.import_storage.s3s.sync( - id=1, - ) + + + async def main() -> None: + await client.import_storage.s3s.sync( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + f"api/storages/s3s/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py b/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py index 89caadc22..21112358a 100644 --- a/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py +++ b/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py @@ -1,30 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing +from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class ImportStorageListTypesResponseItem(pydantic_v1.BaseModel): +class ImportStorageListTypesResponseItem(UniversalBaseModel): name: typing.Optional[str] = None title: typing.Optional[str] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/ml/client.py b/src/label_studio_sdk/ml/client.py index f48afbecf..6f635314a 100644 --- a/src/label_studio_sdk/ml/client.py +++ b/src/label_studio_sdk/ml/client.py @@ -1,19 +1,19 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions -from ..errors.internal_server_error import InternalServerError from ..types.ml_backend import MlBackend +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from .types.ml_create_request_auth_method import MlCreateRequestAuthMethod from .types.ml_create_response import MlCreateResponse +from ..core.jsonable_encoder import jsonable_encoder from .types.ml_update_request_auth_method import MlUpdateRequestAuthMethod from .types.ml_update_response import MlUpdateResponse +from ..errors.internal_server_error import InternalServerError +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -27,8 +27,10 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[MlBackend]: """ + List all configured Machine Learning (ML) backends for a specific project by ID. For more information about ML backends, see [Machine learning integration](https://labelstud.io/guide/ml). + You will need to provide the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). Parameters @@ -46,7 +48,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -54,11 +56,22 @@ def list( client.ml.list() """ _response = self._client_wrapper.httpx_client.request( - "api/ml/", method="GET", params={"project": project}, request_options=request_options + "api/ml/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[MlBackend], _response.json()) # type: ignore + return typing.cast( + typing.List[MlBackend], + parse_obj_as( + type_=typing.List[MlBackend], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -75,11 +88,12 @@ def create( auth_method: typing.Optional[MlCreateRequestAuthMethod] = OMIT, basic_auth_user: typing.Optional[str] = OMIT, basic_auth_pass: typing.Optional[str] = OMIT, - extra_params: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, timeout: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> MlCreateResponse: """ + Add an ML backend to a project. For more information about what you need to configure when adding an ML backend, see [Connect the model to Label studio](https://labelstud.io/guide/ml#Connect-the-model-to-Label-Studio). If you are using Docker Compose, you may need to adjust your ML backend URL. See [localhost and Docker containers](https://labelstud.io/guide/ml#localhost-and-Docker-containers). @@ -112,7 +126,7 @@ def create( basic_auth_pass : typing.Optional[str] Basic auth password - extra_params : typing.Optional[typing.Dict[str, typing.Any]] + extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Extra parameters timeout : typing.Optional[int] @@ -128,7 +142,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -150,12 +164,21 @@ def create( "extra_params": extra_params, "timeout": timeout, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MlCreateResponse, _response.json()) # type: ignore + return typing.cast( + MlCreateResponse, + parse_obj_as( + type_=MlCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -163,6 +186,7 @@ def create( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> MlBackend: """ + Get details about a specific ML backend. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). For more information, see [Machine learning integration](https://labelstud.io/guide/ml). @@ -182,7 +206,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -192,11 +216,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/ml/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MlBackend, _response.json()) # type: ignore + return typing.cast( + MlBackend, + parse_obj_as( + type_=MlBackend, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -204,6 +236,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Remove an existing ML backend connection. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). For more information, see [Machine learning integration](https://labelstud.io/guide/ml). @@ -222,7 +255,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -232,7 +265,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/ml/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -254,11 +289,12 @@ def update( auth_method: typing.Optional[MlUpdateRequestAuthMethod] = OMIT, basic_auth_user: typing.Optional[str] = OMIT, basic_auth_pass: typing.Optional[str] = OMIT, - extra_params: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, timeout: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> MlUpdateResponse: """ + Update the ML backend parameters. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). For more information, see [Machine learning integration](https://labelstud.io/guide/ml). @@ -292,7 +328,7 @@ def update( basic_auth_pass : typing.Optional[str] Basic auth password - extra_params : typing.Optional[typing.Dict[str, typing.Any]] + extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Extra parameters timeout : typing.Optional[int] @@ -308,7 +344,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -332,12 +368,21 @@ def update( "extra_params": extra_params, "timeout": timeout, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MlUpdateResponse, _response.json()) # type: ignore + return typing.cast( + MlUpdateResponse, + parse_obj_as( + type_=MlUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -348,10 +393,11 @@ def predict_interactive( id: int, *, task: int, - context: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + context: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Enable interactive pre-annotations for a specific task. ML-assisted labeling with interactive pre-annotations works with image segmentation and object detection tasks using rectangles, ellipses, polygons, brush masks, and keypoints, as well as with HTML and text named entity recognition tasks. Your ML backend must support the type of labeling that you’re performing, recognize the input that you create, and be able to respond with the relevant output for a prediction. For more information, see [Interactive pre-annotations](https://labelstud.io/guide/ml.html#Interactive-pre-annotations). @@ -368,7 +414,7 @@ def predict_interactive( task : int ID of task to annotate - context : typing.Optional[typing.Dict[str, typing.Any]] + context : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Context for ML model request_options : typing.Optional[RequestOptions] @@ -380,7 +426,7 @@ def predict_interactive( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -393,7 +439,13 @@ def predict_interactive( _response = self._client_wrapper.httpx_client.request( f"api/ml/{jsonable_encoder(id)}/interactive-annotating", method="POST", - json={"task": task, "context": context}, + json={ + "task": task, + "context": context, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -413,6 +465,7 @@ def train( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + After you connect a model to Label Studio as a machine learning backend and annotate at least one task, you can start training the model. Training logs appear in stdout and the console. For more information, see [Model training](https://labelstud.io/guide/ml.html#Model-training). @@ -436,7 +489,7 @@ def train( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -448,7 +501,12 @@ def train( _response = self._client_wrapper.httpx_client.request( f"api/ml/{jsonable_encoder(id)}/train", method="POST", - json={"use_ground_truth": use_ground_truth}, + json={ + "use_ground_truth": use_ground_truth, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -456,7 +514,15 @@ def train( if 200 <= _response.status_code < 300: return if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(str, _response.json())) # type: ignore + raise InternalServerError( + typing.cast( + str, + parse_obj_as( + type_=str, # type: ignore + object_=_response.json(), + ), + ) + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -464,6 +530,7 @@ def train( def list_model_versions(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Get available versions of the model. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). Parameters @@ -479,7 +546,7 @@ def list_model_versions(self, id: str, *, request_options: typing.Optional[Reque Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -489,7 +556,9 @@ def list_model_versions(self, id: str, *, request_options: typing.Optional[Reque ) """ _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/versions", method="GET", request_options=request_options + f"api/ml/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -508,8 +577,10 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[MlBackend]: """ + List all configured Machine Learning (ML) backends for a specific project by ID. For more information about ML backends, see [Machine learning integration](https://labelstud.io/guide/ml). + You will need to provide the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). Parameters @@ -527,19 +598,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.ml.list() + + + async def main() -> None: + await client.ml.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/ml/", method="GET", params={"project": project}, request_options=request_options + "api/ml/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[MlBackend], _response.json()) # type: ignore + return typing.cast( + typing.List[MlBackend], + parse_obj_as( + type_=typing.List[MlBackend], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -556,11 +646,12 @@ async def create( auth_method: typing.Optional[MlCreateRequestAuthMethod] = OMIT, basic_auth_user: typing.Optional[str] = OMIT, basic_auth_pass: typing.Optional[str] = OMIT, - extra_params: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, timeout: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> MlCreateResponse: """ + Add an ML backend to a project. For more information about what you need to configure when adding an ML backend, see [Connect the model to Label studio](https://labelstud.io/guide/ml#Connect-the-model-to-Label-Studio). If you are using Docker Compose, you may need to adjust your ML backend URL. See [localhost and Docker containers](https://labelstud.io/guide/ml#localhost-and-Docker-containers). @@ -593,7 +684,7 @@ async def create( basic_auth_pass : typing.Optional[str] Basic auth password - extra_params : typing.Optional[typing.Dict[str, typing.Any]] + extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Extra parameters timeout : typing.Optional[int] @@ -609,12 +700,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.ml.create() + + + async def main() -> None: + await client.ml.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/ml/", @@ -631,12 +730,21 @@ async def create( "extra_params": extra_params, "timeout": timeout, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MlCreateResponse, _response.json()) # type: ignore + return typing.cast( + MlCreateResponse, + parse_obj_as( + type_=MlCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -644,6 +752,7 @@ async def create( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> MlBackend: """ + Get details about a specific ML backend. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). For more information, see [Machine learning integration](https://labelstud.io/guide/ml). @@ -663,21 +772,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.ml.get( - id=1, - ) + + + async def main() -> None: + await client.ml.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/ml/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MlBackend, _response.json()) # type: ignore + return typing.cast( + MlBackend, + parse_obj_as( + type_=MlBackend, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -685,6 +810,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Remove an existing ML backend connection. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). For more information, see [Machine learning integration](https://labelstud.io/guide/ml). @@ -703,17 +829,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.ml.delete( - id=1, - ) + + + async def main() -> None: + await client.ml.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/ml/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -735,11 +871,12 @@ async def update( auth_method: typing.Optional[MlUpdateRequestAuthMethod] = OMIT, basic_auth_user: typing.Optional[str] = OMIT, basic_auth_pass: typing.Optional[str] = OMIT, - extra_params: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, timeout: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> MlUpdateResponse: """ + Update the ML backend parameters. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). For more information, see [Machine learning integration](https://labelstud.io/guide/ml). @@ -773,7 +910,7 @@ async def update( basic_auth_pass : typing.Optional[str] Basic auth password - extra_params : typing.Optional[typing.Dict[str, typing.Any]] + extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Extra parameters timeout : typing.Optional[int] @@ -789,14 +926,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.ml.update( - id=1, - ) + + + async def main() -> None: + await client.ml.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/ml/{jsonable_encoder(id)}", @@ -813,12 +958,21 @@ async def update( "extra_params": extra_params, "timeout": timeout, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MlUpdateResponse, _response.json()) # type: ignore + return typing.cast( + MlUpdateResponse, + parse_obj_as( + type_=MlUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -829,10 +983,11 @@ async def predict_interactive( id: int, *, task: int, - context: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + context: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Enable interactive pre-annotations for a specific task. ML-assisted labeling with interactive pre-annotations works with image segmentation and object detection tasks using rectangles, ellipses, polygons, brush masks, and keypoints, as well as with HTML and text named entity recognition tasks. Your ML backend must support the type of labeling that you’re performing, recognize the input that you create, and be able to respond with the relevant output for a prediction. For more information, see [Interactive pre-annotations](https://labelstud.io/guide/ml.html#Interactive-pre-annotations). @@ -849,7 +1004,7 @@ async def predict_interactive( task : int ID of task to annotate - context : typing.Optional[typing.Dict[str, typing.Any]] + context : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Context for ML model request_options : typing.Optional[RequestOptions] @@ -861,20 +1016,34 @@ async def predict_interactive( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.ml.predict_interactive( - id=1, - task=1, - ) + + + async def main() -> None: + await client.ml.predict_interactive( + id=1, + task=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/ml/{jsonable_encoder(id)}/interactive-annotating", method="POST", - json={"task": task, "context": context}, + json={ + "task": task, + "context": context, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -894,6 +1063,7 @@ async def train( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + After you connect a model to Label Studio as a machine learning backend and annotate at least one task, you can start training the model. Training logs appear in stdout and the console. For more information, see [Model training](https://labelstud.io/guide/ml.html#Model-training). @@ -917,19 +1087,32 @@ async def train( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.ml.train( - id=1, - ) + + + async def main() -> None: + await client.ml.train( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/ml/{jsonable_encoder(id)}/train", method="POST", - json={"use_ground_truth": use_ground_truth}, + json={ + "use_ground_truth": use_ground_truth, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -937,7 +1120,15 @@ async def train( if 200 <= _response.status_code < 300: return if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(str, _response.json())) # type: ignore + raise InternalServerError( + typing.cast( + str, + parse_obj_as( + type_=str, # type: ignore + object_=_response.json(), + ), + ) + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -945,6 +1136,7 @@ async def train( async def list_model_versions(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Get available versions of the model. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). Parameters @@ -960,17 +1152,27 @@ async def list_model_versions(self, id: str, *, request_options: typing.Optional Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.ml.list_model_versions( - id="id", - ) + + + async def main() -> None: + await client.ml.list_model_versions( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/versions", method="GET", request_options=request_options + f"api/ml/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: diff --git a/src/label_studio_sdk/ml/types/ml_create_response.py b/src/label_studio_sdk/ml/types/ml_create_response.py index 7bfc34bf7..030fa3d3c 100644 --- a/src/label_studio_sdk/ml/types/ml_create_response.py +++ b/src/label_studio_sdk/ml/types/ml_create_response.py @@ -1,78 +1,68 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing - -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic from .ml_create_response_auth_method import MlCreateResponseAuthMethod +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -class MlCreateResponse(pydantic_v1.BaseModel): - url: typing.Optional[str] = pydantic_v1.Field(default=None) +class MlCreateResponse(UniversalBaseModel): + url: typing.Optional[str] = pydantic.Field(default=None) """ ML backend URL """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - is_interactive: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_interactive: typing.Optional[bool] = pydantic.Field(default=None) """ Is interactive """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Description """ - auth_method: typing.Optional[MlCreateResponseAuthMethod] = pydantic_v1.Field(default=None) + auth_method: typing.Optional[MlCreateResponseAuthMethod] = pydantic.Field(default=None) """ Auth method """ - basic_auth_user: typing.Optional[str] = pydantic_v1.Field(default=None) + basic_auth_user: typing.Optional[str] = pydantic.Field(default=None) """ Basic auth user """ - basic_auth_pass: typing.Optional[str] = pydantic_v1.Field(default=None) + basic_auth_pass: typing.Optional[str] = pydantic.Field(default=None) """ Basic auth password """ - extra_params: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Extra parameters """ - timeout: typing.Optional[int] = pydantic_v1.Field(default=None) + timeout: typing.Optional[int] = pydantic.Field(default=None) """ Response model timeout """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/ml/types/ml_update_response.py b/src/label_studio_sdk/ml/types/ml_update_response.py index 058799537..f23e5dadd 100644 --- a/src/label_studio_sdk/ml/types/ml_update_response.py +++ b/src/label_studio_sdk/ml/types/ml_update_response.py @@ -1,78 +1,68 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing - -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic from .ml_update_response_auth_method import MlUpdateResponseAuthMethod +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -class MlUpdateResponse(pydantic_v1.BaseModel): - url: typing.Optional[str] = pydantic_v1.Field(default=None) +class MlUpdateResponse(UniversalBaseModel): + url: typing.Optional[str] = pydantic.Field(default=None) """ ML backend URL """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID """ - is_interactive: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_interactive: typing.Optional[bool] = pydantic.Field(default=None) """ Is interactive """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Description """ - auth_method: typing.Optional[MlUpdateResponseAuthMethod] = pydantic_v1.Field(default=None) + auth_method: typing.Optional[MlUpdateResponseAuthMethod] = pydantic.Field(default=None) """ Auth method """ - basic_auth_user: typing.Optional[str] = pydantic_v1.Field(default=None) + basic_auth_user: typing.Optional[str] = pydantic.Field(default=None) """ Basic auth user """ - basic_auth_pass: typing.Optional[str] = pydantic_v1.Field(default=None) + basic_auth_pass: typing.Optional[str] = pydantic.Field(default=None) """ Basic auth password """ - extra_params: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Extra parameters """ - timeout: typing.Optional[int] = pydantic_v1.Field(default=None) + timeout: typing.Optional[int] = pydantic.Field(default=None) """ Response model timeout """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/model_providers/client.py b/src/label_studio_sdk/model_providers/client.py index 838b777d7..92910f2e5 100644 --- a/src/label_studio_sdk/model_providers/client.py +++ b/src/label_studio_sdk/model_providers/client.py @@ -1,20 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.model_provider_connection import ModelProviderConnection -from ..types.model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod -from ..types.model_provider_connection_created_by import ModelProviderConnectionCreatedBy -from ..types.model_provider_connection_organization import ModelProviderConnectionOrganization +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from ..types.model_provider_connection_provider import ModelProviderConnectionProvider from ..types.model_provider_connection_scope import ModelProviderConnectionScope +from ..types.model_provider_connection_organization import ModelProviderConnectionOrganization +from ..types.model_provider_connection_created_by import ModelProviderConnectionCreatedBy +import datetime as dt +from ..types.model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod +from ..core.serialization import convert_and_respect_annotation_metadata +from ..core.jsonable_encoder import jsonable_encoder +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -40,7 +41,7 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -48,11 +49,19 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty client.model_providers.list() """ _response = self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", method="GET", request_options=request_options + "api/model-provider-connections/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[ModelProviderConnection], _response.json()) # type: ignore + return typing.cast( + typing.List[ModelProviderConnection], + parse_obj_as( + type_=typing.List[ModelProviderConnection], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -129,7 +138,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -147,8 +156,12 @@ def create( "deployment_name": deployment_name, "endpoint": endpoint, "scope": scope, - "organization": organization, - "created_by": created_by, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" + ), "created_at": created_at, "updated_at": updated_at, "is_internal": is_internal, @@ -163,7 +176,13 @@ def create( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ModelProviderConnection, _response.json()) # type: ignore + return typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -188,7 +207,7 @@ def get(self, pk: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -198,11 +217,19 @@ def get(self, pk: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", method="GET", request_options=request_options + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ModelProviderConnection, _response.json()) # type: ignore + return typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -226,7 +253,7 @@ def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -236,7 +263,9 @@ def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", method="DELETE", request_options=request_options + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -321,7 +350,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -340,8 +369,12 @@ def update( "deployment_name": deployment_name, "endpoint": endpoint, "scope": scope, - "organization": organization, - "created_by": created_by, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" + ), "created_at": created_at, "updated_at": updated_at, "is_internal": is_internal, @@ -356,7 +389,13 @@ def update( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ModelProviderConnection, _response.json()) # type: ignore + return typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -385,19 +424,35 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.model_providers.list() + + + async def main() -> None: + await client.model_providers.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", method="GET", request_options=request_options + "api/model-provider-connections/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[ModelProviderConnection], _response.json()) # type: ignore + return typing.cast( + typing.List[ModelProviderConnection], + parse_obj_as( + type_=typing.List[ModelProviderConnection], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -474,14 +529,22 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.model_providers.create( - provider="OpenAI", - ) + + + async def main() -> None: + await client.model_providers.create( + provider="OpenAI", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/model-provider-connections/", @@ -492,8 +555,12 @@ async def create( "deployment_name": deployment_name, "endpoint": endpoint, "scope": scope, - "organization": organization, - "created_by": created_by, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" + ), "created_at": created_at, "updated_at": updated_at, "is_internal": is_internal, @@ -508,7 +575,13 @@ async def create( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ModelProviderConnection, _response.json()) # type: ignore + return typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -533,21 +606,37 @@ async def get(self, pk: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.model_providers.get( - pk=1, - ) + + + async def main() -> None: + await client.model_providers.get( + pk=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", method="GET", request_options=request_options + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ModelProviderConnection, _response.json()) # type: ignore + return typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -571,17 +660,27 @@ async def delete(self, pk: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.model_providers.delete( - pk=1, - ) + + + async def main() -> None: + await client.model_providers.delete( + pk=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", method="DELETE", request_options=request_options + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -666,15 +765,23 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.model_providers.update( - pk=1, - provider="OpenAI", - ) + + + async def main() -> None: + await client.model_providers.update( + pk=1, + provider="OpenAI", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/model-provider-connections/{jsonable_encoder(pk)}", @@ -685,8 +792,12 @@ async def update( "deployment_name": deployment_name, "endpoint": endpoint, "scope": scope, - "organization": organization, - "created_by": created_by, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" + ), "created_at": created_at, "updated_at": updated_at, "is_internal": is_internal, @@ -701,7 +812,13 @@ async def update( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ModelProviderConnection, _response.json()) # type: ignore + return typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/predictions/client.py b/src/label_studio_sdk/predictions/client.py index 5ce13b391..f1f84734d 100644 --- a/src/label_studio_sdk/predictions/client.py +++ b/src/label_studio_sdk/predictions/client.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing +from ..core.client_wrapper import SyncClientWrapper +from ..core.request_options import RequestOptions +from ..types.prediction import Prediction +from ..core.pydantic_utilities import parse_obj_as from json.decoder import JSONDecodeError - from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 -from ..core.request_options import RequestOptions -from ..types.prediction import Prediction +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -26,6 +26,7 @@ def list( request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[Prediction]: """ + Get a list of all predictions. You can optionally filter these by task or by project. If you want to filter, you will need the project ID and/or task ID. Both of these can be found in the Label Studio URL when viewing a task, or you can use [List all projects](../projects/list) and [Get tasks list](../tasks/list). The terms "predictions" and pre-annotations" are used interchangeably. @@ -52,7 +53,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -60,11 +61,23 @@ def list( client.predictions.list() """ _response = self._client_wrapper.httpx_client.request( - "api/predictions/", method="GET", params={"task": task, "project": project}, request_options=request_options + "api/predictions/", + method="GET", + params={ + "task": task, + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Prediction], _response.json()) # type: ignore + return typing.cast( + typing.List[Prediction], + parse_obj_as( + type_=typing.List[Prediction], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -74,22 +87,21 @@ def create( self, *, task: typing.Optional[int] = OMIT, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, score: typing.Optional[float] = OMIT, model_version: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> Prediction: """ + If you have predictions generated for your dataset from a model, either as pre-annotated tasks or pre-labeled tasks, you can import the predictions with your dataset into Label Studio for review and correction. To import predicted labels into Label Studio, you must use the [Basic Label Studio JSON format](https://labelstud.io/guide/tasks#Basic-Label-Studio-JSON-format) and set up your tasks with the predictions JSON key. The Label Studio ML backend also outputs tasks in this format. #### JSON format for predictions - Label Studio JSON format for pre-annotations must contain two sections: - - - A data object which references the source of the data that the pre-annotations apply to. This can be a URL to an audio file, a pre-signed cloud storage link to an image, plain text, a reference to a CSV file stored in Label Studio, or something else. - - A predictions array that contains the pre-annotation results for the different types of labeling. See how to add results to the predictions array. + * A data object which references the source of the data that the pre-annotations apply to. This can be a URL to an audio file, a pre-signed cloud storage link to an image, plain text, a reference to a CSV file stored in Label Studio, or something else. + * A predictions array that contains the pre-annotation results for the different types of labeling. See how to add results to the predictions array. For more information, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations) @@ -98,7 +110,7 @@ def create( task : typing.Optional[int] Task ID for which the prediction is created - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) score : typing.Optional[float] @@ -117,7 +129,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -148,13 +160,27 @@ def create( _response = self._client_wrapper.httpx_client.request( "api/predictions/", method="POST", - json={"task": task, "result": result, "score": score, "model_version": model_version}, + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore + return typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -162,6 +188,7 @@ def create( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prediction: """ + Get details about a specific prediction by its ID. To find the prediction ID, use [List predictions](list). For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). @@ -181,7 +208,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -191,11 +218,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/predictions/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore + return typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -203,6 +238,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a prediction. To find the prediction ID, use [List predictions](list). Parameters @@ -219,7 +255,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -229,7 +265,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/predictions/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -244,12 +282,13 @@ def update( id: int, *, task: typing.Optional[int] = OMIT, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, score: typing.Optional[float] = OMIT, model_version: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> Prediction: """ + Update a prediction. To find the prediction ID, use [List predictions](list). For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). @@ -262,7 +301,7 @@ def update( task : typing.Optional[int] Task ID for which the prediction is created - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) score : typing.Optional[float] @@ -281,7 +320,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -313,13 +352,27 @@ def update( _response = self._client_wrapper.httpx_client.request( f"api/predictions/{jsonable_encoder(id)}/", method="PATCH", - json={"task": task, "result": result, "score": score, "model_version": model_version}, + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore + return typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -338,6 +391,7 @@ async def list( request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[Prediction]: """ + Get a list of all predictions. You can optionally filter these by task or by project. If you want to filter, you will need the project ID and/or task ID. Both of these can be found in the Label Studio URL when viewing a task, or you can use [List all projects](../projects/list) and [Get tasks list](../tasks/list). The terms "predictions" and pre-annotations" are used interchangeably. @@ -364,19 +418,39 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.predictions.list() + + + async def main() -> None: + await client.predictions.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/predictions/", method="GET", params={"task": task, "project": project}, request_options=request_options + "api/predictions/", + method="GET", + params={ + "task": task, + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Prediction], _response.json()) # type: ignore + return typing.cast( + typing.List[Prediction], + parse_obj_as( + type_=typing.List[Prediction], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -386,22 +460,21 @@ async def create( self, *, task: typing.Optional[int] = OMIT, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, score: typing.Optional[float] = OMIT, model_version: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> Prediction: """ + If you have predictions generated for your dataset from a model, either as pre-annotated tasks or pre-labeled tasks, you can import the predictions with your dataset into Label Studio for review and correction. To import predicted labels into Label Studio, you must use the [Basic Label Studio JSON format](https://labelstud.io/guide/tasks#Basic-Label-Studio-JSON-format) and set up your tasks with the predictions JSON key. The Label Studio ML backend also outputs tasks in this format. #### JSON format for predictions - Label Studio JSON format for pre-annotations must contain two sections: - - - A data object which references the source of the data that the pre-annotations apply to. This can be a URL to an audio file, a pre-signed cloud storage link to an image, plain text, a reference to a CSV file stored in Label Studio, or something else. - - A predictions array that contains the pre-annotation results for the different types of labeling. See how to add results to the predictions array. + * A data object which references the source of the data that the pre-annotations apply to. This can be a URL to an audio file, a pre-signed cloud storage link to an image, plain text, a reference to a CSV file stored in Label Studio, or something else. + * A predictions array that contains the pre-annotation results for the different types of labeling. See how to add results to the predictions array. For more information, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations) @@ -410,7 +483,7 @@ async def create( task : typing.Optional[int] Task ID for which the prediction is created - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) score : typing.Optional[float] @@ -429,44 +502,66 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.predictions.create( - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - score=0.95, - model_version="yolo-v8", - ) + + + async def main() -> None: + await client.predictions.create( + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + score=0.95, + model_version="yolo-v8", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/predictions/", method="POST", - json={"task": task, "result": result, "score": score, "model_version": model_version}, + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore + return typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -474,6 +569,7 @@ async def create( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prediction: """ + Get details about a specific prediction by its ID. To find the prediction ID, use [List predictions](list). For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). @@ -493,21 +589,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.predictions.get( - id=1, - ) + + + async def main() -> None: + await client.predictions.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/predictions/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore + return typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -515,6 +627,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a prediction. To find the prediction ID, use [List predictions](list). Parameters @@ -531,17 +644,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.predictions.delete( - id=1, - ) + + + async def main() -> None: + await client.predictions.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/predictions/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -556,12 +679,13 @@ async def update( id: int, *, task: typing.Optional[int] = OMIT, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, score: typing.Optional[float] = OMIT, model_version: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> Prediction: """ + Update a prediction. To find the prediction ID, use [List predictions](list). For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). @@ -574,7 +698,7 @@ async def update( task : typing.Optional[int] Task ID for which the prediction is created - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) score : typing.Optional[float] @@ -593,45 +717,67 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.predictions.update( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - score=0.95, - model_version="yolo-v8", - ) + + + async def main() -> None: + await client.predictions.update( + id=1, + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + score=0.95, + model_version="yolo-v8", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/predictions/{jsonable_encoder(id)}/", method="PATCH", - json={"task": task, "result": result, "score": score, "model_version": model_version}, + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore + return typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/projects/client.py b/src/label_studio_sdk/projects/client.py index 5bb99bc20..7e14ea33b 100644 --- a/src/label_studio_sdk/projects/client.py +++ b/src/label_studio_sdk/projects/client.py @@ -1,22 +1,24 @@ # This file was auto-generated by Fern from our API Definition. import typing +from ..core.client_wrapper import SyncClientWrapper +from .exports.client import ExportsClient +from ..core.request_options import RequestOptions +from ..core.pagination import SyncPager +from ..types.project import Project +from .types.projects_list_response import ProjectsListResponse +from ..core.pydantic_utilities import parse_obj_as from json.decoder import JSONDecodeError - from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from .types.projects_create_response import ProjectsCreateResponse from ..core.jsonable_encoder import jsonable_encoder -from ..core.pagination import AsyncPager, SyncPager -from ..core.pydantic_utilities import pydantic_v1 -from ..core.request_options import RequestOptions +from .types.projects_update_response import ProjectsUpdateResponse +from .types.projects_import_tasks_response import ProjectsImportTasksResponse from ..errors.bad_request_error import BadRequestError -from ..types.project import Project from ..types.project_label_config import ProjectLabelConfig -from .exports.client import AsyncExportsClient, ExportsClient -from .types.projects_create_response import ProjectsCreateResponse -from .types.projects_import_tasks_response import ProjectsImportTasksResponse -from .types.projects_list_response import ProjectsListResponse -from .types.projects_update_response import ProjectsUpdateResponse +from ..core.client_wrapper import AsyncClientWrapper +from .exports.client import AsyncExportsClient +from ..core.pagination import AsyncPager # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -38,13 +40,13 @@ def list( request_options: typing.Optional[RequestOptions] = None, ) -> SyncPager[Project]: """ + Return a list of the projects within your organization. To perform most tasks with the Label Studio API, you must specify the project ID, sometimes referred to as the `pk`. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using this API call. To retrieve a list of your Label Studio projects, update the following command to match your own environment. Replace the domain name, port, and authorization token, then run the following from the command line: - ```bash curl -X GET https://localhost:8080/api/projects/ -H 'Authorization: Token abc123' ``` @@ -76,7 +78,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -88,16 +90,28 @@ def list( for page in response.iter_pages(): yield page """ - page = page or 1 + page = page if page is not None else 1 _response = self._client_wrapper.httpx_client.request( "api/projects/", method="GET", - params={"ordering": ordering, "ids": ids, "title": title, "page": page, "page_size": page_size}, + params={ + "ordering": ordering, + "ids": ids, + "title": title, + "page": page, + "page_size": page_size, + }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - _parsed_response = pydantic_v1.parse_obj_as(ProjectsListResponse, _response.json()) # type: ignore + _parsed_response = typing.cast( + ProjectsListResponse, + parse_obj_as( + type_=ProjectsListResponse, # type: ignore + object_=_response.json(), + ), + ) _has_next = True _get_next = lambda: self.list( ordering=ordering, @@ -129,17 +143,17 @@ def create( show_collab_predictions: typing.Optional[bool] = OMIT, maximum_annotations: typing.Optional[int] = OMIT, color: typing.Optional[str] = OMIT, - control_weights: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, workspace: typing.Optional[int] = OMIT, model_version: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> ProjectsCreateResponse: """ - Create a project and set up the labeling interface. For more information about setting up projects, see the following: - - [Create and configure projects](https://labelstud.io/guide/setup_project) - - [Configure labeling interface](https://labelstud.io/guide/setup) - - [Project settings](https://labelstud.io/guide/project_settings) + Create a project and set up the labeling interface. For more information about setting up projects, see the following: + * [Create and configure projects](https://labelstud.io/guide/setup_project) + * [Configure labeling interface](https://labelstud.io/guide/setup) + * [Project settings](https://labelstud.io/guide/project_settings) ```bash curl -H Content-Type:application/json -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects' --data '{"label_config": "[...]"}' @@ -183,7 +197,7 @@ def create( color : typing.Optional[str] Project color in HEX format - control_weights : typing.Optional[typing.Dict[str, typing.Any]] + control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} workspace : typing.Optional[int] @@ -202,7 +216,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -229,12 +243,21 @@ def create( "workspace": workspace, "model_version": model_version, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectsCreateResponse, _response.json()) # type: ignore + return typing.cast( + ProjectsCreateResponse, + parse_obj_as( + type_=ProjectsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -259,7 +282,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -269,11 +292,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/projects/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Project, _response.json()) # type: ignore + return typing.cast( + Project, + parse_obj_as( + type_=Project, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -281,6 +312,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a project by specified project ID. Deleting a project permanently removes all tasks, annotations, and project data from Label Studio. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). @@ -299,7 +331,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -309,7 +341,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/projects/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -335,17 +369,17 @@ def update( show_collab_predictions: typing.Optional[bool] = OMIT, maximum_annotations: typing.Optional[int] = OMIT, color: typing.Optional[str] = OMIT, - control_weights: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, workspace: typing.Optional[int] = OMIT, model_version: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> ProjectsUpdateResponse: """ - Update the project settings for a specific project. For more information, see the following: - - [Create and configure projects](https://labelstud.io/guide/setup_project) - - [Configure labeling interface](https://labelstud.io/guide/setup) - - [Project settings](https://labelstud.io/guide/project_settings) + Update the project settings for a specific project. For more information, see the following: + * [Create and configure projects](https://labelstud.io/guide/setup_project) + * [Configure labeling interface](https://labelstud.io/guide/setup) + * [Project settings](https://labelstud.io/guide/project_settings) The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). @@ -396,7 +430,7 @@ def update( color : typing.Optional[str] Project color in HEX format - control_weights : typing.Optional[typing.Dict[str, typing.Any]] + control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} workspace : typing.Optional[int] @@ -415,7 +449,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -444,12 +478,21 @@ def update( "workspace": workspace, "model_version": model_version, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectsUpdateResponse, _response.json()) # type: ignore + return typing.cast( + ProjectsUpdateResponse, + parse_obj_as( + type_=ProjectsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -459,26 +502,26 @@ def import_tasks( self, id: int, *, - request: typing.Sequence[typing.Dict[str, typing.Any]], + request: typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]], commit_to_project: typing.Optional[bool] = None, return_task_ids: typing.Optional[bool] = None, preannotated_from_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, request_options: typing.Optional[RequestOptions] = None, ) -> ProjectsImportTasksResponse: """ + Use this API endpoint to import labeling tasks in bulk. Note that each POST request is limited at 250K tasks and 200 MB. - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. + Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. - For example, if the label configuration has a _$text_ variable, then each item in a data object must include a `text` field. + For example, if the label configuration has a *$text* variable, then each item in a data object must include a `text` field. There are three possible ways to import tasks with this endpoint: #### 1\. **POST with data** - Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. Update this example to specify your authorization token and Label Studio instance host, then run the following from @@ -490,7 +533,6 @@ def import_tasks( ``` #### 2\. **POST with files** - Send tasks as files. You can attach multiple files with different names. - **JSON**: text files in JavaScript object notation format @@ -507,7 +549,6 @@ def import_tasks( ``` #### 3\. **POST with URL** - You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. ```bash @@ -523,7 +564,7 @@ def import_tasks( id : int A unique integer value identifying this project. - request : typing.Sequence[typing.Dict[str, typing.Any]] + request : typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]] commit_to_project : typing.Optional[bool] Set to "true" to immediately commit tasks to the project. @@ -544,7 +585,7 @@ def import_tasks( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -568,9 +609,23 @@ def import_tasks( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectsImportTasksResponse, _response.json()) # type: ignore + return typing.cast( + ProjectsImportTasksResponse, + parse_obj_as( + type_=ProjectsImportTasksResponse, # type: ignore + object_=_response.json(), + ), + ) if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ) + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -580,6 +635,7 @@ def validate_config( self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None ) -> ProjectLabelConfig: """ + Determine whether the label configuration for a specific project is valid. For more information about setting up labeling configs, see [Configure labeling interface](https://labelstud.io/guide/setup) and our [Tags reference](https://labelstud.io/tags/). The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). @@ -602,7 +658,7 @@ def validate_config( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -615,13 +671,21 @@ def validate_config( _response = self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/validate/", method="POST", - json={"label_config": label_config}, + json={ + "label_config": label_config, + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectLabelConfig, _response.json()) # type: ignore + return typing.cast( + ProjectLabelConfig, + parse_obj_as( + type_=ProjectLabelConfig, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -644,13 +708,13 @@ async def list( request_options: typing.Optional[RequestOptions] = None, ) -> AsyncPager[Project]: """ + Return a list of the projects within your organization. To perform most tasks with the Label Studio API, you must specify the project ID, sometimes referred to as the `pk`. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using this API call. To retrieve a list of your Label Studio projects, update the following command to match your own environment. Replace the domain name, port, and authorization token, then run the following from the command line: - ```bash curl -X GET https://localhost:8080/api/projects/ -H 'Authorization: Token abc123' ``` @@ -682,28 +746,48 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - response = await client.projects.list() - async for item in response: - yield item - # alternatively, you can paginate page-by-page - async for page in response.iter_pages(): - yield page + + + async def main() -> None: + response = await client.projects.list() + async for item in response: + yield item + # alternatively, you can paginate page-by-page + async for page in response.iter_pages(): + yield page + + + asyncio.run(main()) """ - page = page or 1 + page = page if page is not None else 1 _response = await self._client_wrapper.httpx_client.request( "api/projects/", method="GET", - params={"ordering": ordering, "ids": ids, "title": title, "page": page, "page_size": page_size}, + params={ + "ordering": ordering, + "ids": ids, + "title": title, + "page": page, + "page_size": page_size, + }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - _parsed_response = pydantic_v1.parse_obj_as(ProjectsListResponse, _response.json()) # type: ignore + _parsed_response = typing.cast( + ProjectsListResponse, + parse_obj_as( + type_=ProjectsListResponse, # type: ignore + object_=_response.json(), + ), + ) _has_next = True _get_next = lambda: self.list( ordering=ordering, @@ -735,17 +819,17 @@ async def create( show_collab_predictions: typing.Optional[bool] = OMIT, maximum_annotations: typing.Optional[int] = OMIT, color: typing.Optional[str] = OMIT, - control_weights: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, workspace: typing.Optional[int] = OMIT, model_version: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> ProjectsCreateResponse: """ - Create a project and set up the labeling interface. For more information about setting up projects, see the following: - - [Create and configure projects](https://labelstud.io/guide/setup_project) - - [Configure labeling interface](https://labelstud.io/guide/setup) - - [Project settings](https://labelstud.io/guide/project_settings) + Create a project and set up the labeling interface. For more information about setting up projects, see the following: + * [Create and configure projects](https://labelstud.io/guide/setup_project) + * [Configure labeling interface](https://labelstud.io/guide/setup) + * [Project settings](https://labelstud.io/guide/project_settings) ```bash curl -H Content-Type:application/json -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects' --data '{"label_config": "[...]"}' @@ -789,7 +873,7 @@ async def create( color : typing.Optional[str] Project color in HEX format - control_weights : typing.Optional[typing.Dict[str, typing.Any]] + control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} workspace : typing.Optional[int] @@ -808,12 +892,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.create() + + + async def main() -> None: + await client.projects.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/projects/", @@ -835,12 +927,21 @@ async def create( "workspace": workspace, "model_version": model_version, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectsCreateResponse, _response.json()) # type: ignore + return typing.cast( + ProjectsCreateResponse, + parse_obj_as( + type_=ProjectsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -865,21 +966,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.get( - id=1, - ) + + + async def main() -> None: + await client.projects.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/projects/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Project, _response.json()) # type: ignore + return typing.cast( + Project, + parse_obj_as( + type_=Project, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -887,6 +1004,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a project by specified project ID. Deleting a project permanently removes all tasks, annotations, and project data from Label Studio. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). @@ -905,17 +1023,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.delete( - id=1, - ) + + + async def main() -> None: + await client.projects.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/projects/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -941,17 +1069,17 @@ async def update( show_collab_predictions: typing.Optional[bool] = OMIT, maximum_annotations: typing.Optional[int] = OMIT, color: typing.Optional[str] = OMIT, - control_weights: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, workspace: typing.Optional[int] = OMIT, model_version: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> ProjectsUpdateResponse: """ - Update the project settings for a specific project. For more information, see the following: - - [Create and configure projects](https://labelstud.io/guide/setup_project) - - [Configure labeling interface](https://labelstud.io/guide/setup) - - [Project settings](https://labelstud.io/guide/project_settings) + Update the project settings for a specific project. For more information, see the following: + * [Create and configure projects](https://labelstud.io/guide/setup_project) + * [Configure labeling interface](https://labelstud.io/guide/setup) + * [Project settings](https://labelstud.io/guide/project_settings) The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). @@ -1002,7 +1130,7 @@ async def update( color : typing.Optional[str] Project color in HEX format - control_weights : typing.Optional[typing.Dict[str, typing.Any]] + control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} workspace : typing.Optional[int] @@ -1021,14 +1149,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.update( - id=1, - ) + + + async def main() -> None: + await client.projects.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/", @@ -1050,12 +1186,21 @@ async def update( "workspace": workspace, "model_version": model_version, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectsUpdateResponse, _response.json()) # type: ignore + return typing.cast( + ProjectsUpdateResponse, + parse_obj_as( + type_=ProjectsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1065,26 +1210,26 @@ async def import_tasks( self, id: int, *, - request: typing.Sequence[typing.Dict[str, typing.Any]], + request: typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]], commit_to_project: typing.Optional[bool] = None, return_task_ids: typing.Optional[bool] = None, preannotated_from_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, request_options: typing.Optional[RequestOptions] = None, ) -> ProjectsImportTasksResponse: """ + Use this API endpoint to import labeling tasks in bulk. Note that each POST request is limited at 250K tasks and 200 MB. - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. + Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. - For example, if the label configuration has a _$text_ variable, then each item in a data object must include a `text` field. + For example, if the label configuration has a *$text* variable, then each item in a data object must include a `text` field. There are three possible ways to import tasks with this endpoint: #### 1\. **POST with data** - Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. Update this example to specify your authorization token and Label Studio instance host, then run the following from @@ -1096,7 +1241,6 @@ async def import_tasks( ``` #### 2\. **POST with files** - Send tasks as files. You can attach multiple files with different names. - **JSON**: text files in JavaScript object notation format @@ -1113,7 +1257,6 @@ async def import_tasks( ``` #### 3\. **POST with URL** - You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. ```bash @@ -1129,7 +1272,7 @@ async def import_tasks( id : int A unique integer value identifying this project. - request : typing.Sequence[typing.Dict[str, typing.Any]] + request : typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]] commit_to_project : typing.Optional[bool] Set to "true" to immediately commit tasks to the project. @@ -1150,15 +1293,23 @@ async def import_tasks( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.import_tasks( - id=1, - request=[{"key": "value"}], - ) + + + async def main() -> None: + await client.projects.import_tasks( + id=1, + request=[{"key": "value"}], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/import", @@ -1174,9 +1325,23 @@ async def import_tasks( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectsImportTasksResponse, _response.json()) # type: ignore + return typing.cast( + ProjectsImportTasksResponse, + parse_obj_as( + type_=ProjectsImportTasksResponse, # type: ignore + object_=_response.json(), + ), + ) if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ) + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1186,6 +1351,7 @@ async def validate_config( self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None ) -> ProjectLabelConfig: """ + Determine whether the label configuration for a specific project is valid. For more information about setting up labeling configs, see [Configure labeling interface](https://labelstud.io/guide/setup) and our [Tags reference](https://labelstud.io/tags/). The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). @@ -1208,26 +1374,42 @@ async def validate_config( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.validate_config( - id=1, - label_config="label_config", - ) + + + async def main() -> None: + await client.projects.validate_config( + id=1, + label_config="label_config", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/validate/", method="POST", - json={"label_config": label_config}, + json={ + "label_config": label_config, + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectLabelConfig, _response.json()) # type: ignore + return typing.cast( + ProjectLabelConfig, + parse_obj_as( + type_=ProjectLabelConfig, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/projects/exports/client.py b/src/label_studio_sdk/projects/exports/client.py index 07a3f9e6f..7263d17e2 100644 --- a/src/label_studio_sdk/projects/exports/client.py +++ b/src/label_studio_sdk/projects/exports/client.py @@ -1,23 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing +from ...core.client_wrapper import SyncClientWrapper +from ...core.request_options import RequestOptions +from ...core.jsonable_encoder import jsonable_encoder from json.decoder import JSONDecodeError - from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 -from ...core.request_options import RequestOptions -from ...types.annotation_filter_options import AnnotationFilterOptions -from ...types.converted_format import ConvertedFormat +from ...core.pydantic_utilities import parse_obj_as from ...types.export import Export -from ...types.export_convert import ExportConvert -from ...types.export_create import ExportCreate +from ...types.user_simple import UserSimple +import datetime as dt from ...types.export_create_status import ExportCreateStatus -from ...types.serialization_options import SerializationOptions +from ...types.converted_format import ConvertedFormat from ...types.task_filter_options import TaskFilterOptions -from ...types.user_simple import UserSimple +from ...types.annotation_filter_options import AnnotationFilterOptions +from ...types.serialization_options import SerializationOptions +from ...types.export_create import ExportCreate +from ...core.serialization import convert_and_respect_annotation_metadata +from ...types.export_convert import ExportConvert +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -38,23 +39,19 @@ def create_export( request_options: typing.Optional[RequestOptions] = None, ) -> typing.Iterator[bytes]: """ + If you have a large project it's recommended to use export snapshots, this easy export endpoint might have timeouts. Export annotated tasks as a file in a specific format. For example, to export JSON annotations for a project to a file called `annotations.json`, run the following from the command line: - ```bash curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON -H 'Authorization: Token abc123' --output 'annotations.json' ``` - To export all tasks, including skipped tasks and others without annotations, run the following from the command line: - ```bash curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON&download_all_tasks=true -H 'Authorization: Token abc123' --output 'annotations.json' ``` - To export specific tasks with IDs of 123 and 345, run the following from the command line: - ```bash curl -X GET https://localhost:8080/api/projects/{id}/export?ids[]=123\&ids[]=345 -H 'Authorization: Token abc123' --output 'annotations.json' ``` @@ -70,36 +67,24 @@ def create_export( Selected export format (JSON by default) download_all_tasks : typing.Optional[str] + If true, download all tasks regardless of status. If false, download only annotated tasks. download_resources : typing.Optional[bool] + If true, download all resource files such as images, audio, and others relevant to the tasks. ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] + Specify a list of task IDs to retrieve only the details for those tasks. request_options : typing.Optional[RequestOptions] - Request-specific configuration. + Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. Yields ------ typing.Iterator[bytes] Exported data - - Examples - -------- - from label_studio_sdk.client import LabelStudio - - client = LabelStudio( - api_key="YOUR_API_KEY", - ) - client.projects.exports.create_export( - id=1, - export_type="string", - download_all_tasks="string", - download_resources=True, - ids=1, - ) """ with self._client_wrapper.httpx_client.stream( f"api/projects/{jsonable_encoder(id)}/export", @@ -114,7 +99,8 @@ def create_export( ) as _response: try: if 200 <= _response.status_code < 300: - for _chunk in _response.iter_bytes(): + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + for _chunk in _response.iter_bytes(chunk_size=_chunk_size): yield _chunk return _response.read() @@ -125,6 +111,7 @@ def create_export( def list_formats(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[str]: """ + Before exporting annotations, you can check with formats are supported by the specified project. For more information about export formats, see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). @@ -144,7 +131,7 @@ def list_formats(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -154,11 +141,19 @@ def list_formats(self, id: int, *, request_options: typing.Optional[RequestOptio ) """ _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/export/formats", method="GET", request_options=request_options + f"api/projects/{jsonable_encoder(id)}/export/formats", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[str], _response.json()) # type: ignore + return typing.cast( + typing.List[str], + parse_obj_as( + type_=typing.List[str], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -166,6 +161,7 @@ def list_formats(self, id: int, *, request_options: typing.Optional[RequestOptio def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Export]: """ + Returns a list of export file (snapshots) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). Included in the response is information about each snapshot, such as who created it and what format it is in. @@ -185,7 +181,7 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -195,11 +191,19 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/exports/", method="GET", request_options=request_options + f"api/projects/{jsonable_encoder(id)}/exports/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Export], _response.json()) # type: ignore + return typing.cast( + typing.List[Export], + parse_obj_as( + type_=typing.List[Export], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -216,7 +220,7 @@ def create( finished_at: typing.Optional[dt.datetime] = OMIT, status: typing.Optional[ExportCreateStatus] = OMIT, md5: typing.Optional[str] = OMIT, - counters: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + counters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, converted_formats: typing.Optional[typing.Sequence[ConvertedFormat]] = OMIT, task_filter_options: typing.Optional[TaskFilterOptions] = OMIT, annotation_filter_options: typing.Optional[AnnotationFilterOptions] = OMIT, @@ -224,6 +228,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> ExportCreate: """ + Create a new export request to start a background task and generate an export file (snapshot) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). A snapshot is a static export of your project's data and annotations at a specific point in time. It captures the current state of your tasks, annotations, and other relevant data, allowing you to download and review them later. Snapshots are particularly useful for large projects as they help avoid timeouts during export operations by processing the data asynchronously. @@ -251,7 +256,7 @@ def create( md5 : typing.Optional[str] - counters : typing.Optional[typing.Dict[str, typing.Any]] + counters : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] converted_formats : typing.Optional[typing.Sequence[ConvertedFormat]] @@ -271,7 +276,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -286,23 +291,39 @@ def create( json={ "title": title, "id": id, - "created_by": created_by, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=UserSimple, direction="write" + ), "created_at": created_at, "finished_at": finished_at, "status": status, "md5": md5, "counters": counters, - "converted_formats": converted_formats, - "task_filter_options": task_filter_options, - "annotation_filter_options": annotation_filter_options, - "serialization_options": serialization_options, + "converted_formats": convert_and_respect_annotation_metadata( + object_=converted_formats, annotation=typing.Sequence[ConvertedFormat], direction="write" + ), + "task_filter_options": convert_and_respect_annotation_metadata( + object_=task_filter_options, annotation=TaskFilterOptions, direction="write" + ), + "annotation_filter_options": convert_and_respect_annotation_metadata( + object_=annotation_filter_options, annotation=AnnotationFilterOptions, direction="write" + ), + "serialization_options": convert_and_respect_annotation_metadata( + object_=serialization_options, annotation=SerializationOptions, direction="write" + ), }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExportCreate, _response.json()) # type: ignore + return typing.cast( + ExportCreate, + parse_obj_as( + type_=ExportCreate, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -310,6 +331,7 @@ def create( def get(self, id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None) -> Export: """ + Retrieve information about a specific export file (snapshot). You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). @@ -334,7 +356,7 @@ def get(self, id: int, export_pk: str, *, request_options: typing.Optional[Reque Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -351,7 +373,13 @@ def get(self, id: int, export_pk: str, *, request_options: typing.Optional[Reque ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Export, _response.json()) # type: ignore + return typing.cast( + Export, + parse_obj_as( + type_=Export, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -359,6 +387,7 @@ def get(self, id: int, export_pk: str, *, request_options: typing.Optional[Reque def delete(self, id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete an export file by specified export ID. You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). @@ -380,7 +409,7 @@ def delete(self, id: int, export_pk: str, *, request_options: typing.Optional[Re Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -407,6 +436,7 @@ def convert( self, id: int, export_pk: str, *, export_type: str, request_options: typing.Optional[RequestOptions] = None ) -> ExportConvert: """ + You can use this to convert an export snapshot into the selected format. To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). @@ -436,7 +466,7 @@ def convert( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -450,13 +480,21 @@ def convert( _response = self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/exports/{jsonable_encoder(export_pk)}/convert", method="POST", - json={"export_type": export_type}, + json={ + "export_type": export_type, + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExportConvert, _response.json()) # type: ignore + return typing.cast( + ExportConvert, + parse_obj_as( + type_=ExportConvert, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -471,6 +509,7 @@ def download( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Download an export snapshot as a file in a specified format. To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). @@ -497,7 +536,7 @@ def download( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -510,7 +549,9 @@ def download( _response = self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/exports/{jsonable_encoder(export_pk)}/download", method="GET", - params={"exportType": export_type}, + params={ + "exportType": export_type, + }, request_options=request_options, ) try: @@ -537,23 +578,19 @@ async def create_export( request_options: typing.Optional[RequestOptions] = None, ) -> typing.AsyncIterator[bytes]: """ + If you have a large project it's recommended to use export snapshots, this easy export endpoint might have timeouts. Export annotated tasks as a file in a specific format. For example, to export JSON annotations for a project to a file called `annotations.json`, run the following from the command line: - ```bash curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON -H 'Authorization: Token abc123' --output 'annotations.json' ``` - To export all tasks, including skipped tasks and others without annotations, run the following from the command line: - ```bash curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON&download_all_tasks=true -H 'Authorization: Token abc123' --output 'annotations.json' ``` - To export specific tasks with IDs of 123 and 345, run the following from the command line: - ```bash curl -X GET https://localhost:8080/api/projects/{id}/export?ids[]=123\&ids[]=345 -H 'Authorization: Token abc123' --output 'annotations.json' ``` @@ -569,36 +606,24 @@ async def create_export( Selected export format (JSON by default) download_all_tasks : typing.Optional[str] + If true, download all tasks regardless of status. If false, download only annotated tasks. download_resources : typing.Optional[bool] + If true, download all resource files such as images, audio, and others relevant to the tasks. ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] + Specify a list of task IDs to retrieve only the details for those tasks. request_options : typing.Optional[RequestOptions] - Request-specific configuration. + Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. Yields ------ typing.AsyncIterator[bytes] Exported data - - Examples - -------- - from label_studio_sdk.client import AsyncLabelStudio - - client = AsyncLabelStudio( - api_key="YOUR_API_KEY", - ) - await client.projects.exports.create_export( - id=1, - export_type="string", - download_all_tasks="string", - download_resources=True, - ids=1, - ) """ async with self._client_wrapper.httpx_client.stream( f"api/projects/{jsonable_encoder(id)}/export", @@ -613,7 +638,8 @@ async def create_export( ) as _response: try: if 200 <= _response.status_code < 300: - async for _chunk in _response.aiter_bytes(): + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size): yield _chunk return await _response.aread() @@ -626,6 +652,7 @@ async def list_formats( self, id: int, *, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[str]: """ + Before exporting annotations, you can check with formats are supported by the specified project. For more information about export formats, see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). @@ -645,21 +672,37 @@ async def list_formats( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.exports.list_formats( - id=1, - ) + + + async def main() -> None: + await client.projects.exports.list_formats( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/export/formats", method="GET", request_options=request_options + f"api/projects/{jsonable_encoder(id)}/export/formats", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[str], _response.json()) # type: ignore + return typing.cast( + typing.List[str], + parse_obj_as( + type_=typing.List[str], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -667,6 +710,7 @@ async def list_formats( async def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Export]: """ + Returns a list of export file (snapshots) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). Included in the response is information about each snapshot, such as who created it and what format it is in. @@ -686,21 +730,37 @@ async def list(self, id: int, *, request_options: typing.Optional[RequestOptions Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.exports.list( - id=1, - ) + + + async def main() -> None: + await client.projects.exports.list( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/exports/", method="GET", request_options=request_options + f"api/projects/{jsonable_encoder(id)}/exports/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Export], _response.json()) # type: ignore + return typing.cast( + typing.List[Export], + parse_obj_as( + type_=typing.List[Export], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -717,7 +777,7 @@ async def create( finished_at: typing.Optional[dt.datetime] = OMIT, status: typing.Optional[ExportCreateStatus] = OMIT, md5: typing.Optional[str] = OMIT, - counters: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + counters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, converted_formats: typing.Optional[typing.Sequence[ConvertedFormat]] = OMIT, task_filter_options: typing.Optional[TaskFilterOptions] = OMIT, annotation_filter_options: typing.Optional[AnnotationFilterOptions] = OMIT, @@ -725,6 +785,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> ExportCreate: """ + Create a new export request to start a background task and generate an export file (snapshot) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). A snapshot is a static export of your project's data and annotations at a specific point in time. It captures the current state of your tasks, annotations, and other relevant data, allowing you to download and review them later. Snapshots are particularly useful for large projects as they help avoid timeouts during export operations by processing the data asynchronously. @@ -752,7 +813,7 @@ async def create( md5 : typing.Optional[str] - counters : typing.Optional[typing.Dict[str, typing.Any]] + counters : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] converted_formats : typing.Optional[typing.Sequence[ConvertedFormat]] @@ -772,14 +833,22 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.exports.create( - id_=1, - ) + + + async def main() -> None: + await client.projects.exports.create( + id_=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id_)}/exports/", @@ -787,23 +856,39 @@ async def create( json={ "title": title, "id": id, - "created_by": created_by, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=UserSimple, direction="write" + ), "created_at": created_at, "finished_at": finished_at, "status": status, "md5": md5, "counters": counters, - "converted_formats": converted_formats, - "task_filter_options": task_filter_options, - "annotation_filter_options": annotation_filter_options, - "serialization_options": serialization_options, + "converted_formats": convert_and_respect_annotation_metadata( + object_=converted_formats, annotation=typing.Sequence[ConvertedFormat], direction="write" + ), + "task_filter_options": convert_and_respect_annotation_metadata( + object_=task_filter_options, annotation=TaskFilterOptions, direction="write" + ), + "annotation_filter_options": convert_and_respect_annotation_metadata( + object_=annotation_filter_options, annotation=AnnotationFilterOptions, direction="write" + ), + "serialization_options": convert_and_respect_annotation_metadata( + object_=serialization_options, annotation=SerializationOptions, direction="write" + ), }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExportCreate, _response.json()) # type: ignore + return typing.cast( + ExportCreate, + parse_obj_as( + type_=ExportCreate, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -811,6 +896,7 @@ async def create( async def get(self, id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None) -> Export: """ + Retrieve information about a specific export file (snapshot). You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). @@ -835,15 +921,23 @@ async def get(self, id: int, export_pk: str, *, request_options: typing.Optional Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.exports.get( - id=1, - export_pk="export_pk", - ) + + + async def main() -> None: + await client.projects.exports.get( + id=1, + export_pk="export_pk", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/exports/{jsonable_encoder(export_pk)}", @@ -852,7 +946,13 @@ async def get(self, id: int, export_pk: str, *, request_options: typing.Optional ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Export, _response.json()) # type: ignore + return typing.cast( + Export, + parse_obj_as( + type_=Export, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -860,6 +960,7 @@ async def get(self, id: int, export_pk: str, *, request_options: typing.Optional async def delete(self, id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete an export file by specified export ID. You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). @@ -881,15 +982,23 @@ async def delete(self, id: int, export_pk: str, *, request_options: typing.Optio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.exports.delete( - id=1, - export_pk="export_pk", - ) + + + async def main() -> None: + await client.projects.exports.delete( + id=1, + export_pk="export_pk", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/exports/{jsonable_encoder(export_pk)}", @@ -908,6 +1017,7 @@ async def convert( self, id: int, export_pk: str, *, export_type: str, request_options: typing.Optional[RequestOptions] = None ) -> ExportConvert: """ + You can use this to convert an export snapshot into the selected format. To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). @@ -937,27 +1047,43 @@ async def convert( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.exports.convert( - id=1, - export_pk="export_pk", - export_type="export_type", - ) + + + async def main() -> None: + await client.projects.exports.convert( + id=1, + export_pk="export_pk", + export_type="export_type", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/exports/{jsonable_encoder(export_pk)}/convert", method="POST", - json={"export_type": export_type}, + json={ + "export_type": export_type, + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExportConvert, _response.json()) # type: ignore + return typing.cast( + ExportConvert, + parse_obj_as( + type_=ExportConvert, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -972,6 +1098,7 @@ async def download( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Download an export snapshot as a file in a specified format. To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). @@ -998,20 +1125,30 @@ async def download( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.exports.download( - id=1, - export_pk="export_pk", - ) + + + async def main() -> None: + await client.projects.exports.download( + id=1, + export_pk="export_pk", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/exports/{jsonable_encoder(export_pk)}/download", method="GET", - params={"exportType": export_type}, + params={ + "exportType": export_type, + }, request_options=request_options, ) try: diff --git a/src/label_studio_sdk/projects/types/projects_create_response.py b/src/label_studio_sdk/projects/types/projects_create_response.py index 71352b8d0..647646e02 100644 --- a/src/label_studio_sdk/projects/types/projects_create_response.py +++ b/src/label_studio_sdk/projects/types/projects_create_response.py @@ -1,96 +1,86 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class ProjectsCreateResponse(pydantic_v1.BaseModel): +class ProjectsCreateResponse(UniversalBaseModel): """ Project """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Project title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Project description """ - label_config: typing.Optional[str] = pydantic_v1.Field(default=None) + label_config: typing.Optional[str] = pydantic.Field(default=None) """ Label config in XML format """ - expert_instruction: typing.Optional[str] = pydantic_v1.Field(default=None) + expert_instruction: typing.Optional[str] = pydantic.Field(default=None) """ Labeling instructions to show to the user """ - show_instruction: typing.Optional[bool] = pydantic_v1.Field(default=None) + show_instruction: typing.Optional[bool] = pydantic.Field(default=None) """ Show labeling instructions """ - show_skip_button: typing.Optional[bool] = pydantic_v1.Field(default=None) + show_skip_button: typing.Optional[bool] = pydantic.Field(default=None) """ Show skip button """ - enable_empty_annotation: typing.Optional[bool] = pydantic_v1.Field(default=None) + enable_empty_annotation: typing.Optional[bool] = pydantic.Field(default=None) """ Allow empty annotations """ - show_annotation_history: typing.Optional[bool] = pydantic_v1.Field(default=None) + show_annotation_history: typing.Optional[bool] = pydantic.Field(default=None) """ Show annotation history """ - reveal_preannotations_interactively: typing.Optional[bool] = pydantic_v1.Field(default=None) + reveal_preannotations_interactively: typing.Optional[bool] = pydantic.Field(default=None) """ Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest """ - show_collab_predictions: typing.Optional[bool] = pydantic_v1.Field(default=None) + show_collab_predictions: typing.Optional[bool] = pydantic.Field(default=None) """ Show predictions to annotators """ - maximum_annotations: typing.Optional[int] = pydantic_v1.Field(default=None) + maximum_annotations: typing.Optional[int] = pydantic.Field(default=None) """ Maximum annotations per task """ - color: typing.Optional[str] = pydantic_v1.Field(default=None) + color: typing.Optional[str] = pydantic.Field(default=None) """ Project color in HEX format """ - control_weights: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/projects/types/projects_import_tasks_response.py b/src/label_studio_sdk/projects/types/projects_import_tasks_response.py index 672522004..66adc2d99 100644 --- a/src/label_studio_sdk/projects/types/projects_import_tasks_response.py +++ b/src/label_studio_sdk/projects/types/projects_import_tasks_response.py @@ -1,71 +1,61 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class ProjectsImportTasksResponse(pydantic_v1.BaseModel): +class ProjectsImportTasksResponse(UniversalBaseModel): """ Task creation response """ - task_count: typing.Optional[int] = pydantic_v1.Field(default=None) + task_count: typing.Optional[int] = pydantic.Field(default=None) """ Number of tasks added """ - annotation_count: typing.Optional[int] = pydantic_v1.Field(default=None) + annotation_count: typing.Optional[int] = pydantic.Field(default=None) """ Number of annotations added """ - predictions_count: typing.Optional[int] = pydantic_v1.Field(default=None) + predictions_count: typing.Optional[int] = pydantic.Field(default=None) """ Number of predictions added """ - duration: typing.Optional[float] = pydantic_v1.Field(default=None) + duration: typing.Optional[float] = pydantic.Field(default=None) """ Time in seconds to create """ - file_upload_ids: typing.Optional[typing.List[int]] = pydantic_v1.Field(default=None) + file_upload_ids: typing.Optional[typing.List[int]] = pydantic.Field(default=None) """ Database IDs of uploaded files """ - could_be_tasks_list: typing.Optional[bool] = pydantic_v1.Field(default=None) + could_be_tasks_list: typing.Optional[bool] = pydantic.Field(default=None) """ Whether uploaded files can contain lists of tasks, like CSV/TSV files """ - found_formats: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) + found_formats: typing.Optional[typing.List[str]] = pydantic.Field(default=None) """ The list of found file formats """ - data_columns: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) + data_columns: typing.Optional[typing.List[str]] = pydantic.Field(default=None) """ The list of found data columns """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/projects/types/projects_list_response.py b/src/label_studio_sdk/projects/types/projects_list_response.py index 2224432be..6d16d6b32 100644 --- a/src/label_studio_sdk/projects/types/projects_list_response.py +++ b/src/label_studio_sdk/projects/types/projects_list_response.py @@ -1,33 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing - -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from ...types.project import Project +from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -class ProjectsListResponse(pydantic_v1.BaseModel): +class ProjectsListResponse(UniversalBaseModel): count: int next: typing.Optional[str] = None previous: typing.Optional[str] = None results: typing.List[Project] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/projects/types/projects_update_response.py b/src/label_studio_sdk/projects/types/projects_update_response.py index 45bc70beb..fa27422b4 100644 --- a/src/label_studio_sdk/projects/types/projects_update_response.py +++ b/src/label_studio_sdk/projects/types/projects_update_response.py @@ -1,96 +1,86 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class ProjectsUpdateResponse(pydantic_v1.BaseModel): +class ProjectsUpdateResponse(UniversalBaseModel): """ Project """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Project title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Project description """ - label_config: typing.Optional[str] = pydantic_v1.Field(default=None) + label_config: typing.Optional[str] = pydantic.Field(default=None) """ Label config in XML format """ - expert_instruction: typing.Optional[str] = pydantic_v1.Field(default=None) + expert_instruction: typing.Optional[str] = pydantic.Field(default=None) """ Labeling instructions to show to the user """ - show_instruction: typing.Optional[bool] = pydantic_v1.Field(default=None) + show_instruction: typing.Optional[bool] = pydantic.Field(default=None) """ Show labeling instructions """ - show_skip_button: typing.Optional[bool] = pydantic_v1.Field(default=None) + show_skip_button: typing.Optional[bool] = pydantic.Field(default=None) """ Show skip button """ - enable_empty_annotation: typing.Optional[bool] = pydantic_v1.Field(default=None) + enable_empty_annotation: typing.Optional[bool] = pydantic.Field(default=None) """ Allow empty annotations """ - show_annotation_history: typing.Optional[bool] = pydantic_v1.Field(default=None) + show_annotation_history: typing.Optional[bool] = pydantic.Field(default=None) """ Show annotation history """ - reveal_preannotations_interactively: typing.Optional[bool] = pydantic_v1.Field(default=None) + reveal_preannotations_interactively: typing.Optional[bool] = pydantic.Field(default=None) """ Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest """ - show_collab_predictions: typing.Optional[bool] = pydantic_v1.Field(default=None) + show_collab_predictions: typing.Optional[bool] = pydantic.Field(default=None) """ Show predictions to annotators """ - maximum_annotations: typing.Optional[int] = pydantic_v1.Field(default=None) + maximum_annotations: typing.Optional[int] = pydantic.Field(default=None) """ Maximum annotations per task """ - color: typing.Optional[str] = pydantic_v1.Field(default=None) + color: typing.Optional[str] = pydantic.Field(default=None) """ Project color in HEX format """ - control_weights: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/prompts/client.py b/src/label_studio_sdk/prompts/client.py index ba60a0fc8..6186436c5 100644 --- a/src/label_studio_sdk/prompts/client.py +++ b/src/label_studio_sdk/prompts/client.py @@ -1,26 +1,30 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 +from ..core.client_wrapper import SyncClientWrapper +from .versions.client import VersionsClient +from .runs.client import RunsClient +from .indicators.client import IndicatorsClient from ..core.request_options import RequestOptions from ..types.prompt import Prompt +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from ..types.prompt_created_by import PromptCreatedBy +import datetime as dt from ..types.prompt_organization import PromptOrganization -from .indicators.client import AsyncIndicatorsClient, IndicatorsClient -from .runs.client import AsyncRunsClient, RunsClient +from ..core.serialization import convert_and_respect_annotation_metadata +from ..core.jsonable_encoder import jsonable_encoder +from .types.prompts_batch_predictions_request_results_item import PromptsBatchPredictionsRequestResultsItem +from .types.prompts_batch_predictions_response import PromptsBatchPredictionsResponse from .types.prompts_batch_failed_predictions_request_failed_predictions_item import ( PromptsBatchFailedPredictionsRequestFailedPredictionsItem, ) from .types.prompts_batch_failed_predictions_response import PromptsBatchFailedPredictionsResponse -from .types.prompts_batch_predictions_request_results_item import PromptsBatchPredictionsRequestResultsItem -from .types.prompts_batch_predictions_response import PromptsBatchPredictionsResponse -from .versions.client import AsyncVersionsClient, VersionsClient +from ..core.client_wrapper import AsyncClientWrapper +from .versions.client import AsyncVersionsClient +from .runs.client import AsyncRunsClient +from .indicators.client import AsyncIndicatorsClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -49,7 +53,7 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -57,11 +61,19 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty client.prompts.list() """ _response = self._client_wrapper.httpx_client.request( - "api/prompts/", method="GET", request_options=request_options + "api/prompts/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Prompt], _response.json()) # type: ignore + return typing.cast( + typing.List[Prompt], + parse_obj_as( + type_=typing.List[Prompt], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -127,7 +139,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -144,10 +156,14 @@ def create( json={ "title": title, "description": description, - "created_by": created_by, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), "created_at": created_at, "updated_at": updated_at, - "organization": organization, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptOrganization, direction="write" + ), "input_fields": input_fields, "output_classes": output_classes, "associated_projects": associated_projects, @@ -158,7 +174,13 @@ def create( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore + return typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -183,7 +205,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -193,11 +215,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/prompts/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore + return typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -221,7 +251,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -231,7 +261,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/prompts/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -305,7 +337,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -323,10 +355,14 @@ def update( json={ "title": title, "description": description, - "created_by": created_by, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), "created_at": created_at, "updated_at": updated_at, - "organization": organization, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptOrganization, direction="write" + ), "input_fields": input_fields, "output_classes": output_classes, "associated_projects": associated_projects, @@ -337,7 +373,13 @@ def update( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore + return typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -370,7 +412,7 @@ def batch_predictions( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -380,13 +422,29 @@ def batch_predictions( _response = self._client_wrapper.httpx_client.request( "api/model-run/batch-predictions", method="POST", - json={"modelrun_id": modelrun_id, "results": results}, + json={ + "modelrun_id": modelrun_id, + "results": convert_and_respect_annotation_metadata( + object_=results, + annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PromptsBatchPredictionsResponse, _response.json()) # type: ignore + return typing.cast( + PromptsBatchPredictionsResponse, + parse_obj_as( + type_=PromptsBatchPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -421,7 +479,7 @@ def batch_failed_predictions( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -431,13 +489,29 @@ def batch_failed_predictions( _response = self._client_wrapper.httpx_client.request( "api/model-run/batch-failed-predictions", method="POST", - json={"modelrun_id": modelrun_id, "failed_predictions": failed_predictions}, + json={ + "modelrun_id": modelrun_id, + "failed_predictions": convert_and_respect_annotation_metadata( + object_=failed_predictions, + annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PromptsBatchFailedPredictionsResponse, _response.json()) # type: ignore + return typing.cast( + PromptsBatchFailedPredictionsResponse, + parse_obj_as( + type_=PromptsBatchFailedPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -467,19 +541,35 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.list() + + + async def main() -> None: + await client.prompts.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/prompts/", method="GET", request_options=request_options + "api/prompts/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Prompt], _response.json()) # type: ignore + return typing.cast( + typing.List[Prompt], + parse_obj_as( + type_=typing.List[Prompt], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -545,16 +635,24 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.create( - title="title", - input_fields=["input_fields"], - output_classes=["output_classes"], - ) + + + async def main() -> None: + await client.prompts.create( + title="title", + input_fields=["input_fields"], + output_classes=["output_classes"], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/prompts/", @@ -562,10 +660,14 @@ async def create( json={ "title": title, "description": description, - "created_by": created_by, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), "created_at": created_at, "updated_at": updated_at, - "organization": organization, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptOrganization, direction="write" + ), "input_fields": input_fields, "output_classes": output_classes, "associated_projects": associated_projects, @@ -576,7 +678,13 @@ async def create( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore + return typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -601,21 +709,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.get( - id=1, - ) + + + async def main() -> None: + await client.prompts.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/prompts/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore + return typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -639,17 +763,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.delete( - id=1, - ) + + + async def main() -> None: + await client.prompts.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/prompts/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -723,17 +857,25 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.update( - id=1, - title="title", - input_fields=["input_fields"], - output_classes=["output_classes"], - ) + + + async def main() -> None: + await client.prompts.update( + id=1, + title="title", + input_fields=["input_fields"], + output_classes=["output_classes"], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(id)}", @@ -741,10 +883,14 @@ async def update( json={ "title": title, "description": description, - "created_by": created_by, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), "created_at": created_at, "updated_at": updated_at, - "organization": organization, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptOrganization, direction="write" + ), "input_fields": input_fields, "output_classes": output_classes, "associated_projects": associated_projects, @@ -755,7 +901,13 @@ async def update( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore + return typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -788,23 +940,47 @@ async def batch_predictions( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.batch_predictions() + + + async def main() -> None: + await client.prompts.batch_predictions() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/model-run/batch-predictions", method="POST", - json={"modelrun_id": modelrun_id, "results": results}, + json={ + "modelrun_id": modelrun_id, + "results": convert_and_respect_annotation_metadata( + object_=results, + annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PromptsBatchPredictionsResponse, _response.json()) # type: ignore + return typing.cast( + PromptsBatchPredictionsResponse, + parse_obj_as( + type_=PromptsBatchPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -839,23 +1015,47 @@ async def batch_failed_predictions( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.batch_failed_predictions() + + + async def main() -> None: + await client.prompts.batch_failed_predictions() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/model-run/batch-failed-predictions", method="POST", - json={"modelrun_id": modelrun_id, "failed_predictions": failed_predictions}, + json={ + "modelrun_id": modelrun_id, + "failed_predictions": convert_and_respect_annotation_metadata( + object_=failed_predictions, + annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PromptsBatchFailedPredictionsResponse, _response.json()) # type: ignore + return typing.cast( + PromptsBatchFailedPredictionsResponse, + parse_obj_as( + type_=PromptsBatchFailedPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/prompts/indicators/client.py b/src/label_studio_sdk/prompts/indicators/client.py index 86de2aa9a..684dbe469 100644 --- a/src/label_studio_sdk/prompts/indicators/client.py +++ b/src/label_studio_sdk/prompts/indicators/client.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.client_wrapper import SyncClientWrapper import typing +from ...core.request_options import RequestOptions +from ...types.key_indicators import KeyIndicators +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as from json.decoder import JSONDecodeError - from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 -from ...core.request_options import RequestOptions from ...types.key_indicator_value import KeyIndicatorValue -from ...types.key_indicators import KeyIndicators +from ...core.client_wrapper import AsyncClientWrapper class IndicatorsClient: @@ -35,7 +35,7 @@ def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -45,11 +45,19 @@ def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators", method="GET", request_options=request_options + f"api/inference-runs/{jsonable_encoder(pk)}/indicators", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(KeyIndicators, _response.json()) # type: ignore + return typing.cast( + KeyIndicators, + parse_obj_as( + type_=KeyIndicators, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -79,7 +87,7 @@ def get( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -96,7 +104,13 @@ def get( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(KeyIndicatorValue, _response.json()) # type: ignore + return typing.cast( + KeyIndicatorValue, + parse_obj_as( + type_=KeyIndicatorValue, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -126,21 +140,37 @@ async def list(self, pk: int, *, request_options: typing.Optional[RequestOptions Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.indicators.list( - pk=1, - ) + + + async def main() -> None: + await client.prompts.indicators.list( + pk=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators", method="GET", request_options=request_options + f"api/inference-runs/{jsonable_encoder(pk)}/indicators", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(KeyIndicators, _response.json()) # type: ignore + return typing.cast( + KeyIndicators, + parse_obj_as( + type_=KeyIndicators, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -170,15 +200,23 @@ async def get( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.indicators.get( - indicator_key="indicator_key", - pk=1, - ) + + + async def main() -> None: + await client.prompts.indicators.get( + indicator_key="indicator_key", + pk=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/inference-runs/{jsonable_encoder(pk)}/indicators/{jsonable_encoder(indicator_key)}", @@ -187,7 +225,13 @@ async def get( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(KeyIndicatorValue, _response.json()) # type: ignore + return typing.cast( + KeyIndicatorValue, + parse_obj_as( + type_=KeyIndicatorValue, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/prompts/runs/client.py b/src/label_studio_sdk/prompts/runs/client.py index 463d8db6c..385506f8c 100644 --- a/src/label_studio_sdk/prompts/runs/client.py +++ b/src/label_studio_sdk/prompts/runs/client.py @@ -1,20 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 +from ...core.client_wrapper import SyncClientWrapper +from .types.runs_list_request_project_subset import RunsListRequestProjectSubset from ...core.request_options import RequestOptions from ...types.inference_run import InferenceRun -from ...types.inference_run_created_by import InferenceRunCreatedBy -from ...types.inference_run_organization import InferenceRunOrganization +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from ...types.inference_run_project_subset import InferenceRunProjectSubset +from ...types.inference_run_organization import InferenceRunOrganization +from ...types.inference_run_created_by import InferenceRunCreatedBy from ...types.inference_run_status import InferenceRunStatus -from .types.runs_list_request_project_subset import RunsListRequestProjectSubset +import datetime as dt +from ...core.serialization import convert_and_respect_annotation_metadata +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -60,7 +61,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -75,12 +76,21 @@ def list( _response = self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", method="GET", - params={"project": project, "project_subset": project_subset}, + params={ + "project": project, + "project_subset": project_subset, + }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(InferenceRun, _response.json()) # type: ignore + return typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -147,7 +157,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -163,10 +173,14 @@ def create( f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", method="POST", json={ - "organization": organization, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=InferenceRunOrganization, direction="write" + ), "project": project, "model_version": model_version, - "created_by": created_by, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=InferenceRunCreatedBy, direction="write" + ), "project_subset": project_subset, "status": status, "job_id": job_id, @@ -180,7 +194,13 @@ def create( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(InferenceRun, _response.json()) # type: ignore + return typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -227,27 +247,44 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.runs.list( - id=1, - version_id=1, - project=1, - project_subset="All", - ) + + + async def main() -> None: + await client.prompts.runs.list( + id=1, + version_id=1, + project=1, + project_subset="All", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", method="GET", - params={"project": project, "project_subset": project_subset}, + params={ + "project": project, + "project_subset": project_subset, + }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(InferenceRun, _response.json()) # type: ignore + return typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -314,26 +351,38 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.runs.create( - id=1, - version_id=1, - project=1, - project_subset="All", - ) + + + async def main() -> None: + await client.prompts.runs.create( + id=1, + version_id=1, + project=1, + project_subset="All", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", method="POST", json={ - "organization": organization, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=InferenceRunOrganization, direction="write" + ), "project": project, "model_version": model_version, - "created_by": created_by, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=InferenceRunCreatedBy, direction="write" + ), "project_subset": project_subset, "status": status, "job_id": job_id, @@ -347,7 +396,13 @@ async def create( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(InferenceRun, _response.json()) # type: ignore + return typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py index 7b21aab6d..f58cf15b8 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py @@ -1,42 +1,32 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class PromptsBatchFailedPredictionsRequestFailedPredictionsItem(pydantic_v1.BaseModel): - task_id: typing.Optional[int] = pydantic_v1.Field(default=None) +class PromptsBatchFailedPredictionsRequestFailedPredictionsItem(UniversalBaseModel): + task_id: typing.Optional[int] = pydantic.Field(default=None) """ Task ID to associate the prediction with """ - error_type: typing.Optional[str] = pydantic_v1.Field(default=None) + error_type: typing.Optional[str] = pydantic.Field(default=None) """ Type of error (e.g. "Timeout", "Rate Limit", etc) """ - message: typing.Optional[str] = pydantic_v1.Field(default=None) + message: typing.Optional[str] = pydantic.Field(default=None) """ Error message details """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py index 03217cddb..210085456 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py @@ -1,29 +1,19 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing +from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class PromptsBatchFailedPredictionsResponse(pydantic_v1.BaseModel): +class PromptsBatchFailedPredictionsResponse(UniversalBaseModel): detail: typing.Optional[str] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py index 661d943aa..d46f78c4d 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py @@ -1,62 +1,59 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class PromptsBatchPredictionsRequestResultsItem(pydantic_v1.BaseModel): - task_id: typing.Optional[int] = pydantic_v1.Field(default=None) +class PromptsBatchPredictionsRequestResultsItem(UniversalBaseModel): + task_id: typing.Optional[int] = pydantic.Field(default=None) """ Task ID to associate the prediction with """ - output: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + output: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ - Prediction output that contains keys from labeling config. Each key must be a valid control tag name from the labeling config. For example, given the output: `json {"sentiment": "positive"} ` it will be converted to the internal LS annotation format: `json { "value": { "choices": ["positive"] }, "from_name": "label", "to_name": "", ... } ` + Prediction output that contains keys from labeling config. Each key must be a valid control tag name from the labeling config. For example, given the output: ```json {"sentiment": "positive"} ``` it will be converted to the internal LS annotation format: ```json { + "value": { + "choices": ["positive"] + }, + "from_name": "label", + "to_name": "", + ... + } ``` """ - prompt_tokens: typing.Optional[int] = pydantic_v1.Field(default=None) + prompt_tokens: typing.Optional[int] = pydantic.Field(default=None) """ Number of tokens in the prompt """ - completion_tokens: typing.Optional[int] = pydantic_v1.Field(default=None) + completion_tokens: typing.Optional[int] = pydantic.Field(default=None) """ Number of tokens in the completion """ - prompt_cost_usd: typing.Optional[float] = pydantic_v1.Field(default=None) + prompt_cost_usd: typing.Optional[float] = pydantic.Field(default=None) """ Cost of the prompt (in USD) """ - completion_cost_usd: typing.Optional[float] = pydantic_v1.Field(default=None) + completion_cost_usd: typing.Optional[float] = pydantic.Field(default=None) """ Cost of the completion (in USD) """ - total_cost_usd: typing.Optional[float] = pydantic_v1.Field(default=None) + total_cost_usd: typing.Optional[float] = pydantic.Field(default=None) """ Total cost of the inference (in USD) """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py index 432b25b29..befabdace 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py @@ -1,29 +1,19 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing +from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class PromptsBatchPredictionsResponse(pydantic_v1.BaseModel): +class PromptsBatchPredictionsResponse(UniversalBaseModel): detail: typing.Optional[str] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/prompts/versions/client.py b/src/label_studio_sdk/prompts/versions/client.py index dfea4d441..6c17c2424 100644 --- a/src/label_studio_sdk/prompts/versions/client.py +++ b/src/label_studio_sdk/prompts/versions/client.py @@ -1,20 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions -from ...types.inference_run_cost_estimate import InferenceRunCostEstimate from ...types.prompt_version import PromptVersion +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError +from ...types.prompt_version_provider import PromptVersionProvider from ...types.prompt_version_created_by import PromptVersionCreatedBy +import datetime as dt from ...types.prompt_version_organization import PromptVersionOrganization -from ...types.prompt_version_provider import PromptVersionProvider +from ...core.serialization import convert_and_respect_annotation_metadata +from ...types.inference_run_cost_estimate import InferenceRunCostEstimate from ...types.refined_prompt_response import RefinedPromptResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -43,7 +44,7 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -53,11 +54,19 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No ) """ _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", method="GET", request_options=request_options + f"api/prompts/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[PromptVersion], _response.json()) # type: ignore + return typing.cast( + typing.List[PromptVersion], + parse_obj_as( + type_=typing.List[PromptVersion], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -117,7 +126,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -136,17 +145,27 @@ def create( "prompt": prompt, "provider": provider, "provider_model_id": provider_model_id, - "created_by": created_by, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptVersionCreatedBy, direction="write" + ), "created_at": created_at, "updated_at": updated_at, - "organization": organization, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptVersionOrganization, direction="write" + ), }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore + return typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -176,7 +195,7 @@ def get( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -193,7 +212,13 @@ def get( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore + return typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -220,7 +245,7 @@ def delete(self, id: int, version_id: int, *, request_options: typing.Optional[R Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -301,7 +326,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -321,17 +346,27 @@ def update( "prompt": prompt, "provider": provider, "provider_model_id": provider_model_id, - "created_by": created_by, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptVersionCreatedBy, direction="write" + ), "created_at": created_at, "updated_at": updated_at, - "organization": organization, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptVersionOrganization, direction="write" + ), }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore + return typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -373,7 +408,7 @@ def cost_estimate( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -388,12 +423,21 @@ def cost_estimate( _response = self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", method="POST", - params={"project_id": project_id, "project_subset": project_subset}, + params={ + "project_id": project_id, + "project_subset": project_subset, + }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(InferenceRunCostEstimate, _response.json()) # type: ignore + return typing.cast( + InferenceRunCostEstimate, + parse_obj_as( + type_=InferenceRunCostEstimate, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -431,7 +475,7 @@ def get_refined_prompt( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -445,12 +489,20 @@ def get_refined_prompt( _response = self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", method="GET", - params={"refinement_job_id": refinement_job_id}, + params={ + "refinement_job_id": refinement_job_id, + }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RefinedPromptResponse, _response.json()) # type: ignore + return typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -500,7 +552,7 @@ def refine_prompt( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -513,18 +565,29 @@ def refine_prompt( _response = self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", method="POST", - params={"async": async_}, + params={ + "async": async_, + }, json={ "teacher_model_provider_connection_id": teacher_model_provider_connection_id, "teacher_model_name": teacher_model_name, "project_id": project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RefinedPromptResponse, _response.json()) # type: ignore + return typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -556,21 +619,37 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.versions.list( - id=1, - ) + + + async def main() -> None: + await client.prompts.versions.list( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", method="GET", request_options=request_options + f"api/prompts/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[PromptVersion], _response.json()) # type: ignore + return typing.cast( + typing.List[PromptVersion], + parse_obj_as( + type_=typing.List[PromptVersion], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -630,14 +709,22 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.versions.create( - id=1, - ) + + + async def main() -> None: + await client.prompts.versions.create( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(id)}/versions", @@ -649,17 +736,27 @@ async def create( "prompt": prompt, "provider": provider, "provider_model_id": provider_model_id, - "created_by": created_by, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptVersionCreatedBy, direction="write" + ), "created_at": created_at, "updated_at": updated_at, - "organization": organization, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptVersionOrganization, direction="write" + ), }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore + return typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -689,15 +786,23 @@ async def get( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.versions.get( - id=1, - version_id=1, - ) + + + async def main() -> None: + await client.prompts.versions.get( + id=1, + version_id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", @@ -706,7 +811,13 @@ async def get( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore + return typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -735,15 +846,23 @@ async def delete( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.versions.delete( - id=1, - version_id=1, - ) + + + async def main() -> None: + await client.prompts.versions.delete( + id=1, + version_id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", @@ -816,15 +935,23 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.versions.update( - id=1, - version_id=1, - ) + + + async def main() -> None: + await client.prompts.versions.update( + id=1, + version_id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", @@ -836,17 +963,27 @@ async def update( "prompt": prompt, "provider": provider, "provider_model_id": provider_model_id, - "created_by": created_by, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptVersionCreatedBy, direction="write" + ), "created_at": created_at, "updated_at": updated_at, - "organization": organization, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptVersionOrganization, direction="write" + ), }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore + return typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -888,27 +1025,44 @@ async def cost_estimate( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.versions.cost_estimate( - prompt_id=1, - version_id=1, - project_id=1, - project_subset=1, - ) + + + async def main() -> None: + await client.prompts.versions.cost_estimate( + prompt_id=1, + version_id=1, + project_id=1, + project_subset=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", method="POST", - params={"project_id": project_id, "project_subset": project_subset}, + params={ + "project_id": project_id, + "project_subset": project_subset, + }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(InferenceRunCostEstimate, _response.json()) # type: ignore + return typing.cast( + InferenceRunCostEstimate, + parse_obj_as( + type_=InferenceRunCostEstimate, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -946,26 +1100,42 @@ async def get_refined_prompt( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.versions.get_refined_prompt( - prompt_id=1, - version_id=1, - refinement_job_id="refinement_job_id", - ) + + + async def main() -> None: + await client.prompts.versions.get_refined_prompt( + prompt_id=1, + version_id=1, + refinement_job_id="refinement_job_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", method="GET", - params={"refinement_job_id": refinement_job_id}, + params={ + "refinement_job_id": refinement_job_id, + }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RefinedPromptResponse, _response.json()) # type: ignore + return typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1015,31 +1185,50 @@ async def refine_prompt( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.prompts.versions.refine_prompt( - prompt_id=1, - version_id=1, - ) + + + async def main() -> None: + await client.prompts.versions.refine_prompt( + prompt_id=1, + version_id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", method="POST", - params={"async": async_}, + params={ + "async": async_, + }, json={ "teacher_model_provider_connection_id": teacher_model_provider_connection_id, "teacher_model_name": teacher_model_name, "project_id": project_id, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RefinedPromptResponse, _response.json()) # type: ignore + return typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/tasks/client.py b/src/label_studio_sdk/tasks/client.py index d5e0b58e4..09b44df5f 100644 --- a/src/label_studio_sdk/tasks/client.py +++ b/src/label_studio_sdk/tasks/client.py @@ -1,20 +1,21 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pagination import AsyncPager, SyncPager -from ..core.pydantic_utilities import pydantic_v1 +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions -from ..types.base_task import BaseTask -from ..types.data_manager_task_serializer import DataManagerTaskSerializer from ..types.project_import import ProjectImport -from ..types.task import Task +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from .types.tasks_list_request_fields import TasksListRequestFields +from ..core.pagination import SyncPager +from ..types.task import Task from .types.tasks_list_response import TasksListResponse +from ..types.base_task import BaseTask +from ..types.data_manager_task_serializer import DataManagerTaskSerializer +from ..core.client_wrapper import AsyncClientWrapper +from ..core.pagination import AsyncPager # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -28,6 +29,7 @@ def create_many_status( self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None ) -> ProjectImport: """ + Get information about an async project import operation. This can be especially useful to monitor status, as large import jobs can take time. You will need the project ID and the unique ID of the import operation. @@ -53,7 +55,7 @@ def create_many_status( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -70,7 +72,13 @@ def create_many_status( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectImport, _response.json()) # type: ignore + return typing.cast( + ProjectImport, + parse_obj_as( + type_=ProjectImport, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -78,6 +86,7 @@ def create_many_status( def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete all tasks from a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -96,7 +105,7 @@ def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestO Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -106,7 +115,9 @@ def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestO ) """ _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/tasks/", method="DELETE", request_options=request_options + f"api/projects/{jsonable_encoder(id)}/tasks/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -131,6 +142,7 @@ def list( request_options: typing.Optional[RequestOptions] = None, ) -> SyncPager[Task]: """ + Retrieve a list of tasks. You can use the query parameters to filter the list by project and/or view (a tab within the Data Manager). You can also optionally add pagination to make the response easier to parse. @@ -166,10 +178,10 @@ def list( query : typing.Optional[str] Additional query to filter tasks. It must be JSON encoded string of dict containing one of the following parameters: `{"filters": ..., "selectedItems": ..., "ordering": ...}`. Check [Data Manager > Create View > see `data` field](#tag/Data-Manager/operation/api_dm_views_create) for more details about filters, selectedItems and ordering. - - **filters**: dict with `"conjunction"` string (`"or"` or `"and"`) and list of filters in `"items"` array. Each filter is a dictionary with keys: `"filter"`, `"operator"`, `"type"`, `"value"`. [Read more about available filters](https://labelstud.io/sdk/data_manager.html)
    Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` - - **selectedItems**: dictionary with keys: `"all"`, `"included"`, `"excluded"`. If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
    Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` - - **ordering**: list of fields to order by. Currently, ordering is supported by only one parameter.
    - Example: `["completed_at"]` + * **filters**: dict with `"conjunction"` string (`"or"` or `"and"`) and list of filters in `"items"` array. Each filter is a dictionary with keys: `"filter"`, `"operator"`, `"type"`, `"value"`. [Read more about available filters](https://labelstud.io/sdk/data_manager.html)
    Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` + * **selectedItems**: dictionary with keys: `"all"`, `"included"`, `"excluded"`. If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
    Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` + * **ordering**: list of fields to order by. Currently, ordering is supported by only one parameter.
    + Example: `["completed_at"]` request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -181,7 +193,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -193,7 +205,7 @@ def list( for page in response.iter_pages(): yield page """ - page = page or 1 + page = page if page is not None else 1 _response = self._client_wrapper.httpx_client.request( "api/tasks/", method="GET", @@ -212,7 +224,13 @@ def list( ) try: if 200 <= _response.status_code < 300: - _parsed_response = pydantic_v1.parse_obj_as(TasksListResponse, _response.json()) # type: ignore + _parsed_response = typing.cast( + TasksListResponse, + parse_obj_as( + type_=TasksListResponse, # type: ignore + object_=_response.json(), + ), + ) _has_next = True _get_next = lambda: self.list( page=page + 1, @@ -236,11 +254,12 @@ def list( def create( self, *, - data: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, project: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> BaseTask: """ + Create a new labeling task in Label Studio. The data you provide depends on your labeling config and data type. @@ -249,7 +268,7 @@ def create( Parameters ---------- - data : typing.Optional[typing.Dict[str, typing.Any]] + data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Task data dictionary with arbitrary keys and values project : typing.Optional[int] @@ -265,7 +284,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -278,13 +297,25 @@ def create( _response = self._client_wrapper.httpx_client.request( "api/tasks/", method="POST", - json={"data": data, "project": project}, + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseTask, _response.json()) # type: ignore + return typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -292,6 +323,7 @@ def create( def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> DataManagerTaskSerializer: """ + Get task data, metadata, annotations and other attributes for a specific labeling task by task ID. The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). @@ -310,7 +342,7 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -320,11 +352,19 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/tasks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DataManagerTaskSerializer, _response.json()) # type: ignore + return typing.cast( + DataManagerTaskSerializer, + parse_obj_as( + type_=DataManagerTaskSerializer, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -332,6 +372,7 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a task in Label Studio. You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). @@ -352,7 +393,7 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -362,7 +403,9 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/tasks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -376,11 +419,12 @@ def update( self, id: str, *, - data: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, project: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> BaseTask: """ + Update the attributes of an existing labeling task. You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). @@ -390,7 +434,7 @@ def update( id : str Task ID - data : typing.Optional[typing.Dict[str, typing.Any]] + data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Task data dictionary with arbitrary keys and values project : typing.Optional[int] @@ -406,7 +450,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -420,13 +464,25 @@ def update( _response = self._client_wrapper.httpx_client.request( f"api/tasks/{jsonable_encoder(id)}/", method="PATCH", - json={"data": data, "project": project}, + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseTask, _response.json()) # type: ignore + return typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -441,6 +497,7 @@ async def create_many_status( self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None ) -> ProjectImport: """ + Get information about an async project import operation. This can be especially useful to monitor status, as large import jobs can take time. You will need the project ID and the unique ID of the import operation. @@ -466,15 +523,23 @@ async def create_many_status( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.tasks.create_many_status( - id=1, - import_pk="import_pk", - ) + + + async def main() -> None: + await client.tasks.create_many_status( + id=1, + import_pk="import_pk", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/imports/{jsonable_encoder(import_pk)}/", @@ -483,7 +548,13 @@ async def create_many_status( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectImport, _response.json()) # type: ignore + return typing.cast( + ProjectImport, + parse_obj_as( + type_=ProjectImport, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -491,6 +562,7 @@ async def create_many_status( async def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete all tasks from a specific project. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). @@ -509,17 +581,27 @@ async def delete_all_tasks(self, id: int, *, request_options: typing.Optional[Re Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.tasks.delete_all_tasks( - id=1, - ) + + + async def main() -> None: + await client.tasks.delete_all_tasks( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/tasks/", method="DELETE", request_options=request_options + f"api/projects/{jsonable_encoder(id)}/tasks/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -544,6 +626,7 @@ async def list( request_options: typing.Optional[RequestOptions] = None, ) -> AsyncPager[Task]: """ + Retrieve a list of tasks. You can use the query parameters to filter the list by project and/or view (a tab within the Data Manager). You can also optionally add pagination to make the response easier to parse. @@ -579,10 +662,10 @@ async def list( query : typing.Optional[str] Additional query to filter tasks. It must be JSON encoded string of dict containing one of the following parameters: `{"filters": ..., "selectedItems": ..., "ordering": ...}`. Check [Data Manager > Create View > see `data` field](#tag/Data-Manager/operation/api_dm_views_create) for more details about filters, selectedItems and ordering. - - **filters**: dict with `"conjunction"` string (`"or"` or `"and"`) and list of filters in `"items"` array. Each filter is a dictionary with keys: `"filter"`, `"operator"`, `"type"`, `"value"`. [Read more about available filters](https://labelstud.io/sdk/data_manager.html)
    Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` - - **selectedItems**: dictionary with keys: `"all"`, `"included"`, `"excluded"`. If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
    Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` - - **ordering**: list of fields to order by. Currently, ordering is supported by only one parameter.
    - Example: `["completed_at"]` + * **filters**: dict with `"conjunction"` string (`"or"` or `"and"`) and list of filters in `"items"` array. Each filter is a dictionary with keys: `"filter"`, `"operator"`, `"type"`, `"value"`. [Read more about available filters](https://labelstud.io/sdk/data_manager.html)
    Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` + * **selectedItems**: dictionary with keys: `"all"`, `"included"`, `"excluded"`. If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
    Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` + * **ordering**: list of fields to order by. Currently, ordering is supported by only one parameter.
    + Example: `["completed_at"]` request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -594,19 +677,27 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - response = await client.tasks.list() - async for item in response: - yield item - # alternatively, you can paginate page-by-page - async for page in response.iter_pages(): - yield page + + + async def main() -> None: + response = await client.tasks.list() + async for item in response: + yield item + # alternatively, you can paginate page-by-page + async for page in response.iter_pages(): + yield page + + + asyncio.run(main()) """ - page = page or 1 + page = page if page is not None else 1 _response = await self._client_wrapper.httpx_client.request( "api/tasks/", method="GET", @@ -625,7 +716,13 @@ async def list( ) try: if 200 <= _response.status_code < 300: - _parsed_response = pydantic_v1.parse_obj_as(TasksListResponse, _response.json()) # type: ignore + _parsed_response = typing.cast( + TasksListResponse, + parse_obj_as( + type_=TasksListResponse, # type: ignore + object_=_response.json(), + ), + ) _has_next = True _get_next = lambda: self.list( page=page + 1, @@ -649,11 +746,12 @@ async def list( async def create( self, *, - data: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, project: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> BaseTask: """ + Create a new labeling task in Label Studio. The data you provide depends on your labeling config and data type. @@ -662,7 +760,7 @@ async def create( Parameters ---------- - data : typing.Optional[typing.Dict[str, typing.Any]] + data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Task data dictionary with arbitrary keys and values project : typing.Optional[int] @@ -678,26 +776,49 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.tasks.create( - data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, - project=1, - ) + + + async def main() -> None: + await client.tasks.create( + data={ + "image": "https://example.com/image.jpg", + "text": "Hello, world!", + }, + project=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/tasks/", method="POST", - json={"data": data, "project": project}, + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseTask, _response.json()) # type: ignore + return typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -707,6 +828,7 @@ async def get( self, id: str, *, request_options: typing.Optional[RequestOptions] = None ) -> DataManagerTaskSerializer: """ + Get task data, metadata, annotations and other attributes for a specific labeling task by task ID. The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). @@ -725,21 +847,37 @@ async def get( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.tasks.get( - id="id", - ) + + + async def main() -> None: + await client.tasks.get( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/tasks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DataManagerTaskSerializer, _response.json()) # type: ignore + return typing.cast( + DataManagerTaskSerializer, + parse_obj_as( + type_=DataManagerTaskSerializer, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -747,6 +885,7 @@ async def get( async def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a task in Label Studio. You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). @@ -767,17 +906,27 @@ async def delete(self, id: str, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.tasks.delete( - id="id", - ) + + + async def main() -> None: + await client.tasks.delete( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/tasks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -791,11 +940,12 @@ async def update( self, id: str, *, - data: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, project: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> BaseTask: """ + Update the attributes of an existing labeling task. You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). @@ -805,7 +955,7 @@ async def update( id : str Task ID - data : typing.Optional[typing.Dict[str, typing.Any]] + data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Task data dictionary with arbitrary keys and values project : typing.Optional[int] @@ -821,27 +971,50 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.tasks.update( - id="id", - data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, - project=1, - ) + + + async def main() -> None: + await client.tasks.update( + id="id", + data={ + "image": "https://example.com/image.jpg", + "text": "Hello, world!", + }, + project=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/tasks/{jsonable_encoder(id)}/", method="PATCH", - json={"data": data, "project": project}, + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseTask, _response.json()) # type: ignore + return typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/tasks/types/tasks_list_response.py b/src/label_studio_sdk/tasks/types/tasks_list_response.py index a791548bf..c8d9e0240 100644 --- a/src/label_studio_sdk/tasks/types/tasks_list_response.py +++ b/src/label_studio_sdk/tasks/types/tasks_list_response.py @@ -1,48 +1,38 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing - -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from ...types.task import Task +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -class TasksListResponse(pydantic_v1.BaseModel): - tasks: typing.Optional[typing.List[Task]] = pydantic_v1.Field(default=None) +class TasksListResponse(UniversalBaseModel): + tasks: typing.Optional[typing.List[Task]] = pydantic.Field(default=None) """ List of tasks, each task contains predictions and annotations if `fields` query parameter is set to `all` """ - total: typing.Optional[int] = pydantic_v1.Field(default=None) + total: typing.Optional[int] = pydantic.Field(default=None) """ Total number of tasks """ - total_annotations: typing.Optional[int] = pydantic_v1.Field(default=None) + total_annotations: typing.Optional[int] = pydantic.Field(default=None) """ Total number of annotations """ - total_predictions: typing.Optional[int] = pydantic_v1.Field(default=None) + total_predictions: typing.Optional[int] = pydantic.Field(default=None) """ Total number of predictions """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/annotation.py b/src/label_studio_sdk/types/annotation.py index fb8edfe20..273f60426 100644 --- a/src/label_studio_sdk/types/annotation.py +++ b/src/label_studio_sdk/types/annotation.py @@ -1,116 +1,107 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import datetime as dt from .annotation_last_action import AnnotationLastAction +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class Annotation(pydantic_v1.BaseModel): +class Annotation(UniversalBaseModel): id: typing.Optional[int] = None - result: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] = pydantic_v1.Field(default=None) + result: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = pydantic.Field(default=None) """ List of annotation results for the task """ - created_username: typing.Optional[str] = pydantic_v1.Field(default=None) + created_username: typing.Optional[str] = pydantic.Field(default=None) """ Username string """ - created_ago: typing.Optional[str] = pydantic_v1.Field(default=None) + created_ago: typing.Optional[str] = pydantic.Field(default=None) """ Time delta from creation time """ completed_by: typing.Optional[int] = None unique_id: typing.Optional[str] = None - was_cancelled: typing.Optional[bool] = pydantic_v1.Field(default=None) + was_cancelled: typing.Optional[bool] = pydantic.Field(default=None) """ User skipped the task """ - ground_truth: typing.Optional[bool] = pydantic_v1.Field(default=None) + ground_truth: typing.Optional[bool] = pydantic.Field(default=None) """ This annotation is a Ground Truth (ground_truth) """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last updated time """ - draft_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + draft_created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Draft creation time """ - lead_time: typing.Optional[float] = pydantic_v1.Field(default=None) + lead_time: typing.Optional[float] = pydantic.Field(default=None) """ How much time it took to annotate the task """ - import_id: typing.Optional[int] = pydantic_v1.Field(default=None) + import_id: typing.Optional[int] = pydantic.Field(default=None) """ Original annotation ID that was at the import step or NULL if this annotation wasn't imported """ - last_action: typing.Optional[AnnotationLastAction] = pydantic_v1.Field(default=None) + last_action: typing.Optional[AnnotationLastAction] = pydantic.Field(default=None) """ Action which was performed in the last annotation history item """ - task: typing.Optional[int] = pydantic_v1.Field(default=None) + task: typing.Optional[int] = pydantic.Field(default=None) """ Corresponding task for this annotation """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID for this annotation """ - updated_by: typing.Optional[int] = pydantic_v1.Field(default=None) + updated_by: typing.Optional[int] = pydantic.Field(default=None) """ Last user who updated this annotation """ - parent_prediction: typing.Optional[int] = pydantic_v1.Field(default=None) + parent_prediction: typing.Optional[int] = pydantic.Field(default=None) """ Points to the prediction from which this annotation was created """ - parent_annotation: typing.Optional[int] = pydantic_v1.Field(default=None) + parent_annotation: typing.Optional[int] = pydantic.Field(default=None) """ Points to the parent annotation from which this annotation was created """ - last_created_by: typing.Optional[int] = pydantic_v1.Field(default=None) + last_created_by: typing.Optional[int] = pydantic.Field(default=None) """ User who created the last annotation history item """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/annotation_filter_options.py b/src/label_studio_sdk/types/annotation_filter_options.py index 39b27aabc..3f00e64ba 100644 --- a/src/label_studio_sdk/types/annotation_filter_options.py +++ b/src/label_studio_sdk/types/annotation_filter_options.py @@ -1,42 +1,32 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class AnnotationFilterOptions(pydantic_v1.BaseModel): - usual: typing.Optional[bool] = pydantic_v1.Field(default=None) +class AnnotationFilterOptions(UniversalBaseModel): + usual: typing.Optional[bool] = pydantic.Field(default=None) """ Include not skipped and not ground truth annotations """ - ground_truth: typing.Optional[bool] = pydantic_v1.Field(default=None) + ground_truth: typing.Optional[bool] = pydantic.Field(default=None) """ Include ground truth annotations """ - skipped: typing.Optional[bool] = pydantic_v1.Field(default=None) + skipped: typing.Optional[bool] = pydantic.Field(default=None) """ Include skipped annotations """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/annotations_dm_field.py b/src/label_studio_sdk/types/annotations_dm_field.py index 206bf1427..114de210d 100644 --- a/src/label_studio_sdk/types/annotations_dm_field.py +++ b/src/label_studio_sdk/types/annotations_dm_field.py @@ -1,120 +1,111 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import datetime as dt from .annotations_dm_field_last_action import AnnotationsDmFieldLastAction +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class AnnotationsDmField(pydantic_v1.BaseModel): +class AnnotationsDmField(UniversalBaseModel): id: typing.Optional[int] = None - result: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] = pydantic_v1.Field(default=None) + result: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = pydantic.Field(default=None) """ List of annotation results for the task """ - created_username: typing.Optional[str] = pydantic_v1.Field(default=None) + created_username: typing.Optional[str] = pydantic.Field(default=None) """ Username string """ - created_ago: typing.Optional[str] = pydantic_v1.Field(default=None) + created_ago: typing.Optional[str] = pydantic.Field(default=None) """ Time delta from creation time """ - completed_by: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + completed_by: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ User details who completed this annotation. """ unique_id: typing.Optional[str] = None - was_cancelled: typing.Optional[bool] = pydantic_v1.Field(default=None) + was_cancelled: typing.Optional[bool] = pydantic.Field(default=None) """ User skipped the task """ - ground_truth: typing.Optional[bool] = pydantic_v1.Field(default=None) + ground_truth: typing.Optional[bool] = pydantic.Field(default=None) """ This annotation is a Ground Truth (ground_truth) """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last updated time """ - draft_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + draft_created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Draft creation time """ - lead_time: typing.Optional[float] = pydantic_v1.Field(default=None) + lead_time: typing.Optional[float] = pydantic.Field(default=None) """ How much time it took to annotate the task """ - import_id: typing.Optional[int] = pydantic_v1.Field(default=None) + import_id: typing.Optional[int] = pydantic.Field(default=None) """ Original annotation ID that was at the import step or NULL if this annotation wasn't imported """ - last_action: typing.Optional[AnnotationsDmFieldLastAction] = pydantic_v1.Field(default=None) + last_action: typing.Optional[AnnotationsDmFieldLastAction] = pydantic.Field(default=None) """ Action which was performed in the last annotation history item """ - task: typing.Optional[int] = pydantic_v1.Field(default=None) + task: typing.Optional[int] = pydantic.Field(default=None) """ Corresponding task for this annotation """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID for this annotation """ - updated_by: typing.Optional[int] = pydantic_v1.Field(default=None) + updated_by: typing.Optional[int] = pydantic.Field(default=None) """ Last user who updated this annotation """ - parent_prediction: typing.Optional[int] = pydantic_v1.Field(default=None) + parent_prediction: typing.Optional[int] = pydantic.Field(default=None) """ Points to the prediction from which this annotation was created """ - parent_annotation: typing.Optional[int] = pydantic_v1.Field(default=None) + parent_annotation: typing.Optional[int] = pydantic.Field(default=None) """ Points to the parent annotation from which this annotation was created """ - last_created_by: typing.Optional[int] = pydantic_v1.Field(default=None) + last_created_by: typing.Optional[int] = pydantic.Field(default=None) """ User who created the last annotation history item """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/azure_blob_export_storage.py b/src/label_studio_sdk/types/azure_blob_export_storage.py index 2215bcbf1..83394ea4d 100644 --- a/src/label_studio_sdk/types/azure_blob_export_storage.py +++ b/src/label_studio_sdk/types/azure_blob_export_storage.py @@ -1,112 +1,103 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import datetime as dt from .azure_blob_export_storage_status import AzureBlobExportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class AzureBlobExportStorage(pydantic_v1.BaseModel): +class AzureBlobExportStorage(UniversalBaseModel): id: typing.Optional[int] = None type: typing.Optional[str] = None synchronizable: typing.Optional[bool] = None - container: typing.Optional[str] = pydantic_v1.Field(default=None) + container: typing.Optional[str] = pydantic.Field(default=None) """ Azure blob container """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ Azure blob prefix name """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs """ - account_name: typing.Optional[str] = pydantic_v1.Field(default=None) + account_name: typing.Optional[str] = pydantic.Field(default=None) """ Azure Blob account name """ - account_key: typing.Optional[str] = pydantic_v1.Field(default=None) + account_key: typing.Optional[str] = pydantic.Field(default=None) """ Azure Blob account key """ - last_sync: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last sync finished time """ - last_sync_count: typing.Optional[int] = pydantic_v1.Field(default=None) + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) """ Count of tasks synced last time """ - last_sync_job: typing.Optional[str] = pydantic_v1.Field(default=None) + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) """ Last sync job ID """ status: typing.Optional[AzureBlobExportStorageStatus] = None - traceback: typing.Optional[str] = pydantic_v1.Field(default=None) + traceback: typing.Optional[str] = pydantic.Field(default=None) """ Traceback report for the last failed sync """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta and debug information about storage processes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage description """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - can_delete_objects: typing.Optional[bool] = pydantic_v1.Field(default=None) + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) """ Deletion from storage enabled """ - project: int = pydantic_v1.Field() + project: int = pydantic.Field() """ A unique integer value identifying this project. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/azure_blob_import_storage.py b/src/label_studio_sdk/types/azure_blob_import_storage.py index 8c46f9998..3de9b873b 100644 --- a/src/label_studio_sdk/types/azure_blob_import_storage.py +++ b/src/label_studio_sdk/types/azure_blob_import_storage.py @@ -1,113 +1,104 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import datetime as dt from .azure_blob_import_storage_status import AzureBlobImportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class AzureBlobImportStorage(pydantic_v1.BaseModel): +class AzureBlobImportStorage(UniversalBaseModel): id: typing.Optional[int] = None type: typing.Optional[str] = None synchronizable: typing.Optional[bool] = None presign: typing.Optional[bool] = None - container: typing.Optional[str] = pydantic_v1.Field(default=None) + container: typing.Optional[str] = pydantic.Field(default=None) """ Azure blob container """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ Azure blob prefix name """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs """ - account_name: typing.Optional[str] = pydantic_v1.Field(default=None) + account_name: typing.Optional[str] = pydantic.Field(default=None) """ Azure Blob account name """ - account_key: typing.Optional[str] = pydantic_v1.Field(default=None) + account_key: typing.Optional[str] = pydantic.Field(default=None) """ Azure Blob account key """ - last_sync: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last sync finished time """ - last_sync_count: typing.Optional[int] = pydantic_v1.Field(default=None) + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) """ Count of tasks synced last time """ - last_sync_job: typing.Optional[str] = pydantic_v1.Field(default=None) + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) """ Last sync job ID """ status: typing.Optional[AzureBlobImportStorageStatus] = None - traceback: typing.Optional[str] = pydantic_v1.Field(default=None) + traceback: typing.Optional[str] = pydantic.Field(default=None) """ Traceback report for the last failed sync """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta and debug information about storage processes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage description """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - presign_ttl: typing.Optional[int] = pydantic_v1.Field(default=None) + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) """ Presigned URLs TTL (in minutes) """ - project: int = pydantic_v1.Field() + project: int = pydantic.Field() """ A unique integer value identifying this project. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/base_task.py b/src/label_studio_sdk/types/base_task.py index d49c4cac6..3e8251862 100644 --- a/src/label_studio_sdk/types/base_task.py +++ b/src/label_studio_sdk/types/base_task.py @@ -1,115 +1,106 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .base_task_file_upload import BaseTaskFileUpload +import pydantic +import datetime as dt from .base_task_updated_by import BaseTaskUpdatedBy +from .base_task_file_upload import BaseTaskFileUpload +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class BaseTask(pydantic_v1.BaseModel): +class BaseTask(UniversalBaseModel): id: typing.Optional[int] = None - data: typing.Dict[str, typing.Any] = pydantic_v1.Field() + data: typing.Dict[str, typing.Optional[typing.Any]] = pydantic.Field() """ User imported or uploaded data for a task. Data is formatted according to the project label config. You can find examples of data for your project on the Import page in the Label Studio Data Manager UI. """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta is user imported (uploaded) data and can be useful as input for an ML Backend for embeddings, advanced vectors, and other info. It is passed to ML during training/predicting steps. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Time a task was created """ - updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last time a task was updated """ - is_labeled: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_labeled: typing.Optional[bool] = pydantic.Field(default=None) """ True if the number of annotations for this task is greater than or equal to the number of maximum_completions for the project """ - overlap: typing.Optional[int] = pydantic_v1.Field(default=None) + overlap: typing.Optional[int] = pydantic.Field(default=None) """ Number of distinct annotators that processed the current task """ - inner_id: typing.Optional[int] = pydantic_v1.Field(default=None) + inner_id: typing.Optional[int] = pydantic.Field(default=None) """ Internal task ID in the project, starts with 1 """ - total_annotations: typing.Optional[int] = pydantic_v1.Field(default=None) + total_annotations: typing.Optional[int] = pydantic.Field(default=None) """ Number of total annotations for the current task except cancelled annotations """ - cancelled_annotations: typing.Optional[int] = pydantic_v1.Field(default=None) + cancelled_annotations: typing.Optional[int] = pydantic.Field(default=None) """ Number of total cancelled annotations for the current task """ - total_predictions: typing.Optional[int] = pydantic_v1.Field(default=None) + total_predictions: typing.Optional[int] = pydantic.Field(default=None) """ Number of total predictions for the current task """ - comment_count: typing.Optional[int] = pydantic_v1.Field(default=None) + comment_count: typing.Optional[int] = pydantic.Field(default=None) """ Number of comments in the task including all annotations """ - unresolved_comment_count: typing.Optional[int] = pydantic_v1.Field(default=None) + unresolved_comment_count: typing.Optional[int] = pydantic.Field(default=None) """ Number of unresolved comments in the task including all annotations """ - last_comment_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_comment_updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ When the last comment was updated """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID for this task """ - updated_by: typing.Optional[BaseTaskUpdatedBy] = pydantic_v1.Field(default=None) + updated_by: typing.Optional[BaseTaskUpdatedBy] = pydantic.Field(default=None) """ Last annotator or reviewer who updated this task """ - file_upload: typing.Optional[BaseTaskFileUpload] = pydantic_v1.Field(default=None) + file_upload: typing.Optional[BaseTaskFileUpload] = pydantic.Field(default=None) """ Uploaded file used as data source for this task """ - comment_authors: typing.Optional[typing.List[int]] = pydantic_v1.Field(default=None) + comment_authors: typing.Optional[typing.List[int]] = pydantic.Field(default=None) """ Users who wrote comments """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/base_task_updated_by.py b/src/label_studio_sdk/types/base_task_updated_by.py index de48b058c..4a9de7471 100644 --- a/src/label_studio_sdk/types/base_task_updated_by.py +++ b/src/label_studio_sdk/types/base_task_updated_by.py @@ -2,4 +2,6 @@ import typing -BaseTaskUpdatedBy = typing.Union[typing.Optional[int], typing.Optional[typing.List[typing.Dict[str, typing.Any]]]] +BaseTaskUpdatedBy = typing.Union[ + typing.Optional[int], typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] +] diff --git a/src/label_studio_sdk/types/base_user.py b/src/label_studio_sdk/types/base_user.py index d6bea91c4..6edb03619 100644 --- a/src/label_studio_sdk/types/base_user.py +++ b/src/label_studio_sdk/types/base_user.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import datetime as dt +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class BaseUser(pydantic_v1.BaseModel): +class BaseUser(UniversalBaseModel): id: typing.Optional[int] = None first_name: typing.Optional[str] = None last_name: typing.Optional[str] = None @@ -18,27 +18,18 @@ class BaseUser(pydantic_v1.BaseModel): initials: typing.Optional[str] = None phone: typing.Optional[str] = None active_organization: typing.Optional[int] = None - allow_newsletters: typing.Optional[bool] = pydantic_v1.Field(default=None) + allow_newsletters: typing.Optional[bool] = pydantic.Field(default=None) """ Allow sending newsletters to user """ date_joined: typing.Optional[dt.datetime] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/comment.py b/src/label_studio_sdk/types/comment.py index b1bb6d5a9..5f48c133b 100644 --- a/src/label_studio_sdk/types/comment.py +++ b/src/label_studio_sdk/types/comment.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel +from .comment_created_by import CommentCreatedBy import datetime as dt import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .comment_created_by import CommentCreatedBy +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -class Comment(pydantic_v1.BaseModel): +class Comment(UniversalBaseModel): id: int text: str project: int @@ -20,20 +20,11 @@ class Comment(pydantic_v1.BaseModel): is_resolved: typing.Optional[bool] = None resolved_at: typing.Optional[dt.datetime] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/comment_created_by.py b/src/label_studio_sdk/types/comment_created_by.py index ef1433043..e39b6e93f 100644 --- a/src/label_studio_sdk/types/comment_created_by.py +++ b/src/label_studio_sdk/types/comment_created_by.py @@ -2,4 +2,4 @@ import typing -CommentCreatedBy = typing.Union[int, typing.Dict[str, typing.Any]] +CommentCreatedBy = typing.Union[int, typing.Dict[str, typing.Optional[typing.Any]]] diff --git a/src/label_studio_sdk/types/converted_format.py b/src/label_studio_sdk/types/converted_format.py index d6cbb49ce..bc0bf56aa 100644 --- a/src/label_studio_sdk/types/converted_format.py +++ b/src/label_studio_sdk/types/converted_format.py @@ -1,36 +1,26 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .converted_format_status import ConvertedFormatStatus +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class ConvertedFormat(pydantic_v1.BaseModel): +class ConvertedFormat(UniversalBaseModel): id: typing.Optional[int] = None status: typing.Optional[ConvertedFormatStatus] = None export_type: str - traceback: typing.Optional[str] = pydantic_v1.Field(default=None) + traceback: typing.Optional[str] = pydantic.Field(default=None) """ Traceback report in case of errors """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/data_manager_task_serializer.py b/src/label_studio_sdk/types/data_manager_task_serializer.py index cdb3fb62c..ca9f9c7ea 100644 --- a/src/label_studio_sdk/types/data_manager_task_serializer.py +++ b/src/label_studio_sdk/types/data_manager_task_serializer.py @@ -1,32 +1,30 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .data_manager_task_serializer_predictions_item import DataManagerTaskSerializerPredictionsItem +import pydantic from .annotations_dm_field import AnnotationsDmField -from .data_manager_task_serializer_annotators_item import DataManagerTaskSerializerAnnotatorsItem from .data_manager_task_serializer_drafts_item import DataManagerTaskSerializerDraftsItem -from .data_manager_task_serializer_predictions_item import DataManagerTaskSerializerPredictionsItem +from .data_manager_task_serializer_annotators_item import DataManagerTaskSerializerAnnotatorsItem +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class DataManagerTaskSerializer(pydantic_v1.BaseModel): +class DataManagerTaskSerializer(UniversalBaseModel): id: typing.Optional[int] = None - predictions: typing.Optional[typing.List[DataManagerTaskSerializerPredictionsItem]] = pydantic_v1.Field( - default=None - ) + predictions: typing.Optional[typing.List[DataManagerTaskSerializerPredictionsItem]] = pydantic.Field(default=None) """ Predictions for this task """ annotations: typing.Optional[typing.List[AnnotationsDmField]] = None - drafts: typing.Optional[typing.List[DataManagerTaskSerializerDraftsItem]] = pydantic_v1.Field(default=None) + drafts: typing.Optional[typing.List[DataManagerTaskSerializerDraftsItem]] = pydantic.Field(default=None) """ Drafts for this task """ - annotators: typing.Optional[typing.List[DataManagerTaskSerializerAnnotatorsItem]] = pydantic_v1.Field(default=None) + annotators: typing.Optional[typing.List[DataManagerTaskSerializerAnnotatorsItem]] = pydantic.Field(default=None) """ Annotators who annotated this task """ @@ -45,80 +43,73 @@ class DataManagerTaskSerializer(pydantic_v1.BaseModel): predictions_model_versions: typing.Optional[str] = None avg_lead_time: typing.Optional[float] = None draft_exists: typing.Optional[bool] = None - updated_by: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] = pydantic_v1.Field(default=None) + updated_by: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = pydantic.Field( + default=None + ) """ User IDs who updated this task """ - data: typing.Dict[str, typing.Any] = pydantic_v1.Field() + data: typing.Dict[str, typing.Optional[typing.Any]] = pydantic.Field() """ User imported or uploaded data for a task. Data is formatted according to the project label config. You can find examples of data for your project on the Import page in the Label Studio Data Manager UI. """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta is user imported (uploaded) data and can be useful as input for an ML Backend for embeddings, advanced vectors, and other info. It is passed to ML during training/predicting steps. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Time a task was created """ - updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last time a task was updated """ - is_labeled: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_labeled: typing.Optional[bool] = pydantic.Field(default=None) """ True if the number of annotations for this task is greater than or equal to the number of maximum_completions for the project """ - overlap: typing.Optional[int] = pydantic_v1.Field(default=None) + overlap: typing.Optional[int] = pydantic.Field(default=None) """ Number of distinct annotators that processed the current task """ - comment_count: typing.Optional[int] = pydantic_v1.Field(default=None) + comment_count: typing.Optional[int] = pydantic.Field(default=None) """ Number of comments in the task including all annotations """ - unresolved_comment_count: typing.Optional[int] = pydantic_v1.Field(default=None) + unresolved_comment_count: typing.Optional[int] = pydantic.Field(default=None) """ Number of unresolved comments in the task including all annotations """ - last_comment_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_comment_updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ When the last comment was updated """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID for this task """ - comment_authors: typing.Optional[typing.List[int]] = pydantic_v1.Field(default=None) + comment_authors: typing.Optional[typing.List[int]] = pydantic.Field(default=None) """ Users who wrote comments """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/data_manager_task_serializer_annotators_item.py b/src/label_studio_sdk/types/data_manager_task_serializer_annotators_item.py index 9cbff072e..62cffd181 100644 --- a/src/label_studio_sdk/types/data_manager_task_serializer_annotators_item.py +++ b/src/label_studio_sdk/types/data_manager_task_serializer_annotators_item.py @@ -2,4 +2,4 @@ import typing -DataManagerTaskSerializerAnnotatorsItem = typing.Union[int, typing.Dict[str, typing.Any]] +DataManagerTaskSerializerAnnotatorsItem = typing.Union[int, typing.Dict[str, typing.Optional[typing.Any]]] diff --git a/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py b/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py index 081db79c9..792c89c46 100644 --- a/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py +++ b/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -class DataManagerTaskSerializerDraftsItem(pydantic_v1.BaseModel): - result: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] = None +class DataManagerTaskSerializerDraftsItem(UniversalBaseModel): + result: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = None created_at: typing.Optional[dt.datetime] = None updated_at: typing.Optional[dt.datetime] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py b/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py index f664cff77..8b01227e4 100644 --- a/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py +++ b/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py @@ -1,37 +1,28 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -class DataManagerTaskSerializerPredictionsItem(pydantic_v1.BaseModel): - result: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] = None +class DataManagerTaskSerializerPredictionsItem(UniversalBaseModel): + result: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = None score: typing.Optional[float] = None model_version: typing.Optional[str] = None - model: typing.Optional[typing.Dict[str, typing.Any]] = None - model_run: typing.Optional[typing.Dict[str, typing.Any]] = None + model: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None + model_run: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None task: typing.Optional[int] = None project: typing.Optional[float] = None created_at: typing.Optional[dt.datetime] = None updated_at: typing.Optional[dt.datetime] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/export.py b/src/label_studio_sdk/types/export.py index 218b6c661..1d7f45038 100644 --- a/src/label_studio_sdk/types/export.py +++ b/src/label_studio_sdk/types/export.py @@ -1,48 +1,39 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .converted_format import ConvertedFormat -from .export_status import ExportStatus from .user_simple import UserSimple +import datetime as dt +import pydantic +from .export_status import ExportStatus +from .converted_format import ConvertedFormat +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class Export(pydantic_v1.BaseModel): +class Export(UniversalBaseModel): title: typing.Optional[str] = None id: typing.Optional[int] = None created_by: typing.Optional[UserSimple] = None - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - finished_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + finished_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Complete or fail time """ status: typing.Optional[ExportStatus] = None md5: typing.Optional[str] = None - counters: typing.Optional[typing.Dict[str, typing.Any]] = None + counters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None converted_formats: typing.Optional[typing.List[ConvertedFormat]] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/export_convert.py b/src/label_studio_sdk/types/export_convert.py index 711c19189..2670d3fd4 100644 --- a/src/label_studio_sdk/types/export_convert.py +++ b/src/label_studio_sdk/types/export_convert.py @@ -1,32 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 import typing -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class ExportConvert(pydantic_v1.BaseModel): - export_type: str = pydantic_v1.Field() +class ExportConvert(UniversalBaseModel): + export_type: str = pydantic.Field() """ Export file format. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/export_create.py b/src/label_studio_sdk/types/export_create.py index 18e4dae1a..3a691423b 100644 --- a/src/label_studio_sdk/types/export_create.py +++ b/src/label_studio_sdk/types/export_create.py @@ -1,54 +1,45 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .annotation_filter_options import AnnotationFilterOptions -from .converted_format import ConvertedFormat +from .user_simple import UserSimple +import datetime as dt +import pydantic from .export_create_status import ExportCreateStatus -from .serialization_options import SerializationOptions +from .converted_format import ConvertedFormat from .task_filter_options import TaskFilterOptions -from .user_simple import UserSimple +from .annotation_filter_options import AnnotationFilterOptions +from .serialization_options import SerializationOptions +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class ExportCreate(pydantic_v1.BaseModel): +class ExportCreate(UniversalBaseModel): title: typing.Optional[str] = None id: typing.Optional[int] = None created_by: typing.Optional[UserSimple] = None - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - finished_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + finished_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Complete or fail time """ status: typing.Optional[ExportCreateStatus] = None md5: typing.Optional[str] = None - counters: typing.Optional[typing.Dict[str, typing.Any]] = None + counters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None converted_formats: typing.Optional[typing.List[ConvertedFormat]] = None task_filter_options: typing.Optional[TaskFilterOptions] = None annotation_filter_options: typing.Optional[AnnotationFilterOptions] = None serialization_options: typing.Optional[SerializationOptions] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/file_upload.py b/src/label_studio_sdk/types/file_upload.py index 8e73f9dd2..8fcd31f62 100644 --- a/src/label_studio_sdk/types/file_upload.py +++ b/src/label_studio_sdk/types/file_upload.py @@ -1,30 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class FileUpload(pydantic_v1.BaseModel): +class FileUpload(UniversalBaseModel): id: typing.Optional[int] = None file: typing.Optional[str] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/filter.py b/src/label_studio_sdk/types/filter.py index cd7815acb..c5e37fa4d 100644 --- a/src/label_studio_sdk/types/filter.py +++ b/src/label_studio_sdk/types/filter.py @@ -1,53 +1,43 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class Filter(pydantic_v1.BaseModel): +class Filter(UniversalBaseModel): id: typing.Optional[int] = None - index: typing.Optional[int] = pydantic_v1.Field(default=None) + index: typing.Optional[int] = pydantic.Field(default=None) """ To keep filter order """ - column: str = pydantic_v1.Field() + column: str = pydantic.Field() """ Field name """ - type: str = pydantic_v1.Field() + type: str = pydantic.Field() """ Field type """ - operator: str = pydantic_v1.Field() + operator: str = pydantic.Field() """ Filter operator """ - value: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + value: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Filter value """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/filter_group.py b/src/label_studio_sdk/types/filter_group.py index 79907b683..626b8a439 100644 --- a/src/label_studio_sdk/types/filter_group.py +++ b/src/label_studio_sdk/types/filter_group.py @@ -1,35 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .filter import Filter +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class FilterGroup(pydantic_v1.BaseModel): +class FilterGroup(UniversalBaseModel): id: typing.Optional[int] = None filters: typing.List[Filter] - conjunction: str = pydantic_v1.Field() + conjunction: str = pydantic.Field() """ Type of conjunction """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/gcs_export_storage.py b/src/label_studio_sdk/types/gcs_export_storage.py index ca4d398bd..399102266 100644 --- a/src/label_studio_sdk/types/gcs_export_storage.py +++ b/src/label_studio_sdk/types/gcs_export_storage.py @@ -1,112 +1,103 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import datetime as dt from .gcs_export_storage_status import GcsExportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class GcsExportStorage(pydantic_v1.BaseModel): +class GcsExportStorage(UniversalBaseModel): id: typing.Optional[int] = None type: typing.Optional[str] = None synchronizable: typing.Optional[bool] = None - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ GCS bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ GCS bucket prefix """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs """ - google_application_credentials: typing.Optional[str] = pydantic_v1.Field(default=None) + google_application_credentials: typing.Optional[str] = pydantic.Field(default=None) """ The content of GOOGLE_APPLICATION_CREDENTIALS json file """ - google_project_id: typing.Optional[str] = pydantic_v1.Field(default=None) + google_project_id: typing.Optional[str] = pydantic.Field(default=None) """ Google project ID """ - last_sync: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last sync finished time """ - last_sync_count: typing.Optional[int] = pydantic_v1.Field(default=None) + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) """ Count of tasks synced last time """ - last_sync_job: typing.Optional[str] = pydantic_v1.Field(default=None) + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) """ Last sync job ID """ status: typing.Optional[GcsExportStorageStatus] = None - traceback: typing.Optional[str] = pydantic_v1.Field(default=None) + traceback: typing.Optional[str] = pydantic.Field(default=None) """ Traceback report for the last failed sync """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta and debug information about storage processes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage description """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - can_delete_objects: typing.Optional[bool] = pydantic_v1.Field(default=None) + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) """ Deletion from storage enabled """ - project: int = pydantic_v1.Field() + project: int = pydantic.Field() """ A unique integer value identifying this project. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/gcs_import_storage.py b/src/label_studio_sdk/types/gcs_import_storage.py index 4d5099205..ee406e985 100644 --- a/src/label_studio_sdk/types/gcs_import_storage.py +++ b/src/label_studio_sdk/types/gcs_import_storage.py @@ -1,113 +1,104 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import datetime as dt from .gcs_import_storage_status import GcsImportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class GcsImportStorage(pydantic_v1.BaseModel): +class GcsImportStorage(UniversalBaseModel): id: typing.Optional[int] = None type: typing.Optional[str] = None synchronizable: typing.Optional[bool] = None presign: typing.Optional[bool] = None - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ GCS bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ GCS bucket prefix """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs """ - google_application_credentials: typing.Optional[str] = pydantic_v1.Field(default=None) + google_application_credentials: typing.Optional[str] = pydantic.Field(default=None) """ The content of GOOGLE_APPLICATION_CREDENTIALS json file """ - google_project_id: typing.Optional[str] = pydantic_v1.Field(default=None) + google_project_id: typing.Optional[str] = pydantic.Field(default=None) """ Google project ID """ - last_sync: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last sync finished time """ - last_sync_count: typing.Optional[int] = pydantic_v1.Field(default=None) + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) """ Count of tasks synced last time """ - last_sync_job: typing.Optional[str] = pydantic_v1.Field(default=None) + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) """ Last sync job ID """ status: typing.Optional[GcsImportStorageStatus] = None - traceback: typing.Optional[str] = pydantic_v1.Field(default=None) + traceback: typing.Optional[str] = pydantic.Field(default=None) """ Traceback report for the last failed sync """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta and debug information about storage processes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage description """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - presign_ttl: typing.Optional[int] = pydantic_v1.Field(default=None) + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) """ Presigned URLs TTL (in minutes) """ - project: int = pydantic_v1.Field() + project: int = pydantic.Field() """ A unique integer value identifying this project. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/inference_run.py b/src/label_studio_sdk/types/inference_run.py index e5c9daaee..ccca3fe3f 100644 --- a/src/label_studio_sdk/types/inference_run.py +++ b/src/label_studio_sdk/types/inference_run.py @@ -1,17 +1,17 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .inference_run_created_by import InferenceRunCreatedBy from .inference_run_organization import InferenceRunOrganization +from .inference_run_created_by import InferenceRunCreatedBy from .inference_run_project_subset import InferenceRunProjectSubset from .inference_run_status import InferenceRunStatus +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -class InferenceRun(pydantic_v1.BaseModel): +class InferenceRun(UniversalBaseModel): organization: typing.Optional[InferenceRunOrganization] = None project: int model_version: typing.Optional[str] = None @@ -24,20 +24,11 @@ class InferenceRun(pydantic_v1.BaseModel): predictions_updated_at: typing.Optional[dt.datetime] = None completed_at: typing.Optional[dt.datetime] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/inference_run_cost_estimate.py b/src/label_studio_sdk/types/inference_run_cost_estimate.py index 76187d717..103f975c3 100644 --- a/src/label_studio_sdk/types/inference_run_cost_estimate.py +++ b/src/label_studio_sdk/types/inference_run_cost_estimate.py @@ -1,57 +1,47 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class InferenceRunCostEstimate(pydantic_v1.BaseModel): - prompt_cost_usd: typing.Optional[str] = pydantic_v1.Field(default=None) +class InferenceRunCostEstimate(UniversalBaseModel): + prompt_cost_usd: typing.Optional[str] = pydantic.Field(default=None) """ Cost of the prompt (in USD) """ - completion_cost_usd: typing.Optional[str] = pydantic_v1.Field(default=None) + completion_cost_usd: typing.Optional[str] = pydantic.Field(default=None) """ Cost of the completion (in USD) """ - total_cost_usd: typing.Optional[str] = pydantic_v1.Field(default=None) + total_cost_usd: typing.Optional[str] = pydantic.Field(default=None) """ Total cost of the inference (in USD) """ - is_error: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_error: typing.Optional[bool] = pydantic.Field(default=None) """ Whether an error occurred or not """ - error_type: typing.Optional[str] = pydantic_v1.Field(default=None) + error_type: typing.Optional[str] = pydantic.Field(default=None) """ Type of error (e.g. "Timeout", "Rate Limit", etc) """ - error_message: typing.Optional[str] = pydantic_v1.Field(default=None) + error_message: typing.Optional[str] = pydantic.Field(default=None) """ Error message details """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/inference_run_created_by.py b/src/label_studio_sdk/types/inference_run_created_by.py index 2da9ece87..933313496 100644 --- a/src/label_studio_sdk/types/inference_run_created_by.py +++ b/src/label_studio_sdk/types/inference_run_created_by.py @@ -2,4 +2,4 @@ import typing -InferenceRunCreatedBy = typing.Union[int, typing.Dict[str, typing.Any]] +InferenceRunCreatedBy = typing.Union[int, typing.Dict[str, typing.Optional[typing.Any]]] diff --git a/src/label_studio_sdk/types/inference_run_organization.py b/src/label_studio_sdk/types/inference_run_organization.py index d430254f0..71c60b1d8 100644 --- a/src/label_studio_sdk/types/inference_run_organization.py +++ b/src/label_studio_sdk/types/inference_run_organization.py @@ -2,4 +2,4 @@ import typing -InferenceRunOrganization = typing.Union[int, typing.Dict[str, typing.Any]] +InferenceRunOrganization = typing.Union[int, typing.Dict[str, typing.Optional[typing.Any]]] diff --git a/src/label_studio_sdk/types/key_indicator_value.py b/src/label_studio_sdk/types/key_indicator_value.py index 0994efa32..291358e18 100644 --- a/src/label_studio_sdk/types/key_indicator_value.py +++ b/src/label_studio_sdk/types/key_indicator_value.py @@ -1,30 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class KeyIndicatorValue(pydantic_v1.BaseModel): +class KeyIndicatorValue(UniversalBaseModel): title: typing.Optional[str] = None - values: typing.Optional[typing.Dict[str, typing.Any]] = None - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + values: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/key_indicators.py b/src/label_studio_sdk/types/key_indicators.py index d03c3cd18..23e5b71de 100644 --- a/src/label_studio_sdk/types/key_indicators.py +++ b/src/label_studio_sdk/types/key_indicators.py @@ -1,7 +1,6 @@ # This file was auto-generated by Fern from our API Definition. import typing - from .key_indicators_item import KeyIndicatorsItem KeyIndicators = typing.List[KeyIndicatorsItem] diff --git a/src/label_studio_sdk/types/key_indicators_item.py b/src/label_studio_sdk/types/key_indicators_item.py index f92a60427..2f6a26c70 100644 --- a/src/label_studio_sdk/types/key_indicators_item.py +++ b/src/label_studio_sdk/types/key_indicators_item.py @@ -1,51 +1,41 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel +import pydantic import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .key_indicators_item_additional_kpis_item import KeyIndicatorsItemAdditionalKpisItem from .key_indicators_item_extra_kpis_item import KeyIndicatorsItemExtraKpisItem +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class KeyIndicatorsItem(pydantic_v1.BaseModel): - key: str = pydantic_v1.Field() +class KeyIndicatorsItem(UniversalBaseModel): + key: str = pydantic.Field() """ The key for this KPI, where you can find the value from inside main_kpi """ - title: str = pydantic_v1.Field() + title: str = pydantic.Field() """ The title for this metric, to be displayed to the user """ main_kpi: str secondary_kpi: typing.Optional[str] = None - additional_kpis: typing.Optional[typing.List[KeyIndicatorsItemAdditionalKpisItem]] = pydantic_v1.Field(default=None) + additional_kpis: typing.Optional[typing.List[KeyIndicatorsItemAdditionalKpisItem]] = pydantic.Field(default=None) """ Additional KPIs to be displayed at the bottom of the box """ - extra_kpis: typing.Optional[typing.List[KeyIndicatorsItemExtraKpisItem]] = pydantic_v1.Field(default=None) + extra_kpis: typing.Optional[typing.List[KeyIndicatorsItemExtraKpisItem]] = pydantic.Field(default=None) """ Extra KPIs to be displayed in the hover-tootip for that indicator """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py b/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py index 5ee1816bf..a0e1b06ca 100644 --- a/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py +++ b/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py @@ -1,37 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class KeyIndicatorsItemAdditionalKpisItem(pydantic_v1.BaseModel): - key: typing.Optional[str] = pydantic_v1.Field(default=None) +class KeyIndicatorsItemAdditionalKpisItem(UniversalBaseModel): + key: typing.Optional[str] = pydantic.Field(default=None) """ The key for this KPI, where you can find the value from inside main_kpi """ - label: typing.Optional[str] = pydantic_v1.Field(default=None) + label: typing.Optional[str] = pydantic.Field(default=None) """ The label for this KPI, to be displayed to the user """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py b/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py index f6628f50f..9e539bc1d 100644 --- a/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py +++ b/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py @@ -1,37 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class KeyIndicatorsItemExtraKpisItem(pydantic_v1.BaseModel): - key: typing.Optional[str] = pydantic_v1.Field(default=None) +class KeyIndicatorsItemExtraKpisItem(UniversalBaseModel): + key: typing.Optional[str] = pydantic.Field(default=None) """ The key for this KPI, where you can find the value from inside main_kpi """ - label: typing.Optional[str] = pydantic_v1.Field(default=None) + label: typing.Optional[str] = pydantic.Field(default=None) """ The label for this KPI, to be displayed to the user """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/local_files_export_storage.py b/src/label_studio_sdk/types/local_files_export_storage.py index 0edc7b1dc..fffaaaa84 100644 --- a/src/label_studio_sdk/types/local_files_export_storage.py +++ b/src/label_studio_sdk/types/local_files_export_storage.py @@ -1,97 +1,88 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import datetime as dt from .local_files_export_storage_status import LocalFilesExportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class LocalFilesExportStorage(pydantic_v1.BaseModel): +class LocalFilesExportStorage(UniversalBaseModel): id: typing.Optional[int] = None type: typing.Optional[str] = None synchronizable: typing.Optional[bool] = None - path: typing.Optional[str] = pydantic_v1.Field(default=None) + path: typing.Optional[str] = pydantic.Field(default=None) """ Local path """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs """ - last_sync: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last sync finished time """ - last_sync_count: typing.Optional[int] = pydantic_v1.Field(default=None) + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) """ Count of tasks synced last time """ - last_sync_job: typing.Optional[str] = pydantic_v1.Field(default=None) + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) """ Last sync job ID """ status: typing.Optional[LocalFilesExportStorageStatus] = None - traceback: typing.Optional[str] = pydantic_v1.Field(default=None) + traceback: typing.Optional[str] = pydantic.Field(default=None) """ Traceback report for the last failed sync """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta and debug information about storage processes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage description """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - can_delete_objects: typing.Optional[bool] = pydantic_v1.Field(default=None) + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) """ Deletion from storage enabled """ - project: int = pydantic_v1.Field() + project: int = pydantic.Field() """ A unique integer value identifying this project. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/local_files_import_storage.py b/src/label_studio_sdk/types/local_files_import_storage.py index 53a2b406d..57240a844 100644 --- a/src/label_studio_sdk/types/local_files_import_storage.py +++ b/src/label_studio_sdk/types/local_files_import_storage.py @@ -1,92 +1,83 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import datetime as dt from .local_files_import_storage_status import LocalFilesImportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class LocalFilesImportStorage(pydantic_v1.BaseModel): +class LocalFilesImportStorage(UniversalBaseModel): id: typing.Optional[int] = None type: typing.Optional[str] = None synchronizable: typing.Optional[bool] = None - path: typing.Optional[str] = pydantic_v1.Field(default=None) + path: typing.Optional[str] = pydantic.Field(default=None) """ Local path """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs """ - last_sync: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last sync finished time """ - last_sync_count: typing.Optional[int] = pydantic_v1.Field(default=None) + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) """ Count of tasks synced last time """ - last_sync_job: typing.Optional[str] = pydantic_v1.Field(default=None) + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) """ Last sync job ID """ status: typing.Optional[LocalFilesImportStorageStatus] = None - traceback: typing.Optional[str] = pydantic_v1.Field(default=None) + traceback: typing.Optional[str] = pydantic.Field(default=None) """ Traceback report for the last failed sync """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta and debug information about storage processes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage description """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - project: int = pydantic_v1.Field() + project: int = pydantic.Field() """ A unique integer value identifying this project. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/ml_backend.py b/src/label_studio_sdk/types/ml_backend.py index 2dad822e2..21fd41e90 100644 --- a/src/label_studio_sdk/types/ml_backend.py +++ b/src/label_studio_sdk/types/ml_backend.py @@ -1,89 +1,80 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .ml_backend_auth_method import MlBackendAuthMethod from .ml_backend_state import MlBackendState +import pydantic +from .ml_backend_auth_method import MlBackendAuthMethod +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class MlBackend(pydantic_v1.BaseModel): +class MlBackend(UniversalBaseModel): id: typing.Optional[int] = None state: typing.Optional[MlBackendState] = None readable_state: typing.Optional[str] = None - is_interactive: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_interactive: typing.Optional[bool] = pydantic.Field(default=None) """ Used to interactively annotate tasks. If true, model returns one list with results """ - url: str = pydantic_v1.Field() + url: str = pydantic.Field() """ URL for the machine learning model server """ - error_message: typing.Optional[str] = pydantic_v1.Field(default=None) + error_message: typing.Optional[str] = pydantic.Field(default=None) """ Error message in error state """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Name of the machine learning backend """ auth_method: typing.Optional[MlBackendAuthMethod] = None - basic_auth_user: typing.Optional[str] = pydantic_v1.Field(default=None) + basic_auth_user: typing.Optional[str] = pydantic.Field(default=None) """ HTTP Basic Auth user """ basic_auth_pass: typing.Optional[str] = None basic_auth_pass_is_set: typing.Optional[str] = None - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Description for the machine learning backend """ - extra_params: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Any extra parameters passed to the ML Backend during the setup """ - model_version: typing.Optional[str] = pydantic_v1.Field(default=None) + model_version: typing.Optional[str] = pydantic.Field(default=None) """ Current model version associated with this machine learning backend """ - timeout: typing.Optional[float] = pydantic_v1.Field(default=None) + timeout: typing.Optional[float] = pydantic.Field(default=None) """ Response model timeout """ created_at: typing.Optional[dt.datetime] = None updated_at: typing.Optional[dt.datetime] = None - auto_update: typing.Optional[bool] = pydantic_v1.Field(default=None) + auto_update: typing.Optional[bool] = pydantic.Field(default=None) """ If false, model version is set by the user, if true - getting latest version from backend. """ project: int - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/model_provider_connection.py b/src/label_studio_sdk/types/model_provider_connection.py index 55ea91e59..517877bde 100644 --- a/src/label_studio_sdk/types/model_provider_connection.py +++ b/src/label_studio_sdk/types/model_provider_connection.py @@ -1,18 +1,18 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt -import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod -from .model_provider_connection_created_by import ModelProviderConnectionCreatedBy -from .model_provider_connection_organization import ModelProviderConnectionOrganization +from ..core.pydantic_utilities import UniversalBaseModel from .model_provider_connection_provider import ModelProviderConnectionProvider +import typing from .model_provider_connection_scope import ModelProviderConnectionScope +from .model_provider_connection_organization import ModelProviderConnectionOrganization +from .model_provider_connection_created_by import ModelProviderConnectionCreatedBy +import datetime as dt +import pydantic +from .model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class ModelProviderConnection(pydantic_v1.BaseModel): +class ModelProviderConnection(UniversalBaseModel): provider: ModelProviderConnectionProvider api_key: typing.Optional[str] = None deployment_name: typing.Optional[str] = None @@ -22,50 +22,41 @@ class ModelProviderConnection(pydantic_v1.BaseModel): created_by: typing.Optional[ModelProviderConnectionCreatedBy] = None created_at: typing.Optional[dt.datetime] = None updated_at: typing.Optional[dt.datetime] = None - is_internal: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_internal: typing.Optional[bool] = pydantic.Field(default=None) """ Whether the model provider connection is internal, not visible to the user. """ - budget_limit: typing.Optional[float] = pydantic_v1.Field(default=None) + budget_limit: typing.Optional[float] = pydantic.Field(default=None) """ Budget limit for the model provider connection (null if unlimited) """ - budget_last_reset_date: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + budget_last_reset_date: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Date and time the budget was last reset """ - budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = pydantic_v1.Field(default=None) + budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = pydantic.Field(default=None) """ Budget reset period for the model provider connection (null if not reset) """ - budget_total_spent: typing.Optional[float] = pydantic_v1.Field(default=None) + budget_total_spent: typing.Optional[float] = pydantic.Field(default=None) """ Tracked total budget spent for the given provider connection within the current budget period """ - budget_alert_threshold: typing.Optional[float] = pydantic_v1.Field(default=None) + budget_alert_threshold: typing.Optional[float] = pydantic.Field(default=None) """ Budget alert threshold for the given provider connection """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/model_provider_connection_created_by.py b/src/label_studio_sdk/types/model_provider_connection_created_by.py index 9ec9d319d..90bb941d6 100644 --- a/src/label_studio_sdk/types/model_provider_connection_created_by.py +++ b/src/label_studio_sdk/types/model_provider_connection_created_by.py @@ -2,4 +2,4 @@ import typing -ModelProviderConnectionCreatedBy = typing.Union[int, typing.Dict[str, typing.Any]] +ModelProviderConnectionCreatedBy = typing.Union[int, typing.Dict[str, typing.Optional[typing.Any]]] diff --git a/src/label_studio_sdk/types/model_provider_connection_organization.py b/src/label_studio_sdk/types/model_provider_connection_organization.py index 0ce796632..7da1b33ab 100644 --- a/src/label_studio_sdk/types/model_provider_connection_organization.py +++ b/src/label_studio_sdk/types/model_provider_connection_organization.py @@ -2,4 +2,4 @@ import typing -ModelProviderConnectionOrganization = typing.Union[int, typing.Dict[str, typing.Any]] +ModelProviderConnectionOrganization = typing.Union[int, typing.Dict[str, typing.Optional[typing.Any]]] diff --git a/src/label_studio_sdk/types/prediction.py b/src/label_studio_sdk/types/prediction.py index cd1bbefdf..efb00f16b 100644 --- a/src/label_studio_sdk/types/prediction.py +++ b/src/label_studio_sdk/types/prediction.py @@ -1,57 +1,57 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class Prediction(pydantic_v1.BaseModel): +class Prediction(UniversalBaseModel): id: typing.Optional[int] = None - result: typing.List[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + result: typing.List[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field() """ List of prediction results for the task """ - model_version: typing.Optional[str] = pydantic_v1.Field(default=None) + model_version: typing.Optional[str] = pydantic.Field(default=None) """ Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface """ - created_ago: typing.Optional[str] = pydantic_v1.Field(default=None) + created_ago: typing.Optional[str] = pydantic.Field(default=None) """ Delta time from creation time """ - score: typing.Optional[float] = pydantic_v1.Field(default=None) + score: typing.Optional[float] = pydantic.Field(default=None) """ Prediction score """ - cluster: typing.Optional[int] = pydantic_v1.Field(default=None) + cluster: typing.Optional[int] = pydantic.Field(default=None) """ Cluster for the current prediction """ - neighbors: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + neighbors: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Array of task IDs of the closest neighbors """ - mislabeling: typing.Optional[float] = pydantic_v1.Field(default=None) + mislabeling: typing.Optional[float] = pydantic.Field(default=None) """ Related task mislabeling score """ created_at: typing.Optional[dt.datetime] = None updated_at: typing.Optional[dt.datetime] = None - model: typing.Optional[int] = pydantic_v1.Field(default=None) + model: typing.Optional[int] = pydantic.Field(default=None) """ An ML Backend instance that created the prediction. """ - model_run: typing.Optional[int] = pydantic_v1.Field(default=None) + model_run: typing.Optional[int] = pydantic.Field(default=None) """ A run of a ModelVersion that created the prediction. """ @@ -59,20 +59,11 @@ class Prediction(pydantic_v1.BaseModel): task: int project: typing.Optional[int] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/project.py b/src/label_studio_sdk/types/project.py index 538dbae9a..3180e3caf 100644 --- a/src/label_studio_sdk/types/project.py +++ b/src/label_studio_sdk/types/project.py @@ -1,127 +1,129 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from .prompt import Prompt +from .user_simple import UserSimple +import datetime as dt from .project_sampling import ProjectSampling from .project_skip_queue import ProjectSkipQueue -from .user_simple import UserSimple +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class Project(pydantic_v1.BaseModel): +class Project(UniversalBaseModel): id: typing.Optional[int] = None - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Project name. Must be between 3 and 50 characters long. """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Project description """ - label_config: typing.Optional[str] = pydantic_v1.Field(default=None) + label_config: typing.Optional[str] = pydantic.Field(default=None) """ Label config in XML format. See more about it in documentation """ - expert_instruction: typing.Optional[str] = pydantic_v1.Field(default=None) + expert_instruction: typing.Optional[str] = pydantic.Field(default=None) """ Labeling instructions in HTML format """ - show_instruction: typing.Optional[bool] = pydantic_v1.Field(default=None) + show_instruction: typing.Optional[bool] = pydantic.Field(default=None) """ Show instructions to the annotator before they start """ - show_skip_button: typing.Optional[bool] = pydantic_v1.Field(default=None) + show_skip_button: typing.Optional[bool] = pydantic.Field(default=None) """ Show a skip button in interface and allow annotators to skip the task """ - enable_empty_annotation: typing.Optional[bool] = pydantic_v1.Field(default=None) + enable_empty_annotation: typing.Optional[bool] = pydantic.Field(default=None) """ Allow annotators to submit empty annotations """ - show_annotation_history: typing.Optional[bool] = pydantic_v1.Field(default=None) + show_annotation_history: typing.Optional[bool] = pydantic.Field(default=None) """ Show annotation history to annotator """ organization: typing.Optional[int] = None + prompts: typing.Optional[typing.List[Prompt]] = None color: typing.Optional[str] = None - maximum_annotations: typing.Optional[int] = pydantic_v1.Field(default=None) + maximum_annotations: typing.Optional[int] = pydantic.Field(default=None) """ Maximum number of annotations for one task. If the number of annotations per task is equal or greater to this value, the task is completed (is_labeled=True) """ - is_published: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_published: typing.Optional[bool] = pydantic.Field(default=None) """ Whether or not the project is published to annotators """ - model_version: typing.Optional[str] = pydantic_v1.Field(default=None) + model_version: typing.Optional[str] = pydantic.Field(default=None) """ Machine learning model version """ - is_draft: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_draft: typing.Optional[bool] = pydantic.Field(default=None) """ Whether or not the project is in the middle of being created """ created_by: typing.Optional[UserSimple] = None created_at: typing.Optional[dt.datetime] = None - min_annotations_to_start_training: typing.Optional[int] = pydantic_v1.Field(default=None) + min_annotations_to_start_training: typing.Optional[int] = pydantic.Field(default=None) """ Minimum number of completed tasks after which model training is started """ - start_training_on_annotation_update: typing.Optional[str] = pydantic_v1.Field(default=None) + start_training_on_annotation_update: typing.Optional[str] = pydantic.Field(default=None) """ Start model training after any annotations are submitted or updated """ - show_collab_predictions: typing.Optional[bool] = pydantic_v1.Field(default=None) + show_collab_predictions: typing.Optional[bool] = pydantic.Field(default=None) """ If set, the annotator can view model predictions """ - num_tasks_with_annotations: typing.Optional[int] = pydantic_v1.Field(default=None) + num_tasks_with_annotations: typing.Optional[int] = pydantic.Field(default=None) """ Tasks with annotations count """ - task_number: typing.Optional[int] = pydantic_v1.Field(default=None) + task_number: typing.Optional[int] = pydantic.Field(default=None) """ Total task number in project """ - useful_annotation_number: typing.Optional[int] = pydantic_v1.Field(default=None) + useful_annotation_number: typing.Optional[int] = pydantic.Field(default=None) """ Useful annotation number in project not including skipped_annotations_number and ground_truth_number. Total annotations = annotation_number + skipped_annotations_number + ground_truth_number """ - ground_truth_number: typing.Optional[int] = pydantic_v1.Field(default=None) + ground_truth_number: typing.Optional[int] = pydantic.Field(default=None) """ Honeypot annotation number in project """ - skipped_annotations_number: typing.Optional[int] = pydantic_v1.Field(default=None) + skipped_annotations_number: typing.Optional[int] = pydantic.Field(default=None) """ Skipped by collaborators annotation number in project """ - total_annotations_number: typing.Optional[int] = pydantic_v1.Field(default=None) + total_annotations_number: typing.Optional[int] = pydantic.Field(default=None) """ Total annotations number in project including skipped_annotations_number and ground_truth_number. """ - total_predictions_number: typing.Optional[int] = pydantic_v1.Field(default=None) + total_predictions_number: typing.Optional[int] = pydantic.Field(default=None) """ Total predictions number in project including skipped_annotations_number, ground_truth_number, and useful_annotation_number. """ @@ -130,48 +132,48 @@ class Project(pydantic_v1.BaseModel): show_ground_truth_first: typing.Optional[bool] = None show_overlap_first: typing.Optional[bool] = None overlap_cohort_percentage: typing.Optional[int] = None - task_data_login: typing.Optional[str] = pydantic_v1.Field(default=None) + task_data_login: typing.Optional[str] = pydantic.Field(default=None) """ Task data credentials: login """ - task_data_password: typing.Optional[str] = pydantic_v1.Field(default=None) + task_data_password: typing.Optional[str] = pydantic.Field(default=None) """ Task data credentials: password """ - control_weights: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have it's own key in control weight dict with weight for each label and overall weight.For example, if bounding box annotation with control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice more important than Airplaine, then you have to need the specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplaine': 0.5}, 'overall': 0.33} """ - parsed_label_config: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + parsed_label_config: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ JSON-formatted labeling configuration """ - evaluate_predictions_automatically: typing.Optional[bool] = pydantic_v1.Field(default=None) + evaluate_predictions_automatically: typing.Optional[bool] = pydantic.Field(default=None) """ Retrieve and display predictions when loading a task """ - config_has_control_tags: typing.Optional[str] = pydantic_v1.Field(default=None) + config_has_control_tags: typing.Optional[str] = pydantic.Field(default=None) """ Flag to detect is project ready for labeling """ skip_queue: typing.Optional[ProjectSkipQueue] = None - reveal_preannotations_interactively: typing.Optional[bool] = pydantic_v1.Field(default=None) + reveal_preannotations_interactively: typing.Optional[bool] = pydantic.Field(default=None) """ Reveal pre-annotations interactively """ - pinned_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + pinned_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Pinned date and time """ - finished_task_number: typing.Optional[int] = pydantic_v1.Field(default=None) + finished_task_number: typing.Optional[int] = pydantic.Field(default=None) """ Finished tasks """ @@ -179,20 +181,11 @@ class Project(pydantic_v1.BaseModel): queue_total: typing.Optional[str] = None queue_done: typing.Optional[str] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/project_import.py b/src/label_studio_sdk/types/project_import.py index 8bfa66f92..331fd485a 100644 --- a/src/label_studio_sdk/types/project_import.py +++ b/src/label_studio_sdk/types/project_import.py @@ -1,33 +1,33 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .project_import_status import ProjectImportStatus +import datetime as dt +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class ProjectImport(pydantic_v1.BaseModel): +class ProjectImport(UniversalBaseModel): id: typing.Optional[int] = None - preannotated_from_fields: typing.Optional[typing.Dict[str, typing.Any]] = None + preannotated_from_fields: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None commit_to_project: typing.Optional[bool] = None return_task_ids: typing.Optional[bool] = None status: typing.Optional[ProjectImportStatus] = None url: typing.Optional[str] = None traceback: typing.Optional[str] = None error: typing.Optional[str] = None - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Updated time """ - finished_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + finished_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Complete or fail time """ @@ -36,28 +36,19 @@ class ProjectImport(pydantic_v1.BaseModel): annotation_count: typing.Optional[int] = None prediction_count: typing.Optional[int] = None duration: typing.Optional[int] = None - file_upload_ids: typing.Optional[typing.Dict[str, typing.Any]] = None + file_upload_ids: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None could_be_tasks_list: typing.Optional[bool] = None - found_formats: typing.Optional[typing.Dict[str, typing.Any]] = None - data_columns: typing.Optional[typing.Dict[str, typing.Any]] = None - tasks: typing.Optional[typing.Dict[str, typing.Any]] = None - task_ids: typing.Optional[typing.Dict[str, typing.Any]] = None + found_formats: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None + data_columns: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None + tasks: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None + task_ids: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None project: typing.Optional[int] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/project_label_config.py b/src/label_studio_sdk/types/project_label_config.py index 3b8e2b0a4..443fbb86e 100644 --- a/src/label_studio_sdk/types/project_label_config.py +++ b/src/label_studio_sdk/types/project_label_config.py @@ -1,32 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 import typing -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class ProjectLabelConfig(pydantic_v1.BaseModel): - label_config: str = pydantic_v1.Field() +class ProjectLabelConfig(UniversalBaseModel): + label_config: str = pydantic.Field() """ Label config in XML format. See more about it in documentation """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/prompt.py b/src/label_studio_sdk/types/prompt.py index c732a57b4..a981b9cd4 100644 --- a/src/label_studio_sdk/types/prompt.py +++ b/src/label_studio_sdk/types/prompt.py @@ -1,79 +1,70 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel +import pydantic import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prompt_created_by import PromptCreatedBy +import datetime as dt from .prompt_organization import PromptOrganization +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class Prompt(pydantic_v1.BaseModel): - title: str = pydantic_v1.Field() +class Prompt(UniversalBaseModel): + title: str = pydantic.Field() """ Title of the prompt """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Description of the prompt """ - created_by: typing.Optional[PromptCreatedBy] = pydantic_v1.Field(default=None) + created_by: typing.Optional[PromptCreatedBy] = pydantic.Field(default=None) """ User ID of the creator of the prompt """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Date and time the prompt was created """ - updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Date and time the prompt was last updated """ - organization: typing.Optional[PromptOrganization] = pydantic_v1.Field(default=None) + organization: typing.Optional[PromptOrganization] = pydantic.Field(default=None) """ Organization ID of the prompt """ - input_fields: typing.List[str] = pydantic_v1.Field() + input_fields: typing.List[str] = pydantic.Field() """ List of input fields """ - output_classes: typing.List[str] = pydantic_v1.Field() + output_classes: typing.List[str] = pydantic.Field() """ List of output classes """ - associated_projects: typing.Optional[typing.List[int]] = pydantic_v1.Field(default=None) + associated_projects: typing.Optional[typing.List[int]] = pydantic.Field(default=None) """ List of associated projects IDs """ - skill_name: typing.Optional[str] = pydantic_v1.Field(default=None) + skill_name: typing.Optional[str] = pydantic.Field(default=None) """ Name of the skill """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/prompt_created_by.py b/src/label_studio_sdk/types/prompt_created_by.py index efe14c6c3..530d00670 100644 --- a/src/label_studio_sdk/types/prompt_created_by.py +++ b/src/label_studio_sdk/types/prompt_created_by.py @@ -2,4 +2,4 @@ import typing -PromptCreatedBy = typing.Union[int, typing.Dict[str, typing.Any]] +PromptCreatedBy = typing.Union[int, typing.Dict[str, typing.Optional[typing.Any]]] diff --git a/src/label_studio_sdk/types/prompt_organization.py b/src/label_studio_sdk/types/prompt_organization.py index 1f1a1158c..92b7299b4 100644 --- a/src/label_studio_sdk/types/prompt_organization.py +++ b/src/label_studio_sdk/types/prompt_organization.py @@ -2,4 +2,4 @@ import typing -PromptOrganization = typing.Union[int, typing.Dict[str, typing.Any]] +PromptOrganization = typing.Union[int, typing.Dict[str, typing.Optional[typing.Any]]] diff --git a/src/label_studio_sdk/types/prompt_version.py b/src/label_studio_sdk/types/prompt_version.py index b9b5ea5ab..38f317b13 100644 --- a/src/label_studio_sdk/types/prompt_version.py +++ b/src/label_studio_sdk/types/prompt_version.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .prompt_version_provider import PromptVersionProvider from .prompt_version_created_by import PromptVersionCreatedBy +import datetime as dt from .prompt_version_organization import PromptVersionOrganization -from .prompt_version_provider import PromptVersionProvider +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -class PromptVersion(pydantic_v1.BaseModel): +class PromptVersion(UniversalBaseModel): title: typing.Optional[str] = None parent_model: typing.Optional[int] = None model_provider_connection: typing.Optional[int] = None @@ -22,20 +22,11 @@ class PromptVersion(pydantic_v1.BaseModel): updated_at: typing.Optional[dt.datetime] = None organization: typing.Optional[PromptVersionOrganization] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/prompt_version_created_by.py b/src/label_studio_sdk/types/prompt_version_created_by.py index a0e0d8668..3f7d2550b 100644 --- a/src/label_studio_sdk/types/prompt_version_created_by.py +++ b/src/label_studio_sdk/types/prompt_version_created_by.py @@ -2,4 +2,4 @@ import typing -PromptVersionCreatedBy = typing.Union[int, typing.Dict[str, typing.Any]] +PromptVersionCreatedBy = typing.Union[int, typing.Dict[str, typing.Optional[typing.Any]]] diff --git a/src/label_studio_sdk/types/prompt_version_organization.py b/src/label_studio_sdk/types/prompt_version_organization.py index 28c02e65d..fd7c6ddb9 100644 --- a/src/label_studio_sdk/types/prompt_version_organization.py +++ b/src/label_studio_sdk/types/prompt_version_organization.py @@ -2,4 +2,4 @@ import typing -PromptVersionOrganization = typing.Union[int, typing.Dict[str, typing.Any]] +PromptVersionOrganization = typing.Union[int, typing.Dict[str, typing.Optional[typing.Any]]] diff --git a/src/label_studio_sdk/types/redis_export_storage.py b/src/label_studio_sdk/types/redis_export_storage.py index 5deb64f0c..49d816584 100644 --- a/src/label_studio_sdk/types/redis_export_storage.py +++ b/src/label_studio_sdk/types/redis_export_storage.py @@ -1,117 +1,108 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import datetime as dt from .redis_export_storage_status import RedisExportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class RedisExportStorage(pydantic_v1.BaseModel): +class RedisExportStorage(UniversalBaseModel): id: typing.Optional[int] = None type: typing.Optional[str] = None synchronizable: typing.Optional[bool] = None - path: typing.Optional[str] = pydantic_v1.Field(default=None) + path: typing.Optional[str] = pydantic.Field(default=None) """ Storage prefix (optional) """ - host: typing.Optional[str] = pydantic_v1.Field(default=None) + host: typing.Optional[str] = pydantic.Field(default=None) """ Server Host IP (optional) """ - port: typing.Optional[str] = pydantic_v1.Field(default=None) + port: typing.Optional[str] = pydantic.Field(default=None) """ Server Port (optional) """ - password: typing.Optional[str] = pydantic_v1.Field(default=None) + password: typing.Optional[str] = pydantic.Field(default=None) """ Server Password (optional) """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs """ - last_sync: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last sync finished time """ - last_sync_count: typing.Optional[int] = pydantic_v1.Field(default=None) + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) """ Count of tasks synced last time """ - last_sync_job: typing.Optional[str] = pydantic_v1.Field(default=None) + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) """ Last sync job ID """ status: typing.Optional[RedisExportStorageStatus] = None - traceback: typing.Optional[str] = pydantic_v1.Field(default=None) + traceback: typing.Optional[str] = pydantic.Field(default=None) """ Traceback report for the last failed sync """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta and debug information about storage processes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage description """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - can_delete_objects: typing.Optional[bool] = pydantic_v1.Field(default=None) + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) """ Deletion from storage enabled """ - db: typing.Optional[int] = pydantic_v1.Field(default=None) + db: typing.Optional[int] = pydantic.Field(default=None) """ Server Database """ - project: int = pydantic_v1.Field() + project: int = pydantic.Field() """ A unique integer value identifying this project. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/redis_import_storage.py b/src/label_studio_sdk/types/redis_import_storage.py index c08252ec8..0790e279c 100644 --- a/src/label_studio_sdk/types/redis_import_storage.py +++ b/src/label_studio_sdk/types/redis_import_storage.py @@ -1,112 +1,103 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import datetime as dt from .redis_import_storage_status import RedisImportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class RedisImportStorage(pydantic_v1.BaseModel): +class RedisImportStorage(UniversalBaseModel): id: typing.Optional[int] = None type: typing.Optional[str] = None synchronizable: typing.Optional[bool] = None - path: typing.Optional[str] = pydantic_v1.Field(default=None) + path: typing.Optional[str] = pydantic.Field(default=None) """ Storage prefix (optional) """ - host: typing.Optional[str] = pydantic_v1.Field(default=None) + host: typing.Optional[str] = pydantic.Field(default=None) """ Server Host IP (optional) """ - port: typing.Optional[str] = pydantic_v1.Field(default=None) + port: typing.Optional[str] = pydantic.Field(default=None) """ Server Port (optional) """ - password: typing.Optional[str] = pydantic_v1.Field(default=None) + password: typing.Optional[str] = pydantic.Field(default=None) """ Server Password (optional) """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs """ - last_sync: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last sync finished time """ - last_sync_count: typing.Optional[int] = pydantic_v1.Field(default=None) + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) """ Count of tasks synced last time """ - last_sync_job: typing.Optional[str] = pydantic_v1.Field(default=None) + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) """ Last sync job ID """ status: typing.Optional[RedisImportStorageStatus] = None - traceback: typing.Optional[str] = pydantic_v1.Field(default=None) + traceback: typing.Optional[str] = pydantic.Field(default=None) """ Traceback report for the last failed sync """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta and debug information about storage processes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage description """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - db: typing.Optional[int] = pydantic_v1.Field(default=None) + db: typing.Optional[int] = pydantic.Field(default=None) """ Server Database """ - project: int = pydantic_v1.Field() + project: int = pydantic.Field() """ A unique integer value identifying this project. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/refined_prompt_response.py b/src/label_studio_sdk/types/refined_prompt_response.py index 5c53a9494..c4b20989f 100644 --- a/src/label_studio_sdk/types/refined_prompt_response.py +++ b/src/label_studio_sdk/types/refined_prompt_response.py @@ -1,64 +1,54 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .prompt_version import PromptVersion +import pydantic from .refined_prompt_response_refinement_status import RefinedPromptResponseRefinementStatus +from .prompt_version import PromptVersion +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class RefinedPromptResponse(pydantic_v1.BaseModel): - title: typing.Optional[str] = pydantic_v1.Field(default=None) +class RefinedPromptResponse(UniversalBaseModel): + title: typing.Optional[str] = pydantic.Field(default=None) """ Title of the refined prompt """ - reasoning: typing.Optional[str] = pydantic_v1.Field(default=None) + reasoning: typing.Optional[str] = pydantic.Field(default=None) """ Reasoning behind the refinement """ - prompt: str = pydantic_v1.Field() + prompt: str = pydantic.Field() """ The refined prompt text """ - refinement_job_id: typing.Optional[str] = pydantic_v1.Field(default=None) + refinement_job_id: typing.Optional[str] = pydantic.Field(default=None) """ Unique identifier for the refinement job """ - refinement_status: typing.Optional[RefinedPromptResponseRefinementStatus] = pydantic_v1.Field(default=None) + refinement_status: typing.Optional[RefinedPromptResponseRefinementStatus] = pydantic.Field(default=None) """ Status of the refinement job """ - total_cost: typing.Optional[str] = pydantic_v1.Field(default=None) + total_cost: typing.Optional[str] = pydantic.Field(default=None) """ Total cost of the refinement job (in USD) """ - previous_version: typing.Optional[PromptVersion] = pydantic_v1.Field(default=None) + previous_version: typing.Optional[PromptVersion] = pydantic.Field(default=None) """ Previous version of the prompt """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/s3export_storage.py b/src/label_studio_sdk/types/s3export_storage.py index 7c171844b..ed4e36389 100644 --- a/src/label_studio_sdk/types/s3export_storage.py +++ b/src/label_studio_sdk/types/s3export_storage.py @@ -1,134 +1,127 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import datetime as dt +import pydantic from .s3export_storage_status import S3ExportStorageStatus +import typing_extensions +from ..core.serialization import FieldMetadata +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class S3ExportStorage(pydantic_v1.BaseModel): +class S3ExportStorage(UniversalBaseModel): id: typing.Optional[int] = None type: typing.Optional[str] = None synchronizable: typing.Optional[bool] = None - last_sync: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last sync finished time """ - last_sync_count: typing.Optional[int] = pydantic_v1.Field(default=None) + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) """ Count of tasks synced last time """ - last_sync_job: typing.Optional[str] = pydantic_v1.Field(default=None) + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) """ Last sync job ID """ status: typing.Optional[S3ExportStorageStatus] = None - traceback: typing.Optional[str] = pydantic_v1.Field(default=None) + traceback: typing.Optional[str] = pydantic.Field(default=None) """ Traceback report for the last failed sync """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta and debug information about storage processes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage description """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - can_delete_objects: typing.Optional[bool] = pydantic_v1.Field(default=None) + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) """ Deletion from storage enabled """ - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket prefix """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs """ - aws_access_key_id: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_access_key_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS_ACCESS_KEY_ID """ - aws_secret_access_key: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_secret_access_key: typing.Optional[str] = pydantic.Field(default=None) """ AWS_SECRET_ACCESS_KEY """ - aws_session_token: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_session_token: typing.Optional[str] = pydantic.Field(default=None) """ AWS_SESSION_TOKEN """ - aws_sse_kms_key_id: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_sse_kms_key_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS SSE KMS Key ID """ - region_name: typing.Optional[str] = pydantic_v1.Field(default=None) + region_name: typing.Optional[str] = pydantic.Field(default=None) """ AWS Region """ - s3endpoint: typing.Optional[str] = pydantic_v1.Field(alias="s3_endpoint", default=None) + s3endpoint: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="s3_endpoint")] = pydantic.Field( + default=None + ) """ S3 Endpoint """ - project: int = pydantic_v1.Field() + project: int = pydantic.Field() """ A unique integer value identifying this project. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/s3import_storage.py b/src/label_studio_sdk/types/s3import_storage.py index 6b201c307..dc713fc85 100644 --- a/src/label_studio_sdk/types/s3import_storage.py +++ b/src/label_studio_sdk/types/s3import_storage.py @@ -1,140 +1,133 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import datetime as dt +import pydantic from .s3import_storage_status import S3ImportStorageStatus +import typing_extensions +from ..core.serialization import FieldMetadata +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class S3ImportStorage(pydantic_v1.BaseModel): +class S3ImportStorage(UniversalBaseModel): id: typing.Optional[int] = None type: typing.Optional[str] = None synchronizable: typing.Optional[bool] = None presign: typing.Optional[bool] = None - last_sync: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last sync finished time """ - last_sync_count: typing.Optional[int] = pydantic_v1.Field(default=None) + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) """ Count of tasks synced last time """ - last_sync_job: typing.Optional[str] = pydantic_v1.Field(default=None) + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) """ Last sync job ID """ status: typing.Optional[S3ImportStorageStatus] = None - traceback: typing.Optional[str] = pydantic_v1.Field(default=None) + traceback: typing.Optional[str] = pydantic.Field(default=None) """ Traceback report for the last failed sync """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta and debug information about storage processes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage description """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket prefix """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs """ - aws_access_key_id: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_access_key_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS_ACCESS_KEY_ID """ - aws_secret_access_key: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_secret_access_key: typing.Optional[str] = pydantic.Field(default=None) """ AWS_SECRET_ACCESS_KEY """ - aws_session_token: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_session_token: typing.Optional[str] = pydantic.Field(default=None) """ AWS_SESSION_TOKEN """ - aws_sse_kms_key_id: typing.Optional[str] = pydantic_v1.Field(default=None) + aws_sse_kms_key_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS SSE KMS Key ID """ - region_name: typing.Optional[str] = pydantic_v1.Field(default=None) + region_name: typing.Optional[str] = pydantic.Field(default=None) """ AWS Region """ - s3endpoint: typing.Optional[str] = pydantic_v1.Field(alias="s3_endpoint", default=None) + s3endpoint: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="s3_endpoint")] = pydantic.Field( + default=None + ) """ S3 Endpoint """ - presign_ttl: typing.Optional[int] = pydantic_v1.Field(default=None) + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) """ Presigned URLs TTL (in minutes) """ - recursive_scan: typing.Optional[bool] = pydantic_v1.Field(default=None) + recursive_scan: typing.Optional[bool] = pydantic.Field(default=None) """ Perform recursive scan over the bucket content """ - project: int = pydantic_v1.Field() + project: int = pydantic.Field() """ A unique integer value identifying this project. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/s3s_export_storage.py b/src/label_studio_sdk/types/s3s_export_storage.py index 89579d331..d7e1d616f 100644 --- a/src/label_studio_sdk/types/s3s_export_storage.py +++ b/src/label_studio_sdk/types/s3s_export_storage.py @@ -1,80 +1,73 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import datetime as dt +import typing_extensions +from ..core.serialization import FieldMetadata +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class S3SExportStorage(pydantic_v1.BaseModel): +class S3SExportStorage(UniversalBaseModel): id: typing.Optional[int] = None - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage description """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket prefix """ - external_id: typing.Optional[str] = pydantic_v1.Field(default=None) + external_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS External ID """ - role_arn: typing.Optional[str] = pydantic_v1.Field(default=None) + role_arn: typing.Optional[str] = pydantic.Field(default=None) """ AWS Role ARN """ - region_name: typing.Optional[str] = pydantic_v1.Field(default=None) + region_name: typing.Optional[str] = pydantic.Field(default=None) """ AWS Region """ - s3endpoint: typing.Optional[str] = pydantic_v1.Field(alias="s3_endpoint", default=None) + s3endpoint: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="s3_endpoint")] = pydantic.Field( + default=None + ) """ S3 Endpoint """ - project: int = pydantic_v1.Field() + project: int = pydantic.Field() """ A unique integer value identifying this project. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/s3s_import_storage.py b/src/label_studio_sdk/types/s3s_import_storage.py index 4b7f7b099..1362a231c 100644 --- a/src/label_studio_sdk/types/s3s_import_storage.py +++ b/src/label_studio_sdk/types/s3s_import_storage.py @@ -1,129 +1,122 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import datetime as dt +import pydantic from .s3s_import_storage_status import S3SImportStorageStatus +import typing_extensions +from ..core.serialization import FieldMetadata +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class S3SImportStorage(pydantic_v1.BaseModel): +class S3SImportStorage(UniversalBaseModel): id: typing.Optional[int] = None synchronizable: typing.Optional[bool] = None presign: typing.Optional[bool] = None - last_sync: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last sync finished time """ - last_sync_count: typing.Optional[int] = pydantic_v1.Field(default=None) + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) """ Count of tasks synced last time """ - last_sync_job: typing.Optional[str] = pydantic_v1.Field(default=None) + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) """ Last sync job ID """ status: typing.Optional[S3SImportStorageStatus] = None - traceback: typing.Optional[str] = pydantic_v1.Field(default=None) + traceback: typing.Optional[str] = pydantic.Field(default=None) """ Traceback report for the last failed sync """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta and debug information about storage processes """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage description """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + bucket: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket name """ - prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + prefix: typing.Optional[str] = pydantic.Field(default=None) """ S3 bucket prefix """ - regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + regex_filter: typing.Optional[str] = pydantic.Field(default=None) """ Cloud storage regex for filtering objects """ - use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) """ Interpret objects as BLOBs and generate URLs """ - region_name: typing.Optional[str] = pydantic_v1.Field(default=None) + region_name: typing.Optional[str] = pydantic.Field(default=None) """ AWS Region """ - external_id: typing.Optional[str] = pydantic_v1.Field(default=None) + external_id: typing.Optional[str] = pydantic.Field(default=None) """ AWS External ID """ - role_arn: typing.Optional[str] = pydantic_v1.Field(default=None) + role_arn: typing.Optional[str] = pydantic.Field(default=None) """ AWS Role ARN """ - s3endpoint: typing.Optional[str] = pydantic_v1.Field(alias="s3_endpoint", default=None) + s3endpoint: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="s3_endpoint")] = pydantic.Field( + default=None + ) """ S3 Endpoint """ - presign_ttl: typing.Optional[int] = pydantic_v1.Field(default=None) + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) """ Presigned URLs TTL (in minutes) """ - recursive_scan: typing.Optional[bool] = pydantic_v1.Field(default=None) + recursive_scan: typing.Optional[bool] = pydantic.Field(default=None) """ Perform recursive scan over the bucket content """ - project: int = pydantic_v1.Field() + project: int = pydantic.Field() """ A unique integer value identifying this project. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/serialization_option.py b/src/label_studio_sdk/types/serialization_option.py index b39495a23..347950cba 100644 --- a/src/label_studio_sdk/types/serialization_option.py +++ b/src/label_studio_sdk/types/serialization_option.py @@ -1,36 +1,26 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class SerializationOption(pydantic_v1.BaseModel): +class SerializationOption(UniversalBaseModel): """ JSON dict with parameters """ - only_id: typing.Optional[bool] = pydantic_v1.Field(default=None) + only_id: typing.Optional[bool] = pydantic.Field(default=None) """ Include a full json body or IDs only """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/serialization_options.py b/src/label_studio_sdk/types/serialization_options.py index ca33471a7..164de5ce3 100644 --- a/src/label_studio_sdk/types/serialization_options.py +++ b/src/label_studio_sdk/types/serialization_options.py @@ -1,45 +1,35 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .serialization_option import SerializationOption +import pydantic +import typing_extensions +from ..core.serialization import FieldMetadata +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class SerializationOptions(pydantic_v1.BaseModel): +class SerializationOptions(UniversalBaseModel): drafts: typing.Optional[SerializationOption] = None predictions: typing.Optional[SerializationOption] = None - include_annotation_history: typing.Optional[bool] = pydantic_v1.Field(default=None) + include_annotation_history: typing.Optional[bool] = pydantic.Field(default=None) """ Include annotation history """ - annotations_completed_by: typing.Optional[SerializationOption] = pydantic_v1.Field( - alias="annotations__completed_by", default=None - ) - interpolate_key_frames: typing.Optional[bool] = pydantic_v1.Field(default=None) + annotations_completed_by: typing_extensions.Annotated[ + typing.Optional[SerializationOption], FieldMetadata(alias="annotations__completed_by") + ] = None + interpolate_key_frames: typing.Optional[bool] = pydantic.Field(default=None) """ Interpolate video key frames """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/task.py b/src/label_studio_sdk/types/task.py index 171823666..a0965e4fc 100644 --- a/src/label_studio_sdk/types/task.py +++ b/src/label_studio_sdk/types/task.py @@ -1,159 +1,156 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic from .task_annotators_item import TaskAnnotatorsItem +import datetime as dt from .task_comment_authors_item import TaskCommentAuthorsItem +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class Task(pydantic_v1.BaseModel): - id: typing.Optional[int] = pydantic_v1.Field(default=None) +class Task(UniversalBaseModel): + id: typing.Optional[int] = pydantic.Field(default=None) """ Unique ID of the task """ - predictions: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] = pydantic_v1.Field(default=None) + predictions: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = pydantic.Field( + default=None + ) """ Predictions for this task """ - annotations: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] = pydantic_v1.Field(default=None) + annotations: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = pydantic.Field( + default=None + ) """ Annotations for this task """ - drafts: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] = pydantic_v1.Field(default=None) + drafts: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = pydantic.Field(default=None) """ Drafts for this task """ - annotators: typing.Optional[typing.List[TaskAnnotatorsItem]] = pydantic_v1.Field(default=None) + annotators: typing.Optional[typing.List[TaskAnnotatorsItem]] = pydantic.Field(default=None) """ List of annotators for this task """ - inner_id: typing.Optional[int] = pydantic_v1.Field(default=None) + inner_id: typing.Optional[int] = pydantic.Field(default=None) """ Inner ID of the task """ - cancelled_annotations: typing.Optional[int] = pydantic_v1.Field(default=None) + cancelled_annotations: typing.Optional[int] = pydantic.Field(default=None) """ Number of cancelled annotations for this task """ - total_annotations: typing.Optional[int] = pydantic_v1.Field(default=None) + total_annotations: typing.Optional[int] = pydantic.Field(default=None) """ Total number of annotations for this task """ - total_predictions: typing.Optional[int] = pydantic_v1.Field(default=None) + total_predictions: typing.Optional[int] = pydantic.Field(default=None) """ Total number of predictions for this task """ - completed_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + completed_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Completion time of the task """ - file_upload: typing.Optional[str] = pydantic_v1.Field(default=None) + file_upload: typing.Optional[str] = pydantic.Field(default=None) """ File upload ID for this task """ - storage_filename: typing.Optional[str] = pydantic_v1.Field(default=None) + storage_filename: typing.Optional[str] = pydantic.Field(default=None) """ Storage filename for this task """ - avg_lead_time: typing.Optional[float] = pydantic_v1.Field(default=None) + avg_lead_time: typing.Optional[float] = pydantic.Field(default=None) """ Average lead time for this task """ - draft_exists: typing.Optional[bool] = pydantic_v1.Field(default=None) + draft_exists: typing.Optional[bool] = pydantic.Field(default=None) """ Whether a draft exists for this task or not """ - updated_by: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] = pydantic_v1.Field(default=None) + updated_by: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = pydantic.Field( + default=None + ) """ List of annotators' info who updated this task """ - data: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Data of the task """ - meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + meta: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Meta information of the task """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time of the task """ - updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last updated time of the task """ - is_labeled: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_labeled: typing.Optional[bool] = pydantic.Field(default=None) """ Whether the task is labeled or not """ - overlap: typing.Optional[float] = pydantic_v1.Field(default=None) + overlap: typing.Optional[float] = pydantic.Field(default=None) """ Overlap for the task """ - comment_count: typing.Optional[int] = pydantic_v1.Field(default=None) + comment_count: typing.Optional[int] = pydantic.Field(default=None) """ Number of comments for this task """ - unresolved_comment_count: typing.Optional[int] = pydantic_v1.Field(default=None) + unresolved_comment_count: typing.Optional[int] = pydantic.Field(default=None) """ Number of unresolved comments for this task """ - last_comment_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + last_comment_updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last updated time of the comments for this task """ - project: typing.Optional[int] = pydantic_v1.Field(default=None) + project: typing.Optional[int] = pydantic.Field(default=None) """ Project ID for this task """ - comment_authors: typing.Optional[typing.List[TaskCommentAuthorsItem]] = pydantic_v1.Field(default=None) + comment_authors: typing.Optional[typing.List[TaskCommentAuthorsItem]] = pydantic.Field(default=None) """ List of comment authors for this task """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/task_annotators_item.py b/src/label_studio_sdk/types/task_annotators_item.py index 5dddd4cdf..d08390625 100644 --- a/src/label_studio_sdk/types/task_annotators_item.py +++ b/src/label_studio_sdk/types/task_annotators_item.py @@ -2,4 +2,4 @@ import typing -TaskAnnotatorsItem = typing.Union[int, typing.Dict[str, typing.Any]] +TaskAnnotatorsItem = typing.Union[int, typing.Dict[str, typing.Optional[typing.Any]]] diff --git a/src/label_studio_sdk/types/task_comment_authors_item.py b/src/label_studio_sdk/types/task_comment_authors_item.py index 220dcd309..861c7ed89 100644 --- a/src/label_studio_sdk/types/task_comment_authors_item.py +++ b/src/label_studio_sdk/types/task_comment_authors_item.py @@ -2,4 +2,4 @@ import typing -TaskCommentAuthorsItem = typing.Union[int, typing.Dict[str, typing.Any]] +TaskCommentAuthorsItem = typing.Union[int, typing.Dict[str, typing.Optional[typing.Any]]] diff --git a/src/label_studio_sdk/types/task_filter_options.py b/src/label_studio_sdk/types/task_filter_options.py index f328532d1..04b817727 100644 --- a/src/label_studio_sdk/types/task_filter_options.py +++ b/src/label_studio_sdk/types/task_filter_options.py @@ -1,49 +1,39 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class TaskFilterOptions(pydantic_v1.BaseModel): - view: typing.Optional[int] = pydantic_v1.Field(default=None) +class TaskFilterOptions(UniversalBaseModel): + view: typing.Optional[int] = pydantic.Field(default=None) """ Apply filters from the view ID (a tab from the Data Manager) """ - skipped: typing.Optional[str] = pydantic_v1.Field(default=None) + skipped: typing.Optional[str] = pydantic.Field(default=None) """ `only` - include all tasks with skipped annotations
    `exclude` - exclude all tasks with skipped annotations """ - finished: typing.Optional[str] = pydantic_v1.Field(default=None) + finished: typing.Optional[str] = pydantic.Field(default=None) """ `only` - include all finished tasks (is_labeled = true)
    `exclude` - exclude all finished tasks """ - annotated: typing.Optional[str] = pydantic_v1.Field(default=None) + annotated: typing.Optional[str] = pydantic.Field(default=None) """ `only` - include all tasks with at least one not skipped annotation
    `exclude` - exclude all tasks with at least one not skipped annotation """ only_with_annotations: typing.Optional[bool] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/user_simple.py b/src/label_studio_sdk/types/user_simple.py index 9f105fded..ff7b3e16c 100644 --- a/src/label_studio_sdk/types/user_simple.py +++ b/src/label_studio_sdk/types/user_simple.py @@ -1,13 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class UserSimple(pydantic_v1.BaseModel): +class UserSimple(UniversalBaseModel): """ Project owner """ @@ -18,20 +17,11 @@ class UserSimple(pydantic_v1.BaseModel): email: typing.Optional[str] = None avatar: typing.Optional[str] = None - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/view.py b/src/label_studio_sdk/types/view.py index badd52018..1713377c4 100644 --- a/src/label_studio_sdk/types/view.py +++ b/src/label_studio_sdk/types/view.py @@ -1,55 +1,45 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .filter_group import FilterGroup +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class View(pydantic_v1.BaseModel): +class View(UniversalBaseModel): id: typing.Optional[int] = None filter_group: typing.Optional[FilterGroup] = None - data: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Custom view data """ - ordering: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + ordering: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Ordering parameters """ - selected_items: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + selected_items: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Selected items """ - user: typing.Optional[int] = pydantic_v1.Field(default=None) + user: typing.Optional[int] = pydantic.Field(default=None) """ User who made this view """ - project: int = pydantic_v1.Field() + project: int = pydantic.Field() """ Project ID """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/webhook.py b/src/label_studio_sdk/types/webhook.py index 93e8f0a1d..569486be6 100644 --- a/src/label_studio_sdk/types/webhook.py +++ b/src/label_studio_sdk/types/webhook.py @@ -1,67 +1,58 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic from .webhook_actions_item import WebhookActionsItem +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class Webhook(pydantic_v1.BaseModel): +class Webhook(UniversalBaseModel): id: typing.Optional[int] = None organization: typing.Optional[int] = None project: typing.Optional[int] = None - url: str = pydantic_v1.Field() + url: str = pydantic.Field() """ URL of webhook """ - send_payload: typing.Optional[bool] = pydantic_v1.Field(default=None) + send_payload: typing.Optional[bool] = pydantic.Field(default=None) """ If value is False send only action """ - send_for_all_actions: typing.Optional[bool] = pydantic_v1.Field(default=None) + send_for_all_actions: typing.Optional[bool] = pydantic.Field(default=None) """ If value is False - used only for actions from WebhookAction """ - headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Key Value Json of headers """ - is_active: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_active: typing.Optional[bool] = pydantic.Field(default=None) """ If value is False the webhook is disabled """ actions: typing.Optional[typing.List[WebhookActionsItem]] = None - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last update time """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/webhook_serializer_for_update.py b/src/label_studio_sdk/types/webhook_serializer_for_update.py index 7e0b13258..b257c3910 100644 --- a/src/label_studio_sdk/types/webhook_serializer_for_update.py +++ b/src/label_studio_sdk/types/webhook_serializer_for_update.py @@ -1,67 +1,58 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic from .webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class WebhookSerializerForUpdate(pydantic_v1.BaseModel): +class WebhookSerializerForUpdate(UniversalBaseModel): id: typing.Optional[int] = None organization: typing.Optional[int] = None project: typing.Optional[int] = None - url: str = pydantic_v1.Field() + url: str = pydantic.Field() """ URL of webhook """ - send_payload: typing.Optional[bool] = pydantic_v1.Field(default=None) + send_payload: typing.Optional[bool] = pydantic.Field(default=None) """ If value is False send only action """ - send_for_all_actions: typing.Optional[bool] = pydantic_v1.Field(default=None) + send_for_all_actions: typing.Optional[bool] = pydantic.Field(default=None) """ If value is False - used only for actions from WebhookAction """ - headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ Key Value Json of headers """ - is_active: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_active: typing.Optional[bool] = pydantic.Field(default=None) """ If value is False the webhook is disabled """ actions: typing.Optional[typing.List[WebhookSerializerForUpdateActionsItem]] = None - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time """ - updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last update time """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/workspace.py b/src/label_studio_sdk/types/workspace.py index 029f32691..e0adc186c 100644 --- a/src/label_studio_sdk/types/workspace.py +++ b/src/label_studio_sdk/types/workspace.py @@ -1,77 +1,68 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 -class Workspace(pydantic_v1.BaseModel): - id: typing.Optional[int] = pydantic_v1.Field(default=None) +class Workspace(UniversalBaseModel): + id: typing.Optional[int] = pydantic.Field(default=None) """ Unique ID of the workspace """ - title: typing.Optional[str] = pydantic_v1.Field(default=None) + title: typing.Optional[str] = pydantic.Field(default=None) """ Workspace title """ - description: typing.Optional[str] = pydantic_v1.Field(default=None) + description: typing.Optional[str] = pydantic.Field(default=None) """ Workspace description """ - is_public: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_public: typing.Optional[bool] = pydantic.Field(default=None) """ Whether the workspace is public or not """ - is_personal: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_personal: typing.Optional[bool] = pydantic.Field(default=None) """ Whether the workspace is personal or not """ - is_archived: typing.Optional[bool] = pydantic_v1.Field(default=None) + is_archived: typing.Optional[bool] = pydantic.Field(default=None) """ Whether the workspace is archived or not """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Creation time of the workspace """ - updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None) """ Last updated time of the workspace """ - created_by: typing.Optional[int] = pydantic_v1.Field(default=None) + created_by: typing.Optional[int] = pydantic.Field(default=None) """ User ID of the workspace creator """ - color: typing.Optional[str] = pydantic_v1.Field(default=None) + color: typing.Optional[str] = pydantic.Field(default=None) """ Workspace color """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/users/client.py b/src/label_studio_sdk/users/client.py index 2b9b21999..62618163a 100644 --- a/src/label_studio_sdk/users/client.py +++ b/src/label_studio_sdk/users/client.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing +from ..core.client_wrapper import SyncClientWrapper +from ..core.request_options import RequestOptions +from .types.users_reset_token_response import UsersResetTokenResponse +from ..core.pydantic_utilities import parse_obj_as from json.decoder import JSONDecodeError - from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 -from ..core.request_options import RequestOptions -from ..types.base_user import BaseUser from .types.users_get_token_response import UsersGetTokenResponse -from .types.users_reset_token_response import UsersResetTokenResponse +from ..types.base_user import BaseUser +from ..core.jsonable_encoder import jsonable_encoder +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -36,7 +36,7 @@ def reset_token(self, *, request_options: typing.Optional[RequestOptions] = None Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -44,11 +44,19 @@ def reset_token(self, *, request_options: typing.Optional[RequestOptions] = None client.users.reset_token() """ _response = self._client_wrapper.httpx_client.request( - "api/current-user/reset-token/", method="POST", request_options=request_options + "api/current-user/reset-token/", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UsersResetTokenResponse, _response.json()) # type: ignore + return typing.cast( + UsersResetTokenResponse, + parse_obj_as( + type_=UsersResetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,7 +78,7 @@ def get_token(self, *, request_options: typing.Optional[RequestOptions] = None) Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -78,11 +86,19 @@ def get_token(self, *, request_options: typing.Optional[RequestOptions] = None) client.users.get_token() """ _response = self._client_wrapper.httpx_client.request( - "api/current-user/token", method="GET", request_options=request_options + "api/current-user/token", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UsersGetTokenResponse, _response.json()) # type: ignore + return typing.cast( + UsersGetTokenResponse, + parse_obj_as( + type_=UsersGetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -104,7 +120,7 @@ def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -112,11 +128,19 @@ def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> client.users.whoami() """ _response = self._client_wrapper.httpx_client.request( - "api/current-user/whoami", method="GET", request_options=request_options + "api/current-user/whoami", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -124,6 +148,7 @@ def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[BaseUser]: """ + List all users in your Label Studio organization. Parameters @@ -138,7 +163,7 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -146,11 +171,19 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty client.users.list() """ _response = self._client_wrapper.httpx_client.request( - "api/users/", method="GET", request_options=request_options + "api/users/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[BaseUser], _response.json()) # type: ignore + return typing.cast( + typing.List[BaseUser], + parse_obj_as( + type_=typing.List[BaseUser], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -171,6 +204,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> BaseUser: """ + Create a user in Label Studio. Parameters @@ -212,7 +246,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -233,12 +267,21 @@ def create( "phone": phone, "allow_newsletters": allow_newsletters, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -246,6 +289,7 @@ def create( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ + Get info about a specific Label Studio user. You will need to provide their user ID. You can find a list of all user IDs using [List users](list). @@ -264,7 +308,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -274,11 +318,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/users/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -286,6 +338,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific Label Studio user. You will need to provide their user ID. You can find a list of all user IDs using [List users](list). @@ -306,7 +359,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -316,7 +369,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/users/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -342,6 +397,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> BaseUser: """ + Update details for a specific Label Studio user, such as their name or contact information. You will need to provide their user ID. You can find a list of all user IDs using [List users](list). @@ -388,7 +444,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -411,12 +467,21 @@ def update( "phone": phone, "allow_newsletters": allow_newsletters, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -443,19 +508,35 @@ async def reset_token(self, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.users.reset_token() + + + async def main() -> None: + await client.users.reset_token() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/current-user/reset-token/", method="POST", request_options=request_options + "api/current-user/reset-token/", + method="POST", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UsersResetTokenResponse, _response.json()) # type: ignore + return typing.cast( + UsersResetTokenResponse, + parse_obj_as( + type_=UsersResetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -477,19 +558,35 @@ async def get_token(self, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.users.get_token() + + + async def main() -> None: + await client.users.get_token() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/current-user/token", method="GET", request_options=request_options + "api/current-user/token", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UsersGetTokenResponse, _response.json()) # type: ignore + return typing.cast( + UsersGetTokenResponse, + parse_obj_as( + type_=UsersGetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -511,19 +608,35 @@ async def whoami(self, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.users.whoami() + + + async def main() -> None: + await client.users.whoami() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/current-user/whoami", method="GET", request_options=request_options + "api/current-user/whoami", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -531,6 +644,7 @@ async def whoami(self, *, request_options: typing.Optional[RequestOptions] = Non async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[BaseUser]: """ + List all users in your Label Studio organization. Parameters @@ -545,19 +659,35 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.users.list() + + + async def main() -> None: + await client.users.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/users/", method="GET", request_options=request_options + "api/users/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[BaseUser], _response.json()) # type: ignore + return typing.cast( + typing.List[BaseUser], + parse_obj_as( + type_=typing.List[BaseUser], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -578,6 +708,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> BaseUser: """ + Create a user in Label Studio. Parameters @@ -619,12 +750,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.users.create() + + + async def main() -> None: + await client.users.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/users/", @@ -640,12 +779,21 @@ async def create( "phone": phone, "allow_newsletters": allow_newsletters, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -653,6 +801,7 @@ async def create( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ + Get info about a specific Label Studio user. You will need to provide their user ID. You can find a list of all user IDs using [List users](list). @@ -671,21 +820,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.users.get( - id=1, - ) + + + async def main() -> None: + await client.users.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/users/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -693,6 +858,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific Label Studio user. You will need to provide their user ID. You can find a list of all user IDs using [List users](list). @@ -713,17 +879,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.users.delete( - id=1, - ) + + + async def main() -> None: + await client.users.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/users/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -749,6 +925,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> BaseUser: """ + Update details for a specific Label Studio user, such as their name or contact information. You will need to provide their user ID. You can find a list of all user IDs using [List users](list). @@ -795,14 +972,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.users.update( - id=1, - ) + + + async def main() -> None: + await client.users.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/users/{jsonable_encoder(id)}/", @@ -818,12 +1003,21 @@ async def update( "phone": phone, "allow_newsletters": allow_newsletters, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/users/types/users_get_token_response.py b/src/label_studio_sdk/users/types/users_get_token_response.py index ad629f0af..815096b07 100644 --- a/src/label_studio_sdk/users/types/users_get_token_response.py +++ b/src/label_studio_sdk/users/types/users_get_token_response.py @@ -1,36 +1,26 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class UsersGetTokenResponse(pydantic_v1.BaseModel): +class UsersGetTokenResponse(UniversalBaseModel): """ User token """ - detail: typing.Optional[str] = pydantic_v1.Field(default=None) + detail: typing.Optional[str] = pydantic.Field(default=None) """ Token """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/users/types/users_reset_token_response.py b/src/label_studio_sdk/users/types/users_reset_token_response.py index d588c5756..0bdf5a8fe 100644 --- a/src/label_studio_sdk/users/types/users_reset_token_response.py +++ b/src/label_studio_sdk/users/types/users_reset_token_response.py @@ -1,36 +1,26 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class UsersResetTokenResponse(pydantic_v1.BaseModel): +class UsersResetTokenResponse(UniversalBaseModel): """ User token """ - token: typing.Optional[str] = pydantic_v1.Field(default=None) + token: typing.Optional[str] = pydantic.Field(default=None) """ Token """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/version.py b/src/label_studio_sdk/version.py index 05c70bb97..c62478388 100644 --- a/src/label_studio_sdk/version.py +++ b/src/label_studio_sdk/version.py @@ -1,4 +1,3 @@ - from importlib import metadata __version__ = metadata.version("label-studio-sdk") diff --git a/src/label_studio_sdk/views/client.py b/src/label_studio_sdk/views/client.py index 15ed4cd14..a544068de 100644 --- a/src/label_studio_sdk/views/client.py +++ b/src/label_studio_sdk/views/client.py @@ -1,16 +1,17 @@ # This file was auto-generated by Fern from our API Definition. import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.view import View +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from .types.views_create_request_data import ViewsCreateRequestData +from ..core.serialization import convert_and_respect_annotation_metadata +from ..core.jsonable_encoder import jsonable_encoder from .types.views_update_request_data import ViewsUpdateRequestData +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -24,6 +25,7 @@ def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[View]: """ + List all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize which tasks and information appears. You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). @@ -43,7 +45,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -51,11 +53,22 @@ def list( client.views.list() """ _response = self._client_wrapper.httpx_client.request( - "api/dm/views/", method="GET", params={"project": project}, request_options=request_options + "api/dm/views/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[View], _response.json()) # type: ignore + return typing.cast( + typing.List[View], + parse_obj_as( + type_=typing.List[View], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -69,6 +82,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> View: """ + Create a new Data Manager view for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks and information appears. You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). @@ -91,7 +105,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -101,13 +115,27 @@ def create( _response = self._client_wrapper.httpx_client.request( "api/dm/views/", method="POST", - json={"data": data, "project": project}, + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsCreateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore + return typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -115,6 +143,7 @@ def create( def delete_all(self, *, project: int, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks appear. You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). @@ -132,7 +161,7 @@ def delete_all(self, *, project: int, request_options: typing.Optional[RequestOp Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -144,7 +173,12 @@ def delete_all(self, *, project: int, request_options: typing.Optional[RequestOp _response = self._client_wrapper.httpx_client.request( "api/dm/views/reset/", method="DELETE", - json={"project": project}, + json={ + "project": project, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -158,6 +192,7 @@ def delete_all(self, *, project: int, request_options: typing.Optional[RequestOp def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> View: """ + Get the details about a specific Data Manager view (tab). You will need to supply the view ID. You can find this using [List views](list). Parameters @@ -175,7 +210,7 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -185,11 +220,19 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/dm/views/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore + return typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -213,7 +256,7 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -223,7 +266,9 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/dm/views/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -242,6 +287,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> View: """ + You can update a specific Data Manager view (tab) with additional filters and other customizations. You will need to supply the view ID. You can find this using [List views](list). Parameters @@ -265,7 +311,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -277,13 +323,27 @@ def update( _response = self._client_wrapper.httpx_client.request( f"api/dm/views/{jsonable_encoder(id)}/", method="PATCH", - json={"data": data, "project": project}, + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsUpdateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore + return typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -298,6 +358,7 @@ async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[View]: """ + List all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize which tasks and information appears. You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). @@ -317,19 +378,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.views.list() + + + async def main() -> None: + await client.views.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/dm/views/", method="GET", params={"project": project}, request_options=request_options + "api/dm/views/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[View], _response.json()) # type: ignore + return typing.cast( + typing.List[View], + parse_obj_as( + type_=typing.List[View], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -343,6 +423,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> View: """ + Create a new Data Manager view for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks and information appears. You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). @@ -365,23 +446,45 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.views.create() + + + async def main() -> None: + await client.views.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/dm/views/", method="POST", - json={"data": data, "project": project}, + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsCreateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore + return typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -389,6 +492,7 @@ async def create( async def delete_all(self, *, project: int, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks appear. You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). @@ -406,19 +510,32 @@ async def delete_all(self, *, project: int, request_options: typing.Optional[Req Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.views.delete_all( - project=1, - ) + + + async def main() -> None: + await client.views.delete_all( + project=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/dm/views/reset/", method="DELETE", - json={"project": project}, + json={ + "project": project, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -432,6 +549,7 @@ async def delete_all(self, *, project: int, request_options: typing.Optional[Req async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> View: """ + Get the details about a specific Data Manager view (tab). You will need to supply the view ID. You can find this using [List views](list). Parameters @@ -449,21 +567,37 @@ async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.views.get( - id="id", - ) + + + async def main() -> None: + await client.views.get( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/dm/views/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore + return typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -487,17 +621,27 @@ async def delete(self, id: str, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.views.delete( - id="id", - ) + + + async def main() -> None: + await client.views.delete( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/dm/views/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -516,6 +660,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> View: """ + You can update a specific Data Manager view (tab) with additional filters and other customizations. You will need to supply the view ID. You can find this using [List views](list). Parameters @@ -539,25 +684,47 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.views.update( - id="id", - ) + + + async def main() -> None: + await client.views.update( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/dm/views/{jsonable_encoder(id)}/", method="PATCH", - json={"data": data, "project": project}, + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsUpdateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore + return typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/views/types/views_create_request_data.py b/src/label_studio_sdk/views/types/views_create_request_data.py index fcc986c17..e5a689a01 100644 --- a/src/label_studio_sdk/views/types/views_create_request_data.py +++ b/src/label_studio_sdk/views/types/views_create_request_data.py @@ -1,43 +1,33 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing - -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .views_create_request_data_filters import ViewsCreateRequestDataFilters +import pydantic from .views_create_request_data_ordering_item import ViewsCreateRequestDataOrderingItem +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -class ViewsCreateRequestData(pydantic_v1.BaseModel): +class ViewsCreateRequestData(UniversalBaseModel): """ Custom view data """ - filters: typing.Optional[ViewsCreateRequestDataFilters] = pydantic_v1.Field(default=None) + filters: typing.Optional[ViewsCreateRequestDataFilters] = pydantic.Field(default=None) """ Filters to apply on tasks. You can use [the helper class `Filters` from this page](https://labelstud.io/sdk/data_manager.html) to create Data Manager Filters.
    Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` """ - ordering: typing.Optional[typing.List[ViewsCreateRequestDataOrderingItem]] = pydantic_v1.Field(default=None) + ordering: typing.Optional[typing.List[ViewsCreateRequestDataOrderingItem]] = pydantic.Field(default=None) """ List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/views/types/views_create_request_data_filters.py b/src/label_studio_sdk/views/types/views_create_request_data_filters.py index 3777b2185..db91d708f 100644 --- a/src/label_studio_sdk/views/types/views_create_request_data_filters.py +++ b/src/label_studio_sdk/views/types/views_create_request_data_filters.py @@ -1,43 +1,33 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt -import typing - -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from ...core.pydantic_utilities import UniversalBaseModel from .views_create_request_data_filters_conjunction import ViewsCreateRequestDataFiltersConjunction +import pydantic +import typing from .views_create_request_data_filters_items_item import ViewsCreateRequestDataFiltersItemsItem +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -class ViewsCreateRequestDataFilters(pydantic_v1.BaseModel): +class ViewsCreateRequestDataFilters(UniversalBaseModel): """ Filters to apply on tasks. You can use [the helper class `Filters` from this page](https://labelstud.io/sdk/data_manager.html) to create Data Manager Filters.
    Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` """ - conjunction: ViewsCreateRequestDataFiltersConjunction = pydantic_v1.Field() + conjunction: ViewsCreateRequestDataFiltersConjunction = pydantic.Field() """ Logical conjunction for the filters. This conjunction (either "or" or "and") will be applied to all items in the filters list. It is not possible to combine "or" and "and" within one list of filters. All filters will be either combined with "or" or with "and", but not a mix of both. """ - items: typing.List[ViewsCreateRequestDataFiltersItemsItem] = pydantic_v1.Field() + items: typing.List[ViewsCreateRequestDataFiltersItemsItem] = pydantic.Field() """ List of filter items """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py b/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py index d88d6065a..2bbf8022f 100644 --- a/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py +++ b/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py @@ -1,50 +1,40 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt -import typing - -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from ...core.pydantic_utilities import UniversalBaseModel from .views_create_request_data_filters_items_item_filter import ViewsCreateRequestDataFiltersItemsItemFilter +import pydantic from .views_create_request_data_filters_items_item_operator import ViewsCreateRequestDataFiltersItemsItemOperator from .views_create_request_data_filters_items_item_value import ViewsCreateRequestDataFiltersItemsItemValue +from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import typing -class ViewsCreateRequestDataFiltersItemsItem(pydantic_v1.BaseModel): - filter: ViewsCreateRequestDataFiltersItemsItemFilter = pydantic_v1.Field() +class ViewsCreateRequestDataFiltersItemsItem(UniversalBaseModel): + filter: ViewsCreateRequestDataFiltersItemsItemFilter = pydantic.Field() """ Filter identifier, it should start with `filter:tasks:` prefix, e.g. `filter:tasks:agreement`. For `task.data` fields it may look like `filter:tasks:data.field_name`. If you need more info about columns, check the [Get data manager columns](#tag/Data-Manager/operation/api_dm_columns_list) API endpoint. Possible values:
  • `filter:tasks:agreement`
    (Number) Agreement for annotation results for a specific task (Enterprise only)

  • `filter:tasks:annotations_results`
    (String) Annotation results for the tasks

  • `filter:tasks:annotators`
    (List) Annotators that completed the task (Community). Can include assigned annotators (Enterprise only). Important note: the filter `type` should be List, but the filter `value` is integer

  • `filter:tasks:cancelled_annotations`
    (Number) Number of cancelled or skipped annotations for the task

  • `filter:tasks:comments`
    (Number) Number of comments in a task

  • `filter:tasks:completed_at`
    (Datetime) Time when a task was fully annotated

  • `filter:tasks:created_at`
    (Datetime) Time the task was created at

  • `filter:tasks:file_upload`
    (String) Name of the file uploaded to create the tasks

  • `filter:tasks:ground_truth`
    (Boolean) Ground truth status of the tasks

  • `filter:tasks:id`
    (Number) Task ID

  • `filter:tasks:inner_id`
    (Number) Task Inner ID, it starts from 1 for all projects

  • `filter:tasks:predictions_model_versions`
    (String) Model version used for the predictions

  • `filter:tasks:predictions_results`
    (String) Prediction results for the tasks

  • `filter:tasks:predictions_score`
    (Number) Prediction score for the task

  • `filter:tasks:reviewed`
    (Boolean) Whether the tasks have been reviewed (Enterprise only)

  • `filter:tasks:reviewers`
    (String) Reviewers that reviewed the task, or assigned reviewers (Enterprise only). Important note: the filter `type` should be List, but the filter `value` is integer

  • `filter:tasks:reviews_accepted`
    (Number) Number of annotations accepted for a task in review (Enterprise only)

  • `filter:tasks:reviews_rejected`
    (Number) Number of annotations rejected for a task in review (Enterprise only)

  • `filter:tasks:total_annotations`
    (Number) Total number of annotations on a task

  • `filter:tasks:total_predictions`
    (Number) Total number of predictions for the task

  • `filter:tasks:unresolved_comment_count`
    (Number) Number of unresolved comments in a task

  • `filter:tasks:updated_at`
    (Datetime) Time the task was updated at (e.g. new annotation was created, review added, etc)
  • """ - operator: ViewsCreateRequestDataFiltersItemsItemOperator = pydantic_v1.Field() + operator: ViewsCreateRequestDataFiltersItemsItemOperator = pydantic.Field() """ Filter operator. Possible values:
  • `contains`
    Contains

  • `ends_with`
    Ends with

  • `equal`
    Equal to

  • `exists`
    Exists

  • `greater`
    Greater than

  • `greater_or_equal`
    Greater than or equal to

  • `in`
    Is between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `less`
    Less than

  • `less_or_equal`
    Less than or equal to

  • `not_contains`
    Does not contain

  • `not_equal`
    Not equal to

  • `not_exists`
    Does not exist

  • `not_in`
    Is not between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `starts_with`
    Starts with
  • """ - type: str = pydantic_v1.Field() + type: str = pydantic.Field() """ Type of the filter value. Possible values:
  • `Boolean`
    Boolean

  • `Datetime`
    Datetime string in `strftime('%Y-%m-%dT%H:%M:%S.%fZ')` format

  • `List`
    List of items

  • `Number`
    Float or Integer

  • `String`
    String

  • `Unknown`
    Unknown is explicitly converted to string format
  • """ - value: ViewsCreateRequestDataFiltersItemsItemValue = pydantic_v1.Field() + value: ViewsCreateRequestDataFiltersItemsItemValue = pydantic.Field() """ Value to filter by """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item_value.py b/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item_value.py index 079eb6a7f..9175b3dd1 100644 --- a/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item_value.py +++ b/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item_value.py @@ -2,4 +2,6 @@ import typing -ViewsCreateRequestDataFiltersItemsItemValue = typing.Union[str, int, float, bool, typing.Dict[str, typing.Any]] +ViewsCreateRequestDataFiltersItemsItemValue = typing.Union[ + str, int, float, bool, typing.Dict[str, typing.Optional[typing.Any]] +] diff --git a/src/label_studio_sdk/views/types/views_update_request_data.py b/src/label_studio_sdk/views/types/views_update_request_data.py index 7769d74e0..1a0ceda3c 100644 --- a/src/label_studio_sdk/views/types/views_update_request_data.py +++ b/src/label_studio_sdk/views/types/views_update_request_data.py @@ -1,43 +1,33 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ...core.pydantic_utilities import UniversalBaseModel import typing - -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .views_update_request_data_filters import ViewsUpdateRequestDataFilters +import pydantic from .views_update_request_data_ordering_item import ViewsUpdateRequestDataOrderingItem +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -class ViewsUpdateRequestData(pydantic_v1.BaseModel): +class ViewsUpdateRequestData(UniversalBaseModel): """ Custom view data """ - filters: typing.Optional[ViewsUpdateRequestDataFilters] = pydantic_v1.Field(default=None) + filters: typing.Optional[ViewsUpdateRequestDataFilters] = pydantic.Field(default=None) """ Filters to apply on tasks. You can use [the helper class `Filters` from this page](https://labelstud.io/sdk/data_manager.html) to create Data Manager Filters.
    Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` """ - ordering: typing.Optional[typing.List[ViewsUpdateRequestDataOrderingItem]] = pydantic_v1.Field(default=None) + ordering: typing.Optional[typing.List[ViewsUpdateRequestDataOrderingItem]] = pydantic.Field(default=None) """ List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/views/types/views_update_request_data_filters.py b/src/label_studio_sdk/views/types/views_update_request_data_filters.py index 674d8eb3e..5d35fe732 100644 --- a/src/label_studio_sdk/views/types/views_update_request_data_filters.py +++ b/src/label_studio_sdk/views/types/views_update_request_data_filters.py @@ -1,43 +1,33 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt -import typing - -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from ...core.pydantic_utilities import UniversalBaseModel from .views_update_request_data_filters_conjunction import ViewsUpdateRequestDataFiltersConjunction +import pydantic +import typing from .views_update_request_data_filters_items_item import ViewsUpdateRequestDataFiltersItemsItem +from ...core.pydantic_utilities import IS_PYDANTIC_V2 -class ViewsUpdateRequestDataFilters(pydantic_v1.BaseModel): +class ViewsUpdateRequestDataFilters(UniversalBaseModel): """ Filters to apply on tasks. You can use [the helper class `Filters` from this page](https://labelstud.io/sdk/data_manager.html) to create Data Manager Filters.
    Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` """ - conjunction: ViewsUpdateRequestDataFiltersConjunction = pydantic_v1.Field() + conjunction: ViewsUpdateRequestDataFiltersConjunction = pydantic.Field() """ Logical conjunction for the filters. This conjunction (either "or" or "and") will be applied to all items in the filters list. It is not possible to combine "or" and "and" within one list of filters. All filters will be either combined with "or" or with "and", but not a mix of both. """ - items: typing.List[ViewsUpdateRequestDataFiltersItemsItem] = pydantic_v1.Field() + items: typing.List[ViewsUpdateRequestDataFiltersItemsItem] = pydantic.Field() """ List of filter items """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py b/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py index b491e85d5..88f2e1dcb 100644 --- a/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py +++ b/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py @@ -1,50 +1,40 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt -import typing - -from ...core.datetime_utils import serialize_datetime -from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from ...core.pydantic_utilities import UniversalBaseModel from .views_update_request_data_filters_items_item_filter import ViewsUpdateRequestDataFiltersItemsItemFilter +import pydantic from .views_update_request_data_filters_items_item_operator import ViewsUpdateRequestDataFiltersItemsItemOperator from .views_update_request_data_filters_items_item_value import ViewsUpdateRequestDataFiltersItemsItemValue +from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import typing -class ViewsUpdateRequestDataFiltersItemsItem(pydantic_v1.BaseModel): - filter: ViewsUpdateRequestDataFiltersItemsItemFilter = pydantic_v1.Field() +class ViewsUpdateRequestDataFiltersItemsItem(UniversalBaseModel): + filter: ViewsUpdateRequestDataFiltersItemsItemFilter = pydantic.Field() """ Filter identifier, it should start with `filter:tasks:` prefix, e.g. `filter:tasks:agreement`. For `task.data` fields it may look like `filter:tasks:data.field_name`. If you need more info about columns, check the [Get data manager columns](#tag/Data-Manager/operation/api_dm_columns_list) API endpoint. Possible values:
  • `filter:tasks:agreement`
    (Number) Agreement for annotation results for a specific task (Enterprise only)

  • `filter:tasks:annotations_results`
    (String) Annotation results for the tasks

  • `filter:tasks:annotators`
    (List) Annotators that completed the task (Community). Can include assigned annotators (Enterprise only). Important note: the filter `type` should be List, but the filter `value` is integer

  • `filter:tasks:cancelled_annotations`
    (Number) Number of cancelled or skipped annotations for the task

  • `filter:tasks:comments`
    (Number) Number of comments in a task

  • `filter:tasks:completed_at`
    (Datetime) Time when a task was fully annotated

  • `filter:tasks:created_at`
    (Datetime) Time the task was created at

  • `filter:tasks:file_upload`
    (String) Name of the file uploaded to create the tasks

  • `filter:tasks:ground_truth`
    (Boolean) Ground truth status of the tasks

  • `filter:tasks:id`
    (Number) Task ID

  • `filter:tasks:inner_id`
    (Number) Task Inner ID, it starts from 1 for all projects

  • `filter:tasks:predictions_model_versions`
    (String) Model version used for the predictions

  • `filter:tasks:predictions_results`
    (String) Prediction results for the tasks

  • `filter:tasks:predictions_score`
    (Number) Prediction score for the task

  • `filter:tasks:reviewed`
    (Boolean) Whether the tasks have been reviewed (Enterprise only)

  • `filter:tasks:reviewers`
    (String) Reviewers that reviewed the task, or assigned reviewers (Enterprise only). Important note: the filter `type` should be List, but the filter `value` is integer

  • `filter:tasks:reviews_accepted`
    (Number) Number of annotations accepted for a task in review (Enterprise only)

  • `filter:tasks:reviews_rejected`
    (Number) Number of annotations rejected for a task in review (Enterprise only)

  • `filter:tasks:total_annotations`
    (Number) Total number of annotations on a task

  • `filter:tasks:total_predictions`
    (Number) Total number of predictions for the task

  • `filter:tasks:unresolved_comment_count`
    (Number) Number of unresolved comments in a task

  • `filter:tasks:updated_at`
    (Datetime) Time the task was updated at (e.g. new annotation was created, review added, etc)
  • """ - operator: ViewsUpdateRequestDataFiltersItemsItemOperator = pydantic_v1.Field() + operator: ViewsUpdateRequestDataFiltersItemsItemOperator = pydantic.Field() """ Filter operator. Possible values:
  • `contains`
    Contains

  • `ends_with`
    Ends with

  • `equal`
    Equal to

  • `exists`
    Exists

  • `greater`
    Greater than

  • `greater_or_equal`
    Greater than or equal to

  • `in`
    Is between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `less`
    Less than

  • `less_or_equal`
    Less than or equal to

  • `not_contains`
    Does not contain

  • `not_equal`
    Not equal to

  • `not_exists`
    Does not exist

  • `not_in`
    Is not between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `starts_with`
    Starts with
  • """ - type: str = pydantic_v1.Field() + type: str = pydantic.Field() """ Type of the filter value. Possible values:
  • `Boolean`
    Boolean

  • `Datetime`
    Datetime string in `strftime('%Y-%m-%dT%H:%M:%S.%fZ')` format

  • `List`
    List of items

  • `Number`
    Float or Integer

  • `String`
    String

  • `Unknown`
    Unknown is explicitly converted to string format
  • """ - value: ViewsUpdateRequestDataFiltersItemsItemValue = pydantic_v1.Field() + value: ViewsUpdateRequestDataFiltersItemsItemValue = pydantic.Field() """ Value to filter by """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item_value.py b/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item_value.py index 2031450d2..b5cb5d17a 100644 --- a/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item_value.py +++ b/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item_value.py @@ -2,4 +2,6 @@ import typing -ViewsUpdateRequestDataFiltersItemsItemValue = typing.Union[str, int, float, bool, typing.Dict[str, typing.Any]] +ViewsUpdateRequestDataFiltersItemsItemValue = typing.Union[ + str, int, float, bool, typing.Dict[str, typing.Optional[typing.Any]] +] diff --git a/src/label_studio_sdk/webhooks/client.py b/src/label_studio_sdk/webhooks/client.py index 2fa681c5f..12359cfc6 100644 --- a/src/label_studio_sdk/webhooks/client.py +++ b/src/label_studio_sdk/webhooks/client.py @@ -1,19 +1,19 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.webhook import Webhook +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from ..types.webhook_actions_item import WebhookActionsItem -from ..types.webhook_serializer_for_update import WebhookSerializerForUpdate -from ..types.webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem +import datetime as dt +from ..core.jsonable_encoder import jsonable_encoder from .types.webhooks_update_request_actions_item import WebhooksUpdateRequestActionsItem +from ..types.webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem +from ..types.webhook_serializer_for_update import WebhookSerializerForUpdate +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -27,6 +27,7 @@ def list( self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[Webhook]: """ + List all webhooks set up for your organization. Webhooks in Label Studio let you set up integrations that subscribe to certain events that occur inside Label Studio. When an event is triggered, Label Studio sends an HTTP POST request to the configured webhook URL. @@ -48,7 +49,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -56,11 +57,22 @@ def list( client.webhooks.list() """ _response = self._client_wrapper.httpx_client.request( - "api/webhooks/", method="GET", params={"project": project}, request_options=request_options + "api/webhooks/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Webhook], _response.json()) # type: ignore + return typing.cast( + typing.List[Webhook], + parse_obj_as( + type_=typing.List[Webhook], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -75,7 +87,7 @@ def create( project: typing.Optional[int] = OMIT, send_payload: typing.Optional[bool] = OMIT, send_for_all_actions: typing.Optional[bool] = OMIT, - headers: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, is_active: typing.Optional[bool] = OMIT, actions: typing.Optional[typing.Sequence[WebhookActionsItem]] = OMIT, created_at: typing.Optional[dt.datetime] = OMIT, @@ -83,6 +95,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> Webhook: """ + Create a webhook. Label Studio provides several out-of-the box webhook events, which you can find listed here: [Available Label Studio webhooks](https://labelstud.io/guide/webhooks#Available-Label-Studio-webhooks). @@ -107,7 +120,7 @@ def create( send_for_all_actions : typing.Optional[bool] If value is False - used only for actions from WebhookAction - headers : typing.Optional[typing.Dict[str, typing.Any]] + headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Key Value Json of headers is_active : typing.Optional[bool] @@ -131,7 +144,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -161,7 +174,13 @@ def create( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Webhook, _response.json()) # type: ignore + return typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -174,6 +193,7 @@ def info( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Get descriptions of all available webhook actions to set up webhooks. For more information, see the [Webhook event reference](https://labelstud.io/guide/webhook_reference). Parameters @@ -190,7 +210,7 @@ def info( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -200,7 +220,9 @@ def info( _response = self._client_wrapper.httpx_client.request( "api/webhooks/info/", method="GET", - params={"organization-only": organization_only}, + params={ + "organization-only": organization_only, + }, request_options=request_options, ) try: @@ -213,6 +235,7 @@ def info( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Webhook: """ + Get information about a specific webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). @@ -232,7 +255,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -242,11 +265,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/webhooks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Webhook, _response.json()) # type: ignore + return typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -254,6 +285,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). @@ -272,7 +304,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -282,7 +314,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/webhooks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -310,7 +344,7 @@ def update( project: typing.Optional[int] = OMIT, webhook_serializer_for_update_send_payload: typing.Optional[bool] = OMIT, webhook_serializer_for_update_send_for_all_actions: typing.Optional[bool] = OMIT, - webhook_serializer_for_update_headers: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + webhook_serializer_for_update_headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, webhook_serializer_for_update_is_active: typing.Optional[bool] = OMIT, webhook_serializer_for_update_actions: typing.Optional[ typing.Sequence[WebhookSerializerForUpdateActionsItem] @@ -320,6 +354,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> WebhookSerializerForUpdate: """ + Update a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). @@ -361,7 +396,7 @@ def update( webhook_serializer_for_update_send_for_all_actions : typing.Optional[bool] If value is False - used only for actions from WebhookAction - webhook_serializer_for_update_headers : typing.Optional[typing.Dict[str, typing.Any]] + webhook_serializer_for_update_headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Key Value Json of headers webhook_serializer_for_update_is_active : typing.Optional[bool] @@ -385,7 +420,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -425,7 +460,13 @@ def update( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookSerializerForUpdate, _response.json()) # type: ignore + return typing.cast( + WebhookSerializerForUpdate, + parse_obj_as( + type_=WebhookSerializerForUpdate, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -440,6 +481,7 @@ async def list( self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[Webhook]: """ + List all webhooks set up for your organization. Webhooks in Label Studio let you set up integrations that subscribe to certain events that occur inside Label Studio. When an event is triggered, Label Studio sends an HTTP POST request to the configured webhook URL. @@ -461,19 +503,38 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.webhooks.list() + + + async def main() -> None: + await client.webhooks.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/webhooks/", method="GET", params={"project": project}, request_options=request_options + "api/webhooks/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Webhook], _response.json()) # type: ignore + return typing.cast( + typing.List[Webhook], + parse_obj_as( + type_=typing.List[Webhook], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -488,7 +549,7 @@ async def create( project: typing.Optional[int] = OMIT, send_payload: typing.Optional[bool] = OMIT, send_for_all_actions: typing.Optional[bool] = OMIT, - headers: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, is_active: typing.Optional[bool] = OMIT, actions: typing.Optional[typing.Sequence[WebhookActionsItem]] = OMIT, created_at: typing.Optional[dt.datetime] = OMIT, @@ -496,6 +557,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> Webhook: """ + Create a webhook. Label Studio provides several out-of-the box webhook events, which you can find listed here: [Available Label Studio webhooks](https://labelstud.io/guide/webhooks#Available-Label-Studio-webhooks). @@ -520,7 +582,7 @@ async def create( send_for_all_actions : typing.Optional[bool] If value is False - used only for actions from WebhookAction - headers : typing.Optional[typing.Dict[str, typing.Any]] + headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Key Value Json of headers is_active : typing.Optional[bool] @@ -544,14 +606,22 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.webhooks.create( - url="url", - ) + + + async def main() -> None: + await client.webhooks.create( + url="url", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/webhooks/", @@ -574,7 +644,13 @@ async def create( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Webhook, _response.json()) # type: ignore + return typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -587,6 +663,7 @@ async def info( request_options: typing.Optional[RequestOptions] = None, ) -> None: """ + Get descriptions of all available webhook actions to set up webhooks. For more information, see the [Webhook event reference](https://labelstud.io/guide/webhook_reference). Parameters @@ -603,17 +680,27 @@ async def info( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.webhooks.info() + + + async def main() -> None: + await client.webhooks.info() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/webhooks/info/", method="GET", - params={"organization-only": organization_only}, + params={ + "organization-only": organization_only, + }, request_options=request_options, ) try: @@ -626,6 +713,7 @@ async def info( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Webhook: """ + Get information about a specific webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). @@ -645,21 +733,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.webhooks.get( - id=1, - ) + + + async def main() -> None: + await client.webhooks.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", method="GET", request_options=request_options + f"api/webhooks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Webhook, _response.json()) # type: ignore + return typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -667,6 +771,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). @@ -685,17 +790,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.webhooks.delete( - id=1, - ) + + + async def main() -> None: + await client.webhooks.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options + f"api/webhooks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -723,7 +838,7 @@ async def update( project: typing.Optional[int] = OMIT, webhook_serializer_for_update_send_payload: typing.Optional[bool] = OMIT, webhook_serializer_for_update_send_for_all_actions: typing.Optional[bool] = OMIT, - webhook_serializer_for_update_headers: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + webhook_serializer_for_update_headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, webhook_serializer_for_update_is_active: typing.Optional[bool] = OMIT, webhook_serializer_for_update_actions: typing.Optional[ typing.Sequence[WebhookSerializerForUpdateActionsItem] @@ -733,6 +848,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> WebhookSerializerForUpdate: """ + Update a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). @@ -774,7 +890,7 @@ async def update( webhook_serializer_for_update_send_for_all_actions : typing.Optional[bool] If value is False - used only for actions from WebhookAction - webhook_serializer_for_update_headers : typing.Optional[typing.Dict[str, typing.Any]] + webhook_serializer_for_update_headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] Key Value Json of headers webhook_serializer_for_update_is_active : typing.Optional[bool] @@ -798,16 +914,24 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.webhooks.update( - id_=1, - url="url", - webhook_serializer_for_update_url="url", - ) + + + async def main() -> None: + await client.webhooks.update( + id_=1, + url="url", + webhook_serializer_for_update_url="url", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/webhooks/{jsonable_encoder(id_)}/", @@ -838,7 +962,13 @@ async def update( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookSerializerForUpdate, _response.json()) # type: ignore + return typing.cast( + WebhookSerializerForUpdate, + parse_obj_as( + type_=WebhookSerializerForUpdate, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/workspaces/client.py b/src/label_studio_sdk/workspaces/client.py index 1a5a64bac..413ac6b3a 100644 --- a/src/label_studio_sdk/workspaces/client.py +++ b/src/label_studio_sdk/workspaces/client.py @@ -1,15 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing +from ..core.client_wrapper import SyncClientWrapper +from .members.client import MembersClient +from ..core.request_options import RequestOptions +from ..types.workspace import Workspace +from ..core.pydantic_utilities import parse_obj_as from json.decoder import JSONDecodeError - from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 -from ..core.request_options import RequestOptions -from ..types.workspace import Workspace -from .members.client import AsyncMembersClient, MembersClient +from ..core.client_wrapper import AsyncClientWrapper +from .members.client import AsyncMembersClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -22,6 +23,7 @@ def __init__(self, *, client_wrapper: SyncClientWrapper): def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Workspace]: """ + List all workspaces for your organization. Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. @@ -40,7 +42,7 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -48,11 +50,19 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty client.workspaces.list() """ _response = self._client_wrapper.httpx_client.request( - "api/workspaces", method="GET", request_options=request_options + "api/workspaces", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Workspace], _response.json()) # type: ignore + return typing.cast( + typing.List[Workspace], + parse_obj_as( + type_=typing.List[Workspace], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,6 +80,7 @@ def create( request_options: typing.Optional[RequestOptions] = None, ) -> Workspace: """ + Create a new workspace. Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. @@ -106,7 +117,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -124,12 +135,21 @@ def create( "color": color, "is_archived": is_archived, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Workspace, _response.json()) # type: ignore + return typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -137,6 +157,7 @@ def create( def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Workspace: """ + Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). Parameters @@ -154,7 +175,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -164,11 +185,19 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/workspaces/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Workspace, _response.json()) # type: ignore + return typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -176,6 +205,7 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). Parameters @@ -192,7 +222,7 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -202,7 +232,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = ) """ _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/workspaces/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -225,6 +257,7 @@ def update( request_options: typing.Optional[RequestOptions] = None, ) -> Workspace: """ + Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). Parameters @@ -260,7 +293,7 @@ def update( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -280,12 +313,21 @@ def update( "color": color, "is_archived": is_archived, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Workspace, _response.json()) # type: ignore + return typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -299,6 +341,7 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Workspace]: """ + List all workspaces for your organization. Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. @@ -317,19 +360,35 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.workspaces.list() + + + async def main() -> None: + await client.workspaces.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/workspaces", method="GET", request_options=request_options + "api/workspaces", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Workspace], _response.json()) # type: ignore + return typing.cast( + typing.List[Workspace], + parse_obj_as( + type_=typing.List[Workspace], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -347,6 +406,7 @@ async def create( request_options: typing.Optional[RequestOptions] = None, ) -> Workspace: """ + Create a new workspace. Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. @@ -383,12 +443,20 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.workspaces.create() + + + async def main() -> None: + await client.workspaces.create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "api/workspaces", @@ -401,12 +469,21 @@ async def create( "color": color, "is_archived": is_archived, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Workspace, _response.json()) # type: ignore + return typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -414,6 +491,7 @@ async def create( async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Workspace: """ + Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). Parameters @@ -431,21 +509,37 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.workspaces.get( - id=1, - ) + + + async def main() -> None: + await client.workspaces.get( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/workspaces/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Workspace, _response.json()) # type: ignore + return typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -453,6 +547,7 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ + Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). Parameters @@ -469,17 +564,27 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.workspaces.delete( - id=1, - ) + + + async def main() -> None: + await client.workspaces.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + f"api/workspaces/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -502,6 +607,7 @@ async def update( request_options: typing.Optional[RequestOptions] = None, ) -> Workspace: """ + Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). Parameters @@ -537,14 +643,22 @@ async def update( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.workspaces.update( - id=1, - ) + + + async def main() -> None: + await client.workspaces.update( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/workspaces/{jsonable_encoder(id)}", @@ -557,12 +671,21 @@ async def update( "color": color, "is_archived": is_archived, }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Workspace, _response.json()) # type: ignore + return typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/workspaces/members/client.py b/src/label_studio_sdk/workspaces/members/client.py index db168e10b..22b0ec5c5 100644 --- a/src/label_studio_sdk/workspaces/members/client.py +++ b/src/label_studio_sdk/workspaces/members/client.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. import typing +from ...core.client_wrapper import SyncClientWrapper +from ...core.request_options import RequestOptions +from .types.members_list_response_item import MembersListResponseItem +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as from json.decoder import JSONDecodeError - from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import pydantic_v1 -from ...core.request_options import RequestOptions from .types.members_create_response import MembersCreateResponse -from .types.members_list_response_item import MembersListResponseItem +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -23,6 +23,7 @@ def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[MembersListResponseItem]: """ + List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). Parameters @@ -40,7 +41,7 @@ def list( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -50,11 +51,19 @@ def list( ) """ _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", method="GET", request_options=request_options + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[MembersListResponseItem], _response.json()) # type: ignore + return typing.cast( + typing.List[MembersListResponseItem], + parse_obj_as( + type_=typing.List[MembersListResponseItem], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -64,6 +73,7 @@ def create( self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None ) -> MembersCreateResponse: """ + Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list). Parameters @@ -84,7 +94,7 @@ def create( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -96,13 +106,24 @@ def create( _response = self._client_wrapper.httpx_client.request( f"api/workspaces/{jsonable_encoder(id)}/memberships", method="POST", - json={"user": user}, + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MembersCreateResponse, _response.json()) # type: ignore + return typing.cast( + MembersCreateResponse, + parse_obj_as( + type_=MembersCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -112,6 +133,7 @@ def delete( self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None ) -> None: """ + Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list). Parameters @@ -131,7 +153,7 @@ def delete( Examples -------- - from label_studio_sdk.client import LabelStudio + from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", @@ -143,7 +165,12 @@ def delete( _response = self._client_wrapper.httpx_client.request( f"api/workspaces/{jsonable_encoder(id)}/memberships", method="DELETE", - json={"user": user}, + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) @@ -164,6 +191,7 @@ async def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[MembersListResponseItem]: """ + List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). Parameters @@ -181,21 +209,37 @@ async def list( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.workspaces.members.list( - id=1, - ) + + + async def main() -> None: + await client.workspaces.members.list( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", method="GET", request_options=request_options + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[MembersListResponseItem], _response.json()) # type: ignore + return typing.cast( + typing.List[MembersListResponseItem], + parse_obj_as( + type_=typing.List[MembersListResponseItem], # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -205,6 +249,7 @@ async def create( self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None ) -> MembersCreateResponse: """ + Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list). Parameters @@ -225,25 +270,44 @@ async def create( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.workspaces.members.create( - id=1, - ) + + + async def main() -> None: + await client.workspaces.members.create( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/workspaces/{jsonable_encoder(id)}/memberships", method="POST", - json={"user": user}, + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MembersCreateResponse, _response.json()) # type: ignore + return typing.cast( + MembersCreateResponse, + parse_obj_as( + type_=MembersCreateResponse, # type: ignore + object_=_response.json(), + ), + ) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -253,6 +317,7 @@ async def delete( self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None ) -> None: """ + Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list). Parameters @@ -272,19 +337,32 @@ async def delete( Examples -------- - from label_studio_sdk.client import AsyncLabelStudio + import asyncio + + from label_studio_sdk import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.workspaces.members.delete( - id=1, - ) + + + async def main() -> None: + await client.workspaces.members.delete( + id=1, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"api/workspaces/{jsonable_encoder(id)}/memberships", method="DELETE", - json={"user": user}, + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, omit=OMIT, ) diff --git a/src/label_studio_sdk/workspaces/members/types/members_create_response.py b/src/label_studio_sdk/workspaces/members/types/members_create_response.py index 3c0f08a0c..13ed6d9ba 100644 --- a/src/label_studio_sdk/workspaces/members/types/members_create_response.py +++ b/src/label_studio_sdk/workspaces/members/types/members_create_response.py @@ -1,32 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class MembersCreateResponse(pydantic_v1.BaseModel): - user: typing.Optional[int] = pydantic_v1.Field(default=None) +class MembersCreateResponse(UniversalBaseModel): + user: typing.Optional[int] = pydantic.Field(default=None) """ User ID of the workspace member """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py b/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py index 69ccff3ee..d436b704e 100644 --- a/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py +++ b/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py @@ -1,32 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ....core.pydantic_utilities import UniversalBaseModel import typing +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 - -class MembersListResponseItem(pydantic_v1.BaseModel): - user: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) +class MembersListResponseItem(UniversalBaseModel): + user: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ User ID of the workspace member """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/tests/conftest.py b/tests/conftest.py index b08641925..595f0074c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio import os - import pytest -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio +from label_studio_sdk import AsyncLabelStudio @pytest.fixture diff --git a/tests/custom/test_client.py b/tests/custom/test_client.py index 60a58e64c..73f811f5e 100644 --- a/tests/custom/test_client.py +++ b/tests/custom/test_client.py @@ -1,5 +1,6 @@ import pytest + # Get started with writing tests with pytest at https://docs.pytest.org @pytest.mark.skip(reason="Unimplemented") def test_client() -> None: diff --git a/tests/export_storage/test_azure.py b/tests/export_storage/test_azure.py index 7e68f94b0..e1c3496d4 100644 --- a/tests/export_storage/test_azure.py +++ b/tests/export_storage/test_azure.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -32,7 +31,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -96,9 +95,15 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.export_storage.azure.validate() is None # type: ignore[func-returns-value] + assert ( + client.export_storage.azure.validate() # type: ignore[func-returns-value] + is None + ) - assert await async_client.export_storage.azure.validate() is None # type: ignore[func-returns-value] + assert ( + await async_client.export_storage.azure.validate() # type: ignore[func-returns-value] + is None + ) async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -155,9 +160,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.export_storage.azure.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.export_storage.azure.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.export_storage.azure.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.export_storage.azure.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/export_storage/test_gcs.py b/tests/export_storage/test_gcs.py index 64cfe2914..730af853a 100644 --- a/tests/export_storage/test_gcs.py +++ b/tests/export_storage/test_gcs.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -32,7 +31,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -96,9 +95,15 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.export_storage.gcs.validate() is None # type: ignore[func-returns-value] + assert ( + client.export_storage.gcs.validate() # type: ignore[func-returns-value] + is None + ) - assert await async_client.export_storage.gcs.validate() is None # type: ignore[func-returns-value] + assert ( + await async_client.export_storage.gcs.validate() # type: ignore[func-returns-value] + is None + ) async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -155,9 +160,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.export_storage.gcs.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.export_storage.gcs.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.export_storage.gcs.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.export_storage.gcs.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/export_storage/test_local.py b/tests/export_storage/test_local.py index 6447b40e6..52e08012e 100644 --- a/tests/export_storage/test_local.py +++ b/tests/export_storage/test_local.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -29,7 +28,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -86,9 +85,15 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.export_storage.local.validate() is None # type: ignore[func-returns-value] + assert ( + client.export_storage.local.validate() # type: ignore[func-returns-value] + is None + ) - assert await async_client.export_storage.local.validate() is None # type: ignore[func-returns-value] + assert ( + await async_client.export_storage.local.validate() # type: ignore[func-returns-value] + is None + ) async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -139,9 +144,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.export_storage.local.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.export_storage.local.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.export_storage.local.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.export_storage.local.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/export_storage/test_redis.py b/tests/export_storage/test_redis.py index ef92fea2b..ca2251571 100644 --- a/tests/export_storage/test_redis.py +++ b/tests/export_storage/test_redis.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -33,7 +32,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -100,9 +99,15 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.export_storage.redis.validate() is None # type: ignore[func-returns-value] + assert ( + client.export_storage.redis.validate() # type: ignore[func-returns-value] + is None + ) - assert await async_client.export_storage.redis.validate() is None # type: ignore[func-returns-value] + assert ( + await async_client.export_storage.redis.validate() # type: ignore[func-returns-value] + is None + ) async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -161,9 +166,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.export_storage.redis.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.export_storage.redis.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.export_storage.redis.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.export_storage.redis.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/export_storage/test_s3.py b/tests/export_storage/test_s3.py index d70820e97..a26e70641 100644 --- a/tests/export_storage/test_s3.py +++ b/tests/export_storage/test_s3.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -36,7 +35,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -112,9 +111,15 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.export_storage.s3.validate() is None # type: ignore[func-returns-value] + assert ( + client.export_storage.s3.validate() # type: ignore[func-returns-value] + is None + ) - assert await async_client.export_storage.s3.validate() is None # type: ignore[func-returns-value] + assert ( + await async_client.export_storage.s3.validate() # type: ignore[func-returns-value] + is None + ) async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -179,9 +184,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.export_storage.s3.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.export_storage.s3.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.export_storage.s3.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.export_storage.s3.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/export_storage/test_s3s.py b/tests/export_storage/test_s3s.py index 2fcba66a7..f9efabd90 100644 --- a/tests/export_storage/test_s3s.py +++ b/tests/export_storage/test_s3s.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -23,7 +22,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -118,9 +117,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.export_storage.s3s.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.export_storage.s3s.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.export_storage.s3s.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.export_storage.s3s.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -159,6 +164,12 @@ async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.export_storage.s3s.validate() is None # type: ignore[func-returns-value] + assert ( + client.export_storage.s3s.validate() # type: ignore[func-returns-value] + is None + ) - assert await async_client.export_storage.s3s.validate() is None # type: ignore[func-returns-value] + assert ( + await async_client.export_storage.s3s.validate() # type: ignore[func-returns-value] + is None + ) diff --git a/tests/import_storage/test_azure.py b/tests/import_storage/test_azure.py index 36de34ffb..da52ad2c2 100644 --- a/tests/import_storage/test_azure.py +++ b/tests/import_storage/test_azure.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -33,7 +32,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -104,9 +103,15 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.import_storage.azure.validate() is None # type: ignore[func-returns-value] + assert ( + client.import_storage.azure.validate() # type: ignore[func-returns-value] + is None + ) - assert await async_client.import_storage.azure.validate() is None # type: ignore[func-returns-value] + assert ( + await async_client.import_storage.azure.validate() # type: ignore[func-returns-value] + is None + ) async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -165,9 +170,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.import_storage.azure.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.import_storage.azure.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.import_storage.azure.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.import_storage.azure.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/import_storage/test_gcs.py b/tests/import_storage/test_gcs.py index 74a7445c7..e44d38d91 100644 --- a/tests/import_storage/test_gcs.py +++ b/tests/import_storage/test_gcs.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -33,7 +32,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -104,9 +103,15 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.import_storage.gcs.validate() is None # type: ignore[func-returns-value] + assert ( + client.import_storage.gcs.validate() # type: ignore[func-returns-value] + is None + ) - assert await async_client.import_storage.gcs.validate() is None # type: ignore[func-returns-value] + assert ( + await async_client.import_storage.gcs.validate() # type: ignore[func-returns-value] + is None + ) async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -165,9 +170,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.import_storage.gcs.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.import_storage.gcs.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.import_storage.gcs.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.import_storage.gcs.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/import_storage/test_local.py b/tests/import_storage/test_local.py index 9a1b1f3c4..75f0bff65 100644 --- a/tests/import_storage/test_local.py +++ b/tests/import_storage/test_local.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -28,7 +27,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -84,9 +83,15 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.import_storage.local.validate() is None # type: ignore[func-returns-value] + assert ( + client.import_storage.local.validate() # type: ignore[func-returns-value] + is None + ) - assert await async_client.import_storage.local.validate() is None # type: ignore[func-returns-value] + assert ( + await async_client.import_storage.local.validate() # type: ignore[func-returns-value] + is None + ) async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -135,9 +140,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.import_storage.local.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.import_storage.local.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.import_storage.local.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.import_storage.local.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/import_storage/test_redis.py b/tests/import_storage/test_redis.py index 1a94c5531..01cd0696e 100644 --- a/tests/import_storage/test_redis.py +++ b/tests/import_storage/test_redis.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -32,7 +31,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -98,9 +97,15 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.import_storage.redis.validate() is None # type: ignore[func-returns-value] + assert ( + client.import_storage.redis.validate() # type: ignore[func-returns-value] + is None + ) - assert await async_client.import_storage.redis.validate() is None # type: ignore[func-returns-value] + assert ( + await async_client.import_storage.redis.validate() # type: ignore[func-returns-value] + is None + ) async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -157,9 +162,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.import_storage.redis.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.import_storage.redis.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.import_storage.redis.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.import_storage.redis.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/import_storage/test_s3.py b/tests/import_storage/test_s3.py index 474f01128..3b706703d 100644 --- a/tests/import_storage/test_s3.py +++ b/tests/import_storage/test_s3.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -38,7 +37,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -124,9 +123,15 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.import_storage.s3.validate() is None # type: ignore[func-returns-value] + assert ( + client.import_storage.s3.validate() # type: ignore[func-returns-value] + is None + ) - assert await async_client.import_storage.s3.validate() is None # type: ignore[func-returns-value] + assert ( + await async_client.import_storage.s3.validate() # type: ignore[func-returns-value] + is None + ) async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -195,9 +200,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.import_storage.s3.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.import_storage.s3.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.import_storage.s3.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.import_storage.s3.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/import_storage/test_s3s.py b/tests/import_storage/test_s3s.py index e804fc2cd..96ba46e7a 100644 --- a/tests/import_storage/test_s3s.py +++ b/tests/import_storage/test_s3s.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -35,7 +34,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -190,9 +189,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.import_storage.s3s.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.import_storage.s3s.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.import_storage.s3s.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.import_storage.s3s.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -255,9 +260,15 @@ async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.import_storage.s3s.validate() is None # type: ignore[func-returns-value] + assert ( + client.import_storage.s3s.validate() # type: ignore[func-returns-value] + is None + ) - assert await async_client.import_storage.s3s.validate() is None # type: ignore[func-returns-value] + assert ( + await async_client.import_storage.s3s.validate() # type: ignore[func-returns-value] + is None + ) async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/projects/test_exports.py b/tests/projects/test_exports.py index c17042c1e..1a053f7e3 100644 --- a/tests/projects/test_exports.py +++ b/tests/projects/test_exports.py @@ -1,15 +1,14 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response async def test_list_formats(client: LabelStudio, async_client: AsyncLabelStudio) -> None: expected_response: typing.Any = ["string"] - expected_types: typing.Any = ("list", {0: None}) + expected_types: typing.Tuple[typing.Any, typing.Any] = ("list", {0: None}) response = client.projects.exports.list_formats(id=1) validate_response(response, expected_response, expected_types) @@ -37,7 +36,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "converted_formats": [{"export_type": "export_type"}], } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -164,9 +163,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.projects.exports.delete(id=1, export_pk="export_pk") is None # type: ignore[func-returns-value] + assert ( + client.projects.exports.delete(id=1, export_pk="export_pk") # type: ignore[func-returns-value] + is None + ) - assert await async_client.projects.exports.delete(id=1, export_pk="export_pk") is None # type: ignore[func-returns-value] + assert ( + await async_client.projects.exports.delete(id=1, export_pk="export_pk") # type: ignore[func-returns-value] + is None + ) async def test_convert(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -181,6 +186,12 @@ async def test_convert(client: LabelStudio, async_client: AsyncLabelStudio) -> N async def test_download(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.projects.exports.download(id=1, export_pk="export_pk") is None # type: ignore[func-returns-value] + assert ( + client.projects.exports.download(id=1, export_pk="export_pk") # type: ignore[func-returns-value] + is None + ) - assert await async_client.projects.exports.download(id=1, export_pk="export_pk") is None # type: ignore[func-returns-value] + assert ( + await async_client.projects.exports.download(id=1, export_pk="export_pk") # type: ignore[func-returns-value] + is None + ) diff --git a/tests/prompts/test_indicators.py b/tests/prompts/test_indicators.py index 0f6131295..8167c72e1 100644 --- a/tests/prompts/test_indicators.py +++ b/tests/prompts/test_indicators.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -18,7 +17,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "extra_kpis": [{}], } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { diff --git a/tests/prompts/test_runs.py b/tests/prompts/test_runs.py index fe8a78d73..0d2a0eec0 100644 --- a/tests/prompts/test_runs.py +++ b/tests/prompts/test_runs.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response diff --git a/tests/prompts/test_versions.py b/tests/prompts/test_versions.py index 1af9e4d35..584a55e84 100644 --- a/tests/prompts/test_versions.py +++ b/tests/prompts/test_versions.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response @@ -22,7 +21,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "organization": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -112,9 +111,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.prompts.versions.delete(id=1, version_id=1) is None # type: ignore[func-returns-value] + assert ( + client.prompts.versions.delete(id=1, version_id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.prompts.versions.delete(id=1, version_id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.prompts.versions.delete(id=1, version_id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/test_actions.py b/tests/test_actions.py index 11d6e00db..5d0be4480 100644 --- a/tests/test_actions.py +++ b/tests/test_actions.py @@ -1,22 +1,59 @@ # This file was auto-generated by Fern from our API Definition. -from label_studio_sdk import ( - ActionsCreateRequestFilters, - ActionsCreateRequestFiltersItemsItem, - ActionsCreateRequestSelectedItemsExcluded, -) -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +from label_studio_sdk.actions import ActionsCreateRequestFilters +from label_studio_sdk.actions import ActionsCreateRequestFiltersItemsItem +from label_studio_sdk.actions import ActionsCreateRequestSelectedItemsExcluded async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.actions.list() is None # type: ignore[func-returns-value] + assert ( + client.actions.list() # type: ignore[func-returns-value] + is None + ) - assert await async_client.actions.list() is None # type: ignore[func-returns-value] + assert ( + await async_client.actions.list() # type: ignore[func-returns-value] + is None + ) async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.actions.create(id="retrieve_tasks_predictions", project=1, filters=ActionsCreateRequestFilters(conjunction="or", items=[ActionsCreateRequestFiltersItemsItem(filter="filter:tasks:id", operator="greater", type="Number", value=123)]), selected_items=ActionsCreateRequestSelectedItemsExcluded(all_=True, excluded=[124, 125, 126]), ordering=["tasks:total_annotations"]) is None # type: ignore[func-returns-value] + assert ( + client.actions.create( + id="retrieve_tasks_predictions", + project=1, + filters=ActionsCreateRequestFilters( + conjunction="or", + items=[ + ActionsCreateRequestFiltersItemsItem( + filter="filter:tasks:id", operator="greater", type="Number", value=123 + ) + ], + ), + selected_items=ActionsCreateRequestSelectedItemsExcluded(all_=True, excluded=[124, 125, 126]), + ordering=["tasks:total_annotations"], + ) # type: ignore[func-returns-value] + is None + ) - assert await async_client.actions.create(id="retrieve_tasks_predictions", project=1, filters=ActionsCreateRequestFilters(conjunction="or", items=[ActionsCreateRequestFiltersItemsItem(filter="filter:tasks:id", operator="greater", type="Number", value=123)]), selected_items=ActionsCreateRequestSelectedItemsExcluded(all_=True, excluded=[124, 125, 126]), ordering=["tasks:total_annotations"]) is None # type: ignore[func-returns-value] + assert ( + await async_client.actions.create( + id="retrieve_tasks_predictions", + project=1, + filters=ActionsCreateRequestFilters( + conjunction="or", + items=[ + ActionsCreateRequestFiltersItemsItem( + filter="filter:tasks:id", operator="greater", type="Number", value=123 + ) + ], + ), + selected_items=ActionsCreateRequestSelectedItemsExcluded(all_=True, excluded=[124, 125, 126]), + ordering=["tasks:total_annotations"], + ) # type: ignore[func-returns-value] + is None + ) diff --git a/tests/test_annotations.py b/tests/test_annotations.py index 398b7aa31..1b8cd9161 100644 --- a/tests/test_annotations.py +++ b/tests/test_annotations.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response @@ -94,9 +93,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.annotations.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.annotations.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.annotations.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.annotations.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -270,7 +275,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "last_created_by": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -451,7 +456,7 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_create_bulk(client: LabelStudio, async_client: AsyncLabelStudio) -> None: expected_response: typing.Any = [{"id": 1}] - expected_types: typing.Any = ("list", {0: {"id": "integer"}}) + expected_types: typing.Tuple[typing.Any, typing.Any] = ("list", {0: {"id": "integer"}}) response = client.annotations.create_bulk() validate_response(response, expected_response, expected_types) diff --git a/tests/test_comments.py b/tests/test_comments.py index a168df7d1..509abf4ee 100644 --- a/tests/test_comments.py +++ b/tests/test_comments.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response @@ -22,7 +21,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "resolved_at": "2024-01-15T09:30:00Z", } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -112,9 +111,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.comments.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.comments.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.comments.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.comments.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/test_export_storage.py b/tests/test_export_storage.py index 13682fe94..65d22af47 100644 --- a/tests/test_export_storage.py +++ b/tests/test_export_storage.py @@ -1,15 +1,14 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response async def test_list_types(client: LabelStudio, async_client: AsyncLabelStudio) -> None: expected_response: typing.Any = [{"name": "name", "title": "title"}] - expected_types: typing.Any = ("list", {0: {"name": None, "title": None}}) + expected_types: typing.Tuple[typing.Any, typing.Any] = ("list", {0: {"name": None, "title": None}}) response = client.export_storage.list_types() validate_response(response, expected_response, expected_types) diff --git a/tests/test_files.py b/tests/test_files.py index 4dd052667..5637a8a6b 100644 --- a/tests/test_files.py +++ b/tests/test_files.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response @@ -19,9 +18,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.files.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.files.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.files.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.files.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -36,7 +41,7 @@ async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: expected_response: typing.Any = [{"id": 1, "file": "file"}] - expected_types: typing.Any = ("list", {0: {"id": "integer", "file": None}}) + expected_types: typing.Tuple[typing.Any, typing.Any] = ("list", {0: {"id": "integer", "file": None}}) response = client.files.list(id=1) validate_response(response, expected_response, expected_types) @@ -46,13 +51,25 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non async def test_delete_many(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.files.delete_many(id=1) is None # type: ignore[func-returns-value] + assert ( + client.files.delete_many(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.files.delete_many(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.files.delete_many(id=1) # type: ignore[func-returns-value] + is None + ) async def test_download(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.files.download(filename="filename") is None # type: ignore[func-returns-value] - - assert await async_client.files.download(filename="filename") is None # type: ignore[func-returns-value] + assert ( + client.files.download(filename="filename") # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.files.download(filename="filename") # type: ignore[func-returns-value] + is None + ) diff --git a/tests/test_import_storage.py b/tests/test_import_storage.py index dc70062eb..00b2a428b 100644 --- a/tests/test_import_storage.py +++ b/tests/test_import_storage.py @@ -1,15 +1,14 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response async def test_list_types(client: LabelStudio, async_client: AsyncLabelStudio) -> None: expected_response: typing.Any = [{"name": "name", "title": "title"}] - expected_types: typing.Any = ("list", {0: {"name": None, "title": None}}) + expected_types: typing.Tuple[typing.Any, typing.Any] = ("list", {0: {"name": None, "title": None}}) response = client.import_storage.list_types() validate_response(response, expected_response, expected_types) diff --git a/tests/test_ml.py b/tests/test_ml.py index 73205c541..0a5badc22 100644 --- a/tests/test_ml.py +++ b/tests/test_ml.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response @@ -31,7 +30,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -148,9 +147,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.ml.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.ml.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.ml.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.ml.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -187,20 +192,38 @@ async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_predict_interactive(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.ml.predict_interactive(id=1, task=1) is None # type: ignore[func-returns-value] + assert ( + client.ml.predict_interactive(id=1, task=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.ml.predict_interactive(id=1, task=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.ml.predict_interactive(id=1, task=1) # type: ignore[func-returns-value] + is None + ) async def test_train(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.ml.train(id=1) is None # type: ignore[func-returns-value] + assert ( + client.ml.train(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.ml.train(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.ml.train(id=1) # type: ignore[func-returns-value] + is None + ) async def test_list_model_versions(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.ml.list_model_versions(id="id") is None # type: ignore[func-returns-value] + assert ( + client.ml.list_model_versions(id="id") # type: ignore[func-returns-value] + is None + ) - assert await async_client.ml.list_model_versions(id="id") is None # type: ignore[func-returns-value] + assert ( + await async_client.ml.list_model_versions(id="id") # type: ignore[func-returns-value] + is None + ) diff --git a/tests/test_model_providers.py b/tests/test_model_providers.py index e9eea6ea3..353ad2339 100644 --- a/tests/test_model_providers.py +++ b/tests/test_model_providers.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response @@ -27,7 +26,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "budget_alert_threshold": 1.1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -142,9 +141,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.model_providers.delete(pk=1) is None # type: ignore[func-returns-value] + assert ( + client.model_providers.delete(pk=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.model_providers.delete(pk=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.model_providers.delete(pk=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/test_predictions.py b/tests/test_predictions.py index 8edcefb8f..f8559d6b3 100644 --- a/tests/test_predictions.py +++ b/tests/test_predictions.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response @@ -43,7 +42,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -277,9 +276,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.predictions.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.predictions.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.predictions.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.predictions.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/test_projects.py b/tests/test_projects.py index 61376a911..1254788ea 100644 --- a/tests/test_projects.py +++ b/tests/test_projects.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response @@ -59,6 +58,20 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: "enable_empty_annotation": True, "show_annotation_history": True, "organization": 1, + "prompts": [ + { + "title": "title", + "description": "description", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + "input_fields": ["input_fields"], + "output_classes": ["output_classes"], + "associated_projects": [1], + "skill_name": "skill_name", + } + ], "color": "#FF0000", "maximum_annotations": 1, "is_published": True, @@ -110,6 +123,23 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: "enable_empty_annotation": None, "show_annotation_history": None, "organization": "integer", + "prompts": ( + "list", + { + 0: { + "title": None, + "description": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + "input_fields": ("list", {0: None}), + "output_classes": ("list", {0: None}), + "associated_projects": ("list", {0: "integer"}), + "skill_name": None, + } + }, + ), "color": None, "maximum_annotations": "integer", "is_published": None, @@ -153,9 +183,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.projects.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.projects.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.projects.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.projects.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/test_prompts.py b/tests/test_prompts.py index 547a37c50..d0e1f0ca1 100644 --- a/tests/test_prompts.py +++ b/tests/test_prompts.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response @@ -22,7 +21,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "skill_name": "skill_name", } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -114,9 +113,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.prompts.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.prompts.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.prompts.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.prompts.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/test_tasks.py b/tests/test_tasks.py index 87ff968a5..dea5844c0 100644 --- a/tests/test_tasks.py +++ b/tests/test_tasks.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response @@ -65,9 +64,15 @@ async def test_create_many_status(client: LabelStudio, async_client: AsyncLabelS async def test_delete_all_tasks(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.tasks.delete_all_tasks(id=1) is None # type: ignore[func-returns-value] + assert ( + client.tasks.delete_all_tasks(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.tasks.delete_all_tasks(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.tasks.delete_all_tasks(id=1) # type: ignore[func-returns-value] + is None + ) async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -283,9 +288,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.tasks.delete(id="id") is None # type: ignore[func-returns-value] + assert ( + client.tasks.delete(id="id") # type: ignore[func-returns-value] + is None + ) - assert await async_client.tasks.delete(id="id") is None # type: ignore[func-returns-value] + assert ( + await async_client.tasks.delete(id="id") # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/test_users.py b/tests/test_users.py index 2e5be9bb4..e6e5582b0 100644 --- a/tests/test_users.py +++ b/tests/test_users.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response @@ -80,7 +79,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "date_joined": "2024-01-15T09:30:00Z", } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -180,9 +179,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.users.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.users.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.users.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.users.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/test_views.py b/tests/test_views.py index 3a49135f1..f9943d8a6 100644 --- a/tests/test_views.py +++ b/tests/test_views.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response @@ -23,7 +22,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "project": 1, } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -84,9 +83,15 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_delete_all(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.views.delete_all(project=1) is None # type: ignore[func-returns-value] + assert ( + client.views.delete_all(project=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.views.delete_all(project=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.views.delete_all(project=1) # type: ignore[func-returns-value] + is None + ) async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: @@ -125,9 +130,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.views.delete(id="id") is None # type: ignore[func-returns-value] + assert ( + client.views.delete(id="id") # type: ignore[func-returns-value] + is None + ) - assert await async_client.views.delete(id="id") is None # type: ignore[func-returns-value] + assert ( + await async_client.views.delete(id="id") # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/test_workspaces.py b/tests/test_workspaces.py index 521b6be78..dc43be593 100644 --- a/tests/test_workspaces.py +++ b/tests/test_workspaces.py @@ -1,9 +1,8 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from .utilities import validate_response @@ -22,7 +21,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "color": "color", } ] - expected_types: typing.Any = ( + expected_types: typing.Tuple[typing.Any, typing.Any] = ( "list", { 0: { @@ -112,9 +111,15 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.workspaces.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.workspaces.delete(id=1) # type: ignore[func-returns-value] + is None + ) - assert await async_client.workspaces.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + await async_client.workspaces.delete(id=1) # type: ignore[func-returns-value] + is None + ) async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: diff --git a/tests/utilities.py b/tests/utilities.py index 402ee790b..3d228806a 100644 --- a/tests/utilities.py +++ b/tests/utilities.py @@ -3,15 +3,9 @@ import typing import uuid -import pydantic from dateutil import parser -IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") - -if IS_PYDANTIC_V2: - import pydantic.v1 as pydantic_v1 # type: ignore # nopycln: import -else: - import pydantic as pydantic_v1 # type: ignore # nopycln: import +import pydantic def cast_field(json_expectation: typing.Any, type_expectation: typing.Any) -> typing.Any: @@ -51,7 +45,9 @@ def validate_field(response: typing.Any, json_expectation: typing.Any, type_expe if isinstance(entry_expectation, dict): is_container_of_complex_type = True validate_response( - response=response[idx], json_expectation=ex, type_expectations=entry_expectation + response=response[idx], + json_expectation=ex, + type_expectations=entry_expectation, ) else: cast_json_expectation.append(cast_field(ex, entry_expectation)) @@ -63,7 +59,10 @@ def validate_field(response: typing.Any, json_expectation: typing.Any, type_expe # if any of the values of the set have a type_expectation of a dict, we're assuming it's a pydantic # model and keeping it a list. if container_expectation != "set" or not any( - map(lambda value: isinstance(value, dict), list(contents_expectation.values())) + map( + lambda value: isinstance(value, dict), + list(contents_expectation.values()), + ) ): json_expectation = cast_field(json_expectation, container_expectation) elif isinstance(type_expectation, tuple): @@ -72,9 +71,15 @@ def validate_field(response: typing.Any, json_expectation: typing.Any, type_expe if isinstance(contents_expectation, dict): json_expectation = { cast_field( - key, contents_expectation.get(idx)[0] if contents_expectation.get(idx) is not None else None # type: ignore + key, + contents_expectation.get(idx)[0] # type: ignore + if contents_expectation.get(idx) is not None + else None, ): cast_field( - value, contents_expectation.get(idx)[1] if contents_expectation.get(idx) is not None else None # type: ignore + value, + contents_expectation.get(idx)[1] # type: ignore + if contents_expectation.get(idx) is not None + else None, ) for idx, (key, value) in enumerate(json_expectation.items()) } @@ -85,8 +90,10 @@ def validate_field(response: typing.Any, json_expectation: typing.Any, type_expe # When dealing with containers of models, etc. we're validating them implicitly, so no need to check the resultant list if not is_container_of_complex_type: - assert json_expectation == response, "Primitives found, expected: {0}, Actual: {1}".format( - json_expectation, response + assert ( + json_expectation == response + ), "Primitives found, expected: {0} (type: {1}), Actual: {2} (type: {3})".format( + json_expectation, type(json_expectation), response, type(response) ) @@ -96,28 +103,60 @@ def validate_response(response: typing.Any, json_expectation: typing.Any, type_e if type_expectations == "no_validate": return - if not isinstance(response, dict) and not issubclass(type(response), pydantic_v1.BaseModel): - validate_field(response=response, json_expectation=json_expectation, type_expectation=type_expectations) + if ( + not isinstance(response, list) + and not isinstance(response, dict) + and not issubclass(type(response), pydantic.BaseModel) + ): + validate_field( + response=response, + json_expectation=json_expectation, + type_expectation=type_expectations, + ) return - response_json = response - if issubclass(type(response), pydantic_v1.BaseModel): - response_json = response.dict(by_alias=True) - - for key, value in json_expectation.items(): - assert key in response_json, "Field {0} not found within the response object: {1}".format(key, response_json) - - type_expectation = None - if type_expectations is not None and isinstance(type_expectations, dict): - type_expectation = type_expectations.get(key) - - # If your type_expectation is a tuple then you have a container field, process it as such - # Otherwise, we're just validating a single field that's a pydantic model. - if isinstance(value, dict) and not isinstance(type_expectation, tuple): - validate_response(response=response_json[key], json_expectation=value, type_expectations=type_expectation) - else: - validate_field(response=response_json[key], json_expectation=value, type_expectation=type_expectation) + if isinstance(response, list): + assert len(response) == len(json_expectation), "Length mismatch, expected: {0}, Actual: {1}".format( + len(response), len(json_expectation) + ) + content_expectation = type_expectations + if isinstance(type_expectations, tuple): + content_expectation = type_expectations[1] + for idx, item in enumerate(response): + validate_response( + response=item, + json_expectation=json_expectation[idx], + type_expectations=content_expectation[idx], + ) + else: + response_json = response + if issubclass(type(response), pydantic.BaseModel): + response_json = response.dict(by_alias=True) + + for key, value in json_expectation.items(): + assert key in response_json, "Field {0} not found within the response object: {1}".format( + key, response_json + ) + + type_expectation = None + if type_expectations is not None and isinstance(type_expectations, dict): + type_expectation = type_expectations.get(key) + + # If your type_expectation is a tuple then you have a container field, process it as such + # Otherwise, we're just validating a single field that's a pydantic model. + if isinstance(value, dict) and not isinstance(type_expectation, tuple): + validate_response( + response=response_json[key], + json_expectation=value, + type_expectations=type_expectation, + ) + else: + validate_field( + response=response_json[key], + json_expectation=value, + type_expectation=type_expectation, + ) - # Ensure there are no additional fields here either - del response_json[key] - assert len(response_json) == 0, "Additional fields found, expected None: {0}".format(response_json) + # Ensure there are no additional fields here either + del response_json[key] + assert len(response_json) == 0, "Additional fields found, expected None: {0}".format(response_json) diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/tests/utils/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/tests/utils/assets/models/__init__.py b/tests/utils/assets/models/__init__.py new file mode 100644 index 000000000..3a1c852e7 --- /dev/null +++ b/tests/utils/assets/models/__init__.py @@ -0,0 +1,21 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +from .circle import CircleParams +from .object_with_defaults import ObjectWithDefaultsParams +from .object_with_optional_field import ObjectWithOptionalFieldParams +from .shape import ShapeParams, Shape_CircleParams, Shape_SquareParams +from .square import SquareParams +from .undiscriminated_shape import UndiscriminatedShapeParams + +__all__ = [ + "CircleParams", + "ObjectWithDefaultsParams", + "ObjectWithOptionalFieldParams", + "ShapeParams", + "Shape_CircleParams", + "Shape_SquareParams", + "SquareParams", + "UndiscriminatedShapeParams", +] diff --git a/tests/utils/assets/models/circle.py b/tests/utils/assets/models/circle.py new file mode 100644 index 000000000..09b8e0647 --- /dev/null +++ b/tests/utils/assets/models/circle.py @@ -0,0 +1,11 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +import typing_extensions +import typing_extensions +from label_studio_sdk.core.serialization import FieldMetadata + + +class CircleParams(typing_extensions.TypedDict): + radius_measurement: typing_extensions.Annotated[float, FieldMetadata(alias="radiusMeasurement")] diff --git a/tests/utils/assets/models/color.py b/tests/utils/assets/models/color.py new file mode 100644 index 000000000..2aa2c4c52 --- /dev/null +++ b/tests/utils/assets/models/color.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +import typing + +Color = typing.Union[typing.Literal["red", "blue"], typing.Any] diff --git a/tests/utils/assets/models/object_with_defaults.py b/tests/utils/assets/models/object_with_defaults.py new file mode 100644 index 000000000..ef14f7b2c --- /dev/null +++ b/tests/utils/assets/models/object_with_defaults.py @@ -0,0 +1,16 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +import typing_extensions +import typing_extensions + + +class ObjectWithDefaultsParams(typing_extensions.TypedDict): + """ + Defines properties with default values and validation rules. + """ + + decimal: typing_extensions.NotRequired[float] + string: typing_extensions.NotRequired[str] + required_string: str diff --git a/tests/utils/assets/models/object_with_optional_field.py b/tests/utils/assets/models/object_with_optional_field.py new file mode 100644 index 000000000..ee4f9dbfe --- /dev/null +++ b/tests/utils/assets/models/object_with_optional_field.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +import typing_extensions +import typing +import typing_extensions +from label_studio_sdk.core.serialization import FieldMetadata +import datetime as dt +import uuid +from .color import Color +from .shape import ShapeParams +from .undiscriminated_shape import UndiscriminatedShapeParams + + +class ObjectWithOptionalFieldParams(typing_extensions.TypedDict): + literal: typing.Literal["lit_one"] + string: typing_extensions.NotRequired[str] + integer: typing_extensions.NotRequired[int] + long_: typing_extensions.NotRequired[typing_extensions.Annotated[int, FieldMetadata(alias="long")]] + double: typing_extensions.NotRequired[float] + bool_: typing_extensions.NotRequired[typing_extensions.Annotated[bool, FieldMetadata(alias="bool")]] + datetime: typing_extensions.NotRequired[dt.datetime] + date: typing_extensions.NotRequired[dt.date] + uuid_: typing_extensions.NotRequired[typing_extensions.Annotated[uuid.UUID, FieldMetadata(alias="uuid")]] + base_64: typing_extensions.NotRequired[typing_extensions.Annotated[str, FieldMetadata(alias="base64")]] + list_: typing_extensions.NotRequired[typing_extensions.Annotated[typing.Sequence[str], FieldMetadata(alias="list")]] + set_: typing_extensions.NotRequired[typing_extensions.Annotated[typing.Set[str], FieldMetadata(alias="set")]] + map_: typing_extensions.NotRequired[typing_extensions.Annotated[typing.Dict[int, str], FieldMetadata(alias="map")]] + enum: typing_extensions.NotRequired[Color] + union: typing_extensions.NotRequired[ShapeParams] + second_union: typing_extensions.NotRequired[ShapeParams] + undiscriminated_union: typing_extensions.NotRequired[UndiscriminatedShapeParams] + any: typing.Optional[typing.Any] diff --git a/tests/utils/assets/models/shape.py b/tests/utils/assets/models/shape.py new file mode 100644 index 000000000..820dec7a6 --- /dev/null +++ b/tests/utils/assets/models/shape.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +from __future__ import annotations +import typing_extensions +import typing_extensions +import typing +from label_studio_sdk.core.serialization import FieldMetadata + + +class Base(typing_extensions.TypedDict): + id: str + + +class Shape_CircleParams(Base): + shape_type: typing_extensions.Annotated[typing.Literal["circle"], FieldMetadata(alias="shapeType")] + radius_measurement: typing_extensions.Annotated[float, FieldMetadata(alias="radiusMeasurement")] + + +class Shape_SquareParams(Base): + shape_type: typing_extensions.Annotated[typing.Literal["square"], FieldMetadata(alias="shapeType")] + length_measurement: typing_extensions.Annotated[float, FieldMetadata(alias="lengthMeasurement")] + + +ShapeParams = typing.Union[Shape_CircleParams, Shape_SquareParams] diff --git a/tests/utils/assets/models/square.py b/tests/utils/assets/models/square.py new file mode 100644 index 000000000..b70897be3 --- /dev/null +++ b/tests/utils/assets/models/square.py @@ -0,0 +1,11 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +import typing_extensions +import typing_extensions +from label_studio_sdk.core.serialization import FieldMetadata + + +class SquareParams(typing_extensions.TypedDict): + length_measurement: typing_extensions.Annotated[float, FieldMetadata(alias="lengthMeasurement")] diff --git a/tests/utils/assets/models/undiscriminated_shape.py b/tests/utils/assets/models/undiscriminated_shape.py new file mode 100644 index 000000000..68876a23c --- /dev/null +++ b/tests/utils/assets/models/undiscriminated_shape.py @@ -0,0 +1,9 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +import typing +from .circle import CircleParams +from .square import SquareParams + +UndiscriminatedShapeParams = typing.Union[CircleParams, SquareParams] diff --git a/tests/utils/test_http_client.py b/tests/utils/test_http_client.py index 01bb6055f..1a2d02245 100644 --- a/tests/utils/test_http_client.py +++ b/tests/utils/test_http_client.py @@ -45,3 +45,17 @@ def test_get_none_request_body() -> None: assert json_body_extras == {"see you": "later"} assert data_body_extras is None + + +def test_get_empty_json_request_body() -> None: + unrelated_request_options: RequestOptions = {"max_retries": 3} + json_body, data_body = get_request_body(json=None, data=None, request_options=unrelated_request_options, omit=None) + assert json_body is None + assert data_body is None + + json_body_extras, data_body_extras = get_request_body( + json={}, data=None, request_options=unrelated_request_options, omit=None + ) + + assert json_body_extras is None + assert data_body_extras is None diff --git a/tests/utils/test_query_encoding.py b/tests/utils/test_query_encoding.py index dd8c6a886..ff439eb34 100644 --- a/tests/utils/test_query_encoding.py +++ b/tests/utils/test_query_encoding.py @@ -1,13 +1,37 @@ # This file was auto-generated by Fern from our API Definition. + from label_studio_sdk.core.query_encoder import encode_query -def test_query_encoding() -> None: - assert encode_query({"hello world": "hello world"}) == {"hello world": "hello world"} - assert encode_query({"hello_world": {"hello": "world"}}) == {"hello_world[hello]": "world"} - assert encode_query({"hello_world": {"hello": {"world": "today"}, "test": "this"}, "hi": "there"}) == { - "hello_world[hello][world]": "today", - "hello_world[test]": "this", - "hi": "there", - } +def test_query_encoding_deep_objects() -> None: + assert encode_query({"hello world": "hello world"}) == [("hello world", "hello world")] + assert encode_query({"hello_world": {"hello": "world"}}) == [("hello_world[hello]", "world")] + assert encode_query({"hello_world": {"hello": {"world": "today"}, "test": "this"}, "hi": "there"}) == [ + ("hello_world[hello][world]", "today"), + ("hello_world[test]", "this"), + ("hi", "there"), + ] + + +def test_query_encoding_deep_object_arrays() -> None: + assert encode_query({"objects": [{"key": "hello", "value": "world"}, {"key": "foo", "value": "bar"}]}) == [ + ("objects[key]", "hello"), + ("objects[value]", "world"), + ("objects[key]", "foo"), + ("objects[value]", "bar"), + ] + assert encode_query( + {"users": [{"name": "string", "tags": ["string"]}, {"name": "string2", "tags": ["string2", "string3"]}]} + ) == [ + ("users[name]", "string"), + ("users[tags]", "string"), + ("users[name]", "string2"), + ("users[tags]", "string2"), + ("users[tags]", "string3"), + ] + + +def test_encode_query_with_none() -> None: + encoded = encode_query(None) + assert encoded == None diff --git a/tests/utils/test_serialization.py b/tests/utils/test_serialization.py new file mode 100644 index 000000000..d085f645f --- /dev/null +++ b/tests/utils/test_serialization.py @@ -0,0 +1,72 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import List, Any + +from label_studio_sdk.core.serialization import convert_and_respect_annotation_metadata +from .assets.models import ShapeParams, ObjectWithOptionalFieldParams + + +UNION_TEST: ShapeParams = {"radius_measurement": 1.0, "shape_type": "circle", "id": "1"} +UNION_TEST_CONVERTED = {"shapeType": "circle", "radiusMeasurement": 1.0, "id": "1"} + + +def test_convert_and_respect_annotation_metadata() -> None: + data: ObjectWithOptionalFieldParams = { + "string": "string", + "long_": 12345, + "bool_": True, + "literal": "lit_one", + "any": "any", + } + converted = convert_and_respect_annotation_metadata( + object_=data, annotation=ObjectWithOptionalFieldParams, direction="write" + ) + assert converted == {"string": "string", "long": 12345, "bool": True, "literal": "lit_one", "any": "any"} + + +def test_convert_and_respect_annotation_metadata_in_list() -> None: + data: List[ObjectWithOptionalFieldParams] = [ + {"string": "string", "long_": 12345, "bool_": True, "literal": "lit_one", "any": "any"}, + {"string": "another string", "long_": 67890, "list_": [], "literal": "lit_one", "any": "any"}, + ] + converted = convert_and_respect_annotation_metadata( + object_=data, annotation=List[ObjectWithOptionalFieldParams], direction="write" + ) + + assert converted == [ + {"string": "string", "long": 12345, "bool": True, "literal": "lit_one", "any": "any"}, + {"string": "another string", "long": 67890, "list": [], "literal": "lit_one", "any": "any"}, + ] + + +def test_convert_and_respect_annotation_metadata_in_nested_object() -> None: + data: ObjectWithOptionalFieldParams = { + "string": "string", + "long_": 12345, + "union": UNION_TEST, + "literal": "lit_one", + "any": "any", + } + converted = convert_and_respect_annotation_metadata( + object_=data, annotation=ObjectWithOptionalFieldParams, direction="write" + ) + + assert converted == { + "string": "string", + "long": 12345, + "union": UNION_TEST_CONVERTED, + "literal": "lit_one", + "any": "any", + } + + +def test_convert_and_respect_annotation_metadata_in_union() -> None: + converted = convert_and_respect_annotation_metadata(object_=UNION_TEST, annotation=ShapeParams, direction="write") + + assert converted == UNION_TEST_CONVERTED + + +def test_convert_and_respect_annotation_metadata_with_empty_object() -> None: + data: Any = {} + converted = convert_and_respect_annotation_metadata(object_=data, annotation=ShapeParams, direction="write") + assert converted == data diff --git a/tests/workspaces/test_members.py b/tests/workspaces/test_members.py index 5ba8a26df..3724adc0d 100644 --- a/tests/workspaces/test_members.py +++ b/tests/workspaces/test_members.py @@ -1,15 +1,14 @@ # This file was auto-generated by Fern from our API Definition. +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio import typing - -from label_studio_sdk.client import AsyncLabelStudio, LabelStudio - from ..utilities import validate_response async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: expected_response: typing.Any = [{"user": {"key": "value"}}] - expected_types: typing.Any = ("list", {0: {"user": ("dict", {0: (None, None)})}}) + expected_types: typing.Tuple[typing.Any, typing.Any] = ("list", {0: {"user": ("dict", {0: (None, None)})}}) response = client.workspaces.members.list(id=1) validate_response(response, expected_response, expected_types) @@ -29,6 +28,12 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert client.workspaces.members.delete(id=1) is None # type: ignore[func-returns-value] - - assert await async_client.workspaces.members.delete(id=1) is None # type: ignore[func-returns-value] + assert ( + client.workspaces.members.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.workspaces.members.delete(id=1) # type: ignore[func-returns-value] + is None + )