From 87d329e49a48f5d86dd8690a817523499fd92484 Mon Sep 17 00:00:00 2001 From: niklub Date: Sat, 13 Jul 2024 01:24:01 +0100 Subject: [PATCH 1/5] feat: RND-103: Add support for new version of LabelInterface (#259) * feat: RND-103: Add support for new version of LabelInterface * Fix data model issues * Add config property * making .create work with Tag classes * :herb: Fern Regeneration -- July 12, 2024 (#261) * SDK regeneration * SDK regeneration * SDK regeneration --------- Co-authored-by: fern-api <115122769+fern-api[bot]@users.noreply.github.com> --------- Co-authored-by: nik Co-authored-by: Michael Malyuk Co-authored-by: fern-api[bot] <115122769+fern-api[bot]@users.noreply.github.com> --- .mock/definition/__package__.yml | 141 + .mock/definition/annotations.yml | 12 +- .mock/definition/dataManager.yml | 6 +- .mock/definition/exportStorage/s3S.yml | 290 + .mock/definition/importStorage/s3.yml | 2 +- .mock/definition/importStorage/s3S.yml | 478 + .mock/definition/predictions.yml | 16 +- .mock/definition/projects.yml | 6 + .mock/definition/tasks.yml | 10 +- .mock/definition/workspaces.yml | 202 + .mock/definition/workspaces/members.yml | 101 + .mock/fern.config.json | 2 +- examples/getting_started.ipynb | 266 + .../import_preannotations.ipynb | 181 +- poetry.lock | 352 +- pyproject.toml | 2 +- reference.md | 15741 ++++++++++++++++ src/label_studio_sdk/__init__.py | 10 + src/label_studio_sdk/actions/client.py | 16 +- src/label_studio_sdk/annotations/client.py | 48 +- src/label_studio_sdk/base_client.py | 3 + src/label_studio_sdk/core/client_wrapper.py | 2 +- src/label_studio_sdk/core/http_client.py | 44 +- src/label_studio_sdk/core/request_options.py | 4 +- .../export_storage/__init__.py | 3 +- .../export_storage/azure/client.py | 56 +- src/label_studio_sdk/export_storage/client.py | 11 +- .../export_storage/gcs/client.py | 56 +- .../export_storage/local/client.py | 56 +- .../export_storage/redis/client.py | 56 +- .../export_storage/s3/client.py | 56 +- .../export_storage/s3s/__init__.py | 2 + .../export_storage/s3s/client.py | 836 + src/label_studio_sdk/files/client.py | 48 +- .../import_storage/__init__.py | 3 +- .../import_storage/azure/client.py | 56 +- src/label_studio_sdk/import_storage/client.py | 11 +- .../import_storage/gcs/client.py | 56 +- .../import_storage/local/client.py | 56 +- .../import_storage/redis/client.py | 56 +- .../import_storage/s3/client.py | 56 +- .../import_storage/s3s/__init__.py | 2 + .../import_storage/s3s/client.py | 1054 ++ .../label_interface/control_tags.py | 50 +- .../label_interface/create.py | 10 +- .../label_interface/interface.py | 5 + .../label_interface/object_tags.py | 36 +- .../label_interface/objects.py | 10 +- src/label_studio_sdk/ml/client.py | 72 +- src/label_studio_sdk/predictions/client.py | 48 +- src/label_studio_sdk/projects/client.py | 142 +- src/label_studio_sdk/projects/client_ext.py | 17 +- .../projects/exports/client.py | 76 +- src/label_studio_sdk/tasks/client.py | 130 +- src/label_studio_sdk/tasks/client_ext.py | 4 + src/label_studio_sdk/types/__init__.py | 8 + .../types/s3s_export_storage.py | 80 + .../types/s3s_import_storage.py | 129 + .../types/s3s_import_storage_status.py | 7 + src/label_studio_sdk/types/workspace.py | 77 + src/label_studio_sdk/users/client.py | 64 +- src/label_studio_sdk/views/client.py | 48 +- src/label_studio_sdk/webhooks/client.py | 48 +- src/label_studio_sdk/workspaces/__init__.py | 6 + src/label_studio_sdk/workspaces/client.py | 549 + .../workspaces/members/__init__.py | 5 + .../workspaces/members/client.py | 297 + .../workspaces/members/types/__init__.py | 6 + .../members/types/members_create_response.py | 32 + .../types/members_list_response_item.py | 32 + tests/custom/test_interface/test_create.py | 38 + tests/export_storage/test_s3s.py | 164 + tests/import_storage/test_s3s.py | 318 + tests/test_annotations.py | 16 +- tests/test_predictions.py | 16 +- tests/test_workspaces.py | 149 + tests/utils/test_http_client.py | 47 + tests/utils/test_query_encoding.py | 13 + tests/workspaces/__init__.py | 2 + tests/workspaces/test_members.py | 34 + 80 files changed, 22292 insertions(+), 958 deletions(-) create mode 100644 .mock/definition/exportStorage/s3S.yml create mode 100644 .mock/definition/importStorage/s3S.yml create mode 100644 .mock/definition/workspaces.yml create mode 100644 .mock/definition/workspaces/members.yml create mode 100644 examples/getting_started.ipynb create mode 100644 reference.md create mode 100644 src/label_studio_sdk/export_storage/s3s/__init__.py create mode 100644 src/label_studio_sdk/export_storage/s3s/client.py create mode 100644 src/label_studio_sdk/import_storage/s3s/__init__.py create mode 100644 src/label_studio_sdk/import_storage/s3s/client.py create mode 100644 src/label_studio_sdk/types/s3s_export_storage.py create mode 100644 src/label_studio_sdk/types/s3s_import_storage.py create mode 100644 src/label_studio_sdk/types/s3s_import_storage_status.py create mode 100644 src/label_studio_sdk/types/workspace.py create mode 100644 src/label_studio_sdk/workspaces/__init__.py create mode 100644 src/label_studio_sdk/workspaces/client.py create mode 100644 src/label_studio_sdk/workspaces/members/__init__.py create mode 100644 src/label_studio_sdk/workspaces/members/client.py create mode 100644 src/label_studio_sdk/workspaces/members/types/__init__.py create mode 100644 src/label_studio_sdk/workspaces/members/types/members_create_response.py create mode 100644 src/label_studio_sdk/workspaces/members/types/members_list_response_item.py create mode 100644 tests/export_storage/test_s3s.py create mode 100644 tests/import_storage/test_s3s.py create mode 100644 tests/test_workspaces.py create mode 100644 tests/utils/test_http_client.py create mode 100644 tests/utils/test_query_encoding.py create mode 100644 tests/workspaces/__init__.py create mode 100644 tests/workspaces/test_members.py diff --git a/.mock/definition/__package__.yml b/.mock/definition/__package__.yml index a8547599a..9ac18f709 100644 --- a/.mock/definition/__package__.yml +++ b/.mock/definition/__package__.yml @@ -1925,3 +1925,144 @@ types: comment_authors: type: optional> docs: List of comment authors' IDs for this task + Workspace: + properties: + id: + type: optional + docs: Unique ID of the workspace + title: + type: optional + docs: Workspace title + description: + type: optional + docs: Workspace description + is_public: + type: optional + docs: Whether the workspace is public or not + is_personal: + type: optional + docs: Whether the workspace is personal or not + is_archived: + type: optional + docs: Whether the workspace is archived or not + created_at: + type: optional + docs: Creation time of the workspace + updated_at: + type: optional + docs: Last updated time of the workspace + created_by: + type: optional + docs: User ID of the workspace creator + color: + type: optional + docs: Workspace color + S3SImportStorageStatus: + enum: + - initialized + - queued + - in_progress + - failed + - completed + S3SImportStorage: + properties: + id: optional + synchronizable: optional + presign: optional + last_sync: + type: optional + docs: Last sync finished time + last_sync_count: + type: optional + docs: Count of tasks synced last time + last_sync_job: + type: optional + docs: Last sync job ID + validation: + maxLength: 256 + status: optional + traceback: + type: optional + docs: Traceback report for the last failed sync + meta: + type: optional> + docs: Meta and debug information about storage processes + title: + type: optional + docs: Cloud storage title + validation: + maxLength: 256 + description: + type: optional + docs: Cloud storage description + created_at: + type: optional + docs: Creation time + bucket: + type: optional + docs: S3 bucket name + prefix: + type: optional + docs: S3 bucket prefix + regex_filter: + type: optional + docs: Cloud storage regex for filtering objects + use_blob_urls: + type: optional + docs: Interpret objects as BLOBs and generate URLs + region_name: + type: optional + docs: AWS Region + external_id: + type: optional + docs: AWS External ID + role_arn: + type: optional + docs: AWS Role ARN + s3_endpoint: + type: optional + docs: S3 Endpoint + presign_ttl: + type: optional + docs: Presigned URLs TTL (in minutes) + recursive_scan: + type: optional + docs: Perform recursive scan over the bucket content + project: + type: integer + docs: A unique integer value identifying this project. + S3SExportStorage: + properties: + id: optional + title: + type: optional + docs: Cloud storage title + validation: + maxLength: 256 + description: + type: optional + docs: Cloud storage description + created_at: + type: optional + docs: Creation time + bucket: + type: optional + docs: S3 bucket name + prefix: + type: optional + docs: S3 bucket prefix + external_id: + type: optional + docs: AWS External ID + role_arn: + type: optional + docs: AWS Role ARN + region_name: + type: optional + docs: AWS Region + s3_endpoint: + type: optional + docs: S3 Endpoint + project: + type: integer + docs: A unique integer value identifying this project. diff --git a/.mock/definition/annotations.yml b/.mock/definition/annotations.yml index 9fcb54c0a..d5dd51922 100644 --- a/.mock/definition/annotations.yml +++ b/.mock/definition/annotations.yml @@ -48,7 +48,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person created_username: created_username created_ago: created_ago completed_by: 1 @@ -174,7 +174,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person was_cancelled: false ground_truth: true response: @@ -195,7 +195,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person created_username: created_username created_ago: created_ago completed_by: 1 @@ -258,7 +258,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person created_username: created_username created_ago: created_ago completed_by: 1 @@ -379,7 +379,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person was_cancelled: false ground_truth: true response: @@ -400,7 +400,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person created_username: created_username created_ago: created_ago completed_by: 1 diff --git a/.mock/definition/dataManager.yml b/.mock/definition/dataManager.yml index a119deb7c..9d00d41bc 100644 --- a/.mock/definition/dataManager.yml +++ b/.mock/definition/dataManager.yml @@ -32,8 +32,7 @@ service: response: body: columns: - '0': - id: id + - id: id title: ID type: Number help: Task ID @@ -42,8 +41,7 @@ service: explore: true labeling: false project_defined: false - '1': - id: completed_at + - id: completed_at title: Completed type: Datetime target: tasks diff --git a/.mock/definition/exportStorage/s3S.yml b/.mock/definition/exportStorage/s3S.yml new file mode 100644 index 000000000..d0685fe20 --- /dev/null +++ b/.mock/definition/exportStorage/s3S.yml @@ -0,0 +1,290 @@ +imports: + root: ../__package__.yml +service: + auth: false + base-path: '' + endpoints: + list: + path: /api/storages/export/s3s + method: GET + auth: true + docs: > + + You can connect your S3 bucket to Label Studio as a source storage or + target storage. Use this API request to get a list of all S3 export + (target) storage connections for a specific project. + + + The project ID can be found in the URL when viewing the project in Label + Studio, or you can retrieve all project IDs using [List all + projects](../projects/list). + + + For more information about working with external storage, see [Sync data + from external storage](https://labelstud.io/guide/storage). + display-name: List export storages + request: + name: S3SListRequest + query-parameters: + project: + type: optional + docs: Project ID + response: + docs: '' + type: list + examples: + - response: + body: + - id: 1 + title: title + description: description + created_at: '2024-01-15T09:30:00Z' + bucket: bucket + prefix: prefix + external_id: external_id + role_arn: role_arn + region_name: region_name + s3_endpoint: s3_endpoint + project: 1 + audiences: + - public + create: + path: /api/storages/export/s3s + method: POST + auth: true + docs: > + + Create a new target storage connection to a S3 bucket with IAM role + access. + + + For information about the required fields and prerequisites, see [Amazon + S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) + in the Label Studio documentation. + display-name: Create export storage + request: + name: S3SCreateRequest + body: + properties: + can_delete_objects: + type: optional + docs: Deletion from storage enabled. + title: + type: optional + docs: Storage title + description: + type: optional + docs: Storage description + project: + type: optional + docs: Project ID + bucket: + type: optional + docs: S3 bucket name + prefix: + type: optional + docs: S3 bucket prefix + external_id: + type: optional + docs: AWS External ID + role_arn: + type: optional + docs: AWS Role ARN + region_name: + type: optional + docs: AWS Region + s3_endpoint: + type: optional + docs: S3 Endpoint + response: + docs: '' + type: root.S3SExportStorage + examples: + - request: {} + response: + body: + id: 1 + title: title + description: description + created_at: '2024-01-15T09:30:00Z' + bucket: bucket + prefix: prefix + external_id: external_id + role_arn: role_arn + region_name: region_name + s3_endpoint: s3_endpoint + project: 1 + audiences: + - public + get: + path: /api/storages/export/s3s/{id} + method: GET + auth: true + docs: > + + Get a specific S3 export storage connection. You will need to provide + the export storage ID. You can find this using [List export + storages](list). + path-parameters: + id: + type: integer + docs: Export storage ID + display-name: Get export storage + response: + docs: '' + type: root.S3SExportStorage + examples: + - path-parameters: + id: 1 + response: + body: + id: 1 + title: title + description: description + created_at: '2024-01-15T09:30:00Z' + bucket: bucket + prefix: prefix + external_id: external_id + role_arn: role_arn + region_name: region_name + s3_endpoint: s3_endpoint + project: 1 + audiences: + - public + delete: + path: /api/storages/export/s3s/{id} + method: DELETE + auth: true + docs: > + + Delete a specific S3 export storage connection. You will need to provide + the export storage ID. You can find this using [List export + storages](list). + path-parameters: + id: + type: integer + docs: Export storage ID + display-name: Delete export storage + examples: + - path-parameters: + id: 1 + audiences: + - public + update: + path: /api/storages/export/s3s/{id} + method: PATCH + auth: true + docs: > + + Update a specific S3 export storage connection. You will need to provide + the export storage ID. You can find this using [List export + storages](list). + path-parameters: + id: + type: integer + docs: Export storage ID + display-name: Update export storage + request: + name: S3SUpdateRequest + body: + properties: + can_delete_objects: + type: optional + docs: Deletion from storage enabled. + title: + type: optional + docs: Storage title + description: + type: optional + docs: Storage description + project: + type: optional + docs: Project ID + bucket: + type: optional + docs: S3 bucket name + prefix: + type: optional + docs: S3 bucket prefix + external_id: + type: optional + docs: AWS External ID + role_arn: + type: optional + docs: AWS Role ARN + region_name: + type: optional + docs: AWS Region + s3_endpoint: + type: optional + docs: S3 Endpoint + response: + docs: '' + type: root.S3SExportStorage + examples: + - path-parameters: + id: 1 + request: {} + response: + body: + id: 1 + title: title + description: description + created_at: '2024-01-15T09:30:00Z' + bucket: bucket + prefix: prefix + external_id: external_id + role_arn: role_arn + region_name: region_name + s3_endpoint: s3_endpoint + project: 1 + audiences: + - public + validate: + path: /api/storages/export/s3s/validate + method: POST + auth: true + docs: > + + Validate a specific S3 export storage connection. This is useful to + ensure that the storage configuration settings are correct and + operational before attempting to export data. + display-name: Validate export storage + request: + name: S3SValidateRequest + body: + properties: + can_delete_objects: + type: optional + docs: Deletion from storage enabled. + title: + type: optional + docs: Storage title + description: + type: optional + docs: Storage description + project: + type: optional + docs: Project ID + bucket: + type: optional + docs: S3 bucket name + prefix: + type: optional + docs: S3 bucket prefix + external_id: + type: optional + docs: AWS External ID + role_arn: + type: optional + docs: AWS Role ARN + region_name: + type: optional + docs: AWS Region + s3_endpoint: + type: optional + docs: S3 Endpoint + examples: + - request: {} + audiences: + - public diff --git a/.mock/definition/importStorage/s3.yml b/.mock/definition/importStorage/s3.yml index 83e372c83..1d1ec9cd2 100644 --- a/.mock/definition/importStorage/s3.yml +++ b/.mock/definition/importStorage/s3.yml @@ -459,7 +459,7 @@ service: Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to - ensure that your data remains secure and private. + ensure that your data remains secure and private. path-parameters: id: type: integer diff --git a/.mock/definition/importStorage/s3S.yml b/.mock/definition/importStorage/s3S.yml new file mode 100644 index 000000000..cda06c7ba --- /dev/null +++ b/.mock/definition/importStorage/s3S.yml @@ -0,0 +1,478 @@ +imports: + root: ../__package__.yml +service: + auth: false + base-path: '' + endpoints: + list: + path: /api/storages/s3s + method: GET + auth: true + docs: > + + You can connect your S3 bucket to Label Studio as a source storage or + target storage. Use this API request to get a list of all Google import + (source) storage connections for a specific project. + + + The project ID can be found in the URL when viewing the project in Label + Studio, or you can retrieve all project IDs using [List all + projects](../projects/list). + + + For more information about working with external storage, see [Sync data + from external storage](https://labelstud.io/guide/storage). + display-name: List import storages + request: + name: S3SListRequest + query-parameters: + project: + type: optional + docs: Project ID + response: + docs: '' + type: list + examples: + - response: + body: + - id: 1 + synchronizable: true + presign: true + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + status: initialized + traceback: traceback + meta: + meta: + key: value + title: title + description: description + created_at: '2024-01-15T09:30:00Z' + bucket: bucket + prefix: prefix + regex_filter: regex_filter + use_blob_urls: true + region_name: region_name + external_id: external_id + role_arn: role_arn + s3_endpoint: s3_endpoint + presign_ttl: 1 + recursive_scan: true + project: 1 + audiences: + - public + create: + path: /api/storages/s3s + method: POST + auth: true + docs: > + + Create a new source storage connection to a S3 bucket. + + + For information about the required fields and prerequisites, see [Amazon + S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio + documentation. + + + Ensure you configure CORS before adding cloud storage. This + ensures you will be able to see the content of the data rather than just + a link. + + + After you add the storage, you should validate the connection + before attempting to sync your data. Your data will not be imported + until you [sync your connection](sync). + display-name: Create import storage + request: + name: S3SCreateRequest + body: + properties: + regex_filter: + type: optional + docs: >- + Cloud storage regex for filtering objects. You must specify it + otherwise no objects will be imported. + use_blob_urls: + type: optional + docs: >- + Interpret objects as BLOBs and generate URLs. For example, if + your bucket contains images, you can use this option to generate + URLs for these images. If set to False, it will read the content + of the file and load it into Label Studio. + presign: + type: optional + docs: Presign URLs for download + presign_ttl: + type: optional + docs: Presign TTL in minutes + default: 1 + recursive_scan: + type: optional + docs: Scan recursively + title: + type: optional + docs: Storage title + description: + type: optional + docs: Storage description + project: + type: optional + docs: Project ID + bucket: + type: optional + docs: S3 bucket name + prefix: + type: optional + docs: S3 bucket prefix + external_id: + type: optional + docs: AWS External ID + role_arn: + type: optional + docs: AWS Role ARN + region_name: + type: optional + docs: AWS Region + s3_endpoint: + type: optional + docs: S3 Endpoint + response: + docs: '' + type: root.S3SImportStorage + examples: + - request: {} + response: + body: + id: 1 + synchronizable: true + presign: true + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + status: initialized + traceback: traceback + meta: + meta: + key: value + title: title + description: description + created_at: '2024-01-15T09:30:00Z' + bucket: bucket + prefix: prefix + regex_filter: regex_filter + use_blob_urls: true + region_name: region_name + external_id: external_id + role_arn: role_arn + s3_endpoint: s3_endpoint + presign_ttl: 1 + recursive_scan: true + project: 1 + audiences: + - public + get: + path: /api/storages/s3s/{id} + method: GET + auth: true + docs: > + + Get a specific S3 import storage connection. You will need to provide + the import storage ID. You can find this using [List import + storages](list). + path-parameters: + id: + type: integer + docs: Import storage ID + display-name: Get import storage + response: + docs: '' + type: root.S3SImportStorage + examples: + - path-parameters: + id: 1 + response: + body: + id: 1 + synchronizable: true + presign: true + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + status: initialized + traceback: traceback + meta: + meta: + key: value + title: title + description: description + created_at: '2024-01-15T09:30:00Z' + bucket: bucket + prefix: prefix + regex_filter: regex_filter + use_blob_urls: true + region_name: region_name + external_id: external_id + role_arn: role_arn + s3_endpoint: s3_endpoint + presign_ttl: 1 + recursive_scan: true + project: 1 + audiences: + - public + delete: + path: /api/storages/s3s/{id} + method: DELETE + auth: true + docs: > + + Delete a specific S3 import storage connection. You will need to provide + the import storage ID. You can find this using [List import + storages](list). + + + Deleting a source storage connection does not affect tasks with synced + data in Label Studio. The sync process is designed to import new or + updated tasks from the connected storage into the project, but it does + not track deletions of files from the storage. Therefore, if you remove + the external storage connection, the tasks that were created from that + storage will remain in the project. + + + If you want to remove the tasks that were synced from the external + storage, you will need to delete them manually from within the Label + Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + path-parameters: + id: + type: integer + docs: Import storage ID + display-name: Delete import storage + examples: + - path-parameters: + id: 1 + audiences: + - public + update: + path: /api/storages/s3s/{id} + method: PATCH + auth: true + docs: > + + Update a specific S3 import storage connection. You will need to provide + the import storage ID. You can find this using [List import + storages](list). + + + For more information about working with external storage, see [Sync data + from external storage](https://labelstud.io/guide/storage). + path-parameters: + id: + type: integer + docs: Import storage ID + display-name: Update import storage + request: + name: S3SUpdateRequest + body: + properties: + regex_filter: + type: optional + docs: >- + Cloud storage regex for filtering objects. You must specify it + otherwise no objects will be imported. + use_blob_urls: + type: optional + docs: >- + Interpret objects as BLOBs and generate URLs. For example, if + your bucket contains images, you can use this option to generate + URLs for these images. If set to False, it will read the content + of the file and load it into Label Studio. + presign: + type: optional + docs: Presign URLs for download + presign_ttl: + type: optional + docs: Presign TTL in minutes + default: 1 + recursive_scan: + type: optional + docs: Scan recursively + title: + type: optional + docs: Storage title + description: + type: optional + docs: Storage description + project: + type: optional + docs: Project ID + bucket: + type: optional + docs: S3 bucket name + prefix: + type: optional + docs: S3 bucket prefix + external_id: + type: optional + docs: AWS External ID + role_arn: + type: optional + docs: AWS Role ARN + region_name: + type: optional + docs: AWS Region + s3_endpoint: + type: optional + docs: S3 Endpoint + response: + docs: '' + type: root.S3SImportStorage + examples: + - path-parameters: + id: 1 + request: {} + response: + body: + id: 1 + synchronizable: true + presign: true + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + status: initialized + traceback: traceback + meta: + meta: + key: value + title: title + description: description + created_at: '2024-01-15T09:30:00Z' + bucket: bucket + prefix: prefix + regex_filter: regex_filter + use_blob_urls: true + region_name: region_name + external_id: external_id + role_arn: role_arn + s3_endpoint: s3_endpoint + presign_ttl: 1 + recursive_scan: true + project: 1 + audiences: + - public + validate: + path: /api/storages/s3s/validate + method: POST + auth: true + docs: > + + Validate a specific S3 import storage connection. This is useful to + ensure that the storage configuration settings are correct and + operational before attempting to import data. + display-name: Validate import storage + request: + name: S3SValidateRequest + body: + properties: + regex_filter: + type: optional + docs: >- + Cloud storage regex for filtering objects. You must specify it + otherwise no objects will be imported. + use_blob_urls: + type: optional + docs: >- + Interpret objects as BLOBs and generate URLs. For example, if + your bucket contains images, you can use this option to generate + URLs for these images. If set to False, it will read the content + of the file and load it into Label Studio. + presign: + type: optional + docs: Presign URLs for download + presign_ttl: + type: optional + docs: Presign TTL in minutes + default: 1 + recursive_scan: + type: optional + docs: Scan recursively + title: + type: optional + docs: Storage title + description: + type: optional + docs: Storage description + project: + type: optional + docs: Project ID + bucket: + type: optional + docs: S3 bucket name + prefix: + type: optional + docs: S3 bucket prefix + external_id: + type: optional + docs: AWS External ID + role_arn: + type: optional + docs: AWS Role ARN + region_name: + type: optional + docs: AWS Region + s3_endpoint: + type: optional + docs: S3 Endpoint + examples: + - request: {} + audiences: + - public + sync: + path: /api/storages/s3s/{id}/sync + method: POST + auth: true + docs: > + + Sync tasks from an S3 import storage connection. You will need to + provide the import storage ID. You can find this using [List import + storages](list). + path-parameters: + id: + type: integer + docs: Storage ID + display-name: Sync import storage + response: + docs: '' + type: root.S3SImportStorage + examples: + - path-parameters: + id: 1 + response: + body: + id: 1 + synchronizable: true + presign: true + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + status: initialized + traceback: traceback + meta: + meta: + key: value + title: title + description: description + created_at: '2024-01-15T09:30:00Z' + bucket: bucket + prefix: prefix + regex_filter: regex_filter + use_blob_urls: true + region_name: region_name + external_id: external_id + role_arn: role_arn + s3_endpoint: s3_endpoint + presign_ttl: 1 + recursive_scan: true + project: 1 + audiences: + - public diff --git a/.mock/definition/predictions.yml b/.mock/definition/predictions.yml index 02a25285b..6ebb2de53 100644 --- a/.mock/definition/predictions.yml +++ b/.mock/definition/predictions.yml @@ -61,7 +61,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person model_version: yolo-v8 created_ago: created_ago score: 0.95 @@ -160,7 +160,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person score: 0.95 model_version: yolo-v8 response: @@ -181,7 +181,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person model_version: yolo-v8 created_ago: created_ago score: 0.95 @@ -241,7 +241,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person model_version: yolo-v8 created_ago: created_ago score: 0.95 @@ -316,7 +316,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person score: 0.95 model_version: yolo-v8 response: @@ -337,7 +337,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person model_version: yolo-v8 created_ago: created_ago score: 0.95 @@ -438,7 +438,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person score: 0.95 model_version: yolo-v8 response: @@ -459,7 +459,7 @@ service: rotation: 0 values: rectanglelabels: - '0': Person + - Person model_version: yolo-v8 created_ago: created_ago score: 0.95 diff --git a/.mock/definition/projects.yml b/.mock/definition/projects.yml index d32b874ae..db29058d3 100644 --- a/.mock/definition/projects.yml +++ b/.mock/definition/projects.yml @@ -332,6 +332,9 @@ service: as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + workspace: + type: optional + docs: Workspace ID response: docs: '' type: ProjectsCreateResponse @@ -568,6 +571,9 @@ service: as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + workspace: + type: optional + docs: Workspace ID response: docs: '' type: ProjectsUpdateResponse diff --git a/.mock/definition/tasks.yml b/.mock/definition/tasks.yml index 6f1308803..8b0d8b53a 100644 --- a/.mock/definition/tasks.yml +++ b/.mock/definition/tasks.yml @@ -189,14 +189,13 @@ service: - id: 1 predictions: - result: - '0': - from_name: sentiment + - from_name: sentiment to_name: text type: choices value: value: choices: - '0': POSITIVE + - POSITIVE score: 0.9 model_version: '1.0' task: 1 @@ -204,14 +203,13 @@ service: updated_at: '2021-01-01T00:00:00Z' annotations: - result: - '0': - from_name: sentiment + - from_name: sentiment to_name: text type: choices value: value: choices: - '0': POSITIVE + - POSITIVE created_at: '2021-01-01T00:00:00Z' updated_at: '2021-01-01T00:00:00Z' completed_by: 1 diff --git a/.mock/definition/workspaces.yml b/.mock/definition/workspaces.yml new file mode 100644 index 000000000..0f593c2db --- /dev/null +++ b/.mock/definition/workspaces.yml @@ -0,0 +1,202 @@ +imports: + root: __package__.yml +service: + auth: false + base-path: '' + endpoints: + list: + path: /api/workspaces + method: GET + auth: true + docs: > + + List all workspaces for your organization. + + + Workspaces in Label Studio let you organize your projects and users into + separate spaces. This is useful for managing different teams, + departments, or projects within your organization. + + + For more information, see [Workspaces in Label + Studio](https://docs.humansignal.com/guide/workspaces). + display-name: List workspaces + response: + docs: '' + type: list + examples: + - response: + body: + - id: 1 + title: title + description: description + is_public: true + is_personal: true + is_archived: true + created_at: '2024-01-15T09:30:00Z' + updated_at: '2024-01-15T09:30:00Z' + created_by: 1 + color: color + audiences: + - public + create: + path: /api/workspaces + method: POST + auth: true + docs: > + + Create a new workspace. + + + Workspaces in Label Studio let you organize your projects and users into + separate spaces. This is useful for managing different teams, + departments, or projects within your organization. + + + For more information, see [Workspaces in Label + Studio](https://docs.humansignal.com/guide/workspaces). + display-name: Create workspace + request: + name: WorkspacesCreateRequest + body: + properties: + title: + type: optional + docs: Workspace title + description: + type: optional + docs: Workspace description + is_public: + type: optional + docs: Is workspace public + is_personal: + type: optional + docs: Is workspace personal + color: + type: optional + docs: Workspace color in HEX format + default: '#FFFFFF' + response: + docs: '' + type: root.Workspace + examples: + - request: {} + response: + body: + id: 1 + title: title + description: description + is_public: true + is_personal: true + is_archived: true + created_at: '2024-01-15T09:30:00Z' + updated_at: '2024-01-15T09:30:00Z' + created_by: 1 + color: color + audiences: + - public + get: + path: /api/workspaces/{id} + method: GET + auth: true + docs: > + + Get information about a specific workspace. You will need to provide the + workspace ID. You can find this using [List workspaces](list). + path-parameters: + id: + type: integer + docs: Workspace ID + display-name: Get workspace + response: + docs: '' + type: root.Workspace + examples: + - path-parameters: + id: 1 + response: + body: + id: 1 + title: title + description: description + is_public: true + is_personal: true + is_archived: true + created_at: '2024-01-15T09:30:00Z' + updated_at: '2024-01-15T09:30:00Z' + created_by: 1 + color: color + audiences: + - public + delete: + path: /api/workspaces/{id} + method: DELETE + auth: true + docs: > + + Delete a specific workspace. You will need to provide the workspace ID. + You can find this using [List workspaces](list). + path-parameters: + id: + type: integer + docs: Workspace ID + display-name: Delete workspace + examples: + - path-parameters: + id: 1 + audiences: + - public + update: + path: /api/workspaces/{id} + method: PATCH + auth: true + docs: > + + Update a specific workspace. You will need to provide the workspace ID. + You can find this using [List workspaces](list). + path-parameters: + id: + type: integer + docs: Workspace ID + display-name: Update workspace + request: + name: WorkspacesUpdateRequest + body: + properties: + title: + type: optional + docs: Workspace title + description: + type: optional + docs: Workspace description + is_public: + type: optional + docs: Is workspace public + is_personal: + type: optional + docs: Is workspace personal + color: + type: optional + docs: Workspace color in HEX format + default: '#FFFFFF' + response: + docs: '' + type: root.Workspace + examples: + - path-parameters: + id: 1 + request: {} + response: + body: + id: 1 + title: title + description: description + is_public: true + is_personal: true + is_archived: true + created_at: '2024-01-15T09:30:00Z' + updated_at: '2024-01-15T09:30:00Z' + created_by: 1 + color: color + audiences: + - public diff --git a/.mock/definition/workspaces/members.yml b/.mock/definition/workspaces/members.yml new file mode 100644 index 000000000..fb022d338 --- /dev/null +++ b/.mock/definition/workspaces/members.yml @@ -0,0 +1,101 @@ +types: + MembersListResponseItem: + properties: + user: + type: optional> + docs: User ID of the workspace member + MembersCreateResponse: + properties: + user: + type: optional + docs: User ID of the workspace member +service: + auth: false + base-path: '' + endpoints: + list: + path: /api/workspaces/{id}/memberships + method: GET + auth: true + docs: > + + List all workspace memberships for a specific workspace. You will need + to provide the workspace ID. You can find this using [List + workspaces](list). + path-parameters: + id: + type: integer + docs: Workspace ID + display-name: List workspace memberships + response: + docs: '' + type: list + examples: + - path-parameters: + id: 1 + response: + body: + - user: + user: + key: value + audiences: + - public + create: + path: /api/workspaces/{id}/memberships + method: POST + auth: true + docs: > + + Create a new workspace membership. You will need to provide the + workspace ID. You can find this using [List workspaces](list). + path-parameters: + id: + type: integer + docs: Workspace ID + display-name: Create workspace membership + request: + name: MembersCreateRequest + body: + properties: + user: + type: optional + docs: User ID of the workspace member + response: + docs: '' + type: MembersCreateResponse + examples: + - path-parameters: + id: 1 + request: {} + response: + body: + user: 1 + audiences: + - public + delete: + path: /api/workspaces/{id}/memberships + method: DELETE + auth: true + docs: > + + Delete a specific workspace membership. You will need to provide the + workspace ID and the user ID. You can find this using [List workspace + memberships](list). + path-parameters: + id: + type: integer + docs: Workspace ID + display-name: Delete workspace membership + request: + name: MembersDeleteRequest + body: + properties: + user: + type: optional + docs: User ID of the workspace member + examples: + - path-parameters: + id: 1 + request: {} + audiences: + - public diff --git a/.mock/fern.config.json b/.mock/fern.config.json index 8d8515df2..003c655d6 100644 --- a/.mock/fern.config.json +++ b/.mock/fern.config.json @@ -1,4 +1,4 @@ { "organization" : "humansignal-org", - "version" : "0.30.0" + "version" : "0.31.5" } \ No newline at end of file diff --git a/examples/getting_started.ipynb b/examples/getting_started.ipynb new file mode 100644 index 000000000..1b7ca3c17 --- /dev/null +++ b/examples/getting_started.ipynb @@ -0,0 +1,266 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# Connect to Label Studio" + ], + "metadata": { + "collapsed": false + }, + "id": "e22fcce0ee4a12fa" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "from label_studio_sdk.client import LabelStudio\n", + "\n", + "ls = LabelStudio(\n", + " base_url='http://localhost:8080',\n", + " api_key='27c982caa9e599c9d81b25b00663e7d4f82c9e3c',\n", + ")" + ], + "metadata": { + "collapsed": true, + "ExecuteTime": { + "end_time": "2024-07-09T09:27:32.753924Z", + "start_time": "2024-07-09T09:27:31.538397Z" + } + }, + "id": "initial_id", + "execution_count": 1 + }, + { + "cell_type": "markdown", + "source": [ + "# Create New Project\n", + "\n", + "First we need to define a **Labeling Interface** for the new project:" + ], + "metadata": { + "collapsed": false + }, + "id": "39e40e7b1120c335" + }, + { + "cell_type": "code", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ] + } + ], + "source": [ + "from label_studio_sdk.label_interface import LabelInterface\n", + "from label_studio_sdk.label_interface.create import choices\n", + "\n", + "label_config = LabelInterface.create({\n", + " 'text': 'Text',\n", + " 'label': choices(['Positive', 'Negative'])\n", + "})\n", + "print(label_config)" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2024-07-09T08:40:57.172903Z", + "start_time": "2024-07-09T08:40:57.167649Z" + } + }, + "id": "93e728713446e20f", + "execution_count": 3 + }, + { + "cell_type": "markdown", + "source": [ + "You can manually create [XML configuration for the labeling interface](https://labelstud.io/tags), but `LabelInterface` class provides a convenient way to create it programmatically.\n", + "\n", + "Now let's create a new project with the specified labeling interface:" + ], + "metadata": { + "collapsed": false + }, + "id": "79099883515b82c2" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "project = ls.projects.create(\n", + " title='Text Classification',\n", + " label_config=label_config\n", + ")\n", + "\n", + "print(f'Created project:\\n{ls._client_wrapper.get_base_url()}/projects/{project.id}')" + ], + "metadata": { + "collapsed": false + }, + "id": "7f05d9e5f6d25125" + }, + { + "cell_type": "markdown", + "source": [ + "# Import Tasks to the Project" + ], + "metadata": { + "collapsed": false + }, + "id": "a106abb6cb3c42a1" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "ls.projects.import_tasks(\n", + " id=project.id,\n", + " request=[\n", + " {\"text\": \"Hello world\"},\n", + " {\"text\": \"Hello Label Studio\"},\n", + " {\"text\": \"What a beautiful day\"},\n", + " ]\n", + ")" + ], + "metadata": { + "collapsed": false + }, + "id": "f4d556fb04165434", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "## Import Tasks with Pre-annotations" + ], + "metadata": { + "collapsed": false + }, + "id": "792066fed100cbda" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "ls.projects.import_tasks(\n", + " id=project.id,\n", + " request=[\n", + " {\"text\": \"Hello world\", \"sentiment\": \"Positive\"},\n", + " {\"text\": \"Goodbye Label Studio\", \"sentiment\": \"Negative\"},\n", + " {\"text\": \"What a beautiful day\", \"sentiment\": \"Positive\"},\n", + " ],\n", + " preannotated_from_fields=['sentiment']\n", + ")" + ], + "metadata": { + "collapsed": false + }, + "id": "5df9f60ae30651f8", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "More customizable way to import preannotations:" + ], + "metadata": { + "collapsed": false + }, + "id": "47dd92fcdab936f8" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "from label_studio_sdk.label_interface.objects import PredictionValue\n", + "\n", + "# this returns the same `LabelInterface` object as above\n", + "li = ls.projects.get(id=project.id).get_label_interface()\n", + "\n", + "# by specifying what fields to `include` we can speed up task loading\n", + "for task in ls.tasks.list(project=project.id, include=[\"id\"]):\n", + " task_id = task.id\n", + " prediction = PredictionValue(\n", + " # tag predictions with specific model version string\n", + " # it can help managing multiple models in Label Studio UI\n", + " model_version='my_model_v1',\n", + " # define your labels here\n", + " result=[\n", + " li.get_control('label').label(['Positive']),\n", + " ]\n", + " )\n", + " ls.predictions.create(task=task_id, **prediction.model_dump())\n" + ], + "metadata": { + "collapsed": false + }, + "id": "687a17f9e370f275", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "# Export Annotations" + ], + "metadata": { + "collapsed": false + }, + "id": "58cde0e2e4659c0f" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "for task in ls.tasks.list(project=project.id):\n", + " print(task.id)\n", + " print(task.annotations)\n", + " print(task.predictions)" + ], + "metadata": { + "collapsed": false + }, + "id": "24e1333f469965b", + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "Read more about task, annotations and predictions format in Label Studio [documentation](https://labelstud.io/guide/export#Label-Studio-JSON-format-of-annotated-tasks)." + ], + "metadata": { + "collapsed": false + }, + "id": "b0c80cf5bbc3ffc0" + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/import_preannotations/import_preannotations.ipynb b/examples/import_preannotations/import_preannotations.ipynb index 09aa0885f..e18b4b825 100644 --- a/examples/import_preannotations/import_preannotations.ipynb +++ b/examples/import_preannotations/import_preannotations.ipynb @@ -32,16 +32,22 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, + "execution_count": 1, + "metadata": { + "ExecuteTime": { + "end_time": "2024-07-09T09:13:46.006673Z", + "start_time": "2024-07-09T09:13:44.982373Z" + } + }, "outputs": [], "source": [ - "from label_studio_sdk import Client\n", + "from label_studio_sdk.client import LabelStudio\n", "\n", "LABEL_STUDIO_URL = 'http://localhost:8080'\n", - "API_KEY = '91b3b61589784ed069b138eae3d5a5fe1e909f57'\n", + "# find your key at Account & Settings -> Access Token\n", + "API_KEY = '27c982caa9e599c9d81b25b00663e7d4f82c9e3c'\n", "\n", - "ls = Client(url=LABEL_STUDIO_URL, api_key=API_KEY)" + "ls = LabelStudio(base_url=LABEL_STUDIO_URL, api_key=API_KEY)" ] }, { @@ -60,21 +66,41 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "project = ls.start_project(\n", + "execution_count": 2, + "metadata": { + "ExecuteTime": { + "end_time": "2024-07-09T09:15:24.191827Z", + "start_time": "2024-07-09T09:15:24.115609Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ] + } + ], + "source": [ + "from label_studio_sdk.label_interface import LabelInterface\n", + "from label_studio_sdk.label_interface.create import choices\n", + "\n", + "label_config = LabelInterface.create({\n", + " 'image': 'Image',\n", + " 'image_class': choices(['Cat', 'Dog'])\n", + "})\n", + "print(label_config)\n", + "\n", + "project = ls.projects.create(\n", " title='Project Created from SDK: Image Preannotation',\n", - " label_config='''\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " '''\n", + " label_config=label_config\n", ")" ] }, @@ -108,14 +134,28 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": { - "scrolled": true + "scrolled": true, + "ExecuteTime": { + "end_time": "2024-07-09T09:17:03.178950Z", + "start_time": "2024-07-09T09:17:03.124176Z" + } }, - "outputs": [], - "source": [ - "project.import_tasks(\n", - " [{\n", + "outputs": [ + { + "data": { + "text/plain": "ProjectsImportTasksResponse(task_count=2, annotation_count=0, predictions_count=None, duration=0.031484127044677734, file_upload_ids=[], could_be_tasks_list=False, found_formats=[], data_columns=[], prediction_count=2)" + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ls.projects.import_tasks(\n", + " project.id,\n", + " request=[{\n", " 'data': {'image': 'https://data.heartex.net/open-images/train_0/mini/0045dd96bf73936c.jpg'},\n", " 'predictions': [{\n", " 'result': [{\n", @@ -168,12 +208,27 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "project.import_tasks(\n", - " [{'image': f'https://data.heartex.net/open-images/train_0/mini/0045dd96bf73936c.jpg', 'pet': 'Dog'},\n", + "execution_count": 6, + "metadata": { + "ExecuteTime": { + "end_time": "2024-07-09T09:17:33.852323Z", + "start_time": "2024-07-09T09:17:33.804344Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": "ProjectsImportTasksResponse(task_count=2, annotation_count=0, predictions_count=None, duration=0.027251005172729492, file_upload_ids=[], could_be_tasks_list=False, found_formats=[], data_columns=[], prediction_count=2)" + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ls.projects.import_tasks(\n", + " project.id,\n", + " request=[{'image': f'https://data.heartex.net/open-images/train_0/mini/0045dd96bf73936c.jpg', 'pet': 'Dog'},\n", " {'image': f'https://data.heartex.net/open-images/train_0/mini/0083d02f6ad18b38.jpg', 'pet': 'Cat'}],\n", " preannotated_from_fields=['pet']\n", ")" @@ -211,7 +266,7 @@ "outputs": [], "source": [ "import pandas as pd\n", - "pd.read_csv('data/images.csv')" + "df = pd.read_csv('data/images.csv')" ] }, { @@ -229,7 +284,10 @@ }, "outputs": [], "source": [ - "project.import_tasks('data/images.csv', preannotated_from_fields=['pet'])" + "ls.projects.import_tasks(\n", + " project.id, \n", + " request=df.to_dict(orient='records'),\n", + " preannotated_from_fields=['pet'])" ] }, { @@ -252,33 +310,22 @@ "metadata": {}, "outputs": [], "source": [ - "tasks_ids = project.get_tasks_ids()\n", - "project.create_prediction(tasks_ids[0], result='Dog', model_version='1')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "or bunch of predictions:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "predictions = [{\n", - " \"task\": tasks_ids[0],\n", - " \"result\": \"Dog\",\n", - " \"score\": 0.9\n", - "}, {\n", - " \"task\": tasks_ids[1],\n", - " \"result\": \"Cat\",\n", - " \"score\": 0.8\n", - "}]\n", - "project.create_predictions(predictions)" + "from label_studio_sdk.label_interface.objects import PredictionValue\n", + "\n", + "li = ls.projects.get(id=project.id).get_label_interface()\n", + "\n", + "for task in ls.tasks.list(project=project.id, include=['id']):\n", + " prediction = PredictionValue(\n", + " # tag predictions with specific model version string\n", + " # it can help managing multiple models in Label Studio UI\n", + " model_version='my_model_v1',\n", + " # define your labels here\n", + " result=[\n", + " li.get_control('image_class').label(['Dog']),\n", + " # ... if you have more labels, add them here\n", + " ]\n", + " )\n", + " ls.predictions.create(task=task.id, **prediction.model_dump())" ] }, { @@ -340,9 +387,9 @@ "\n", "total_score = 0\n", "n = 0\n", - "for task in project.tasks:\n", - " score = get_agreement(task['annotations'][0], task['predictions'][0])\n", - " print(f'{task[\"id\"]} ==> {score}')\n", + "for task in ls.tasks.list(project=project.id):\n", + " score = get_agreement(task.annotations[0], task.predictions[0])\n", + " print(f'{task.id} ==> {score}')\n", " total_score += score\n", " n += 1\n", "\n", @@ -364,6 +411,14 @@ "\n", "The `preannotated_from_fields` option for the `import_tasks()` method makes it easier to add your predictions without worrying about the intricacies of the Label Studio JSON format, but you can still use that field to add valuable metadata such as prediction scores and model versions to your pre-annotated task data. " ] + }, + { + "cell_type": "code", + "outputs": [], + "source": [], + "metadata": { + "collapsed": false + } } ], "metadata": { diff --git a/poetry.lock b/poetry.lock index a1e0c233d..8d1487335 100644 --- a/poetry.lock +++ b/poetry.lock @@ -68,13 +68,13 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -839,84 +839,95 @@ xml = ["lxml (>=4.6.3)"] [[package]] name = "pillow" -version = "10.3.0" +version = "10.4.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, - {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, - {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, - {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, - {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, - {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, - {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, - {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, - {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, - {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, - {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, - {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, - {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, - {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, - {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, - {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, - {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, - {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, - {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, - {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, - {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, - {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, - {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, - {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, - {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, - {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, - {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, - {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, - {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, - {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, - {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, - {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, - {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, - {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, - {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, + {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, + {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, + {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, + {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, + {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, + {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, + {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, + {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, + {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, + {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, + {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, + {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, + {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, + {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, + {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, + {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, + {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, + {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, + {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, + {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, ] [package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] @@ -940,109 +951,122 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.7.4" +version = "2.8.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, - {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.18.4" -typing-extensions = ">=4.6.1" +pydantic-core = "2.20.1" +typing-extensions = [ + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, +] [package.extras] email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.18.4" +version = "2.20.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, - {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, - {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, - {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, - {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, - {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, - {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, - {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, - {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, - {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, - {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, - {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, - {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, - {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, ] [package.dependencies] @@ -1282,18 +1306,18 @@ fixture = ["fixtures"] [[package]] name = "setuptools" -version = "70.1.1" +version = "70.3.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.1.1-py3-none-any.whl", hash = "sha256:a58a8fde0541dab0419750bcc521fbdf8585f6e5cb41909df3a472ef7b81ca95"}, - {file = "setuptools-70.1.1.tar.gz", hash = "sha256:937a48c7cdb7a21eb53cd7f9b59e525503aa8abaf3584c730dc5f7a5bec3a650"}, + {file = "setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc"}, + {file = "setuptools-70.3.0.tar.gz", hash = "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" diff --git a/pyproject.toml b/pyproject.toml index 99b4de63b..eeddf7076 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "label-studio-sdk" -version = "1.0.4.dev" +version = "1.0.4" description = "" readme = "README.md" authors = [] diff --git a/reference.md b/reference.md new file mode 100644 index 000000000..33dd67295 --- /dev/null +++ b/reference.md @@ -0,0 +1,15741 @@ +# Reference +## Annotations +
client.annotations.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Tasks can have multiple annotations. Use this call to retrieve a specific annotation using its ID. + +You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.annotations.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this annotation. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.annotations.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete an annotation. + +This action can't be undone! + +You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.annotations.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this annotation. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.annotations.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update attributes for an existing annotation. + +You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + +For information about the JSON format used in the result, see [Label Studio JSON format of annotated tasks](https://labelstud.io/guide/export#Label-Studio-JSON-format-of-annotated-tasks). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.annotations.update( + id=1, + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + was_cancelled=False, + ground_truth=True, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this annotation. + +
+
+ +
+
+ +**result:** `typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]]` — Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) + +
+
+ +
+
+ +**task:** `typing.Optional[int]` — Corresponding task for this annotation + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID for this annotation + +
+
+ +
+
+ +**completed_by:** `typing.Optional[int]` — User ID of the person who created this annotation + +
+
+ +
+
+ +**updated_by:** `typing.Optional[int]` — Last user who updated this annotation + +
+
+ +
+
+ +**was_cancelled:** `typing.Optional[bool]` — User skipped the task + +
+
+ +
+
+ +**ground_truth:** `typing.Optional[bool]` — This annotation is a Ground Truth + +
+
+ +
+
+ +**lead_time:** `typing.Optional[float]` — How much time it took to annotate the task (in seconds) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.annotations.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +List all annotations for a task. + +You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.annotations.list( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Task ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.annotations.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Add annotations to a task like an annotator does. + +You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). + +The content of the result field depends on your labeling configuration. For example, send the following data as part of your POST +request to send an empty annotation with the ID of the user who completed the task: + +```json +{ +"result": {}, +"was_cancelled": true, +"ground_truth": true, +"lead_time": 0, +"task": 0 +"completed_by": 123 +} +``` +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.annotations.create( + id=1, + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + was_cancelled=False, + ground_truth=True, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Task ID + +
+
+ +
+
+ +**result:** `typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]]` — Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) + +
+
+ +
+
+ +**task:** `typing.Optional[int]` — Corresponding task for this annotation + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID for this annotation + +
+
+ +
+
+ +**completed_by:** `typing.Optional[int]` — User ID of the person who created this annotation + +
+
+ +
+
+ +**updated_by:** `typing.Optional[int]` — Last user who updated this annotation + +
+
+ +
+
+ +**was_cancelled:** `typing.Optional[bool]` — User skipped the task + +
+
+ +
+
+ +**ground_truth:** `typing.Optional[bool]` — This annotation is a Ground Truth + +
+
+ +
+
+ +**lead_time:** `typing.Optional[float]` — How much time it took to annotate the task (in seconds) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Users +
client.users.reset_token() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Reset your access token or API key. When reset, any scripts or automations you have in place will need to be updated with the new key. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.users.reset_token() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.users.get_token() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a access token to authenticate to the API as the current user. To find this in the Label Studio interface, click **Account & Settings** in the upper right. For more information, see [Access Token](https://labelstud.io/guide/user_account#Access-token). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.users.get_token() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.users.whoami() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get information about your user account, such as your username, email, and user ID. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.users.whoami() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.users.list() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +List all users in your Label Studio organization. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.users.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.users.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a user in Label Studio. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.users.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — User ID + +
+
+ +
+
+ +**first_name:** `typing.Optional[str]` — First name of the user + +
+
+ +
+
+ +**last_name:** `typing.Optional[str]` — Last name of the user + +
+
+ +
+
+ +**username:** `typing.Optional[str]` — Username of the user + +
+
+ +
+
+ +**email:** `typing.Optional[str]` — Email of the user + +
+
+ +
+
+ +**avatar:** `typing.Optional[str]` — Avatar URL of the user + +
+
+ +
+
+ +**initials:** `typing.Optional[str]` — Initials of the user + +
+
+ +
+
+ +**phone:** `typing.Optional[str]` — Phone number of the user + +
+
+ +
+
+ +**allow_newsletters:** `typing.Optional[bool]` — Whether the user allows newsletters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.users.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get info about a specific Label Studio user. +You will need to provide their user ID. You can find a list of all user IDs using [List users](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.users.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — User ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.users.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific Label Studio user. + +You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + +Use caution when deleting a user, as this can cause issues such as breaking the "Annotated by" filter or leaving orphaned records. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.users.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — User ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.users.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update details for a specific Label Studio user, such as their name or contact information. + +You will need to provide their user ID. You can find a list of all user IDs using [List users](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.users.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — User ID + +
+
+ +
+
+ +**users_update_request_id:** `typing.Optional[int]` — User ID + +
+
+ +
+
+ +**first_name:** `typing.Optional[str]` — First name of the user + +
+
+ +
+
+ +**last_name:** `typing.Optional[str]` — Last name of the user + +
+
+ +
+
+ +**username:** `typing.Optional[str]` — Username of the user + +
+
+ +
+
+ +**email:** `typing.Optional[str]` — Email of the user + +
+
+ +
+
+ +**avatar:** `typing.Optional[str]` — Avatar URL of the user + +
+
+ +
+
+ +**initials:** `typing.Optional[str]` — Initials of the user + +
+
+ +
+
+ +**phone:** `typing.Optional[str]` — Phone number of the user + +
+
+ +
+
+ +**allow_newsletters:** `typing.Optional[bool]` — Whether the user allows newsletters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Actions +
client.actions.list() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieve all the registered actions with descriptions that data manager can use. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.actions.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.actions.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Perform a Data Manager action with the selected tasks and filters. Note: More complex actions require additional parameters in the request body. Call `GET api/actions?project=` to explore them.
Example: `GET api/actions?id=delete_tasks&project=1` +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import ( + ActionsCreateRequestFilters, + ActionsCreateRequestFiltersItemsItem, + ActionsCreateRequestSelectedItemsExcluded, +) +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.actions.create( + id="retrieve_tasks_predictions", + project=1, + filters=ActionsCreateRequestFilters( + conjunction="or", + items=[ + ActionsCreateRequestFiltersItemsItem( + filter="filter:tasks:id", + operator="greater", + type="Number", + value=123, + ) + ], + ), + selected_items=ActionsCreateRequestSelectedItemsExcluded( + all_=True, + excluded=[124, 125, 126], + ), + ordering=["tasks:total_annotations"], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `ActionsCreateRequestId` — Action name ID, see the full list of actions in the `GET api/actions` request + +
+
+ +
+
+ +**project:** `int` — Project ID + +
+
+ +
+
+ +**view:** `typing.Optional[int]` — View ID (optional, it has higher priority than filters, selectedItems and ordering from the request body payload) + +
+
+ +
+
+ +**filters:** `typing.Optional[ActionsCreateRequestFilters]` — Filters to apply on tasks. You can use [the helper class `Filters` from this page](https://labelstud.io/sdk/data_manager.html) to create Data Manager Filters.
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` + +
+
+ +
+
+ +**selected_items:** `typing.Optional[ActionsCreateRequestSelectedItems]` — Task selection by IDs. If filters are applied, the selection will be applied to the filtered tasks.If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` + +
+
+ +
+
+ +**ordering:** `typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]]` — List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Views +
client.views.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +List all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize which tasks and information appears. + +You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.views.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.views.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new Data Manager view for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks and information appears. + +You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.views.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**data:** `typing.Optional[ViewsCreateRequestData]` — Custom view data + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.views.delete_all(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks appear. + +You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.views.delete_all( + project=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.views.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get the details about a specific Data Manager view (tab). You will need to supply the view ID. You can find this using [List views](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.views.get( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` — View ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.views.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific Data Manager view (tab) by ID. You can find the view using [List views](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.views.delete( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` — View ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.views.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +You can update a specific Data Manager view (tab) with additional filters and other customizations. You will need to supply the view ID. You can find this using [List views](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.views.update( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` — View ID + +
+
+ +
+
+ +**data:** `typing.Optional[ViewsUpdateRequestData]` — Custom view data + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Files +
client.files.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieve details about a specific uploaded file. To get the file upload ID, use [Get files list](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.files.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this file upload. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.files.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific uploaded file. To get the file upload ID, use [Get files list](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.files.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this file upload. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.files.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific uploaded file. To get the file upload ID, use [Get files list](list). + +You will need to include the file data in the request body. For example: + +```bash +curl -H 'Authorization: Token abc123' \ -X POST 'https://localhost:8080/api/import/file-upload/245' -F ‘file=@path/to/my_file.csv’ +``` +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import FileUpload +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.files.update( + id=1, + request=FileUpload(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this file upload. + +
+
+ +
+
+ +**request:** `FileUpload` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.files.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieve the list of uploaded files used to create labeling tasks for a specific project. These are files that have been uploaded directly to Label Studio. + +You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.files.list( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this file upload. + +
+
+ +
+
+ +**all_:** `typing.Optional[bool]` — Set to "true" if you want to retrieve all file uploads + +
+
+ +
+
+ +**ids:** `typing.Optional[typing.Union[int, typing.Sequence[int]]]` — Specify the list of file upload IDs to retrieve, e.g. ids=[1,2,3] + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.files.delete_many(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete uploaded files for a specific project. These are files that have been uploaded directly to Label Studio. + +You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.files.delete_many( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this file upload. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.files.download(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Download a specific uploaded file. If you aren't sure of the file name, try [Get files list](list) first. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.files.download( + filename="filename", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**filename:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Projects +
client.projects.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Return a list of the projects within your organization. + +To perform most tasks with the Label Studio API, you must specify the project ID, sometimes referred to as the `pk`. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using this API call. + +To retrieve a list of your Label Studio projects, update the following command to match your own environment. +Replace the domain name, port, and authorization token, then run the following from the command line: + +```bash +curl -X GET https://localhost:8080/api/projects/ -H 'Authorization: Token abc123' +``` +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +response = client.projects.list() +for item in response: + yield item +# alternatively, you can paginate page-by-page +for page in response.iter_pages(): + yield page + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. + +
+
+ +
+
+ +**ids:** `typing.Optional[str]` — ids + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — title + +
+
+ +
+
+ +**page:** `typing.Optional[int]` — A page number within the paginated result set. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.projects.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a project and set up the labeling interface. For more information about setting up projects, see the following: + +- [Create and configure projects](https://labelstud.io/guide/setup_project) +- [Configure labeling interface](https://labelstud.io/guide/setup) +- [Project settings](https://labelstud.io/guide/project_settings) + +```bash +curl -H Content-Type:application/json -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects' --data '{"label_config": "[...]"}' +``` +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Project title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Project description + +
+
+ +
+
+ +**label_config:** `typing.Optional[str]` — Label config in XML format + +
+
+ +
+
+ +**expert_instruction:** `typing.Optional[str]` — Labeling instructions to show to the user + +
+
+ +
+
+ +**show_instruction:** `typing.Optional[bool]` — Show labeling instructions + +
+
+ +
+
+ +**show_skip_button:** `typing.Optional[bool]` — Show skip button + +
+
+ +
+
+ +**enable_empty_annotation:** `typing.Optional[bool]` — Allow empty annotations + +
+
+ +
+
+ +**show_annotation_history:** `typing.Optional[bool]` — Show annotation history + +
+
+ +
+
+ +**reveal_preannotations_interactively:** `typing.Optional[bool]` — Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest + +
+
+ +
+
+ +**show_collab_predictions:** `typing.Optional[bool]` — Show predictions to annotators + +
+
+ +
+
+ +**maximum_annotations:** `typing.Optional[int]` — Maximum annotations per task + +
+
+ +
+
+ +**color:** `typing.Optional[str]` — Project color in HEX format + +
+
+ +
+
+ +**control_weights:** `typing.Optional[typing.Dict[str, typing.Any]]` — Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + +
+
+ +
+
+ +**workspace:** `typing.Optional[int]` — Workspace ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.projects.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieve information about a specific project by project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.projects.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a project by specified project ID. Deleting a project permanently removes all tasks, annotations, and project data from Label Studio. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.projects.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update the project settings for a specific project. For more information, see the following: + +- [Create and configure projects](https://labelstud.io/guide/setup_project) +- [Configure labeling interface](https://labelstud.io/guide/setup) +- [Project settings](https://labelstud.io/guide/project_settings) + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + +If you are modifying the labeling config for project that has in-progress work, note the following: +* You cannot remove labels or change the type of labeling being performed unless you delete any existing annotations that are using those labels. +* If you make changes to the labeling configuration, any tabs that you might have created in the Data Manager are removed. + +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Project title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Project description + +
+
+ +
+
+ +**label_config:** `typing.Optional[str]` — Label config in XML format + +
+
+ +
+
+ +**expert_instruction:** `typing.Optional[str]` — Labeling instructions to show to the user + +
+
+ +
+
+ +**show_instruction:** `typing.Optional[bool]` — Show labeling instructions + +
+
+ +
+
+ +**show_skip_button:** `typing.Optional[bool]` — Show skip button + +
+
+ +
+
+ +**enable_empty_annotation:** `typing.Optional[bool]` — Allow empty annotations + +
+
+ +
+
+ +**show_annotation_history:** `typing.Optional[bool]` — Show annotation history + +
+
+ +
+
+ +**reveal_preannotations_interactively:** `typing.Optional[bool]` — Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest + +
+
+ +
+
+ +**show_collab_predictions:** `typing.Optional[bool]` — Show predictions to annotators + +
+
+ +
+
+ +**maximum_annotations:** `typing.Optional[int]` — Maximum annotations per task + +
+
+ +
+
+ +**color:** `typing.Optional[str]` — Project color in HEX format + +
+
+ +
+
+ +**control_weights:** `typing.Optional[typing.Dict[str, typing.Any]]` — Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + +
+
+ +
+
+ +**workspace:** `typing.Optional[int]` — Workspace ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.projects.import_tasks(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Use this API endpoint to import labeling tasks in bulk. Note that each POST request is limited at 250K tasks and 200 MB. +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + +Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. + +For example, if the label configuration has a _$text_ variable, then each item in a data object must include a `text` field. + + +There are three possible ways to import tasks with this endpoint: + +#### 1\. **POST with data** + +Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. + +Update this example to specify your authorization token and Label Studio instance host, then run the following from +the command line: + +```bash +curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ +-X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' +``` + +#### 2\. **POST with files** + +Send tasks as files. You can attach multiple files with different names. + +- **JSON**: text files in JavaScript object notation format +- **CSV**: text files with tables in Comma Separated Values format +- **TSV**: text files with tables in Tab Separated Value format +- **TXT**: simple text files are similar to CSV with one column and no header, supported for projects with one source only + +Update this example to specify your authorization token, Label Studio instance host, and file name and path, +then run the following from the command line: + +```bash +curl -H 'Authorization: Token abc123' \ +-X POST 'https://localhost:8080/api/projects/1/import' -F ‘file=@path/to/my_file.csv’ +``` + +#### 3\. **POST with URL** + +You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. + +```bash +curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ +-X POST 'https://localhost:8080/api/projects/1/import' \ +--data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' +``` + +
+
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.import_tasks( + id=1, + request=[{}], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**request:** `typing.Sequence[typing.Dict[str, typing.Any]]` + +
+
+ +
+
+ +**commit_to_project:** `typing.Optional[bool]` — Set to "true" to immediately commit tasks to the project. + +
+
+ +
+
+ +**return_task_ids:** `typing.Optional[bool]` — Set to "true" to return task IDs in the response. + +
+
+ +
+
+ +**preannotated_from_fields:** `typing.Optional[typing.Union[str, typing.Sequence[str]]]` — List of fields to preannotate from the task data. For example, if you provide a list of `{"text": "text", "prediction": "label"}` items in the request, the system will create a task with the `text` field and a prediction with the `label` field when `preannoted_from_fields=["prediction"]`. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.projects.validate_config(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Determine whether the label configuration for a specific project is valid. For more information about setting up labeling configs, see [Configure labeling interface](https://labelstud.io/guide/setup) and our [Tags reference](https://labelstud.io/tags/). + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import ProjectLabelConfig +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.validate_config( + id=1, + request=ProjectLabelConfig( + label_config="label_config", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**request:** `ProjectLabelConfig` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ml +
client.ml.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +List all configured Machine Learning (ML) backends for a specific project by ID. For more information about ML backends, see [Machine learning integration](https://labelstud.io/guide/ml). + +You will need to provide the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.ml.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ml.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Add an ML backend to a project. For more information about what you need to configure when adding an ML backend, see [Connect the model to Label studio](https://labelstud.io/guide/ml#Connect-the-model-to-Label-Studio). + +If you are using Docker Compose, you may need to adjust your ML backend URL. See [localhost and Docker containers](https://labelstud.io/guide/ml#localhost-and-Docker-containers). + +If you are using files that are located in the cloud, local storage, or uploaded to Label Studio, you must configure your environment variables to allow the ML backend to interact with those files. See [Allow the ML backend to access Label Studio](https://labelstud.io/guide/ml#Allow-the-ML-backend-to-access-Label-Studio-data). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.ml.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**url:** `typing.Optional[str]` — ML backend URL + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**is_interactive:** `typing.Optional[bool]` — Is interactive + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Description + +
+
+ +
+
+ +**auth_method:** `typing.Optional[MlCreateRequestAuthMethod]` — Auth method + +
+
+ +
+
+ +**basic_auth_user:** `typing.Optional[str]` — Basic auth user + +
+
+ +
+
+ +**basic_auth_pass:** `typing.Optional[str]` — Basic auth password + +
+
+ +
+
+ +**extra_params:** `typing.Optional[typing.Dict[str, typing.Any]]` — Extra parameters + +
+
+ +
+
+ +**timeout:** `typing.Optional[int]` — Response model timeout + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ml.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get details about a specific ML backend. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + +For more information, see [Machine learning integration](https://labelstud.io/guide/ml). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.ml.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this ml backend. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ml.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Remove an existing ML backend connection. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + +For more information, see [Machine learning integration](https://labelstud.io/guide/ml). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.ml.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this ml backend. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ml.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update the ML backend parameters. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + +For more information, see [Machine learning integration](https://labelstud.io/guide/ml). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.ml.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this ml backend. + +
+
+ +
+
+ +**url:** `typing.Optional[str]` — ML backend URL + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**is_interactive:** `typing.Optional[bool]` — Is interactive + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Description + +
+
+ +
+
+ +**auth_method:** `typing.Optional[MlUpdateRequestAuthMethod]` — Auth method + +
+
+ +
+
+ +**basic_auth_user:** `typing.Optional[str]` — Basic auth user + +
+
+ +
+
+ +**basic_auth_pass:** `typing.Optional[str]` — Basic auth password + +
+
+ +
+
+ +**extra_params:** `typing.Optional[typing.Dict[str, typing.Any]]` — Extra parameters + +
+
+ +
+
+ +**timeout:** `typing.Optional[int]` — Response model timeout + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ml.predict_interactive(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Enable interactive pre-annotations for a specific task. + +ML-assisted labeling with interactive pre-annotations works with image segmentation and object detection tasks using rectangles, ellipses, polygons, brush masks, and keypoints, as well as with HTML and text named entity recognition tasks. Your ML backend must support the type of labeling that you’re performing, recognize the input that you create, and be able to respond with the relevant output for a prediction. For more information, see [Interactive pre-annotations](https://labelstud.io/guide/ml.html#Interactive-pre-annotations). + +Before you can use interactive annotations, it must be enabled for you ML backend connection (`"is_interactive": true`). + +You will need the task ID and the ML backend connection ID. The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](../tasks/list). The ML backend connection ID is available via [List ML backends](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.ml.predict_interactive( + id=1, + task=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this ML backend. + +
+
+ +
+
+ +**task:** `int` — ID of task to annotate + +
+
+ +
+
+ +**context:** `typing.Optional[typing.Dict[str, typing.Any]]` — Context for ML model + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ml.train(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +After you connect a model to Label Studio as a machine learning backend and annotate at least one task, you can start training the model. Training logs appear in stdout and the console. + +For more information, see [Model training](https://labelstud.io/guide/ml.html#Model-training). + +You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.ml.train( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this ML backend. + +
+
+ +
+
+ +**use_ground_truth:** `typing.Optional[bool]` — Whether to include ground truth annotations in training + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ml.list_model_versions(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get available versions of the model. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.ml.list_model_versions( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Predictions +
client.predictions.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a list of all predictions. You can optionally filter these by task or by project. If you want to filter, you will need the project ID and/or task ID. Both of these can be found in the Label Studio URL when viewing a task, or you can use [List all projects](../projects/list) and [Get tasks list](../tasks/list). + +The terms "predictions" and pre-annotations" are used interchangeably. + +Predictions can be [imported directly into Label Studio](https://labelstud.io/guide/predictions) or [generated by a connected ML backend](https://labelstud.io/guide/ml.html#Pre-annotations-predictions). + +To import predictions via the API, see [Create prediction](create). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.predictions.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**task:** `typing.Optional[int]` — Filter predictions by task ID + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Filter predictions by project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.predictions.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +If you have predictions generated for your dataset from a model, either as pre-annotated tasks or pre-labeled tasks, you can import the predictions with your dataset into Label Studio for review and correction. + +To import predicted labels into Label Studio, you must use the [Basic Label Studio JSON format](https://labelstud.io/guide/tasks#Basic-Label-Studio-JSON-format) and set up your tasks with the predictions JSON key. The Label Studio ML backend also outputs tasks in this format. + +#### JSON format for predictions + +Label Studio JSON format for pre-annotations must contain two sections: + +- A data object which references the source of the data that the pre-annotations apply to. This can be a URL to an audio file, a pre-signed cloud storage link to an image, plain text, a reference to a CSV file stored in Label Studio, or something else. +- A predictions array that contains the pre-annotation results for the different types of labeling. See how to add results to the predictions array. + +For more information, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations) +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.predictions.create( + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + score=0.95, + model_version="yolo-v8", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**task:** `typing.Optional[int]` — Task ID for which the prediction is created + +
+
+ +
+
+ +**result:** `typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]]` — Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) + +
+
+ +
+
+ +**score:** `typing.Optional[float]` — Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. + +
+
+ +
+
+ +**model_version:** `typing.Optional[str]` — Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.predictions.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get details about a specific prediction by its ID. To find the prediction ID, use [List predictions](list). + +For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.predictions.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Prediction ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.predictions.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a prediction. To find the prediction ID, use [List predictions](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.predictions.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Prediction ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.predictions.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a prediction. To find the prediction ID, use [List predictions](list). + +For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.predictions.update( + id=1, + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + score=0.95, + model_version="yolo-v8", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Prediction ID + +
+
+ +
+
+ +**task:** `typing.Optional[int]` — Task ID for which the prediction is created + +
+
+ +
+
+ +**result:** `typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]]` — Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) + +
+
+ +
+
+ +**score:** `typing.Optional[float]` — Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. + +
+
+ +
+
+ +**model_version:** `typing.Optional[str]` — Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Projects Exports +
client.projects.exports.create_export(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +If you have a large project it's recommended to use export snapshots, this easy export endpoint might have timeouts. +Export annotated tasks as a file in a specific format. +For example, to export JSON annotations for a project to a file called `annotations.json`, +run the following from the command line: + +```bash +curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON -H 'Authorization: Token abc123' --output 'annotations.json' +``` + +To export all tasks, including skipped tasks and others without annotations, run the following from the command line: + +```bash +curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON&download_all_tasks=true -H 'Authorization: Token abc123' --output 'annotations.json' +``` + +To export specific tasks with IDs of 123 and 345, run the following from the command line: + +```bash +curl -X GET https://localhost:8080/api/projects/{id}/export?ids[]=123\&ids[]=345 -H 'Authorization: Token abc123' --output 'annotations.json' +``` + +You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.exports.create_export( + id=1, + export_type="string", + download_all_tasks="string", + download_resources=True, + ids=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**export_type:** `typing.Optional[str]` — Selected export format (JSON by default) + +
+
+ +
+
+ +**download_all_tasks:** `typing.Optional[str]` — If true, download all tasks regardless of status. If false, download only annotated tasks. + +
+
+ +
+
+ +**download_resources:** `typing.Optional[bool]` — If true, download all resource files such as images, audio, and others relevant to the tasks. + +
+
+ +
+
+ +**ids:** `typing.Optional[typing.Union[int, typing.Sequence[int]]]` — Specify a list of task IDs to retrieve only the details for those tasks. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.projects.exports.list_formats(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Before exporting annotations, you can check with formats are supported by the specified project. For more information about export formats, see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + +You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.exports.list_formats( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.projects.exports.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of export file (snapshots) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + +Included in the response is information about each snapshot, such as who created it and what format it is in. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.exports.list( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.projects.exports.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new export request to start a background task and generate an export file (snapshot) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + +A snapshot is a static export of your project's data and annotations at a specific point in time. It captures the current state of your tasks, annotations, and other relevant data, allowing you to download and review them later. Snapshots are particularly useful for large projects as they help avoid timeouts during export operations by processing the data asynchronously. + +For more information, see the [Label Studio documentation on exporting annotations](https://labelstud.io/guide/export.html). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import ExportCreate +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.exports.create( + id=1, + request=ExportCreate(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**request:** `ExportCreate` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.projects.exports.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieve information about a specific export file (snapshot). + +You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). + +You will also need the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.exports.get( + id=1, + export_pk="export_pk", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**export_pk:** `str` — Primary key identifying the export file. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.projects.exports.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete an export file by specified export ID. + +You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.exports.delete( + id=1, + export_pk="export_pk", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**export_pk:** `str` — Primary key identifying the export file. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.projects.exports.convert(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +You can use this to convert an export snapshot into the selected format. + +To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + +You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import ExportConvert +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.exports.convert( + id=1, + export_pk="export_pk", + request=ExportConvert( + export_type="export_type", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**export_pk:** `str` — Primary key identifying the export file. + +
+
+ +
+
+ +**request:** `ExportConvert` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.projects.exports.download(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Download an export snapshot as a file in a specified format. To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + +You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.projects.exports.download( + id=1, + export_pk="export_pk", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**export_pk:** `str` — Primary key identifying the export file. + +
+
+ +
+
+ +**export_type:** `typing.Optional[str]` — Selected export format + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Tasks +
client.tasks.create_many_status(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get information about an async project import operation. This can be especially useful to monitor status, as large import jobs can take time. + +You will need the project ID and the unique ID of the import operation. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +The import ID is returned as part of the response when you call [Import tasks](import-tasks). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.tasks.create_many_status( + id=1, + import_pk="import_pk", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — The project ID. + +
+
+ +
+
+ +**import_pk:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.tasks.delete_all_tasks(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete all tasks from a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.tasks.delete_all_tasks( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.tasks.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieve a list of tasks. + +You can use the query parameters to filter the list by project and/or view (a tab within the Data Manager). You can also optionally add pagination to make the response easier to parse. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). The view ID can be found using [List views](../views/list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +response = client.tasks.list() +for item in response: + yield item +# alternatively, you can paginate page-by-page +for page in response.iter_pages(): + yield page + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**page:** `typing.Optional[int]` — A page number within the paginated result set. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**view:** `typing.Optional[int]` — View ID + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**resolve_uri:** `typing.Optional[bool]` — Resolve task data URIs using Cloud Storage + +
+
+ +
+
+ +**fields:** `typing.Optional[TasksListRequestFields]` — Set to "all" if you want to include annotations and predictions in the response + +
+
+ +
+
+ +**review:** `typing.Optional[bool]` — Get tasks for review + +
+
+ +
+
+ +**include:** `typing.Optional[str]` — Specify which fields to include in the response + +
+
+ +
+
+ +**query:** `typing.Optional[str]` + +Additional query to filter tasks. It must be JSON encoded string of dict containing one of the following parameters: `{"filters": ..., "selectedItems": ..., "ordering": ...}`. Check [Data Manager > Create View > see `data` field](#tag/Data-Manager/operation/api_dm_views_create) for more details about filters, selectedItems and ordering. + +- **filters**: dict with `"conjunction"` string (`"or"` or `"and"`) and list of filters in `"items"` array. Each filter is a dictionary with keys: `"filter"`, `"operator"`, `"type"`, `"value"`. [Read more about available filters](https://labelstud.io/sdk/data_manager.html)
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` +- **selectedItems**: dictionary with keys: `"all"`, `"included"`, `"excluded"`. If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` +- **ordering**: list of fields to order by. Currently, ordering is supported by only one parameter.
+ Example: `["completed_at"]` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.tasks.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new labeling task in Label Studio. + +The data you provide depends on your labeling config and data type. + +You will also need to provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.tasks.create( + data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, + project=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**data:** `typing.Optional[typing.Dict[str, typing.Any]]` — Task data dictionary with arbitrary keys and values + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.tasks.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get task data, metadata, annotations and other attributes for a specific labeling task by task ID. +The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.tasks.get( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` — Task ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.tasks.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a task in Label Studio. + +You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + +This action cannot be undone. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.tasks.delete( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` — Task ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.tasks.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update the attributes of an existing labeling task. + +You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.tasks.update( + id="id", + data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, + project=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` — Task ID + +
+
+ +
+
+ +**data:** `typing.Optional[typing.Dict[str, typing.Any]]` — Task data dictionary with arbitrary keys and values + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ImportStorage +
client.import_storage.list_types() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieve a list of the import storages types. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.list_types() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ImportStorage Azure +
client.import_storage.azure.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure import (source) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new source storage connection to Microsoft Azure Blob storage. + +For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. + +Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for direct download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**container:** `typing.Optional[str]` — Azure blob container + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — Azure blob prefix name + +
+
+ +
+
+ +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
+
+ +
+
+ +**account_key:** `typing.Optional[str]` — Azure Blob account key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific Azure import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for direct download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**container:** `typing.Optional[str]` — Azure blob container + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — Azure blob prefix name + +
+
+ +
+
+ +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
+
+ +
+
+ +**account_key:** `typing.Optional[str]` — Azure Blob account key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this azure blob import storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + +If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this azure blob import storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this azure blob import storage. + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for direct download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**container:** `typing.Optional[str]` — Azure blob container + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — Azure blob prefix name + +
+
+ +
+
+ +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
+
+ +
+
+ +**account_key:** `typing.Optional[str]` — Azure Blob account key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks from an Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.sync( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Storage ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ExportStorage +
client.export_storage.list_types() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieve a list of the export storages types. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.list_types() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ExportStorage Azure +
client.export_storage.azure.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure export (target) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.azure.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.azure.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new target storage connection to Microsoft Azure Blob storage. + +For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.azure.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**container:** `typing.Optional[str]` — Azure blob container + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — Azure blob prefix name + +
+
+ +
+
+ +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
+
+ +
+
+ +**account_key:** `typing.Optional[str]` — Azure Blob account key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.azure.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific Azure export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.azure.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**container:** `typing.Optional[str]` — Azure blob container + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — Azure blob prefix name + +
+
+ +
+
+ +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
+
+ +
+
+ +**account_key:** `typing.Optional[str]` — Azure Blob account key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.azure.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.azure.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this azure blob export storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.azure.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.azure.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this azure blob export storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.azure.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.azure.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this azure blob export storage. + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**container:** `typing.Optional[str]` — Azure blob container + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — Azure blob prefix name + +
+
+ +
+
+ +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
+
+ +
+
+ +**account_key:** `typing.Optional[str]` — Azure Blob account key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.azure.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks to an Azure export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.azure.sync( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ExportStorage Gcs +
client.export_storage.gcs.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all GCS export (target) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.gcs.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.gcs.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new target storage connection to Google Cloud Storage. + +For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.gcs.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — GCS bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — GCS bucket prefix + +
+
+ +
+
+ +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + +
+
+ +
+
+ +**google_project_id:** `typing.Optional[str]` — Google project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.gcs.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific GCS export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.gcs.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — GCS bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — GCS bucket prefix + +
+
+ +
+
+ +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + +
+
+ +
+
+ +**google_project_id:** `typing.Optional[str]` — Google project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.gcs.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.gcs.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this gcs export storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.gcs.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.gcs.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this gcs export storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.gcs.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.gcs.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this gcs export storage. + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — GCS bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — GCS bucket prefix + +
+
+ +
+
+ +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + +
+
+ +
+
+ +**google_project_id:** `typing.Optional[str]` — Google project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.gcs.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks to a GCS export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.gcs.sync( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ExportStorage Local +
client.export_storage.local.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +You can connect a local file directory to Label Studio as a source storage or target storage. Use this API request to get a list of all local file export (target) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.local.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.local.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new target storage connection to a local file directory. + +For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.local.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Path to local directory + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.local.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific local file export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.local.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Path to local directory + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.local.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.local.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this local files export storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.local.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.local.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this local files export storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.local.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.local.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this local files export storage. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Path to local directory + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.local.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks to an local file export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Sync operations with external local file directories only go one way. They either create tasks from objects in the directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.local.sync( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ExportStorage Redis +
client.export_storage.redis.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis export (target) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.redis.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new target storage connection to Redis. + +For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**db:** `typing.Optional[int]` — Database ID of database to use + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Storage prefix (optional) + +
+
+ +
+
+ +**host:** `typing.Optional[str]` — Server Host IP (optional) + +
+
+ +
+
+ +**port:** `typing.Optional[str]` — Server Port (optional) + +
+
+ +
+
+ +**password:** `typing.Optional[str]` — Server Password (optional) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.redis.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific Redis export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
+
+ +
+
+ +**db:** `typing.Optional[int]` — Database ID of database to use + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Storage prefix (optional) + +
+
+ +
+
+ +**host:** `typing.Optional[str]` — Server Host IP (optional) + +
+
+ +
+
+ +**port:** `typing.Optional[str]` — Server Port (optional) + +
+
+ +
+
+ +**password:** `typing.Optional[str]` — Server Password (optional) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.redis.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this redis export storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.redis.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this redis export storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.redis.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this redis export storage. + +
+
+ +
+
+ +**db:** `typing.Optional[int]` — Database ID of database to use + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Storage prefix (optional) + +
+
+ +
+
+ +**host:** `typing.Optional[str]` — Server Host IP (optional) + +
+
+ +
+
+ +**port:** `typing.Optional[str]` — Server Port (optional) + +
+
+ +
+
+ +**password:** `typing.Optional[str]` — Server Password (optional) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.redis.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks to an Redis export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.sync( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ExportStorage S3 +
client.export_storage.s3.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new target storage connection to S3 storage. + +For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID + +
+
+ +
+
+ +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
+
+ +
+
+ +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
+
+ +
+
+ +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID + +
+
+ +
+
+ +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
+
+ +
+
+ +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
+
+ +
+
+ +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this s3 export storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this s3 export storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this s3 export storage. + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID + +
+
+ +
+
+ +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
+
+ +
+
+ +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
+
+ +
+
+ +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks to an S3 export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + +Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.sync( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ImportStorage Gcs +
client.import_storage.gcs.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.gcs.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.gcs.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new source storage connection to a Google Cloud Storage bucket. + +For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. + +Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.gcs.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for direct download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — GCS bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — GCS bucket prefix + +
+
+ +
+
+ +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + +
+
+ +
+
+ +**google_project_id:** `typing.Optional[str]` — Google project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.gcs.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific GCS import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.gcs.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for direct download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — GCS bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — GCS bucket prefix + +
+
+ +
+
+ +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + +
+
+ +
+
+ +**google_project_id:** `typing.Optional[str]` — Google project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.gcs.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.gcs.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this gcs import storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.gcs.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + +If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.gcs.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this gcs import storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.gcs.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.gcs.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this gcs import storage. + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for direct download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — GCS bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — GCS bucket prefix + +
+
+ +
+
+ +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + +
+
+ +
+
+ +**google_project_id:** `typing.Optional[str]` — Google project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.gcs.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks from a GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.gcs.sync( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Storage ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ImportStorage Local +
client.import_storage.local.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +If you have local files that you want to add to Label Studio from a specific directory, you can set up a specific local directory on the machine where LS is running as source or target storage. Use this API request to get a list of all local file import (source) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.local.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.local.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new source storage connection to a local file directory. + +For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.local.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Path to local directory + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.local.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific local file import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.local.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Path to local directory + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.local.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific local file import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.local.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this local files import storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.local.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + +If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.local.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this local files import storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.local.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.local.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this local files import storage. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Path to local directory + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.local.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks from a local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Sync operations with external sources only go one way. They either create tasks from objects in the source directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.local.sync( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Storage ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ImportStorage Redis +
client.import_storage.redis.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis import (source) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.redis.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.redis.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new source storage connection to a Redis database. + +For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.redis.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Storage prefix (optional) + +
+
+ +
+
+ +**host:** `typing.Optional[str]` — Server Host IP (optional) + +
+
+ +
+
+ +**port:** `typing.Optional[str]` — Server Port (optional) + +
+
+ +
+
+ +**password:** `typing.Optional[str]` — Server Password (optional) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.redis.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific Redis import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.redis.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Storage prefix (optional) + +
+
+ +
+
+ +**host:** `typing.Optional[str]` — Server Host IP (optional) + +
+
+ +
+
+ +**port:** `typing.Optional[str]` — Server Port (optional) + +
+
+ +
+
+ +**password:** `typing.Optional[str]` — Server Password (optional) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.redis.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.redis.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this redis import storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.redis.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + +If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.redis.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this redis import storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.redis.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.redis.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this redis import storage. + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Storage prefix (optional) + +
+
+ +
+
+ +**host:** `typing.Optional[str]` — Server Host IP (optional) + +
+
+ +
+
+ +**port:** `typing.Optional[str]` — Server Port (optional) + +
+
+ +
+
+ +**password:** `typing.Optional[str]` — Server Password (optional) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.redis.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks from a Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.redis.sync( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Storage ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ImportStorage S3 +
client.import_storage.s3.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.s3.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new source storage connection to a S3 bucket. + +For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + +Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**recursive_scan:** `typing.Optional[bool]` — Scan recursively + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID + +
+
+ +
+
+ +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
+
+ +
+
+ +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
+
+ +
+
+ +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.s3.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**recursive_scan:** `typing.Optional[bool]` — Scan recursively + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID + +
+
+ +
+
+ +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
+
+ +
+
+ +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
+
+ +
+
+ +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.s3.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this s3 import storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.s3.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + +If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this s3 import storage. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.s3.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this s3 import storage. + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**recursive_scan:** `typing.Optional[bool]` — Scan recursively + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID + +
+
+ +
+
+ +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
+
+ +
+
+ +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
+
+ +
+
+ +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.s3.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + +Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3.sync( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Storage ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Webhooks +
client.webhooks.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +List all webhooks set up for your organization. + +Webhooks in Label Studio let you set up integrations that subscribe to certain events that occur inside Label Studio. When an event is triggered, Label Studio sends an HTTP POST request to the configured webhook URL. + +For more information, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.webhooks.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[str]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.webhooks.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a webhook. +Label Studio provides several out-of-the box webhook events, which you can find listed here: [Available Label Studio webhooks](https://labelstud.io/guide/webhooks#Available-Label-Studio-webhooks). + +If you want to create your own custom webhook, refer to [Create custom events for webhooks in Label Studio](https://labelstud.io/guide/webhook_create). + +Label Studio makes two main types of events available to integrate with webhooks: project-level task events and organization events. If you want to use organization-level webhook events, you will need to set `LABEL_STUDIO_ALLOW_ORGANIZATION_WEBHOOKS=true`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import Webhook +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.webhooks.create( + request=Webhook( + url="url", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `Webhook` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.webhooks.info(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get descriptions of all available webhook actions to set up webhooks. For more information, see the [Webhook event reference](https://labelstud.io/guide/webhook_reference). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.webhooks.info() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**organization_only:** `typing.Optional[bool]` — organization-only or not + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.webhooks.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get information about a specific webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + +For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.webhooks.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this webhook. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.webhooks.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + +For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.webhooks.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this webhook. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.webhooks.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + +For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import WebhookSerializerForUpdate +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.webhooks.update( + id=1, + url="url", + request=WebhookSerializerForUpdate( + url="url", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — A unique integer value identifying this webhook. + +
+
+ +
+
+ +**url:** `str` — URL of webhook + +
+
+ +
+
+ +**request:** `WebhookSerializerForUpdate` + +
+
+ +
+
+ +**send_payload:** `typing.Optional[bool]` — If value is False send only action + +
+
+ +
+
+ +**send_for_all_actions:** `typing.Optional[bool]` — If value is False - used only for actions from WebhookAction + +
+
+ +
+
+ +**headers:** `typing.Optional[str]` — Key Value Json of headers + +
+
+ +
+
+ +**is_active:** `typing.Optional[bool]` — If value is False the webhook is disabled + +
+
+ +
+
+ +**actions:** `typing.Optional[ + typing.Union[ + WebhooksUpdateRequestActionsItem, + typing.Sequence[WebhooksUpdateRequestActionsItem], + ] +]` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ImportStorage S3S +
client.import_storage.s3s.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3s.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.s3s.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new source storage connection to a S3 bucket. + +For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + +Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + +After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3s.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**recursive_scan:** `typing.Optional[bool]` — Scan recursively + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**external_id:** `typing.Optional[str]` — AWS External ID + +
+
+ +
+
+ +**role_arn:** `typing.Optional[str]` — AWS Role ARN + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.s3s.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3s.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Import storage ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.s3s.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + +If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3s.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Import storage ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.s3s.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3s.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Import storage ID + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**recursive_scan:** `typing.Optional[bool]` — Scan recursively + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**external_id:** `typing.Optional[str]` — AWS External ID + +
+
+ +
+
+ +**role_arn:** `typing.Optional[str]` — AWS Role ARN + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.s3s.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3s.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**recursive_scan:** `typing.Optional[bool]` — Scan recursively + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**external_id:** `typing.Optional[str]` — AWS External ID + +
+
+ +
+
+ +**role_arn:** `typing.Optional[str]` — AWS Role ARN + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.s3s.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.s3s.sync( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Storage ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ExportStorage S3S +
client.export_storage.s3s.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. + +The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + +For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3s.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3s.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new target storage connection to a S3 bucket with IAM role access. + +For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3s.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**external_id:** `typing.Optional[str]` — AWS External ID + +
+
+ +
+
+ +**role_arn:** `typing.Optional[str]` — AWS Role ARN + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3s.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3s.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Export storage ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3s.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3s.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Export storage ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3s.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3s.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Export storage ID + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**external_id:** `typing.Optional[str]` — AWS External ID + +
+
+ +
+
+ +**role_arn:** `typing.Optional[str]` — AWS Role ARN + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3s.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3s.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**external_id:** `typing.Optional[str]` — AWS External ID + +
+
+ +
+
+ +**role_arn:** `typing.Optional[str]` — AWS Role ARN + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Workspaces +
client.workspaces.list() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +List all workspaces for your organization. + +Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + +For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.workspaces.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.workspaces.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new workspace. + +Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + +For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.workspaces.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Workspace title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Workspace description + +
+
+ +
+
+ +**is_public:** `typing.Optional[bool]` — Is workspace public + +
+
+ +
+
+ +**is_personal:** `typing.Optional[bool]` — Is workspace personal + +
+
+ +
+
+ +**color:** `typing.Optional[str]` — Workspace color in HEX format + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.workspaces.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.workspaces.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Workspace ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.workspaces.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.workspaces.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Workspace ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.workspaces.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.workspaces.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Workspace ID + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Workspace title + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Workspace description + +
+
+ +
+
+ +**is_public:** `typing.Optional[bool]` — Is workspace public + +
+
+ +
+
+ +**is_personal:** `typing.Optional[bool]` — Is workspace personal + +
+
+ +
+
+ +**color:** `typing.Optional[str]` — Workspace color in HEX format + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Workspaces Members +
client.workspaces.members.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.workspaces.members.list( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Workspace ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.workspaces.members.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.workspaces.members.create( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Workspace ID + +
+
+ +
+
+ +**user:** `typing.Optional[int]` — User ID of the workspace member + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.workspaces.members.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.workspaces.members.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Workspace ID + +
+
+ +
+
+ +**user:** `typing.Optional[int]` — User ID of the workspace member + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ diff --git a/src/label_studio_sdk/__init__.py b/src/label_studio_sdk/__init__.py index 214643ca6..67f5ef6b0 100644 --- a/src/label_studio_sdk/__init__.py +++ b/src/label_studio_sdk/__init__.py @@ -54,6 +54,9 @@ S3ExportStorageStatus, S3ImportStorage, S3ImportStorageStatus, + S3SExportStorage, + S3SImportStorage, + S3SImportStorageStatus, SerializationOption, SerializationOptions, Task, @@ -65,6 +68,7 @@ WebhookActionsItem, WebhookSerializerForUpdate, WebhookSerializerForUpdateActionsItem, + Workspace, ) from .errors import BadRequestError, InternalServerError from . import ( @@ -80,6 +84,7 @@ users, views, webhooks, + workspaces, ) from ._legacy import Client from .actions import ( @@ -211,6 +216,9 @@ "S3ExportStorageStatus", "S3ImportStorage", "S3ImportStorageStatus", + "S3SExportStorage", + "S3SImportStorage", + "S3SImportStorageStatus", "SerializationOption", "SerializationOptions", "Task", @@ -243,6 +251,7 @@ "WebhookSerializerForUpdate", "WebhookSerializerForUpdateActionsItem", "WebhooksUpdateRequestActionsItem", + "Workspace", "__version__", "actions", "annotations", @@ -256,4 +265,5 @@ "users", "views", "webhooks", + "workspaces", ] diff --git a/src/label_studio_sdk/actions/client.py b/src/label_studio_sdk/actions/client.py index d05865fb2..f674a5db2 100644 --- a/src/label_studio_sdk/actions/client.py +++ b/src/label_studio_sdk/actions/client.py @@ -44,9 +44,9 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> No _response = self._client_wrapper.httpx_client.request( "api/dm/actions/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -134,9 +134,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -172,9 +172,9 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) _response = await self._client_wrapper.httpx_client.request( "api/dm/actions/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -262,9 +262,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/annotations/client.py b/src/label_studio_sdk/annotations/client.py index aa0f5aabc..6e62ac670 100644 --- a/src/label_studio_sdk/annotations/client.py +++ b/src/label_studio_sdk/annotations/client.py @@ -51,9 +51,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/annotations/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -93,9 +93,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/annotations/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -182,7 +182,7 @@ def update( "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -206,9 +206,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -247,9 +247,9 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No _response = self._client_wrapper.httpx_client.request( f"api/tasks/{jsonable_encoder(id)}/annotations/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Annotation], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[Annotation], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -348,7 +348,7 @@ def create( "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -372,9 +372,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -418,9 +418,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/annotations/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -460,9 +460,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/annotations/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -549,7 +549,7 @@ async def update( "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -573,9 +573,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -616,9 +616,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( f"api/tasks/{jsonable_encoder(id)}/annotations/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Annotation], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[Annotation], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -717,7 +717,7 @@ async def create( "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -741,9 +741,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Annotation, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/base_client.py b/src/label_studio_sdk/base_client.py index 260a7a46b..ffe10f44a 100644 --- a/src/label_studio_sdk/base_client.py +++ b/src/label_studio_sdk/base_client.py @@ -20,6 +20,7 @@ from .users.client import AsyncUsersClient, UsersClient from .views.client import AsyncViewsClient, ViewsClient from .webhooks.client import AsyncWebhooksClient, WebhooksClient +from .workspaces.client import AsyncWorkspacesClient, WorkspacesClient class LabelStudioBase: @@ -96,6 +97,7 @@ def __init__( self.import_storage = ImportStorageClient(client_wrapper=self._client_wrapper) self.export_storage = ExportStorageClient(client_wrapper=self._client_wrapper) self.webhooks = WebhooksClient(client_wrapper=self._client_wrapper) + self.workspaces = WorkspacesClient(client_wrapper=self._client_wrapper) class AsyncLabelStudioBase: @@ -172,6 +174,7 @@ def __init__( self.import_storage = AsyncImportStorageClient(client_wrapper=self._client_wrapper) self.export_storage = AsyncExportStorageClient(client_wrapper=self._client_wrapper) self.webhooks = AsyncWebhooksClient(client_wrapper=self._client_wrapper) + self.workspaces = AsyncWorkspacesClient(client_wrapper=self._client_wrapper) def _get_base_url(*, base_url: typing.Optional[str] = None, environment: LabelStudioEnvironment) -> str: diff --git a/src/label_studio_sdk/core/client_wrapper.py b/src/label_studio_sdk/core/client_wrapper.py index 2a35c5f24..6c289ae07 100644 --- a/src/label_studio_sdk/core/client_wrapper.py +++ b/src/label_studio_sdk/core/client_wrapper.py @@ -17,7 +17,7 @@ def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "label-studio-sdk", - "X-Fern-SDK-Version": "1.0.3", + "X-Fern-SDK-Version": "1.0.4", } headers["Authorization"] = f"Token {self.api_key}" return headers diff --git a/src/label_studio_sdk/core/http_client.py b/src/label_studio_sdk/core/http_client.py index ed7cf5d3d..09c68368b 100644 --- a/src/label_studio_sdk/core/http_client.py +++ b/src/label_studio_sdk/core/http_client.py @@ -2,6 +2,7 @@ import asyncio import email.utils +import json import re import time import typing @@ -125,6 +126,25 @@ def maybe_filter_request_body( return data_content +# Abstracted out for testing purposes +def get_request_body( + *, + json: typing.Optional[typing.Any], + data: typing.Optional[typing.Any], + request_options: typing.Optional[RequestOptions], + omit: typing.Optional[typing.Any], +) -> typing.Tuple[typing.Optional[typing.Any], typing.Optional[typing.Any]]: + json_body = None + data_body = None + if data is not None: + data_body = maybe_filter_request_body(data, request_options, omit) + else: + # If both data and json are None, we send json data in the event extra properties are specified + json_body = maybe_filter_request_body(json, request_options, omit) + + return json_body, data_body + + class HttpClient: def __init__( self, @@ -168,6 +188,8 @@ def request( else self.base_timeout ) + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + response = self.httpx_client.request( method=method, url=urllib.parse.urljoin(f"{base_url}/", path), @@ -197,8 +219,8 @@ def request( ) ) ), - json=maybe_filter_request_body(json, request_options, omit), - data=maybe_filter_request_body(data, request_options, omit), + json=json_body, + data=data_body, content=content, files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, timeout=timeout, @@ -248,6 +270,8 @@ def stream( else self.base_timeout ) + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + with self.httpx_client.stream( method=method, url=urllib.parse.urljoin(f"{base_url}/", path), @@ -277,8 +301,8 @@ def stream( ) ) ), - json=maybe_filter_request_body(json, request_options, omit), - data=maybe_filter_request_body(data, request_options, omit), + json=json_body, + data=data_body, content=content, files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, timeout=timeout, @@ -329,6 +353,8 @@ async def request( else self.base_timeout ) + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + # Add the input to each of these and do None-safety checks response = await self.httpx_client.request( method=method, @@ -359,8 +385,8 @@ async def request( ) ) ), - json=maybe_filter_request_body(json, request_options, omit), - data=maybe_filter_request_body(data, request_options, omit), + json=json_body, + data=data_body, content=content, files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, timeout=timeout, @@ -409,6 +435,8 @@ async def stream( else self.base_timeout ) + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + async with self.httpx_client.stream( method=method, url=urllib.parse.urljoin(f"{base_url}/", path), @@ -438,8 +466,8 @@ async def stream( ) ) ), - json=maybe_filter_request_body(json, request_options, omit), - data=maybe_filter_request_body(data, request_options, omit), + json=json_body, + data=data_body, content=content, files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, timeout=timeout, diff --git a/src/label_studio_sdk/core/request_options.py b/src/label_studio_sdk/core/request_options.py index cd6f27a7e..d0bf0dbce 100644 --- a/src/label_studio_sdk/core/request_options.py +++ b/src/label_studio_sdk/core/request_options.py @@ -5,10 +5,10 @@ try: from typing import NotRequired # type: ignore except ImportError: - from typing_extensions import NotRequired # type: ignore + from typing_extensions import NotRequired -class RequestOptions(typing.TypedDict): +class RequestOptions(typing.TypedDict, total=False): """ Additional options for request-specific configuration when calling APIs via the SDK. This is used primarily as an optional final parameter for service functions. diff --git a/src/label_studio_sdk/export_storage/__init__.py b/src/label_studio_sdk/export_storage/__init__.py index 5a2dbf0c6..0203a293b 100644 --- a/src/label_studio_sdk/export_storage/__init__.py +++ b/src/label_studio_sdk/export_storage/__init__.py @@ -1,7 +1,7 @@ # This file was auto-generated by Fern from our API Definition. from .types import ExportStorageListTypesResponseItem -from . import azure, gcs, local, redis, s3 +from . import azure, gcs, local, redis, s3, s3s from .azure import AzureCreateResponse, AzureUpdateResponse from .gcs import GcsCreateResponse, GcsUpdateResponse from .local import LocalCreateResponse, LocalUpdateResponse @@ -25,4 +25,5 @@ "local", "redis", "s3", + "s3s", ] diff --git a/src/label_studio_sdk/export_storage/azure/client.py b/src/label_studio_sdk/export_storage/azure/client.py index 9a762b378..fb58ccb1d 100644 --- a/src/label_studio_sdk/export_storage/azure/client.py +++ b/src/label_studio_sdk/export_storage/azure/client.py @@ -55,9 +55,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/storages/export/azure", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[AzureBlobExportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[AzureBlobExportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -142,9 +142,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -229,9 +229,9 @@ def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -270,9 +270,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/storages/export/azure/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureBlobExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -310,9 +310,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/storages/export/azure/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -401,9 +401,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -443,9 +443,9 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No _response = self._client_wrapper.httpx_client.request( f"api/storages/export/azure/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureBlobExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -491,9 +491,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/storages/export/azure", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[AzureBlobExportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[AzureBlobExportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -578,9 +578,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -665,9 +665,9 @@ async def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -706,9 +706,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/azure/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureBlobExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -746,9 +746,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/azure/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -837,9 +837,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -879,9 +879,9 @@ async def sync(self, id: str, *, request_options: typing.Optional[RequestOptions _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/azure/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureBlobExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/export_storage/client.py b/src/label_studio_sdk/export_storage/client.py index bcc514153..c683ebb8c 100644 --- a/src/label_studio_sdk/export_storage/client.py +++ b/src/label_studio_sdk/export_storage/client.py @@ -12,6 +12,7 @@ from .local.client import AsyncLocalClient, LocalClient from .redis.client import AsyncRedisClient, RedisClient from .s3.client import AsyncS3Client, S3Client +from .s3s.client import AsyncS3SClient, S3SClient from .types.export_storage_list_types_response_item import ExportStorageListTypesResponseItem @@ -23,6 +24,7 @@ def __init__(self, *, client_wrapper: SyncClientWrapper): self.local = LocalClient(client_wrapper=self._client_wrapper) self.redis = RedisClient(client_wrapper=self._client_wrapper) self.s3 = S3Client(client_wrapper=self._client_wrapper) + self.s3s = S3SClient(client_wrapper=self._client_wrapper) def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -52,9 +54,9 @@ def list_types( _response = self._client_wrapper.httpx_client.request( "api/storages/export/types", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[ExportStorageListTypesResponseItem], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[ExportStorageListTypesResponseItem], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -69,6 +71,7 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): self.local = AsyncLocalClient(client_wrapper=self._client_wrapper) self.redis = AsyncRedisClient(client_wrapper=self._client_wrapper) self.s3 = AsyncS3Client(client_wrapper=self._client_wrapper) + self.s3s = AsyncS3SClient(client_wrapper=self._client_wrapper) async def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -98,9 +101,9 @@ async def list_types( _response = await self._client_wrapper.httpx_client.request( "api/storages/export/types", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[ExportStorageListTypesResponseItem], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[ExportStorageListTypesResponseItem], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/export_storage/gcs/client.py b/src/label_studio_sdk/export_storage/gcs/client.py index 6a27f4d35..f7a687f3a 100644 --- a/src/label_studio_sdk/export_storage/gcs/client.py +++ b/src/label_studio_sdk/export_storage/gcs/client.py @@ -55,9 +55,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/storages/export/gcs", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[GcsExportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[GcsExportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -142,9 +142,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -229,9 +229,9 @@ def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -270,9 +270,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/storages/export/gcs/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -310,9 +310,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/storages/export/gcs/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -401,9 +401,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -443,9 +443,9 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No _response = self._client_wrapper.httpx_client.request( f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -491,9 +491,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/storages/export/gcs", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[GcsExportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[GcsExportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -578,9 +578,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -665,9 +665,9 @@ async def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -706,9 +706,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/gcs/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -746,9 +746,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/gcs/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -837,9 +837,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -879,9 +879,9 @@ async def sync(self, id: str, *, request_options: typing.Optional[RequestOptions _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/export_storage/local/client.py b/src/label_studio_sdk/export_storage/local/client.py index 3b6070a45..08fc66798 100644 --- a/src/label_studio_sdk/export_storage/local/client.py +++ b/src/label_studio_sdk/export_storage/local/client.py @@ -55,9 +55,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/storages/export/localfiles", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[LocalFilesExportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[LocalFilesExportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -132,9 +132,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -209,9 +209,9 @@ def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -250,9 +250,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/storages/export/localfiles/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalFilesExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -290,9 +290,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/storages/export/localfiles/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -371,9 +371,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -415,9 +415,9 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No method="POST", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalFilesExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -463,9 +463,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/storages/export/localfiles", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[LocalFilesExportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[LocalFilesExportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -540,9 +540,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -617,9 +617,9 @@ async def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -658,9 +658,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/localfiles/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalFilesExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -698,9 +698,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/localfiles/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -779,9 +779,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -825,9 +825,9 @@ async def sync( method="POST", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalFilesExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/export_storage/redis/client.py b/src/label_studio_sdk/export_storage/redis/client.py index fbbb9f2e8..1b515f40c 100644 --- a/src/label_studio_sdk/export_storage/redis/client.py +++ b/src/label_studio_sdk/export_storage/redis/client.py @@ -55,9 +55,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/storages/export/redis", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[RedisExportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[RedisExportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -147,9 +147,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -239,9 +239,9 @@ def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -280,9 +280,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/storages/export/redis/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -320,9 +320,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/storages/export/redis/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -416,9 +416,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -458,9 +458,9 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No _response = self._client_wrapper.httpx_client.request( f"api/storages/export/redis/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -506,9 +506,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/storages/export/redis", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[RedisExportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[RedisExportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -598,9 +598,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -690,9 +690,9 @@ async def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -731,9 +731,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/redis/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -771,9 +771,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/redis/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -867,9 +867,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -909,9 +909,9 @@ async def sync(self, id: str, *, request_options: typing.Optional[RequestOptions _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/redis/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/export_storage/s3/client.py b/src/label_studio_sdk/export_storage/s3/client.py index 8671096b8..42a248c0a 100644 --- a/src/label_studio_sdk/export_storage/s3/client.py +++ b/src/label_studio_sdk/export_storage/s3/client.py @@ -55,9 +55,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/storages/export/s3", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[S3ExportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[S3ExportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -162,9 +162,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3CreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3CreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -269,9 +269,9 @@ def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -310,9 +310,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/storages/export/s3/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3ExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -350,9 +350,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/storages/export/s3/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -461,9 +461,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3UpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3UpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -503,9 +503,9 @@ def sync(self, id: str, *, request_options: typing.Optional[RequestOptions] = No _response = self._client_wrapper.httpx_client.request( f"api/storages/export/s3/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3ExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -551,9 +551,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/storages/export/s3", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[S3ExportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[S3ExportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -658,9 +658,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3CreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3CreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -765,9 +765,9 @@ async def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -806,9 +806,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/s3/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3ExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -846,9 +846,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/s3/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -957,9 +957,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3UpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3UpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -999,9 +999,9 @@ async def sync(self, id: str, *, request_options: typing.Optional[RequestOptions _response = await self._client_wrapper.httpx_client.request( f"api/storages/export/s3/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ExportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3ExportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/export_storage/s3s/__init__.py b/src/label_studio_sdk/export_storage/s3s/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/src/label_studio_sdk/export_storage/s3s/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/src/label_studio_sdk/export_storage/s3s/client.py b/src/label_studio_sdk/export_storage/s3s/client.py new file mode 100644 index 000000000..6a42d4726 --- /dev/null +++ b/src/label_studio_sdk/export_storage/s3s/client.py @@ -0,0 +1,836 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.request_options import RequestOptions +from ...types.s3s_export_storage import S3SExportStorage + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class S3SClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[S3SExportStorage]: + """ + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[S3SExportStorage] + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.s3s.list() + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", method="GET", params={"project": project}, request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[S3SExportStorage], _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> S3SExportStorage: + """ + Create a new target storage connection to a S3 bucket with IAM role access. + + For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation. + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SExportStorage + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.s3s.create() + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SExportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SExportStorage: + """ + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SExportStorage + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.s3s.get( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", method="GET", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SExportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.s3s.delete( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> S3SExportStorage: + """ + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SExportStorage + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.s3s.update( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SExportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def validate( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> None: + """ + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.s3s.validate() + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3s/validate", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncS3SClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[S3SExportStorage]: + """ + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[S3SExportStorage] + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.export_storage.s3s.list() + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", method="GET", params={"project": project}, request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[S3SExportStorage], _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> S3SExportStorage: + """ + Create a new target storage connection to a S3 bucket with IAM role access. + + For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation. + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SExportStorage + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.export_storage.s3s.create() + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SExportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SExportStorage: + """ + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SExportStorage + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.export_storage.s3s.get( + id=1, + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", method="GET", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SExportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.export_storage.s3s.delete( + id=1, + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> S3SExportStorage: + """ + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SExportStorage + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.export_storage.s3s.update( + id=1, + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SExportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def validate( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> None: + """ + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.export_storage.s3s.validate() + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3s/validate", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/files/client.py b/src/label_studio_sdk/files/client.py index 41035bcf5..a047936f5 100644 --- a/src/label_studio_sdk/files/client.py +++ b/src/label_studio_sdk/files/client.py @@ -49,9 +49,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/import/file-upload/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FileUpload, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(FileUpload, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -87,9 +87,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/import/file-upload/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -142,9 +142,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FileUpload, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(FileUpload, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -199,9 +199,9 @@ def list( params={"all": all_, "ids": ids}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[FileUpload], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[FileUpload], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -239,9 +239,9 @@ def delete_many(self, id: int, *, request_options: typing.Optional[RequestOption _response = self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/file-uploads", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -276,9 +276,9 @@ def download(self, filename: str, *, request_options: typing.Optional[RequestOpt _response = self._client_wrapper.httpx_client.request( f"data/upload/{jsonable_encoder(filename)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -320,9 +320,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/import/file-upload/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FileUpload, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(FileUpload, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -358,9 +358,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/import/file-upload/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -413,9 +413,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FileUpload, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(FileUpload, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -470,9 +470,9 @@ async def list( params={"all": all_, "ids": ids}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[FileUpload], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[FileUpload], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -510,9 +510,9 @@ async def delete_many(self, id: int, *, request_options: typing.Optional[Request _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/file-uploads", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -547,9 +547,9 @@ async def download(self, filename: str, *, request_options: typing.Optional[Requ _response = await self._client_wrapper.httpx_client.request( f"data/upload/{jsonable_encoder(filename)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/__init__.py b/src/label_studio_sdk/import_storage/__init__.py index 5c81bf675..51599b165 100644 --- a/src/label_studio_sdk/import_storage/__init__.py +++ b/src/label_studio_sdk/import_storage/__init__.py @@ -1,7 +1,7 @@ # This file was auto-generated by Fern from our API Definition. from .types import ImportStorageListTypesResponseItem -from . import azure, gcs, local, redis, s3 +from . import azure, gcs, local, redis, s3, s3s from .azure import AzureCreateResponse, AzureUpdateResponse from .gcs import GcsCreateResponse, GcsUpdateResponse from .local import LocalCreateResponse, LocalUpdateResponse @@ -25,4 +25,5 @@ "local", "redis", "s3", + "s3s", ] diff --git a/src/label_studio_sdk/import_storage/azure/client.py b/src/label_studio_sdk/import_storage/azure/client.py index f083a6075..5518a133f 100644 --- a/src/label_studio_sdk/import_storage/azure/client.py +++ b/src/label_studio_sdk/import_storage/azure/client.py @@ -55,9 +55,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/storages/azure/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[AzureBlobImportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[AzureBlobImportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -159,9 +159,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -261,9 +261,9 @@ def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -302,9 +302,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/storages/azure/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureBlobImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -344,9 +344,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/storages/azure/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -450,9 +450,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -493,9 +493,9 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No _response = self._client_wrapper.httpx_client.request( f"api/storages/azure/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureBlobImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -541,9 +541,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/storages/azure/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[AzureBlobImportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[AzureBlobImportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -645,9 +645,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -747,9 +747,9 @@ async def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -788,9 +788,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/storages/azure/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureBlobImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -830,9 +830,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/storages/azure/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -936,9 +936,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -979,9 +979,9 @@ async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions _response = await self._client_wrapper.httpx_client.request( f"api/storages/azure/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AzureBlobImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(AzureBlobImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/client.py b/src/label_studio_sdk/import_storage/client.py index 8f7a41554..7fb7adbca 100644 --- a/src/label_studio_sdk/import_storage/client.py +++ b/src/label_studio_sdk/import_storage/client.py @@ -12,6 +12,7 @@ from .local.client import AsyncLocalClient, LocalClient from .redis.client import AsyncRedisClient, RedisClient from .s3.client import AsyncS3Client, S3Client +from .s3s.client import AsyncS3SClient, S3SClient from .types.import_storage_list_types_response_item import ImportStorageListTypesResponseItem @@ -23,6 +24,7 @@ def __init__(self, *, client_wrapper: SyncClientWrapper): self.local = LocalClient(client_wrapper=self._client_wrapper) self.redis = RedisClient(client_wrapper=self._client_wrapper) self.s3 = S3Client(client_wrapper=self._client_wrapper) + self.s3s = S3SClient(client_wrapper=self._client_wrapper) def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -52,9 +54,9 @@ def list_types( _response = self._client_wrapper.httpx_client.request( "api/storages/types", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[ImportStorageListTypesResponseItem], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[ImportStorageListTypesResponseItem], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -69,6 +71,7 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): self.local = AsyncLocalClient(client_wrapper=self._client_wrapper) self.redis = AsyncRedisClient(client_wrapper=self._client_wrapper) self.s3 = AsyncS3Client(client_wrapper=self._client_wrapper) + self.s3s = AsyncS3SClient(client_wrapper=self._client_wrapper) async def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -98,9 +101,9 @@ async def list_types( _response = await self._client_wrapper.httpx_client.request( "api/storages/types", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[ImportStorageListTypesResponseItem], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[ImportStorageListTypesResponseItem], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/gcs/client.py b/src/label_studio_sdk/import_storage/gcs/client.py index 80ad49e74..b36dbe5a8 100644 --- a/src/label_studio_sdk/import_storage/gcs/client.py +++ b/src/label_studio_sdk/import_storage/gcs/client.py @@ -55,9 +55,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/storages/gcs/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[GcsImportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[GcsImportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -159,9 +159,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -261,9 +261,9 @@ def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -302,9 +302,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/storages/gcs/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -344,9 +344,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/storages/gcs/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -450,9 +450,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -493,9 +493,9 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No _response = self._client_wrapper.httpx_client.request( f"api/storages/gcs/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -541,9 +541,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/storages/gcs/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[GcsImportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[GcsImportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -645,9 +645,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -747,9 +747,9 @@ async def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -788,9 +788,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/storages/gcs/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -830,9 +830,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/storages/gcs/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -936,9 +936,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -979,9 +979,9 @@ async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions _response = await self._client_wrapper.httpx_client.request( f"api/storages/gcs/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GcsImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GcsImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/local/client.py b/src/label_studio_sdk/import_storage/local/client.py index f2d8732b3..d0613d8c0 100644 --- a/src/label_studio_sdk/import_storage/local/client.py +++ b/src/label_studio_sdk/import_storage/local/client.py @@ -55,9 +55,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/storages/localfiles/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[LocalFilesImportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[LocalFilesImportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -132,9 +132,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -209,9 +209,9 @@ def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -250,9 +250,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/storages/localfiles/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalFilesImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -292,9 +292,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/storages/localfiles/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -373,9 +373,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -416,9 +416,9 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No _response = self._client_wrapper.httpx_client.request( f"api/storages/localfiles/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalFilesImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -464,9 +464,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/storages/localfiles/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[LocalFilesImportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[LocalFilesImportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -541,9 +541,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -618,9 +618,9 @@ async def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -659,9 +659,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/storages/localfiles/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalFilesImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -701,9 +701,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/storages/localfiles/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -782,9 +782,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -827,9 +827,9 @@ async def sync( _response = await self._client_wrapper.httpx_client.request( f"api/storages/localfiles/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LocalFilesImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(LocalFilesImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/redis/client.py b/src/label_studio_sdk/import_storage/redis/client.py index a6885b152..338d9bdfd 100644 --- a/src/label_studio_sdk/import_storage/redis/client.py +++ b/src/label_studio_sdk/import_storage/redis/client.py @@ -55,9 +55,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/storages/redis/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[RedisImportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[RedisImportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -147,9 +147,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -239,9 +239,9 @@ def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -280,9 +280,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/storages/redis/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -322,9 +322,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/storages/redis/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -418,9 +418,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -461,9 +461,9 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No _response = self._client_wrapper.httpx_client.request( f"api/storages/redis/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -509,9 +509,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/storages/redis/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[RedisImportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[RedisImportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -601,9 +601,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -693,9 +693,9 @@ async def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -734,9 +734,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/storages/redis/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -776,9 +776,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/storages/redis/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -872,9 +872,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -915,9 +915,9 @@ async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions _response = await self._client_wrapper.httpx_client.request( f"api/storages/redis/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RedisImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(RedisImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/s3/client.py b/src/label_studio_sdk/import_storage/s3/client.py index d01cd328c..5c4a68ad8 100644 --- a/src/label_studio_sdk/import_storage/s3/client.py +++ b/src/label_studio_sdk/import_storage/s3/client.py @@ -55,9 +55,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/storages/s3/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[S3ImportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[S3ImportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -184,9 +184,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3CreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3CreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -311,9 +311,9 @@ def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -352,9 +352,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/storages/s3/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3ImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -394,9 +394,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/storages/s3/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -525,9 +525,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3UpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3UpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -568,9 +568,9 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No _response = self._client_wrapper.httpx_client.request( f"api/storages/s3/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3ImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -616,9 +616,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/storages/s3/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[S3ImportStorage], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[S3ImportStorage], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -745,9 +745,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3CreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3CreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -872,9 +872,9 @@ async def validate( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -913,9 +913,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/storages/s3/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3ImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -955,9 +955,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/storages/s3/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1086,9 +1086,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3UpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3UpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1129,9 +1129,9 @@ async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions _response = await self._client_wrapper.httpx_client.request( f"api/storages/s3/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(S3ImportStorage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3ImportStorage, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/import_storage/s3s/__init__.py b/src/label_studio_sdk/import_storage/s3s/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/src/label_studio_sdk/import_storage/s3s/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/src/label_studio_sdk/import_storage/s3s/client.py b/src/label_studio_sdk/import_storage/s3s/client.py new file mode 100644 index 000000000..80c617a70 --- /dev/null +++ b/src/label_studio_sdk/import_storage/s3s/client.py @@ -0,0 +1,1054 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.request_options import RequestOptions +from ...types.s3s_import_storage import S3SImportStorage + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class S3SClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[S3SImportStorage]: + """ + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[S3SImportStorage] + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.s3s.list() + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3s", method="GET", params={"project": project}, request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[S3SImportStorage], _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> S3SImportStorage: + """ + Create a new source storage connection to a S3 bucket. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SImportStorage + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.s3s.create() + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: + """ + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Parameters + ---------- + id : int + Import storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SImportStorage + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.s3s.get( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", method="GET", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + Import storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.s3s.delete( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> S3SImportStorage: + """ + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + Import storage ID + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SImportStorage + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.s3s.update( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def validate( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> None: + """ + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.s3s.validate() + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3s/validate", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: + """ + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SImportStorage + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.s3s.sync( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncS3SClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[S3SImportStorage]: + """ + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[S3SImportStorage] + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.import_storage.s3s.list() + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3s", method="GET", params={"project": project}, request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[S3SImportStorage], _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> S3SImportStorage: + """ + Create a new source storage connection to a S3 bucket. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SImportStorage + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.import_storage.s3s.create() + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: + """ + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Parameters + ---------- + id : int + Import storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SImportStorage + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.import_storage.s3s.get( + id=1, + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", method="GET", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + Import storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.import_storage.s3s.delete( + id=1, + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> S3SImportStorage: + """ + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + Import storage ID + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SImportStorage + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.import_storage.s3s.update( + id=1, + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def validate( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> None: + """ + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.import_storage.s3s.validate() + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3s/validate", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: + """ + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + S3SImportStorage + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.import_storage.s3s.sync( + id=1, + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/label_interface/control_tags.py b/src/label_studio_sdk/label_interface/control_tags.py index c82f2b855..94f343eab 100644 --- a/src/label_studio_sdk/label_interface/control_tags.py +++ b/src/label_studio_sdk/label_interface/control_tags.py @@ -145,6 +145,14 @@ def parse_node(cls, tag: xml.etree.ElementTree.Element, tags_mapping=None) -> "C return tag_class(**tag_info) + def collect_attrs(self): + """Return tag attrs as a single dict""" + return { + **self.attr, + "name": self.name, + "toName": self.to_name + } + def get_object(self, name=None): """ This method retrieves the object tag that the control tag maps to. @@ -473,7 +481,7 @@ class ChoicesValue(BaseModel): class ChoicesTag(ControlTag): """ """ - + tag: str = "Choices" _label_attr_name: str = "choices" _value_class: Type[ChoicesValue] = ChoicesValue @@ -484,7 +492,7 @@ class LabelsValue(SpanSelection): class LabelsTag(ControlTag): """ """ - + tag: str = "Labels" _label_attr_name: str = "labels" _value_class: Type[LabelsValue] = LabelsValue @@ -534,7 +542,7 @@ class BrushLabelsValue(BrushValue): class BrushTag(ControlTag): """ """ - + tag: str = "Brush" _value_class: Type[BrushValue] = BrushValue # def validate_value(self, value) -> bool: @@ -546,7 +554,7 @@ class BrushTag(ControlTag): class BrushLabelsTag(BrushTag): """ """ - + tag: str = "BrushLabels" _label_attr_name: str = "brushlabels" _value_class: Type[BrushLabelsValue] = BrushLabelsValue @@ -565,13 +573,13 @@ class EllipseLabelsValue(EllipseValue): class EllipseTag(ControlTag): """ """ - + tag: str = "Ellipse" _value_class: Type[EllipseValue] = EllipseValue class EllipseLabelsTag(ControlTag): """ """ - + tag: str = "EllipseLabels" _label_attr_name: str = "ellipselabels" _value_class: Type[EllipseLabelsValue] = EllipseLabelsValue @@ -587,13 +595,13 @@ class KeyPointLabelsValue(KeyPointValue): class KeyPointTag(ControlTag): """ """ - + tag: str = "KeyPoint" _value_class: Type[KeyPointValue] = KeyPointValue class KeyPointLabelsTag(ControlTag): """ """ - + tag: str = "KeyPointLabels" _label_attr_name: str = "keypointlabels" _value_class: Type[KeyPointLabelsValue] = KeyPointLabelsValue @@ -608,13 +616,13 @@ class PolygonLabelsValue(PolygonValue): class PolygonTag(ControlTag): """ """ - + tag: str = "Polygon" _value_class: Type[PolygonValue] = PolygonValue class PolygonLabelsTag(ControlTag): """ """ - + tag: str = "PolygonLabels" _label_attr_name: str = "polygonlabels" _value_class: Type[PolygonLabelsValue] = PolygonLabelsValue @@ -633,13 +641,13 @@ class RectangleLabelsValue(RectangleValue): class RectangleTag(ControlTag): """ """ - + tag: str = "Rectangle" _value_class: Type[RectangleValue] = RectangleValue class RectangleLabelsTag(ControlTag): """ """ - + tag: str = "RectangleLabels" _label_attr_name: str = "rectanglelabels" _value_class: Type[RectangleLabelsValue] = RectangleLabelsValue @@ -663,6 +671,7 @@ class VideoRectangleValue(BaseModel): class VideoRectangleTag(ControlTag): """ """ + tag: str = "VideoRectangle" _label_attr_name: str = "labels" _value_class: Type[VideoRectangleValue] = VideoRectangleValue @@ -673,6 +682,7 @@ class NumberValue(BaseModel): class NumberTag(ControlTag): """ """ + tag: str = "Number" _value_class: Type[NumberValue] = NumberValue @@ -682,6 +692,7 @@ class DateTimeValue(BaseModel): class DateTimeTag(ControlTag): """ """ + tag: str = "DateTime" _value_class: Type[DateTimeValue] = DateTimeValue @@ -691,7 +702,7 @@ class HyperTextLabelsValue(SpanSelectionOffsets): class HyperTextLabelsTag(ControlTag): """ """ - + tag: str = "HyperTextLabels" _label_attr_name: str = "htmllabels" _value_class: Type[HyperTextLabelsValue] = HyperTextLabelsValue @@ -702,7 +713,7 @@ class PairwiseValue(BaseModel): class PairwiseTag(ControlTag): """ """ - + tag: str = "Pairwise" _value_class: Type[PairwiseValue] = PairwiseValue def label(self, side): @@ -717,7 +728,7 @@ class ParagraphLabelsValue(SpanSelectionOffsets): class ParagraphLabelsTag(ControlTag): """ """ - + tag: str = "ParagraphsLabels" _label_attr_name: str = "paragraphlabels" _value_class: Type[ParagraphLabelsValue] = ParagraphLabelsValue @@ -736,6 +747,7 @@ class RankerValue(BaseModel): class RankerTag(ControlTag): """ """ + tag: str = "Ranker" _value_class: Type[RankerValue] = RankerValue @@ -745,11 +757,13 @@ class RatingValue(BaseModel): class RatingTag(ControlTag): """ """ + tag: str = "Rating" _value_class: Type[RatingValue] = RatingValue class RelationsTag(ControlTag): """ """ + tag: str = "Relations" def validate_value(self, ) -> bool: """ """ raise NotImplemented("""Should not be called directly, instead @@ -768,7 +782,7 @@ class TaxonomyValue(BaseModel): class TaxonomyTag(ControlTag): """ """ - + tag: str = "Taxonomy" _value_class: Type[TaxonomyValue] = TaxonomyValue @@ -778,7 +792,7 @@ class TextAreaValue(BaseModel): class TextAreaTag(ControlTag): """ """ - + tag: str = "TextArea" _value_class: Type[TextAreaValue] = TextAreaValue @@ -789,6 +803,6 @@ class TimeSeriesValue(SpanSelection): class TimeSeriesLabelsTag(ControlTag): """ """ - + tag: str = "TimeSeriesLabels" _label_attr_name: str = "timeserieslabels" _value_class: Type[TimeSeriesValue] = TimeSeriesValue diff --git a/src/label_studio_sdk/label_interface/create.py b/src/label_studio_sdk/label_interface/create.py index 07b53a716..c2c04bd4f 100644 --- a/src/label_studio_sdk/label_interface/create.py +++ b/src/label_studio_sdk/label_interface/create.py @@ -80,6 +80,9 @@ def _convert_to_tuple(args, tag_type="Choice"): Returns: A tuple containing all labels in specified format. """ + if not args: + return None + return tuple(((tag_type, {"value": arg}, {})) for arg in args) @@ -118,14 +121,15 @@ def _convert(name: str, tag: Union[str, list, tuple, LabelStudioTag]) -> tuple: if isinstance(tag, LabelStudioTag): tag.name = tag.name or name - el = tag.tag, tag.attrs + child_tag_type = "Choice" if tag.tag in ["Choices", "Taxonomy"] else "Label" + el = tag.tag, tag.collect_attrs(), _convert_to_tuple(getattr(tag, "labels", None), tag_type=child_tag_type) elif isinstance(tag, (list, tuple)): el = (*tag, ()) if len(tag) < 3 else tag elif isinstance(tag, str): el = tag, {}, () else: raise TypeError("Input tag must be one of str, list, tuple, LabelStudioTag") - + el[1].setdefault("name", name) if el[0].lower() in OT._TAG_TO_CLASS and not el[1].get("value"): @@ -226,7 +230,7 @@ def convert_tags_description(tags: Dict[str, Any], # The value of `toName` key is set based on whether `name` of # tag is in the mapping dictionary or the `name` of the first # object tag if it's not in mapping - if el[0].lower() in CT._TAG_TO_CLASS and "toName" not in el[1]: + if el[0].lower() in CT._TAG_TO_CLASS and ("toName" not in el[1] or el[1]["toName"] is None): if mapping and el[1].get("name") in mapping: el[1]["toName"] = mapping.get(el[1]["name"]) else: diff --git a/src/label_studio_sdk/label_interface/interface.py b/src/label_studio_sdk/label_interface/interface.py index a99b1ac51..3408d6753 100644 --- a/src/label_studio_sdk/label_interface/interface.py +++ b/src/label_studio_sdk/label_interface/interface.py @@ -303,6 +303,11 @@ def __init__(self, config: str, tags_mapping=None, *args, **kwargs): ##### NEW API + @property + def config(self): + """Returns the XML configuration string""" + return self._config + @property def controls(self): """Returns list of control tags""" diff --git a/src/label_studio_sdk/label_interface/object_tags.py b/src/label_studio_sdk/label_interface/object_tags.py index eac4a3ed8..89a4ef84d 100644 --- a/src/label_studio_sdk/label_interface/object_tags.py +++ b/src/label_studio_sdk/label_interface/object_tags.py @@ -144,7 +144,14 @@ def value_is_variable(self) -> bool: """Check if value has variable""" pattern = re.compile(r"^\$[^, ]+$") return bool(pattern.fullmatch(self.value)) - + + def collect_attrs(self): + """Return tag attrs as a single dict""" + return { + **self.attr, + "name": self.name, + "value": '$' + self.value if self.value is not None else None + } # and have generate_example in each def generate_example_value(self, mode="upload", secure_mode=False): @@ -174,7 +181,8 @@ def generate_example_value(self, mode="upload", secure_mode=False): class AudioTag(ObjectTag): """ """ - + tag: str = "Audio" + def _generate_example(self, examples, only_urls=False): """ """ return examples.get("Audio") @@ -182,7 +190,8 @@ def _generate_example(self, examples, only_urls=False): class ImageTag(ObjectTag): """ """ - + tag: str = "Image" + def _generate_example(self, examples, only_urls=False): """ """ return examples.get("Image") @@ -190,7 +199,8 @@ def _generate_example(self, examples, only_urls=False): class TableTag(ObjectTag): """ """ - + tag: str = "Table" + def _generate_example(self, examples, only_urls=False): """ """ return examples.get("Table") @@ -198,7 +208,8 @@ def _generate_example(self, examples, only_urls=False): class TextTag(ObjectTag): """ """ - + tag: str = "Text" + def _generate_example(self, examples, only_urls=False): """ """ if only_urls: @@ -209,7 +220,8 @@ def _generate_example(self, examples, only_urls=False): class VideoTag(ObjectTag): """ """ - + tag: str = "Video" + def _generate_example(self, examples, only_urls=False): """ """ return examples.get("Video") @@ -217,7 +229,8 @@ def _generate_example(self, examples, only_urls=False): class HyperTextTag(ObjectTag): """ """ - + tag: str = "HyperText" + def _generate_example(self, examples, only_urls=False): """ """ examples = data_examples(mode="upload") @@ -229,7 +242,8 @@ def _generate_example(self, examples, only_urls=False): class ListTag(ObjectTag): """ """ - + tag: str = "List" + def _generate_example(self, examples, only_urls=False): """ """ examples = data_examples(mode="upload") @@ -238,7 +252,8 @@ def _generate_example(self, examples, only_urls=False): class ParagraphsTag(ObjectTag): """ """ - + tag: str = "Paragraphs" + def _generate_example(self, examples, only_urls=False): """ """ # Paragraphs special case - replace nameKey/textKey if presented @@ -259,7 +274,8 @@ def _generate_example(self, examples, only_urls=False): class TimeSeriesTag(ObjectTag): """ """ - + tag: str = "TimeSeries" + def _generate_example(self, examples, only_urls=False): """ """ p = self.attr diff --git a/src/label_studio_sdk/label_interface/objects.py b/src/label_studio_sdk/label_interface/objects.py index b5e7d5065..a6a67938e 100644 --- a/src/label_studio_sdk/label_interface/objects.py +++ b/src/label_studio_sdk/label_interface/objects.py @@ -20,13 +20,13 @@ def serialize_regions(result): class PredictionValue(BaseModel): """ """ - model_version: Optional[Any] = None score: Optional[float] = 0.00 result: Optional[List[Union[Dict[str, Any], Region]]] class Config: - allow_population_by_field_name = True + populate_by_name = True + protected_namespaces = () @field_serializer('result') def serialize_result(self, result): @@ -45,7 +45,7 @@ class AnnotationValue(BaseModel): result: Optional[List[Union[Dict[str, Any], Region]]] class Config: - allow_population_by_field_name = True + populate_by_name = True @field_serializer('result') def serialize_result(self, result): @@ -56,5 +56,5 @@ class TaskValue(BaseModel): """ """ data: Optional[dict] - annotations: Optional[List[AnnotationValue]] - predictions: Optional[List[PredictionValue]] + annotations: Optional[List[AnnotationValue]] = Field(default_factory=list) + predictions: Optional[List[PredictionValue]] = Field(default_factory=list) diff --git a/src/label_studio_sdk/ml/client.py b/src/label_studio_sdk/ml/client.py index d39e5a52b..f48afbecf 100644 --- a/src/label_studio_sdk/ml/client.py +++ b/src/label_studio_sdk/ml/client.py @@ -56,9 +56,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/ml/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[MlBackend], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[MlBackend], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -153,9 +153,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MlCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(MlCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -194,9 +194,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/ml/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MlBackend, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(MlBackend, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -234,9 +234,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/ml/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -335,9 +335,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MlUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(MlUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -397,9 +397,9 @@ def predict_interactive( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -452,11 +452,11 @@ def train( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return - if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(str, _response.json())) # type: ignore try: + if 200 <= _response.status_code < 300: + return + if _response.status_code == 500: + raise InternalServerError(pydantic_v1.parse_obj_as(str, _response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -491,9 +491,9 @@ def list_model_versions(self, id: str, *, request_options: typing.Optional[Reque _response = self._client_wrapper.httpx_client.request( f"api/ml/{jsonable_encoder(id)}/versions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -537,9 +537,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/ml/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[MlBackend], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[MlBackend], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -634,9 +634,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MlCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(MlCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -675,9 +675,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/ml/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MlBackend, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(MlBackend, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -715,9 +715,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/ml/{jsonable_encoder(id)}", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -816,9 +816,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MlUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(MlUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -878,9 +878,9 @@ async def predict_interactive( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -933,11 +933,11 @@ async def train( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return - if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(str, _response.json())) # type: ignore try: + if 200 <= _response.status_code < 300: + return + if _response.status_code == 500: + raise InternalServerError(pydantic_v1.parse_obj_as(str, _response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -972,9 +972,9 @@ async def list_model_versions(self, id: str, *, request_options: typing.Optional _response = await self._client_wrapper.httpx_client.request( f"api/ml/{jsonable_encoder(id)}/versions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/predictions/client.py b/src/label_studio_sdk/predictions/client.py index 3e0fffa4f..5ce13b391 100644 --- a/src/label_studio_sdk/predictions/client.py +++ b/src/label_studio_sdk/predictions/client.py @@ -62,9 +62,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/predictions/", method="GET", params={"task": task, "project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Prediction], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[Prediction], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -137,7 +137,7 @@ def create( "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -152,9 +152,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -193,9 +193,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/predictions/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -231,9 +231,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/predictions/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -302,7 +302,7 @@ def update( "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -317,9 +317,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -374,9 +374,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/predictions/", method="GET", params={"task": task, "project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Prediction], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[Prediction], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -449,7 +449,7 @@ async def create( "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -464,9 +464,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -505,9 +505,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/predictions/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -543,9 +543,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/predictions/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -614,7 +614,7 @@ async def update( "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -629,9 +629,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Prediction, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/projects/client.py b/src/label_studio_sdk/projects/client.py index 908802625..4edf2f574 100644 --- a/src/label_studio_sdk/projects/client.py +++ b/src/label_studio_sdk/projects/client.py @@ -81,7 +81,12 @@ def list( client = LabelStudio( api_key="YOUR_API_KEY", ) - client.projects.list() + response = client.projects.list() + for item in response: + yield item + # alternatively, you can paginate page-by-page + for page in response.iter_pages(): + yield page """ page = page or 1 _response = self._client_wrapper.httpx_client.request( @@ -90,20 +95,20 @@ def list( params={"ordering": ordering, "ids": ids, "title": title, "page": page, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - _parsed_response = pydantic_v1.parse_obj_as(ProjectsListResponse, _response.json()) # type: ignore - _has_next = True - _get_next = lambda: self.list( - ordering=ordering, - ids=ids, - title=title, - page=page + 1, - page_size=page_size, - request_options=request_options, - ) - _items = _parsed_response.results - return SyncPager(has_next=_has_next, items=_items, get_next=_get_next) try: + if 200 <= _response.status_code < 300: + _parsed_response = pydantic_v1.parse_obj_as(ProjectsListResponse, _response.json()) # type: ignore + _has_next = True + _get_next = lambda: self.list( + ordering=ordering, + ids=ids, + title=title, + page=page + 1, + page_size=page_size, + request_options=request_options, + ) + _items = _parsed_response.results + return SyncPager(has_next=_has_next, items=_items, get_next=_get_next) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -125,6 +130,7 @@ def create( maximum_annotations: typing.Optional[int] = OMIT, color: typing.Optional[str] = OMIT, control_weights: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + workspace: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> ProjectsCreateResponse: """ @@ -179,6 +185,9 @@ def create( control_weights : typing.Optional[typing.Dict[str, typing.Any]] Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + workspace : typing.Optional[int] + Workspace ID + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -213,13 +222,14 @@ def create( "maximum_annotations": maximum_annotations, "color": color, "control_weights": control_weights, + "workspace": workspace, }, request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectsCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ProjectsCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -256,9 +266,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Project, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Project, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -296,9 +306,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -321,6 +331,7 @@ def update( maximum_annotations: typing.Optional[int] = OMIT, color: typing.Optional[str] = OMIT, control_weights: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + workspace: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> ProjectsUpdateResponse: """ @@ -382,6 +393,9 @@ def update( control_weights : typing.Optional[typing.Dict[str, typing.Any]] Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + workspace : typing.Optional[int] + Workspace ID + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -418,13 +432,14 @@ def update( "maximum_annotations": maximum_annotations, "color": color, "control_weights": control_weights, + "workspace": workspace, }, request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectsUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ProjectsUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -541,11 +556,11 @@ def import_tasks( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectsImportTasksResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ProjectsImportTasksResponse, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -596,9 +611,9 @@ def validate_config( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectLabelConfig, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ProjectLabelConfig, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -664,7 +679,12 @@ async def list( client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.projects.list() + response = await client.projects.list() + async for item in response: + yield item + # alternatively, you can paginate page-by-page + async for page in response.iter_pages(): + yield page """ page = page or 1 _response = await self._client_wrapper.httpx_client.request( @@ -673,20 +693,20 @@ async def list( params={"ordering": ordering, "ids": ids, "title": title, "page": page, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - _parsed_response = pydantic_v1.parse_obj_as(ProjectsListResponse, _response.json()) # type: ignore - _has_next = True - _get_next = lambda: self.list( - ordering=ordering, - ids=ids, - title=title, - page=page + 1, - page_size=page_size, - request_options=request_options, - ) - _items = _parsed_response.results - return AsyncPager(has_next=_has_next, items=_items, get_next=_get_next) try: + if 200 <= _response.status_code < 300: + _parsed_response = pydantic_v1.parse_obj_as(ProjectsListResponse, _response.json()) # type: ignore + _has_next = True + _get_next = lambda: self.list( + ordering=ordering, + ids=ids, + title=title, + page=page + 1, + page_size=page_size, + request_options=request_options, + ) + _items = _parsed_response.results + return AsyncPager(has_next=_has_next, items=_items, get_next=_get_next) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -708,6 +728,7 @@ async def create( maximum_annotations: typing.Optional[int] = OMIT, color: typing.Optional[str] = OMIT, control_weights: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + workspace: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> ProjectsCreateResponse: """ @@ -762,6 +783,9 @@ async def create( control_weights : typing.Optional[typing.Dict[str, typing.Any]] Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + workspace : typing.Optional[int] + Workspace ID + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -796,13 +820,14 @@ async def create( "maximum_annotations": maximum_annotations, "color": color, "control_weights": control_weights, + "workspace": workspace, }, request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectsCreateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ProjectsCreateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -839,9 +864,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Project, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Project, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -879,9 +904,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -904,6 +929,7 @@ async def update( maximum_annotations: typing.Optional[int] = OMIT, color: typing.Optional[str] = OMIT, control_weights: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + workspace: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> ProjectsUpdateResponse: """ @@ -965,6 +991,9 @@ async def update( control_weights : typing.Optional[typing.Dict[str, typing.Any]] Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + workspace : typing.Optional[int] + Workspace ID + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -1001,13 +1030,14 @@ async def update( "maximum_annotations": maximum_annotations, "color": color, "control_weights": control_weights, + "workspace": workspace, }, request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectsUpdateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ProjectsUpdateResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1124,11 +1154,11 @@ async def import_tasks( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectsImportTasksResponse, _response.json()) # type: ignore - if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ProjectsImportTasksResponse, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1179,9 +1209,9 @@ async def validate_config( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectLabelConfig, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ProjectLabelConfig, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/projects/client_ext.py b/src/label_studio_sdk/projects/client_ext.py index 0628d97bb..ab5589353 100644 --- a/src/label_studio_sdk/projects/client_ext.py +++ b/src/label_studio_sdk/projects/client_ext.py @@ -1,6 +1,18 @@ +import typing +from typing_extensions import Annotated from .client import ProjectsClient, AsyncProjectsClient - +from pydantic import model_validator, validator, Field, ConfigDict from label_studio_sdk._extensions.pager_ext import SyncPagerExt, AsyncPagerExt, T +from label_studio_sdk.types.project import Project +from label_studio_sdk.label_interface import LabelInterface + +from ..core import RequestOptions + + +class ProjectExt(Project): + + def get_label_interface(self): + return LabelInterface(self.label_config) class ProjectsClientExt(ProjectsClient): @@ -10,6 +22,9 @@ def list(self, **kwargs) -> SyncPagerExt[T]: list.__doc__ = ProjectsClient.list.__doc__ + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> ProjectExt: + return ProjectExt(**dict(super().get(id, request_options=request_options))) + class AsyncProjectsClientExt(AsyncProjectsClient): diff --git a/src/label_studio_sdk/projects/exports/client.py b/src/label_studio_sdk/projects/exports/client.py index 388ad5334..131d0f05d 100644 --- a/src/label_studio_sdk/projects/exports/client.py +++ b/src/label_studio_sdk/projects/exports/client.py @@ -105,12 +105,12 @@ def create_export( }, request_options=request_options, ) as _response: - if 200 <= _response.status_code < 300: - for _chunk in _response.iter_bytes(): - yield _chunk - return - _response.read() try: + if 200 <= _response.status_code < 300: + for _chunk in _response.iter_bytes(): + yield _chunk + return + _response.read() _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -149,9 +149,9 @@ def list_formats(self, id: int, *, request_options: typing.Optional[RequestOptio _response = self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/export/formats", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[str], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[str], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -190,9 +190,9 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No _response = self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/exports/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Export], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[Export], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -243,9 +243,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExportCreate, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ExportCreate, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -292,9 +292,9 @@ def get(self, id: int, export_pk: str, *, request_options: typing.Optional[Reque method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Export, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Export, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -338,9 +338,9 @@ def delete(self, id: int, export_pk: str, *, request_options: typing.Optional[Re method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -404,9 +404,9 @@ def convert( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExportConvert, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ExportConvert, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -463,9 +463,9 @@ def download( params={"exportType": export_type}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -561,12 +561,12 @@ async def create_export( }, request_options=request_options, ) as _response: - if 200 <= _response.status_code < 300: - async for _chunk in _response.aiter_bytes(): - yield _chunk - return - await _response.aread() try: + if 200 <= _response.status_code < 300: + async for _chunk in _response.aiter_bytes(): + yield _chunk + return + await _response.aread() _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -607,9 +607,9 @@ async def list_formats( _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/export/formats", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[str], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[str], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -648,9 +648,9 @@ async def list(self, id: int, *, request_options: typing.Optional[RequestOptions _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/exports/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Export], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[Export], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -701,9 +701,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExportCreate, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ExportCreate, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -750,9 +750,9 @@ async def get(self, id: int, export_pk: str, *, request_options: typing.Optional method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Export, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Export, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -796,9 +796,9 @@ async def delete(self, id: int, export_pk: str, *, request_options: typing.Optio method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -862,9 +862,9 @@ async def convert( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExportConvert, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ExportConvert, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -921,9 +921,9 @@ async def download( params={"exportType": export_type}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/tasks/client.py b/src/label_studio_sdk/tasks/client.py index 2d9f74910..d5e0b58e4 100644 --- a/src/label_studio_sdk/tasks/client.py +++ b/src/label_studio_sdk/tasks/client.py @@ -68,9 +68,9 @@ def create_many_status( method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectImport, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ProjectImport, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -108,9 +108,9 @@ def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestO _response = self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/tasks/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -186,7 +186,12 @@ def list( client = LabelStudio( api_key="YOUR_API_KEY", ) - client.tasks.list() + response = client.tasks.list() + for item in response: + yield item + # alternatively, you can paginate page-by-page + for page in response.iter_pages(): + yield page """ page = page or 1 _response = self._client_wrapper.httpx_client.request( @@ -205,24 +210,24 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - _parsed_response = pydantic_v1.parse_obj_as(TasksListResponse, _response.json()) # type: ignore - _has_next = True - _get_next = lambda: self.list( - page=page + 1, - page_size=page_size, - view=view, - project=project, - resolve_uri=resolve_uri, - fields=fields, - review=review, - include=include, - query=query, - request_options=request_options, - ) - _items = _parsed_response.tasks - return SyncPager(has_next=_has_next, items=_items, get_next=_get_next) try: + if 200 <= _response.status_code < 300: + _parsed_response = pydantic_v1.parse_obj_as(TasksListResponse, _response.json()) # type: ignore + _has_next = True + _get_next = lambda: self.list( + page=page + 1, + page_size=page_size, + view=view, + project=project, + resolve_uri=resolve_uri, + fields=fields, + review=review, + include=include, + query=query, + request_options=request_options, + ) + _items = _parsed_response.tasks + return SyncPager(has_next=_has_next, items=_items, get_next=_get_next) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -277,9 +282,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseTask, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(BaseTask, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -317,9 +322,9 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/tasks/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DataManagerTaskSerializer, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(DataManagerTaskSerializer, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -359,9 +364,9 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/tasks/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -419,9 +424,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseTask, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(BaseTask, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -476,9 +481,9 @@ async def create_many_status( method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ProjectImport, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ProjectImport, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -516,9 +521,9 @@ async def delete_all_tasks(self, id: int, *, request_options: typing.Optional[Re _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/tasks/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -594,7 +599,12 @@ async def list( client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - await client.tasks.list() + response = await client.tasks.list() + async for item in response: + yield item + # alternatively, you can paginate page-by-page + async for page in response.iter_pages(): + yield page """ page = page or 1 _response = await self._client_wrapper.httpx_client.request( @@ -613,24 +623,24 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - _parsed_response = pydantic_v1.parse_obj_as(TasksListResponse, _response.json()) # type: ignore - _has_next = True - _get_next = lambda: self.list( - page=page + 1, - page_size=page_size, - view=view, - project=project, - resolve_uri=resolve_uri, - fields=fields, - review=review, - include=include, - query=query, - request_options=request_options, - ) - _items = _parsed_response.tasks - return AsyncPager(has_next=_has_next, items=_items, get_next=_get_next) try: + if 200 <= _response.status_code < 300: + _parsed_response = pydantic_v1.parse_obj_as(TasksListResponse, _response.json()) # type: ignore + _has_next = True + _get_next = lambda: self.list( + page=page + 1, + page_size=page_size, + view=view, + project=project, + resolve_uri=resolve_uri, + fields=fields, + review=review, + include=include, + query=query, + request_options=request_options, + ) + _items = _parsed_response.tasks + return AsyncPager(has_next=_has_next, items=_items, get_next=_get_next) _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -685,9 +695,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseTask, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(BaseTask, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -727,9 +737,9 @@ async def get( _response = await self._client_wrapper.httpx_client.request( f"api/tasks/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DataManagerTaskSerializer, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(DataManagerTaskSerializer, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -769,9 +779,9 @@ async def delete(self, id: str, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/tasks/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -829,9 +839,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseTask, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(BaseTask, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/tasks/client_ext.py b/src/label_studio_sdk/tasks/client_ext.py index a4202ed54..b54d423f1 100644 --- a/src/label_studio_sdk/tasks/client_ext.py +++ b/src/label_studio_sdk/tasks/client_ext.py @@ -5,6 +5,8 @@ class TasksClientExt(TasksClient): def list(self, **kwargs) -> SyncPagerExt[T]: + # use `fields: all` by default and return the full data + kwargs['fields'] = kwargs.get('fields', 'all') return SyncPagerExt.from_sync_pager(super().list(**kwargs)) list.__doc__ = TasksClient.list.__doc__ @@ -13,6 +15,8 @@ def list(self, **kwargs) -> SyncPagerExt[T]: class AsyncTasksClientExt(AsyncTasksClient): async def list(self, **kwargs): + # use `fields: all` by default and return the full data + kwargs['fields'] = kwargs.get('fields', 'all') return await AsyncPagerExt.from_async_pager(await super().list(**kwargs)) list.__doc__ = AsyncTasksClient.list.__doc__ diff --git a/src/label_studio_sdk/types/__init__.py b/src/label_studio_sdk/types/__init__.py index 2ea41088c..a47aaaadd 100644 --- a/src/label_studio_sdk/types/__init__.py +++ b/src/label_studio_sdk/types/__init__.py @@ -53,6 +53,9 @@ from .s3export_storage_status import S3ExportStorageStatus from .s3import_storage import S3ImportStorage from .s3import_storage_status import S3ImportStorageStatus +from .s3s_export_storage import S3SExportStorage +from .s3s_import_storage import S3SImportStorage +from .s3s_import_storage_status import S3SImportStorageStatus from .serialization_option import SerializationOption from .serialization_options import SerializationOptions from .task import Task @@ -64,6 +67,7 @@ from .webhook_actions_item import WebhookActionsItem from .webhook_serializer_for_update import WebhookSerializerForUpdate from .webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem +from .workspace import Workspace __all__ = [ "Annotation", @@ -119,6 +123,9 @@ "S3ExportStorageStatus", "S3ImportStorage", "S3ImportStorageStatus", + "S3SExportStorage", + "S3SImportStorage", + "S3SImportStorageStatus", "SerializationOption", "SerializationOptions", "Task", @@ -130,4 +137,5 @@ "WebhookActionsItem", "WebhookSerializerForUpdate", "WebhookSerializerForUpdateActionsItem", + "Workspace", ] diff --git a/src/label_studio_sdk/types/s3s_export_storage.py b/src/label_studio_sdk/types/s3s_export_storage.py new file mode 100644 index 000000000..89579d331 --- /dev/null +++ b/src/label_studio_sdk/types/s3s_export_storage.py @@ -0,0 +1,80 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class S3SExportStorage(pydantic_v1.BaseModel): + id: typing.Optional[int] = None + title: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Cloud storage title + """ + + description: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Cloud storage description + """ + + created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + """ + Creation time + """ + + bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + S3 bucket name + """ + + prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + S3 bucket prefix + """ + + external_id: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + AWS External ID + """ + + role_arn: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + AWS Role ARN + """ + + region_name: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + AWS Region + """ + + s3endpoint: typing.Optional[str] = pydantic_v1.Field(alias="s3_endpoint", default=None) + """ + S3 Endpoint + """ + + project: int = pydantic_v1.Field() + """ + A unique integer value identifying this project. + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/label_studio_sdk/types/s3s_import_storage.py b/src/label_studio_sdk/types/s3s_import_storage.py new file mode 100644 index 000000000..4b7f7b099 --- /dev/null +++ b/src/label_studio_sdk/types/s3s_import_storage.py @@ -0,0 +1,129 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .s3s_import_storage_status import S3SImportStorageStatus + + +class S3SImportStorage(pydantic_v1.BaseModel): + id: typing.Optional[int] = None + synchronizable: typing.Optional[bool] = None + presign: typing.Optional[bool] = None + last_sync: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + """ + Last sync finished time + """ + + last_sync_count: typing.Optional[int] = pydantic_v1.Field(default=None) + """ + Count of tasks synced last time + """ + + last_sync_job: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Last sync job ID + """ + + status: typing.Optional[S3SImportStorageStatus] = None + traceback: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Traceback report for the last failed sync + """ + + meta: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + """ + Meta and debug information about storage processes + """ + + title: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Cloud storage title + """ + + description: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Cloud storage description + """ + + created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + """ + Creation time + """ + + bucket: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + S3 bucket name + """ + + prefix: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + S3 bucket prefix + """ + + regex_filter: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Cloud storage regex for filtering objects + """ + + use_blob_urls: typing.Optional[bool] = pydantic_v1.Field(default=None) + """ + Interpret objects as BLOBs and generate URLs + """ + + region_name: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + AWS Region + """ + + external_id: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + AWS External ID + """ + + role_arn: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + AWS Role ARN + """ + + s3endpoint: typing.Optional[str] = pydantic_v1.Field(alias="s3_endpoint", default=None) + """ + S3 Endpoint + """ + + presign_ttl: typing.Optional[int] = pydantic_v1.Field(default=None) + """ + Presigned URLs TTL (in minutes) + """ + + recursive_scan: typing.Optional[bool] = pydantic_v1.Field(default=None) + """ + Perform recursive scan over the bucket content + """ + + project: int = pydantic_v1.Field() + """ + A unique integer value identifying this project. + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/label_studio_sdk/types/s3s_import_storage_status.py b/src/label_studio_sdk/types/s3s_import_storage_status.py new file mode 100644 index 000000000..f3765ab47 --- /dev/null +++ b/src/label_studio_sdk/types/s3s_import_storage_status.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +S3SImportStorageStatus = typing.Union[ + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any +] diff --git a/src/label_studio_sdk/types/workspace.py b/src/label_studio_sdk/types/workspace.py new file mode 100644 index 000000000..029f32691 --- /dev/null +++ b/src/label_studio_sdk/types/workspace.py @@ -0,0 +1,77 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class Workspace(pydantic_v1.BaseModel): + id: typing.Optional[int] = pydantic_v1.Field(default=None) + """ + Unique ID of the workspace + """ + + title: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Workspace title + """ + + description: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Workspace description + """ + + is_public: typing.Optional[bool] = pydantic_v1.Field(default=None) + """ + Whether the workspace is public or not + """ + + is_personal: typing.Optional[bool] = pydantic_v1.Field(default=None) + """ + Whether the workspace is personal or not + """ + + is_archived: typing.Optional[bool] = pydantic_v1.Field(default=None) + """ + Whether the workspace is archived or not + """ + + created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + """ + Creation time of the workspace + """ + + updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + """ + Last updated time of the workspace + """ + + created_by: typing.Optional[int] = pydantic_v1.Field(default=None) + """ + User ID of the workspace creator + """ + + color: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Workspace color + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/label_studio_sdk/users/client.py b/src/label_studio_sdk/users/client.py index 696308aa7..2b9b21999 100644 --- a/src/label_studio_sdk/users/client.py +++ b/src/label_studio_sdk/users/client.py @@ -46,9 +46,9 @@ def reset_token(self, *, request_options: typing.Optional[RequestOptions] = None _response = self._client_wrapper.httpx_client.request( "api/current-user/reset-token/", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UsersResetTokenResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(UsersResetTokenResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -80,9 +80,9 @@ def get_token(self, *, request_options: typing.Optional[RequestOptions] = None) _response = self._client_wrapper.httpx_client.request( "api/current-user/token", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UsersGetTokenResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(UsersGetTokenResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -114,9 +114,9 @@ def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> _response = self._client_wrapper.httpx_client.request( "api/current-user/whoami", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -148,9 +148,9 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty _response = self._client_wrapper.httpx_client.request( "api/users/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[BaseUser], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[BaseUser], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -236,9 +236,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -276,9 +276,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/users/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -318,9 +318,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/users/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -414,9 +414,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -453,9 +453,9 @@ async def reset_token(self, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( "api/current-user/reset-token/", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UsersResetTokenResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(UsersResetTokenResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -487,9 +487,9 @@ async def get_token(self, *, request_options: typing.Optional[RequestOptions] = _response = await self._client_wrapper.httpx_client.request( "api/current-user/token", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UsersGetTokenResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(UsersGetTokenResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -521,9 +521,9 @@ async def whoami(self, *, request_options: typing.Optional[RequestOptions] = Non _response = await self._client_wrapper.httpx_client.request( "api/current-user/whoami", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -555,9 +555,9 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) _response = await self._client_wrapper.httpx_client.request( "api/users/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[BaseUser], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[BaseUser], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -643,9 +643,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -683,9 +683,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/users/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -725,9 +725,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/users/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -821,9 +821,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(BaseUser, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/views/client.py b/src/label_studio_sdk/views/client.py index 686c3d7ac..15ed4cd14 100644 --- a/src/label_studio_sdk/views/client.py +++ b/src/label_studio_sdk/views/client.py @@ -53,9 +53,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/dm/views/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[View], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[View], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -105,9 +105,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -148,9 +148,9 @@ def delete_all(self, *, project: int, request_options: typing.Optional[RequestOp request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -187,9 +187,9 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/dm/views/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -225,9 +225,9 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/dm/views/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -281,9 +281,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -327,9 +327,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/dm/views/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[View], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[View], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -379,9 +379,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -422,9 +422,9 @@ async def delete_all(self, *, project: int, request_options: typing.Optional[Req request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -461,9 +461,9 @@ async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/dm/views/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -499,9 +499,9 @@ async def delete(self, id: str, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/dm/views/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -555,9 +555,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(View, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/webhooks/client.py b/src/label_studio_sdk/webhooks/client.py index 9b04e0034..f77322b6d 100644 --- a/src/label_studio_sdk/webhooks/client.py +++ b/src/label_studio_sdk/webhooks/client.py @@ -55,9 +55,9 @@ def list( _response = self._client_wrapper.httpx_client.request( "api/webhooks/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Webhook], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[Webhook], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -101,9 +101,9 @@ def create(self, *, request: Webhook, request_options: typing.Optional[RequestOp _response = self._client_wrapper.httpx_client.request( "api/webhooks/", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Webhook, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Webhook, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -145,9 +145,9 @@ def info( params={"organization-only": organization_only}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -186,9 +186,9 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non _response = self._client_wrapper.httpx_client.request( f"api/webhooks/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Webhook, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Webhook, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -226,9 +226,9 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = _response = self._client_wrapper.httpx_client.request( f"api/webhooks/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -317,9 +317,9 @@ def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookSerializerForUpdate, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(WebhookSerializerForUpdate, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -365,9 +365,9 @@ async def list( _response = await self._client_wrapper.httpx_client.request( "api/webhooks/", method="GET", params={"project": project}, request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[Webhook], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[Webhook], _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -411,9 +411,9 @@ async def create(self, *, request: Webhook, request_options: typing.Optional[Req _response = await self._client_wrapper.httpx_client.request( "api/webhooks/", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Webhook, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Webhook, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -455,9 +455,9 @@ async def info( params={"organization-only": organization_only}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -496,9 +496,9 @@ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] _response = await self._client_wrapper.httpx_client.request( f"api/webhooks/{jsonable_encoder(id)}/", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Webhook, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Webhook, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -536,9 +536,9 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio _response = await self._client_wrapper.httpx_client.request( f"api/webhooks/{jsonable_encoder(id)}/", method="DELETE", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -627,9 +627,9 @@ async def update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookSerializerForUpdate, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(WebhookSerializerForUpdate, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/label_studio_sdk/workspaces/__init__.py b/src/label_studio_sdk/workspaces/__init__.py new file mode 100644 index 000000000..ddc7fa13c --- /dev/null +++ b/src/label_studio_sdk/workspaces/__init__.py @@ -0,0 +1,6 @@ +# This file was auto-generated by Fern from our API Definition. + +from . import members +from .members import MembersCreateResponse, MembersListResponseItem + +__all__ = ["MembersCreateResponse", "MembersListResponseItem", "members"] diff --git a/src/label_studio_sdk/workspaces/client.py b/src/label_studio_sdk/workspaces/client.py new file mode 100644 index 000000000..febb191e4 --- /dev/null +++ b/src/label_studio_sdk/workspaces/client.py @@ -0,0 +1,549 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import pydantic_v1 +from ..core.request_options import RequestOptions +from ..types.workspace import Workspace +from .members.client import AsyncMembersClient, MembersClient + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class WorkspacesClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + self.members = MembersClient(client_wrapper=self._client_wrapper) + + def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Workspace]: + """ + List all workspaces for your organization. + + Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + + For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[Workspace] + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.workspaces.list() + """ + _response = self._client_wrapper.httpx_client.request( + "api/workspaces", method="GET", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[Workspace], _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + is_public: typing.Optional[bool] = OMIT, + is_personal: typing.Optional[bool] = OMIT, + color: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> Workspace: + """ + Create a new workspace. + + Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + + For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + + Parameters + ---------- + title : typing.Optional[str] + Workspace title + + description : typing.Optional[str] + Workspace description + + is_public : typing.Optional[bool] + Is workspace public + + is_personal : typing.Optional[bool] + Is workspace personal + + color : typing.Optional[str] + Workspace color in HEX format + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + Workspace + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.workspaces.create() + """ + _response = self._client_wrapper.httpx_client.request( + "api/workspaces", + method="POST", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Workspace, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Workspace: + """ + Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + Workspace + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.workspaces.get( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", method="GET", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Workspace, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.workspaces.delete( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + is_public: typing.Optional[bool] = OMIT, + is_personal: typing.Optional[bool] = OMIT, + color: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> Workspace: + """ + Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + title : typing.Optional[str] + Workspace title + + description : typing.Optional[str] + Workspace description + + is_public : typing.Optional[bool] + Is workspace public + + is_personal : typing.Optional[bool] + Is workspace personal + + color : typing.Optional[str] + Workspace color in HEX format + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + Workspace + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.workspaces.update( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Workspace, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncWorkspacesClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + self.members = AsyncMembersClient(client_wrapper=self._client_wrapper) + + async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Workspace]: + """ + List all workspaces for your organization. + + Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + + For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[Workspace] + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.workspaces.list() + """ + _response = await self._client_wrapper.httpx_client.request( + "api/workspaces", method="GET", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[Workspace], _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + is_public: typing.Optional[bool] = OMIT, + is_personal: typing.Optional[bool] = OMIT, + color: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> Workspace: + """ + Create a new workspace. + + Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + + For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + + Parameters + ---------- + title : typing.Optional[str] + Workspace title + + description : typing.Optional[str] + Workspace description + + is_public : typing.Optional[bool] + Is workspace public + + is_personal : typing.Optional[bool] + Is workspace personal + + color : typing.Optional[str] + Workspace color in HEX format + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + Workspace + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.workspaces.create() + """ + _response = await self._client_wrapper.httpx_client.request( + "api/workspaces", + method="POST", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Workspace, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Workspace: + """ + Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + Workspace + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.workspaces.get( + id=1, + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", method="GET", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Workspace, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.workspaces.delete( + id=1, + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", method="DELETE", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + is_public: typing.Optional[bool] = OMIT, + is_personal: typing.Optional[bool] = OMIT, + color: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> Workspace: + """ + Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + title : typing.Optional[str] + Workspace title + + description : typing.Optional[str] + Workspace description + + is_public : typing.Optional[bool] + Is workspace public + + is_personal : typing.Optional[bool] + Is workspace personal + + color : typing.Optional[str] + Workspace color in HEX format + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + Workspace + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.workspaces.update( + id=1, + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Workspace, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/workspaces/members/__init__.py b/src/label_studio_sdk/workspaces/members/__init__.py new file mode 100644 index 000000000..2e3a8f37d --- /dev/null +++ b/src/label_studio_sdk/workspaces/members/__init__.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +from .types import MembersCreateResponse, MembersListResponseItem + +__all__ = ["MembersCreateResponse", "MembersListResponseItem"] diff --git a/src/label_studio_sdk/workspaces/members/client.py b/src/label_studio_sdk/workspaces/members/client.py new file mode 100644 index 000000000..db168e10b --- /dev/null +++ b/src/label_studio_sdk/workspaces/members/client.py @@ -0,0 +1,297 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.request_options import RequestOptions +from .types.members_create_response import MembersCreateResponse +from .types.members_list_response_item import MembersListResponseItem + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class MembersClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[MembersListResponseItem]: + """ + List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[MembersListResponseItem] + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.workspaces.members.list( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", method="GET", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[MembersListResponseItem], _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def create( + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + ) -> MembersCreateResponse: + """ + Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + user : typing.Optional[int] + User ID of the workspace member + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + MembersCreateResponse + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.workspaces.members.create( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="POST", + json={"user": user}, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(MembersCreateResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete( + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + ) -> None: + """ + Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list). + + Parameters + ---------- + id : int + Workspace ID + + user : typing.Optional[int] + User ID of the workspace member + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.workspaces.members.delete( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="DELETE", + json={"user": user}, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncMembersClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[MembersListResponseItem]: + """ + List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[MembersListResponseItem] + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.workspaces.members.list( + id=1, + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", method="GET", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[MembersListResponseItem], _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def create( + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + ) -> MembersCreateResponse: + """ + Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + user : typing.Optional[int] + User ID of the workspace member + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + MembersCreateResponse + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.workspaces.members.create( + id=1, + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="POST", + json={"user": user}, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(MembersCreateResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def delete( + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + ) -> None: + """ + Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list). + + Parameters + ---------- + id : int + Workspace ID + + user : typing.Optional[int] + User ID of the workspace member + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.workspaces.members.delete( + id=1, + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="DELETE", + json={"user": user}, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/workspaces/members/types/__init__.py b/src/label_studio_sdk/workspaces/members/types/__init__.py new file mode 100644 index 000000000..b6f51dbc8 --- /dev/null +++ b/src/label_studio_sdk/workspaces/members/types/__init__.py @@ -0,0 +1,6 @@ +# This file was auto-generated by Fern from our API Definition. + +from .members_create_response import MembersCreateResponse +from .members_list_response_item import MembersListResponseItem + +__all__ = ["MembersCreateResponse", "MembersListResponseItem"] diff --git a/src/label_studio_sdk/workspaces/members/types/members_create_response.py b/src/label_studio_sdk/workspaces/members/types/members_create_response.py new file mode 100644 index 000000000..3c0f08a0c --- /dev/null +++ b/src/label_studio_sdk/workspaces/members/types/members_create_response.py @@ -0,0 +1,32 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ....core.datetime_utils import serialize_datetime +from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class MembersCreateResponse(pydantic_v1.BaseModel): + user: typing.Optional[int] = pydantic_v1.Field(default=None) + """ + User ID of the workspace member + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py b/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py new file mode 100644 index 000000000..69ccff3ee --- /dev/null +++ b/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py @@ -0,0 +1,32 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ....core.datetime_utils import serialize_datetime +from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class MembersListResponseItem(pydantic_v1.BaseModel): + user: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) + """ + User ID of the workspace member + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/tests/custom/test_interface/test_create.py b/tests/custom/test_interface/test_create.py index 858694c01..151021e68 100644 --- a/tests/custom/test_interface/test_create.py +++ b/tests/custom/test_interface/test_create.py @@ -1,3 +1,5 @@ + +from label_studio_sdk.label_interface import LabelInterface import label_studio_sdk.label_interface.create as CE from label_studio_sdk.label_interface.control_tags import ChoicesTag from label_studio_sdk.label_interface.object_tags import TextTag @@ -87,3 +89,39 @@ def test_create_image_labels(): assert res[0] is tag_type assert len(res[2]) is len(label_names) + + +def test_using_lpi_tags(): + """ """ + tags = { + 'choices': ChoicesTag(name='sentiment_class', labels=['Positive', 'Negative', 'Neutral']), + 'input': TextTag(name='message', value='my_text'), + } + + tuples = CE.convert_tags_description(tags, mapping=None) + + assert len(tags) is 2 + + ftag = tuples[0] + stag = tuples[1] + + assert ftag[0] == "Choices" + assert stag[0] == "Text" + assert ftag[1]["name"] == "sentiment_class" + assert ftag[1]["toName"] == "message" + assert stag[1]["name"] == "message" + assert stag[1]["value"] == '$my_text' + + tags = { + 'choices': ChoicesTag(labels=['Positive', 'Negative', 'Neutral']), + 'input': TextTag(), + } + + tuples = CE.convert_tags_description(tags, mapping=None) + ftag = tuples[0] + stag = tuples[1] + + assert ftag[1]["name"] == "choices" + assert ftag[1]["toName"] == "input" + assert stag[1]["name"] == "input" + assert stag[1]["value"] == "$input" diff --git a/tests/export_storage/test_s3s.py b/tests/export_storage/test_s3s.py new file mode 100644 index 000000000..2fcba66a7 --- /dev/null +++ b/tests/export_storage/test_s3s.py @@ -0,0 +1,164 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from label_studio_sdk.client import AsyncLabelStudio, LabelStudio + +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "external_id": "external_id", + "role_arn": "role_arn", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "project": 1, + } + ] + expected_types: typing.Any = ( + "list", + { + 0: { + "id": "integer", + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "external_id": None, + "role_arn": None, + "region_name": None, + "s3_endpoint": None, + "project": "integer", + } + }, + ) + response = client.export_storage.s3s.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.s3s.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "external_id": "external_id", + "role_arn": "role_arn", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "external_id": None, + "role_arn": None, + "region_name": None, + "s3_endpoint": None, + "project": "integer", + } + response = client.export_storage.s3s.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.s3s.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "external_id": "external_id", + "role_arn": "role_arn", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "external_id": None, + "role_arn": None, + "region_name": None, + "s3_endpoint": None, + "project": "integer", + } + response = client.export_storage.s3s.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.s3s.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert client.export_storage.s3s.delete(id=1) is None # type: ignore[func-returns-value] + + assert await async_client.export_storage.s3s.delete(id=1) is None # type: ignore[func-returns-value] + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "external_id": "external_id", + "role_arn": "role_arn", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "external_id": None, + "role_arn": None, + "region_name": None, + "s3_endpoint": None, + "project": "integer", + } + response = client.export_storage.s3s.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.s3s.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert client.export_storage.s3s.validate() is None # type: ignore[func-returns-value] + + assert await async_client.export_storage.s3s.validate() is None # type: ignore[func-returns-value] diff --git a/tests/import_storage/test_s3s.py b/tests/import_storage/test_s3s.py new file mode 100644 index 000000000..a2c85931a --- /dev/null +++ b/tests/import_storage/test_s3s.py @@ -0,0 +1,318 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from label_studio_sdk.client import AsyncLabelStudio, LabelStudio + +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "synchronizable": True, + "presign": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"meta": {"key": "value"}}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "region_name": "region_name", + "external_id": "external_id", + "role_arn": "role_arn", + "s3_endpoint": "s3_endpoint", + "presign_ttl": 1, + "recursive_scan": True, + "project": 1, + } + ] + expected_types: typing.Any = ( + "list", + { + 0: { + "id": "integer", + "synchronizable": None, + "presign": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "region_name": None, + "external_id": None, + "role_arn": None, + "s3_endpoint": None, + "presign_ttl": "integer", + "recursive_scan": None, + "project": "integer", + } + }, + ) + response = client.import_storage.s3s.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3s.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "synchronizable": True, + "presign": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"meta": {"key": "value"}}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "region_name": "region_name", + "external_id": "external_id", + "role_arn": "role_arn", + "s3_endpoint": "s3_endpoint", + "presign_ttl": 1, + "recursive_scan": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "synchronizable": None, + "presign": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "region_name": None, + "external_id": None, + "role_arn": None, + "s3_endpoint": None, + "presign_ttl": "integer", + "recursive_scan": None, + "project": "integer", + } + response = client.import_storage.s3s.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3s.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "synchronizable": True, + "presign": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"meta": {"key": "value"}}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "region_name": "region_name", + "external_id": "external_id", + "role_arn": "role_arn", + "s3_endpoint": "s3_endpoint", + "presign_ttl": 1, + "recursive_scan": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "synchronizable": None, + "presign": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "region_name": None, + "external_id": None, + "role_arn": None, + "s3_endpoint": None, + "presign_ttl": "integer", + "recursive_scan": None, + "project": "integer", + } + response = client.import_storage.s3s.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3s.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert client.import_storage.s3s.delete(id=1) is None # type: ignore[func-returns-value] + + assert await async_client.import_storage.s3s.delete(id=1) is None # type: ignore[func-returns-value] + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "synchronizable": True, + "presign": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"meta": {"key": "value"}}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "region_name": "region_name", + "external_id": "external_id", + "role_arn": "role_arn", + "s3_endpoint": "s3_endpoint", + "presign_ttl": 1, + "recursive_scan": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "synchronizable": None, + "presign": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "region_name": None, + "external_id": None, + "role_arn": None, + "s3_endpoint": None, + "presign_ttl": "integer", + "recursive_scan": None, + "project": "integer", + } + response = client.import_storage.s3s.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3s.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert client.import_storage.s3s.validate() is None # type: ignore[func-returns-value] + + assert await async_client.import_storage.s3s.validate() is None # type: ignore[func-returns-value] + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "synchronizable": True, + "presign": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"meta": {"key": "value"}}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "region_name": "region_name", + "external_id": "external_id", + "role_arn": "role_arn", + "s3_endpoint": "s3_endpoint", + "presign_ttl": 1, + "recursive_scan": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "synchronizable": None, + "presign": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "region_name": None, + "external_id": None, + "role_arn": None, + "s3_endpoint": None, + "presign_ttl": "integer", + "recursive_scan": None, + "project": "integer", + } + response = client.import_storage.s3s.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3s.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_annotations.py b/tests/test_annotations.py index 9a0c664e9..1a371b7f9 100644 --- a/tests/test_annotations.py +++ b/tests/test_annotations.py @@ -24,7 +24,7 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -116,7 +116,7 @@ async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> No "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -193,7 +193,7 @@ async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> No "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -218,7 +218,7 @@ async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> No "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -246,7 +246,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -337,7 +337,7 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -414,7 +414,7 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -439,7 +439,7 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], diff --git a/tests/test_predictions.py b/tests/test_predictions.py index fa62160cc..6806f4250 100644 --- a/tests/test_predictions.py +++ b/tests/test_predictions.py @@ -25,7 +25,7 @@ async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> Non "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -104,7 +104,7 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -168,7 +168,7 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -192,7 +192,7 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -219,7 +219,7 @@ async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -299,7 +299,7 @@ async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> No "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -364,7 +364,7 @@ async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> No "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], @@ -389,7 +389,7 @@ async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> No "width": 50, "height": 60, "rotation": 0, - "values": {"rectanglelabels": {"0": "Person"}}, + "values": {"rectanglelabels": ["Person"]}, }, } ], diff --git a/tests/test_workspaces.py b/tests/test_workspaces.py new file mode 100644 index 000000000..521b6be78 --- /dev/null +++ b/tests/test_workspaces.py @@ -0,0 +1,149 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from label_studio_sdk.client import AsyncLabelStudio, LabelStudio + +from .utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "title": "title", + "description": "description", + "is_public": True, + "is_personal": True, + "is_archived": True, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "created_by": 1, + "color": "color", + } + ] + expected_types: typing.Any = ( + "list", + { + 0: { + "id": "integer", + "title": None, + "description": None, + "is_public": None, + "is_personal": None, + "is_archived": None, + "created_at": "datetime", + "updated_at": "datetime", + "created_by": "integer", + "color": None, + } + }, + ) + response = client.workspaces.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.workspaces.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "title", + "description": "description", + "is_public": True, + "is_personal": True, + "is_archived": True, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "created_by": 1, + "color": "color", + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "is_public": None, + "is_personal": None, + "is_archived": None, + "created_at": "datetime", + "updated_at": "datetime", + "created_by": "integer", + "color": None, + } + response = client.workspaces.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.workspaces.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "title", + "description": "description", + "is_public": True, + "is_personal": True, + "is_archived": True, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "created_by": 1, + "color": "color", + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "is_public": None, + "is_personal": None, + "is_archived": None, + "created_at": "datetime", + "updated_at": "datetime", + "created_by": "integer", + "color": None, + } + response = client.workspaces.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.workspaces.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert client.workspaces.delete(id=1) is None # type: ignore[func-returns-value] + + assert await async_client.workspaces.delete(id=1) is None # type: ignore[func-returns-value] + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "title", + "description": "description", + "is_public": True, + "is_personal": True, + "is_archived": True, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "created_by": 1, + "color": "color", + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "is_public": None, + "is_personal": None, + "is_archived": None, + "created_at": "datetime", + "updated_at": "datetime", + "created_by": "integer", + "color": None, + } + response = client.workspaces.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.workspaces.update(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/utils/test_http_client.py b/tests/utils/test_http_client.py new file mode 100644 index 000000000..01bb6055f --- /dev/null +++ b/tests/utils/test_http_client.py @@ -0,0 +1,47 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk.core.http_client import get_request_body +from label_studio_sdk.core.request_options import RequestOptions + + +def get_request_options() -> RequestOptions: + return {"additional_body_parameters": {"see you": "later"}} + + +def test_get_json_request_body() -> None: + json_body, data_body = get_request_body(json={"hello": "world"}, data=None, request_options=None, omit=None) + assert json_body == {"hello": "world"} + assert data_body is None + + json_body_extras, data_body_extras = get_request_body( + json={"goodbye": "world"}, data=None, request_options=get_request_options(), omit=None + ) + + assert json_body_extras == {"goodbye": "world", "see you": "later"} + assert data_body_extras is None + + +def test_get_files_request_body() -> None: + json_body, data_body = get_request_body(json=None, data={"hello": "world"}, request_options=None, omit=None) + assert data_body == {"hello": "world"} + assert json_body is None + + json_body_extras, data_body_extras = get_request_body( + json=None, data={"goodbye": "world"}, request_options=get_request_options(), omit=None + ) + + assert data_body_extras == {"goodbye": "world", "see you": "later"} + assert json_body_extras is None + + +def test_get_none_request_body() -> None: + json_body, data_body = get_request_body(json=None, data=None, request_options=None, omit=None) + assert data_body is None + assert json_body is None + + json_body_extras, data_body_extras = get_request_body( + json=None, data=None, request_options=get_request_options(), omit=None + ) + + assert json_body_extras == {"see you": "later"} + assert data_body_extras is None diff --git a/tests/utils/test_query_encoding.py b/tests/utils/test_query_encoding.py new file mode 100644 index 000000000..dd8c6a886 --- /dev/null +++ b/tests/utils/test_query_encoding.py @@ -0,0 +1,13 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk.core.query_encoder import encode_query + + +def test_query_encoding() -> None: + assert encode_query({"hello world": "hello world"}) == {"hello world": "hello world"} + assert encode_query({"hello_world": {"hello": "world"}}) == {"hello_world[hello]": "world"} + assert encode_query({"hello_world": {"hello": {"world": "today"}, "test": "this"}, "hi": "there"}) == { + "hello_world[hello][world]": "today", + "hello_world[test]": "this", + "hi": "there", + } diff --git a/tests/workspaces/__init__.py b/tests/workspaces/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/tests/workspaces/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/tests/workspaces/test_members.py b/tests/workspaces/test_members.py new file mode 100644 index 000000000..05ba66ec3 --- /dev/null +++ b/tests/workspaces/test_members.py @@ -0,0 +1,34 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from label_studio_sdk.client import AsyncLabelStudio, LabelStudio + +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [{"user": {"user": {"key": "value"}}}] + expected_types: typing.Any = ("list", {0: {"user": ("dict", {0: (None, None)})}}) + response = client.workspaces.members.list(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.workspaces.members.list(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"user": 1} + expected_types: typing.Any = {"user": "integer"} + response = client.workspaces.members.create(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.workspaces.members.create(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert client.workspaces.members.delete(id=1) is None # type: ignore[func-returns-value] + + assert await async_client.workspaces.members.delete(id=1) is None # type: ignore[func-returns-value] From 0b7ece0554de291d05d446ea5240e56724e384e8 Mon Sep 17 00:00:00 2001 From: niklub Date: Sat, 13 Jul 2024 08:50:26 +0100 Subject: [PATCH 2/5] fix: RND-103: add workspace.is_archived (#264) * fix: RND-103: add workspace.is_archived * :herb: Fern Regeneration -- July 13, 2024 (#263) SDK regeneration Co-authored-by: fern-api <115122769+fern-api[bot]@users.noreply.github.com> --------- Co-authored-by: nik Co-authored-by: fern-api[bot] <115122769+fern-api[bot]@users.noreply.github.com> --- .mock/definition/workspaces.yml | 6 + README.md | 16 +- poetry.lock | 6 +- reference.md | 472 +--------------------- src/label_studio_sdk/workspaces/client.py | 20 + 5 files changed, 38 insertions(+), 482 deletions(-) diff --git a/.mock/definition/workspaces.yml b/.mock/definition/workspaces.yml index 0f593c2db..022f75b22 100644 --- a/.mock/definition/workspaces.yml +++ b/.mock/definition/workspaces.yml @@ -72,6 +72,9 @@ service: is_personal: type: optional docs: Is workspace personal + is_archived: + type: optional + docs: Is workspace archived color: type: optional docs: Workspace color in HEX format @@ -175,6 +178,9 @@ service: is_personal: type: optional docs: Is workspace personal + is_archived: + type: optional + docs: Is workspace archived color: type: optional docs: Workspace color in HEX format diff --git a/README.md b/README.md index 84213b651..48804904d 100644 --- a/README.md +++ b/README.md @@ -62,18 +62,16 @@ Check more examples [here](https://api.labelstud.io/). ## Create a new project ```python +from label_studio_sdk.label_interface import LabelInterface +from label_studio_sdk.label_interface.create import labels + project = ls.projects.create( name="Project name", description="Project description", - label_config=""" - - - - - - """ + label_config=LabelInterface.create({ + "image": "Image", + "bbox": labels(["cat", "dog"], tag_type="RectangleLabels") + }) ) ``` diff --git a/poetry.lock b/poetry.lock index 8d1487335..ef2d3b9f3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -203,13 +203,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] diff --git a/reference.md b/reference.md index 33dd67295..aeb2f8d2f 100644 --- a/reference.md +++ b/reference.md @@ -15242,249 +15242,7 @@ client.workspaces.create()
-**color:** `typing.Optional[str]` — Workspace color in HEX format - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
- - - - - - - - -
client.workspaces.get(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.workspaces.get( - id=1, -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**id:** `int` — Workspace ID - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.workspaces.delete(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.workspaces.delete( - id=1, -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**id:** `int` — Workspace ID - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.workspaces.update(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.workspaces.update( - id=1, -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**id:** `int` — Workspace ID - -
-
- -
-
- -**title:** `typing.Optional[str]` — Workspace title - -
-
- -
-
- -**description:** `typing.Optional[str]` — Workspace description - -
-
- -
-
- -**is_public:** `typing.Optional[bool]` — Is workspace public - -
-
- -
-
- -**is_personal:** `typing.Optional[bool]` — Is workspace personal +**is_archived:** `typing.Optional[bool]` — Is workspace archived
@@ -15512,230 +15270,4 @@ client.workspaces.update(
-## Workspaces Members -
client.workspaces.members.list(...) -
-
- -#### 📝 Description - -
-
- -
-
- -List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.workspaces.members.list( - id=1, -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**id:** `int` — Workspace ID - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.workspaces.members.create(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list). -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.workspaces.members.create( - id=1, -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**id:** `int` — Workspace ID - -
-
- -
-
- -**user:** `typing.Optional[int]` — User ID of the workspace member - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- -
client.workspaces.members.delete(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list). -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from label_studio_sdk.client import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.workspaces.members.delete( - id=1, -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**id:** `int` — Workspace ID - -
-
- -
-
- -**user:** `typing.Optional[int]` — User ID of the workspace member - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
-
-
- - -
-
-
- +
client.workspaces.get(...) diff --git a/src/label_studio_sdk/workspaces/client.py b/src/label_studio_sdk/workspaces/client.py index febb191e4..452ff3f6d 100644 --- a/src/label_studio_sdk/workspaces/client.py +++ b/src/label_studio_sdk/workspaces/client.py @@ -65,6 +65,7 @@ def create( description: typing.Optional[str] = OMIT, is_public: typing.Optional[bool] = OMIT, is_personal: typing.Optional[bool] = OMIT, + is_archived: typing.Optional[bool] = OMIT, color: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> Workspace: @@ -89,6 +90,9 @@ def create( is_personal : typing.Optional[bool] Is workspace personal + is_archived : typing.Optional[bool] + Is workspace archived + color : typing.Optional[str] Workspace color in HEX format @@ -117,6 +121,7 @@ def create( "description": description, "is_public": is_public, "is_personal": is_personal, + "is_archived": is_archived, "color": color, }, request_options=request_options, @@ -215,6 +220,7 @@ def update( description: typing.Optional[str] = OMIT, is_public: typing.Optional[bool] = OMIT, is_personal: typing.Optional[bool] = OMIT, + is_archived: typing.Optional[bool] = OMIT, color: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> Workspace: @@ -238,6 +244,9 @@ def update( is_personal : typing.Optional[bool] Is workspace personal + is_archived : typing.Optional[bool] + Is workspace archived + color : typing.Optional[str] Workspace color in HEX format @@ -268,6 +277,7 @@ def update( "description": description, "is_public": is_public, "is_personal": is_personal, + "is_archived": is_archived, "color": color, }, request_options=request_options, @@ -332,6 +342,7 @@ async def create( description: typing.Optional[str] = OMIT, is_public: typing.Optional[bool] = OMIT, is_personal: typing.Optional[bool] = OMIT, + is_archived: typing.Optional[bool] = OMIT, color: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> Workspace: @@ -356,6 +367,9 @@ async def create( is_personal : typing.Optional[bool] Is workspace personal + is_archived : typing.Optional[bool] + Is workspace archived + color : typing.Optional[str] Workspace color in HEX format @@ -384,6 +398,7 @@ async def create( "description": description, "is_public": is_public, "is_personal": is_personal, + "is_archived": is_archived, "color": color, }, request_options=request_options, @@ -482,6 +497,7 @@ async def update( description: typing.Optional[str] = OMIT, is_public: typing.Optional[bool] = OMIT, is_personal: typing.Optional[bool] = OMIT, + is_archived: typing.Optional[bool] = OMIT, color: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> Workspace: @@ -505,6 +521,9 @@ async def update( is_personal : typing.Optional[bool] Is workspace personal + is_archived : typing.Optional[bool] + Is workspace archived + color : typing.Optional[str] Workspace color in HEX format @@ -535,6 +554,7 @@ async def update( "description": description, "is_public": is_public, "is_personal": is_personal, + "is_archived": is_archived, "color": color, }, request_options=request_options, From 2bb09d6cedfc635d2cb9db1993b4c21273f6d372 Mon Sep 17 00:00:00 2001 From: niklub <> Date: Mon, 15 Jul 2024 03:09:26 +0000 Subject: [PATCH 3/5] chore: Bump version to 1.0.5.dev --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index eeddf7076..9d1ceb3c3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "label-studio-sdk" -version = "1.0.4" +version = "1.0.5.dev" description = "" readme = "README.md" authors = [] From b4e48ad946c8937ab3559d84a02931670d7529e8 Mon Sep 17 00:00:00 2001 From: Caitlin Wheeless Date: Fri, 19 Jul 2024 15:19:22 -0500 Subject: [PATCH 4/5] docs: Update README to include HumanSignal URL (#265) --- examples/migrate_ls_to_ls/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/migrate_ls_to_ls/README.md b/examples/migrate_ls_to_ls/README.md index e95b8a5f9..88d64c2f7 100644 --- a/examples/migrate_ls_to_ls/README.md +++ b/examples/migrate_ls_to_ls/README.md @@ -66,7 +66,7 @@ pip install label-studio-sdk==0.0.34 ``` python3 migrate-ls-to-ls.py --project-ids=123,456 \ --src-url http://localhost:8000 --src-key \ - --dst-url https://app.heartex.com --dst-key + --dst-url https://app.humansignal.com --dst-key ``` To migrate all projects: @@ -74,7 +74,7 @@ pip install label-studio-sdk==0.0.34 ``` python3 migrate-ls-to-ls.py \ --src-url http://localhost:8000 --src-key \ - --dst-url https://app.heartex.com --dst-key + --dst-url https://app.humansignal.com --dst-key ``` 5. Migrate all projects to the specific workspace (optional) @@ -85,7 +85,7 @@ pip install label-studio-sdk==0.0.34 ``` python3 migrate-ls-to-ls.py --project-ids=123,456 \ --src-url http://localhost:8000 --src-key \ - --dst-url https://app.heartex.com --dst-key \ + --dst-url https://app.humansignal.com --dst-key \ --dest-workspace ``` From d660198ce8afec6e7c6bcf4e1004e440ff51fd11 Mon Sep 17 00:00:00 2001 From: Caitlin Wheeless Date: Fri, 2 Aug 2024 18:16:46 -0500 Subject: [PATCH 5/5] docs: Fix import Client command (#268) Co-authored-by: caitlinwheeless --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 48804904d..879f2c87f 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ > > OR you can change your import statements as follows: > ```python -> from label_studio_sdk._legacy import Client +> from label_studio_sdk import Client > from label_studio_sdk.data_manager import Filters, Column, Operator, Type > from label_studio_sdk._legacy import Project > ```