diff --git a/.mock/definition/__package__.yml b/.mock/definition/__package__.yml index ef8b98866..9e608b616 100644 --- a/.mock/definition/__package__.yml +++ b/.mock/definition/__package__.yml @@ -28,6 +28,8 @@ types: - fixed_and_accepted - deleted_review docs: Action which was performed in the last annotation history item + source: + openapi: openapi/openapi.yaml Annotation: properties: id: optional @@ -92,6 +94,8 @@ types: last_created_by: type: optional docs: User who created the last annotation history item + source: + openapi: openapi/openapi.yaml BaseUser: properties: id: optional @@ -124,6 +128,8 @@ types: allow_newsletters: type: optional docs: Allow sending newsletters to user + source: + openapi: openapi/openapi.yaml Filter: properties: id: optional @@ -151,6 +157,8 @@ types: value: type: optional> docs: Filter value + source: + openapi: openapi/openapi.yaml FilterGroup: properties: id: optional @@ -161,6 +169,8 @@ types: validation: minLength: 1 maxLength: 1024 + source: + openapi: openapi/openapi.yaml View: properties: id: optional @@ -180,10 +190,14 @@ types: project: type: integer docs: Project ID + source: + openapi: openapi/openapi.yaml FileUpload: properties: id: optional file: optional + source: + openapi: openapi/openapi.yaml OrganizationInvite: properties: token: @@ -194,6 +208,8 @@ types: type: optional validation: minLength: 1 + source: + openapi: openapi/openapi.yaml LabelLink: properties: id: optional @@ -206,6 +222,8 @@ types: maxLength: 2048 project: integer label: integer + source: + openapi: openapi/openapi.yaml Label: properties: id: optional @@ -217,8 +235,8 @@ types: type: optional docs: Time of label modification value: - docs: Label value type: map + docs: Label value title: type: string docs: Label title @@ -239,6 +257,8 @@ types: docs: User who approved this label organization: integer projects: optional> + source: + openapi: openapi/openapi.yaml LabelCreate: properties: id: optional @@ -256,8 +276,8 @@ types: type: optional docs: Time of label modification value: - docs: Label value type: map + docs: Label value title: type: string docs: Label title @@ -274,6 +294,8 @@ types: type: optional docs: User who approved this label projects: optional> + source: + openapi: openapi/openapi.yaml MlBackendState: enum: - CO @@ -281,10 +303,14 @@ types: - ER - TR - PR + source: + openapi: openapi/openapi.yaml MlBackendAuthMethod: enum: - NONE - BASIC_AUTH + source: + openapi: openapi/openapi.yaml MlBackend: properties: id: optional @@ -332,6 +358,8 @@ types: If false, model version is set by the user, if true - getting latest version from backend. project: integer + source: + openapi: openapi/openapi.yaml OrganizationId: properties: id: optional @@ -345,6 +373,8 @@ types: validation: format: email maxLength: 254 + source: + openapi: openapi/openapi.yaml Organization: properties: id: optional @@ -366,6 +396,8 @@ types: maxLength: 254 created_by: optional users: optional> + source: + openapi: openapi/openapi.yaml UserSerializerWithProjects: properties: id: optional @@ -400,6 +432,8 @@ types: docs: Allow sending newsletters to user created_projects: optional contributed_to_projects: optional + source: + openapi: openapi/openapi.yaml OrganizationMemberUser: properties: id: optional @@ -407,6 +441,8 @@ types: type: integer docs: Organization ID user: UserSerializerWithProjects + source: + openapi: openapi/openapi.yaml Prediction: properties: id: optional @@ -447,6 +483,8 @@ types: docs: A run of a ModelVersion that created the prediction. task: integer project: optional + source: + openapi: openapi/openapi.yaml UserSimple: docs: Project owner properties: @@ -465,6 +503,8 @@ types: format: email maxLength: 254 avatar: optional + source: + openapi: openapi/openapi.yaml ProjectSampling: enum: - value: Sequential sampling @@ -473,11 +513,15 @@ types: name: UniformSampling - value: Uncertainty sampling name: UncertaintySampling + source: + openapi: openapi/openapi.yaml ProjectSkipQueue: enum: - REQUEUE_FOR_ME - REQUEUE_FOR_OTHERS - IGNORE_SKIPPED + source: + openapi: openapi/openapi.yaml Project: properties: id: optional @@ -616,6 +660,8 @@ types: docs: Finished tasks queue_total: optional queue_done: optional + source: + openapi: openapi/openapi.yaml ProjectLabelConfig: properties: label_config: @@ -623,12 +669,16 @@ types: docs: Label config in XML format. See more about it in documentation validation: minLength: 1 + source: + openapi: openapi/openapi.yaml ConvertedFormatStatus: enum: - created - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml ConvertedFormat: properties: id: optional @@ -641,12 +691,16 @@ types: traceback: type: optional docs: Traceback report in case of errors + source: + openapi: openapi/openapi.yaml ExportStatus: enum: - created - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml Export: properties: title: @@ -669,6 +723,8 @@ types: maxLength: 128 counters: optional> converted_formats: optional> + source: + openapi: openapi/openapi.yaml TaskFilterOptions: properties: view: @@ -693,6 +749,8 @@ types: only_with_annotations: type: optional default: false + source: + openapi: openapi/openapi.yaml AnnotationFilterOptions: properties: usual: @@ -705,6 +763,8 @@ types: skipped: type: optional docs: Include skipped annotations + source: + openapi: openapi/openapi.yaml SerializationOption: docs: JSON dict with parameters properties: @@ -712,6 +772,8 @@ types: type: optional docs: Include a full json body or IDs only default: false + source: + openapi: openapi/openapi.yaml SerializationOptions: properties: drafts: optional @@ -725,12 +787,16 @@ types: type: optional docs: Interpolate video key frames default: false + source: + openapi: openapi/openapi.yaml ExportCreateStatus: enum: - created - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml ExportCreate: properties: title: @@ -756,6 +822,8 @@ types: task_filter_options: optional annotation_filter_options: optional serialization_options: optional + source: + openapi: openapi/openapi.yaml ExportConvert: properties: export_type: @@ -763,12 +831,16 @@ types: docs: Export file format. validation: minLength: 1 + source: + openapi: openapi/openapi.yaml ProjectImportStatus: enum: - created - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml ProjectImport: properties: id: optional @@ -802,12 +874,16 @@ types: tasks: optional> task_ids: optional> project: optional + source: + openapi: openapi/openapi.yaml ProjectReimportStatus: enum: - created - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml ProjectReimport: properties: id: optional @@ -823,6 +899,8 @@ types: data_columns: optional> traceback: optional project: optional + source: + openapi: openapi/openapi.yaml AzureBlobImportStorageStatus: enum: - initialized @@ -830,6 +908,8 @@ types: - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml AzureBlobImportStorage: properties: id: optional @@ -895,6 +975,8 @@ types: project: type: integer docs: A unique integer value identifying this project. + source: + openapi: openapi/openapi.yaml AzureBlobExportStorageStatus: enum: - initialized @@ -902,6 +984,8 @@ types: - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml AzureBlobExportStorage: properties: id: optional @@ -964,6 +1048,8 @@ types: project: type: integer docs: A unique integer value identifying this project. + source: + openapi: openapi/openapi.yaml GcsExportStorageStatus: enum: - initialized @@ -971,6 +1057,8 @@ types: - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml GcsExportStorage: properties: id: optional @@ -1033,6 +1121,8 @@ types: project: type: integer docs: A unique integer value identifying this project. + source: + openapi: openapi/openapi.yaml LocalFilesExportStorageStatus: enum: - initialized @@ -1040,6 +1130,8 @@ types: - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml LocalFilesExportStorage: properties: id: optional @@ -1093,6 +1185,8 @@ types: project: type: integer docs: A unique integer value identifying this project. + source: + openapi: openapi/openapi.yaml RedisExportStorageStatus: enum: - initialized @@ -1100,6 +1194,8 @@ types: - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml RedisExportStorage: properties: id: optional @@ -1165,6 +1261,8 @@ types: project: type: integer docs: A unique integer value identifying this project. + source: + openapi: openapi/openapi.yaml S3ExportStorageStatus: enum: - initialized @@ -1172,6 +1270,8 @@ types: - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml S3ExportStorage: properties: id: optional @@ -1246,6 +1346,8 @@ types: project: type: integer docs: A unique integer value identifying this project. + source: + openapi: openapi/openapi.yaml GcsImportStorageStatus: enum: - initialized @@ -1253,6 +1355,8 @@ types: - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml GcsImportStorage: properties: id: optional @@ -1318,6 +1422,8 @@ types: project: type: integer docs: A unique integer value identifying this project. + source: + openapi: openapi/openapi.yaml LocalFilesImportStorageStatus: enum: - initialized @@ -1325,6 +1431,8 @@ types: - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml LocalFilesImportStorage: properties: id: optional @@ -1375,6 +1483,8 @@ types: project: type: integer docs: A unique integer value identifying this project. + source: + openapi: openapi/openapi.yaml RedisImportStorageStatus: enum: - initialized @@ -1382,6 +1492,8 @@ types: - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml RedisImportStorage: properties: id: optional @@ -1444,6 +1556,8 @@ types: project: type: integer docs: A unique integer value identifying this project. + source: + openapi: openapi/openapi.yaml S3ImportStorageStatus: enum: - initialized @@ -1451,6 +1565,8 @@ types: - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml S3ImportStorage: properties: id: optional @@ -1531,6 +1647,8 @@ types: project: type: integer docs: A unique integer value identifying this project. + source: + openapi: openapi/openapi.yaml BaseTaskUpdatedBy: discriminated: false docs: Last annotator or reviewer who updated this task @@ -1539,6 +1657,8 @@ types: docs: Last annotator who updated this task - type: optional>> docs: Last annotators or reviewers who updated this task + source: + openapi: openapi/openapi.yaml BaseTaskFileUpload: discriminated: false docs: Uploaded file used as data source for this task @@ -1547,15 +1667,17 @@ types: docs: Uploaded file used as data source for this task - type: optional docs: Uploaded file ID used as data source for this task + source: + openapi: openapi/openapi.yaml BaseTask: properties: id: optional data: + type: map docs: >- User imported or uploaded data for a task. Data is formatted according to the project label config. You can find examples of data for your project on the Import page in the Label Studio Data Manager UI. - type: map meta: type: optional> docs: >- @@ -1611,6 +1733,8 @@ types: comment_authors: type: optional> docs: Users who wrote comments + source: + openapi: openapi/openapi.yaml AnnotationsDmFieldLastAction: enum: - prediction @@ -1624,6 +1748,8 @@ types: - fixed_and_accepted - deleted_review docs: Action which was performed in the last annotation history item + source: + openapi: openapi/openapi.yaml AnnotationsDmField: properties: id: optional @@ -1690,6 +1816,8 @@ types: last_created_by: type: optional docs: User who created the last annotation history item + source: + openapi: openapi/openapi.yaml DataManagerTaskSerializerPredictionsItem: properties: result: optional>> @@ -1701,16 +1829,22 @@ types: project: optional created_at: optional updated_at: optional + source: + openapi: openapi/openapi.yaml DataManagerTaskSerializerDraftsItem: properties: result: optional>> created_at: optional updated_at: optional + source: + openapi: openapi/openapi.yaml DataManagerTaskSerializerAnnotatorsItem: discriminated: false union: - integer - map + source: + openapi: openapi/openapi.yaml DataManagerTaskSerializer: properties: id: optional @@ -1742,11 +1876,11 @@ types: type: optional>> docs: User IDs who updated this task data: + type: map docs: >- User imported or uploaded data for a task. Data is formatted according to the project label config. You can find examples of data for your project on the Import page in the Label Studio Data Manager UI. - type: map meta: type: optional> docs: >- @@ -1782,6 +1916,8 @@ types: comment_authors: type: optional> docs: Users who wrote comments + source: + openapi: openapi/openapi.yaml WebhookActionsItem: enum: - PROJECT_CREATED @@ -1796,6 +1932,8 @@ types: - LABEL_LINK_CREATED - LABEL_LINK_UPDATED - LABEL_LINK_DELETED + source: + openapi: openapi/openapi.yaml Webhook: properties: id: optional @@ -1827,6 +1965,8 @@ types: updated_at: type: optional docs: Last update time + source: + openapi: openapi/openapi.yaml WebhookSerializerForUpdateActionsItem: enum: - PROJECT_CREATED @@ -1841,6 +1981,8 @@ types: - LABEL_LINK_CREATED - LABEL_LINK_UPDATED - LABEL_LINK_DELETED + source: + openapi: openapi/openapi.yaml WebhookSerializerForUpdate: properties: id: optional @@ -1872,11 +2014,15 @@ types: updated_at: type: optional docs: Last update time + source: + openapi: openapi/openapi.yaml TaskAnnotatorsItem: discriminated: false union: - integer - map + source: + openapi: openapi/openapi.yaml Task: properties: id: @@ -1957,6 +2103,8 @@ types: comment_authors: type: optional> docs: List of comment authors' IDs for this task + source: + openapi: openapi/openapi.yaml Workspace: properties: id: @@ -1989,6 +2137,8 @@ types: color: type: optional docs: Workspace color + source: + openapi: openapi/openapi.yaml S3SImportStorageStatus: enum: - initialized @@ -1996,6 +2146,8 @@ types: - in_progress - failed - completed + source: + openapi: openapi/openapi.yaml S3SImportStorage: properties: id: optional @@ -2067,6 +2219,8 @@ types: project: type: integer docs: A unique integer value identifying this project. + source: + openapi: openapi/openapi.yaml S3SExportStorage: properties: id: optional @@ -2102,11 +2256,191 @@ types: project: type: integer docs: A unique integer value identifying this project. + source: + openapi: openapi/openapi.yaml + PromptCreatedBy: + discriminated: false + docs: User ID of the creator of the prompt + union: + - integer + - map + source: + openapi: openapi/openapi.yaml + PromptOrganization: + discriminated: false + docs: Organization ID of the prompt + union: + - integer + - map + source: + openapi: openapi/openapi.yaml + Prompt: + properties: + title: + type: string + docs: Title of the prompt + validation: + maxLength: 500 + description: + type: optional + docs: Description of the prompt + created_by: + type: optional + docs: User ID of the creator of the prompt + created_at: + type: optional + docs: Date and time the prompt was created + updated_at: + type: optional + docs: Date and time the prompt was last updated + organization: + type: optional + docs: Organization ID of the prompt + input_fields: + docs: List of input fields + type: list + output_classes: + docs: List of output classes + type: list + associated_projects: + type: optional> + docs: List of associated projects IDs + skill_name: + type: optional + docs: Name of the skill + source: + openapi: openapi/openapi.yaml + PromptVersionProvider: + enum: + - OpenAI + - AzureOpenAI + source: + openapi: openapi/openapi.yaml + PromptVersionCreatedBy: + discriminated: false + union: + - integer + - map + source: + openapi: openapi/openapi.yaml + PromptVersionOrganization: + discriminated: false + union: + - integer + - map + source: + openapi: openapi/openapi.yaml + PromptVersion: + properties: + title: + type: string + validation: + maxLength: 500 + parent_model: optional + prompt: string + provider: PromptVersionProvider + provider_model_id: string + created_by: optional + created_at: optional + updated_at: optional + organization: optional + source: + openapi: openapi/openapi.yaml + InferenceRunOrganization: + discriminated: false + union: + - integer + - map + source: + openapi: openapi/openapi.yaml + InferenceRunCreatedBy: + discriminated: false + union: + - integer + - map + source: + openapi: openapi/openapi.yaml + InferenceRunProjectSubset: + enum: + - All + - HasGT + - Sample + source: + openapi: openapi/openapi.yaml + InferenceRunStatus: + enum: + - Pending + - InProgress + - Completed + - Failed + - Canceled + source: + openapi: openapi/openapi.yaml + InferenceRun: + properties: + organization: optional + project: integer + model_version: optional + created_by: optional + project_subset: InferenceRunProjectSubset + status: optional + job_id: optional + total_predictions: optional + total_correct_predictions: optional + total_tasks: optional + created_at: optional + triggered_at: optional + predictions_updated_at: optional + completed_at: optional + source: + openapi: openapi/openapi.yaml + ModelProviderConnectionProvider: + enum: + - OpenAI + - AzureOpenAI + source: + openapi: openapi/openapi.yaml + ModelProviderConnectionScope: + enum: + - Organization + - User + - Model + source: + openapi: openapi/openapi.yaml + ModelProviderConnectionOrganization: + discriminated: false + union: + - integer + - map + source: + openapi: openapi/openapi.yaml + ModelProviderConnectionCreatedBy: + discriminated: false + union: + - integer + - map + source: + openapi: openapi/openapi.yaml + ModelProviderConnection: + properties: + provider: ModelProviderConnectionProvider + api_key: optional + deployment_name: optional + endpoint: optional + scope: optional + organization: optional + created_by: optional + created_at: optional + updated_at: optional + source: + openapi: openapi/openapi.yaml CommentCreatedBy: discriminated: false union: - integer - map + source: + openapi: openapi/openapi.yaml Comment: properties: id: integer @@ -2119,3 +2453,5 @@ types: updated_at: datetime is_resolved: optional resolved_at: optional + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/actions.yml b/.mock/definition/actions.yml index bdb7165fc..212d65f01 100644 --- a/.mock/definition/actions.yml +++ b/.mock/definition/actions.yml @@ -87,6 +87,8 @@ service: - tasks:total_annotations audiences: - public + source: + openapi: openapi/openapi.yaml types: ActionsCreateRequestId: enum: @@ -100,6 +102,8 @@ types: - delete_tasks_predictions - delete_reviewers - delete_annotators + source: + openapi: openapi/openapi.yaml ActionsCreateRequestFiltersConjunction: enum: - or @@ -109,6 +113,8 @@ types: "and") will be applied to all items in the filters list. It is not possible to combine "or" and "and" within one list of filters. All filters will be either combined with "or" or with "and", but not a mix of both. + source: + openapi: openapi/openapi.yaml ActionsCreateRequestFiltersItemsItemFilter: enum: - value: filter:tasks:agreement @@ -202,6 +208,8 @@ types: Number of unresolved comments in a task
  • `filter:tasks:updated_at`
    (Datetime) Time the task was updated at (e.g. new annotation was created, review added, etc)
  • + source: + openapi: openapi/openapi.yaml ActionsCreateRequestFiltersItemsItemOperator: enum: - contains @@ -231,6 +239,8 @@ types: to
  • `not_exists`
    Does not exist

  • `not_in`
    Is not between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `starts_with`
    Starts with
  • + source: + openapi: openapi/openapi.yaml ActionsCreateRequestFiltersItemsItemValue: discriminated: false docs: Value to filter by @@ -243,8 +253,10 @@ types: docs: Float - type: boolean docs: Boolean - - docs: List of strings or integers - type: map + - type: map + docs: List of strings or integers + source: + openapi: openapi/openapi.yaml ActionsCreateRequestFiltersItemsItem: properties: filter: @@ -326,6 +338,8 @@ types: value: type: ActionsCreateRequestFiltersItemsItemValue docs: Value to filter by + source: + openapi: openapi/openapi.yaml ActionsCreateRequestFilters: docs: >- Filters to apply on tasks. You can use [the helper class `Filters` from @@ -345,6 +359,8 @@ types: items: docs: List of filter items type: list + source: + openapi: openapi/openapi.yaml ActionsCreateRequestSelectedItemsIncluded: properties: all: @@ -353,6 +369,8 @@ types: included: type: optional> docs: List of included task IDs + source: + openapi: openapi/openapi.yaml ActionsCreateRequestSelectedItemsExcluded: properties: all: @@ -361,6 +379,8 @@ types: excluded: type: optional> docs: List of excluded task IDs + source: + openapi: openapi/openapi.yaml ActionsCreateRequestSelectedItems: discriminated: false docs: >- @@ -371,6 +391,8 @@ types: union: - ActionsCreateRequestSelectedItemsIncluded - ActionsCreateRequestSelectedItemsExcluded + source: + openapi: openapi/openapi.yaml ActionsCreateRequestOrderingItem: enum: - value: tasks:agreement @@ -417,3 +439,5 @@ types: name: TasksUnresolvedCommentCount - value: tasks:updated_at name: TasksUpdatedAt + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/annotations.yml b/.mock/definition/annotations.yml index d5dd51922..56ebc783f 100644 --- a/.mock/definition/annotations.yml +++ b/.mock/definition/annotations.yml @@ -421,3 +421,5 @@ service: last_created_by: 1 audiences: - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/comments.yml b/.mock/definition/comments.yml index 140a556c5..7cb0181f4 100644 --- a/.mock/definition/comments.yml +++ b/.mock/definition/comments.yml @@ -167,3 +167,5 @@ service: resolved_at: '2024-01-15T09:30:00Z' audiences: - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/dataManager.yml b/.mock/definition/dataManager.yml index 6a87d6f23..215350e70 100644 --- a/.mock/definition/dataManager.yml +++ b/.mock/definition/dataManager.yml @@ -106,6 +106,8 @@ service: project: 1 audiences: - internal + source: + openapi: openapi/openapi.yaml types: ApiDmViewsUpdateRequestDataFiltersConjunction: enum: @@ -116,6 +118,8 @@ types: "and") will be applied to all items in the filters list. It is not possible to combine "or" and "and" within one list of filters. All filters will be either combined with "or" or with "and", but not a mix of both. + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateRequestDataFiltersItemsItemFilter: enum: - value: filter:tasks:agreement @@ -209,6 +213,8 @@ types: Number of unresolved comments in a task
  • `filter:tasks:updated_at`
    (Datetime) Time the task was updated at (e.g. new annotation was created, review added, etc)
  • + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateRequestDataFiltersItemsItemOperator: enum: - contains @@ -238,6 +244,8 @@ types: to
  • `not_exists`
    Does not exist

  • `not_in`
    Is not between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `starts_with`
    Starts with
  • + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateRequestDataFiltersItemsItemValue: discriminated: false docs: Value to filter by @@ -250,8 +258,10 @@ types: docs: Float - type: boolean docs: Boolean - - docs: List of strings or integers - type: map + - type: map + docs: List of strings or integers + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateRequestDataFiltersItemsItem: properties: filter: @@ -333,6 +343,8 @@ types: value: type: ApiDmViewsUpdateRequestDataFiltersItemsItemValue docs: Value to filter by + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateRequestDataFilters: docs: >- Filters to apply on tasks. You can use [the helper class `Filters` from @@ -352,6 +364,8 @@ types: items: docs: List of filter items type: list + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateRequestDataOrderingItem: enum: - value: tasks:agreement @@ -398,6 +412,8 @@ types: name: TasksUnresolvedCommentCount - value: tasks:updated_at name: TasksUpdatedAt + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateRequestData: docs: Custom view data properties: @@ -415,6 +431,8 @@ types: List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateResponseDataFiltersConjunction: enum: - or @@ -424,6 +442,8 @@ types: "and") will be applied to all items in the filters list. It is not possible to combine "or" and "and" within one list of filters. All filters will be either combined with "or" or with "and", but not a mix of both. + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateResponseDataFiltersItemsItemFilter: enum: - value: filter:tasks:agreement @@ -517,6 +537,8 @@ types: Number of unresolved comments in a task
  • `filter:tasks:updated_at`
    (Datetime) Time the task was updated at (e.g. new annotation was created, review added, etc)
  • + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateResponseDataFiltersItemsItemOperator: enum: - contains @@ -546,6 +568,8 @@ types: to
  • `not_exists`
    Does not exist

  • `not_in`
    Is not between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `starts_with`
    Starts with
  • + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateResponseDataFiltersItemsItemValue: discriminated: false docs: Value to filter by @@ -558,8 +582,10 @@ types: docs: Float - type: boolean docs: Boolean - - docs: List of strings or integers - type: map + - type: map + docs: List of strings or integers + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateResponseDataFiltersItemsItem: properties: filter: @@ -641,6 +667,8 @@ types: value: type: ApiDmViewsUpdateResponseDataFiltersItemsItemValue docs: Value to filter by + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateResponseDataFilters: docs: >- Filters to apply on tasks. You can use [the helper class `Filters` from @@ -660,6 +688,8 @@ types: items: docs: List of filter items type: list + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateResponseDataOrderingItem: enum: - value: tasks:agreement @@ -706,6 +736,8 @@ types: name: TasksUnresolvedCommentCount - value: tasks:updated_at name: TasksUpdatedAt + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateResponseData: docs: Custom view data properties: @@ -723,6 +755,8 @@ types: List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. + source: + openapi: openapi/openapi.yaml ApiDmViewsUpdateResponse: properties: data: @@ -731,3 +765,5 @@ types: project: type: optional docs: Project ID + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/exportStorage.yml b/.mock/definition/export_storage.yml similarity index 90% rename from .mock/definition/exportStorage.yml rename to .mock/definition/export_storage.yml index 2f4972807..b279e4063 100644 --- a/.mock/definition/exportStorage.yml +++ b/.mock/definition/export_storage.yml @@ -28,8 +28,12 @@ service: title: title audiences: - public + source: + openapi: openapi/openapi.yaml types: ExportStorageListTypesResponseItem: properties: name: optional title: optional + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/exportStorage/azure.yml b/.mock/definition/export_storage/azure.yml similarity index 98% rename from .mock/definition/exportStorage/azure.yml rename to .mock/definition/export_storage/azure.yml index 1eb33f483..43bf70682 100644 --- a/.mock/definition/exportStorage/azure.yml +++ b/.mock/definition/export_storage/azure.yml @@ -366,6 +366,8 @@ service: project: 1 audiences: - public + source: + openapi: openapi/openapi.yaml types: AzureCreateResponse: properties: @@ -394,6 +396,8 @@ types: account_key: type: optional docs: Azure Blob account key + source: + openapi: openapi/openapi.yaml AzureUpdateResponse: properties: can_delete_objects: @@ -421,3 +425,5 @@ types: account_key: type: optional docs: Azure Blob account key + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/exportStorage/gcs.yml b/.mock/definition/export_storage/gcs.yml similarity index 99% rename from .mock/definition/exportStorage/gcs.yml rename to .mock/definition/export_storage/gcs.yml index fd12cfcb7..116621875 100644 --- a/.mock/definition/exportStorage/gcs.yml +++ b/.mock/definition/export_storage/gcs.yml @@ -369,6 +369,8 @@ service: project: 1 audiences: - public + source: + openapi: openapi/openapi.yaml types: GcsCreateResponse: properties: @@ -398,6 +400,8 @@ types: google_project_id: type: optional docs: Google project ID + source: + openapi: openapi/openapi.yaml GcsUpdateResponse: properties: can_delete_objects: @@ -426,3 +430,5 @@ types: google_project_id: type: optional docs: Google project ID + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/exportStorage/local.yml b/.mock/definition/export_storage/local.yml similarity index 98% rename from .mock/definition/exportStorage/local.yml rename to .mock/definition/export_storage/local.yml index eaf16bff0..d32c1789e 100644 --- a/.mock/definition/exportStorage/local.yml +++ b/.mock/definition/export_storage/local.yml @@ -345,6 +345,8 @@ service: project: 1 audiences: - public + source: + openapi: openapi/openapi.yaml types: LocalCreateResponse: properties: @@ -371,6 +373,8 @@ types: for these images. If set to False, it will read the content of the file and load it into Label Studio. default: false + source: + openapi: openapi/openapi.yaml LocalUpdateResponse: properties: title: @@ -396,3 +400,5 @@ types: for these images. If set to False, it will read the content of the file and load it into Label Studio. default: false + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/exportStorage/redis.yml b/.mock/definition/export_storage/redis.yml similarity index 98% rename from .mock/definition/exportStorage/redis.yml rename to .mock/definition/export_storage/redis.yml index d7d6d9852..4de217341 100644 --- a/.mock/definition/exportStorage/redis.yml +++ b/.mock/definition/export_storage/redis.yml @@ -375,6 +375,8 @@ service: project: 1 audiences: - public + source: + openapi: openapi/openapi.yaml types: RedisCreateResponse: properties: @@ -405,6 +407,8 @@ types: password: type: optional docs: Server Password (optional) + source: + openapi: openapi/openapi.yaml RedisUpdateResponse: properties: db: @@ -434,3 +438,5 @@ types: password: type: optional docs: Server Password (optional) + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/exportStorage/s3.yml b/.mock/definition/export_storage/s3.yml similarity index 99% rename from .mock/definition/exportStorage/s3.yml rename to .mock/definition/export_storage/s3.yml index 305a2b904..28844250c 100644 --- a/.mock/definition/exportStorage/s3.yml +++ b/.mock/definition/export_storage/s3.yml @@ -416,6 +416,8 @@ service: project: 1 audiences: - public + source: + openapi: openapi/openapi.yaml types: S3CreateResponse: properties: @@ -455,6 +457,8 @@ types: s3_endpoint: type: optional docs: S3 Endpoint + source: + openapi: openapi/openapi.yaml S3UpdateResponse: properties: can_delete_objects: @@ -493,3 +497,5 @@ types: s3_endpoint: type: optional docs: S3 Endpoint + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/exportStorage/s3S.yml b/.mock/definition/export_storage/s3s.yml similarity index 99% rename from .mock/definition/exportStorage/s3S.yml rename to .mock/definition/export_storage/s3s.yml index d0685fe20..6bcdad026 100644 --- a/.mock/definition/exportStorage/s3S.yml +++ b/.mock/definition/export_storage/s3s.yml @@ -288,3 +288,5 @@ service: - request: {} audiences: - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/files.yml b/.mock/definition/files.yml index 45031998b..5f098b9d9 100644 --- a/.mock/definition/files.yml +++ b/.mock/definition/files.yml @@ -163,3 +163,5 @@ service: filename: filename audiences: - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/importStorage.yml b/.mock/definition/import_storage.yml similarity index 90% rename from .mock/definition/importStorage.yml rename to .mock/definition/import_storage.yml index d02ebc0dd..5c4bea19e 100644 --- a/.mock/definition/importStorage.yml +++ b/.mock/definition/import_storage.yml @@ -28,8 +28,12 @@ service: title: title audiences: - public + source: + openapi: openapi/openapi.yaml types: ImportStorageListTypesResponseItem: properties: name: optional title: optional + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/importStorage/azure.yml b/.mock/definition/import_storage/azure.yml similarity index 99% rename from .mock/definition/importStorage/azure.yml rename to .mock/definition/import_storage/azure.yml index 2f22d28bd..13515cc6b 100644 --- a/.mock/definition/importStorage/azure.yml +++ b/.mock/definition/import_storage/azure.yml @@ -439,6 +439,8 @@ service: project: 1 audiences: - public + source: + openapi: openapi/openapi.yaml types: AzureCreateResponse: properties: @@ -484,6 +486,8 @@ types: account_key: type: optional docs: Azure Blob account key + source: + openapi: openapi/openapi.yaml AzureUpdateResponse: properties: regex_filter: @@ -528,3 +532,5 @@ types: account_key: type: optional docs: Azure Blob account key + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/importStorage/gcs.yml b/.mock/definition/import_storage/gcs.yml similarity index 99% rename from .mock/definition/importStorage/gcs.yml rename to .mock/definition/import_storage/gcs.yml index 04c7a6483..b6edf9518 100644 --- a/.mock/definition/importStorage/gcs.yml +++ b/.mock/definition/import_storage/gcs.yml @@ -446,6 +446,8 @@ service: project: 1 audiences: - public + source: + openapi: openapi/openapi.yaml types: GcsCreateResponse: properties: @@ -493,6 +495,8 @@ types: google_project_id: type: optional docs: Google project ID + source: + openapi: openapi/openapi.yaml GcsUpdateResponse: properties: regex_filter: @@ -539,3 +543,5 @@ types: google_project_id: type: optional docs: Google project ID + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/importStorage/local.yml b/.mock/definition/import_storage/local.yml similarity index 98% rename from .mock/definition/importStorage/local.yml rename to .mock/definition/import_storage/local.yml index fc718535c..606640048 100644 --- a/.mock/definition/importStorage/local.yml +++ b/.mock/definition/import_storage/local.yml @@ -352,6 +352,8 @@ service: project: 1 audiences: - public + source: + openapi: openapi/openapi.yaml types: LocalCreateResponse: properties: @@ -378,6 +380,8 @@ types: for these images. If set to False, it will read the content of the file and load it into Label Studio. default: false + source: + openapi: openapi/openapi.yaml LocalUpdateResponse: properties: title: @@ -403,3 +407,5 @@ types: for these images. If set to False, it will read the content of the file and load it into Label Studio. default: false + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/importStorage/redis.yml b/.mock/definition/import_storage/redis.yml similarity index 99% rename from .mock/definition/importStorage/redis.yml rename to .mock/definition/import_storage/redis.yml index ed00425b2..d607d9605 100644 --- a/.mock/definition/importStorage/redis.yml +++ b/.mock/definition/import_storage/redis.yml @@ -401,6 +401,8 @@ service: project: 1 audiences: - public + source: + openapi: openapi/openapi.yaml types: RedisCreateResponse: properties: @@ -438,6 +440,8 @@ types: password: type: optional docs: Server Password (optional) + source: + openapi: openapi/openapi.yaml RedisUpdateResponse: properties: regex_filter: @@ -474,3 +478,5 @@ types: password: type: optional docs: Server Password (optional) + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/importStorage/s3.yml b/.mock/definition/import_storage/s3.yml similarity index 99% rename from .mock/definition/importStorage/s3.yml rename to .mock/definition/import_storage/s3.yml index 222a50d93..e78c18d00 100644 --- a/.mock/definition/importStorage/s3.yml +++ b/.mock/definition/import_storage/s3.yml @@ -506,6 +506,8 @@ service: project: 1 audiences: - public + source: + openapi: openapi/openapi.yaml types: S3CreateResponse: properties: @@ -566,6 +568,8 @@ types: s3_endpoint: type: optional docs: S3 Endpoint + source: + openapi: openapi/openapi.yaml S3UpdateResponse: properties: regex_filter: @@ -625,3 +629,5 @@ types: s3_endpoint: type: optional docs: S3 Endpoint + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/importStorage/s3S.yml b/.mock/definition/import_storage/s3s.yml similarity index 99% rename from .mock/definition/importStorage/s3S.yml rename to .mock/definition/import_storage/s3s.yml index 9ed647561..a5f52f6c0 100644 --- a/.mock/definition/importStorage/s3S.yml +++ b/.mock/definition/import_storage/s3s.yml @@ -477,3 +477,5 @@ service: project: 1 audiences: - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/labels.yml b/.mock/definition/labels.yml index d2987f609..1d2a1f7c4 100644 --- a/.mock/definition/labels.yml +++ b/.mock/definition/labels.yml @@ -13,6 +13,8 @@ types: validation: format: uri results: list + source: + openapi: openapi/openapi.yaml service: auth: false base-path: '' @@ -188,3 +190,5 @@ service: - 1 audiences: - internal + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/ml.yml b/.mock/definition/ml.yml index 0121a8141..f154a5c8c 100644 --- a/.mock/definition/ml.yml +++ b/.mock/definition/ml.yml @@ -397,17 +397,23 @@ service: id: id audiences: - public + source: + openapi: openapi/openapi.yaml types: MlCreateRequestAuthMethod: enum: - NONE - BASIC_AUTH docs: Auth method + source: + openapi: openapi/openapi.yaml MlCreateResponseAuthMethod: enum: - NONE - BASIC_AUTH docs: Auth method + source: + openapi: openapi/openapi.yaml MlCreateResponse: properties: url: @@ -440,16 +446,22 @@ types: timeout: type: optional docs: Response model timeout + source: + openapi: openapi/openapi.yaml MlUpdateRequestAuthMethod: enum: - NONE - BASIC_AUTH docs: Auth method + source: + openapi: openapi/openapi.yaml MlUpdateResponseAuthMethod: enum: - NONE - BASIC_AUTH docs: Auth method + source: + openapi: openapi/openapi.yaml MlUpdateResponse: properties: url: @@ -482,3 +494,5 @@ types: timeout: type: optional docs: Response model timeout + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/model_providers.yml b/.mock/definition/model_providers.yml new file mode 100644 index 000000000..fbc22f9c1 --- /dev/null +++ b/.mock/definition/model_providers.yml @@ -0,0 +1,36 @@ +imports: + root: __package__.yml +service: + auth: false + base-path: '' + endpoints: + create: + path: /api/model-provider-connections/ + method: POST + auth: true + docs: | + Create a new model provider connection. + display-name: Create model provider connection + request: + body: root.ModelProviderConnection + response: + docs: '' + type: root.ModelProviderConnection + examples: + - request: + provider: OpenAI + response: + body: + provider: OpenAI + api_key: api_key + deployment_name: deployment_name + endpoint: endpoint + scope: Organization + organization: 1 + created_by: 1 + created_at: '2024-01-15T09:30:00Z' + updated_at: '2024-01-15T09:30:00Z' + audiences: + - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/organizations.yml b/.mock/definition/organizations.yml index 4e504512a..cdedea3f1 100644 --- a/.mock/definition/organizations.yml +++ b/.mock/definition/organizations.yml @@ -112,3 +112,5 @@ service: created_by: 1 users: - 1 + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/organizations/members.yml b/.mock/definition/organizations/members.yml index 830b15deb..21409d0ea 100644 --- a/.mock/definition/organizations/members.yml +++ b/.mock/definition/organizations/members.yml @@ -13,6 +13,8 @@ types: validation: format: uri results: list + source: + openapi: openapi/openapi.yaml service: auth: false base-path: '' @@ -73,3 +75,5 @@ service: - path-parameters: id: id user_pk: 1 + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/predictions.yml b/.mock/definition/predictions.yml index a9996ec33..f665f5726 100644 --- a/.mock/definition/predictions.yml +++ b/.mock/definition/predictions.yml @@ -471,3 +471,5 @@ service: project: 1 audiences: - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/projects.yml b/.mock/definition/projects.yml index 3532b2bb9..5aead99bb 100644 --- a/.mock/definition/projects.yml +++ b/.mock/definition/projects.yml @@ -13,6 +13,8 @@ types: validation: format: uri results: list + source: + openapi: openapi/openapi.yaml ProjectsCreateResponse: docs: Project properties: @@ -66,6 +68,8 @@ types: first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + source: + openapi: openapi/openapi.yaml ProjectsUpdateResponse: docs: Project properties: @@ -119,6 +123,8 @@ types: first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + source: + openapi: openapi/openapi.yaml ProjectsImportTasksResponse: docs: Task creation response properties: @@ -146,6 +152,8 @@ types: data_columns: type: optional> docs: The list of found data columns + source: + openapi: openapi/openapi.yaml service: auth: false base-path: '' @@ -876,3 +884,5 @@ service: label_config: label_config audiences: - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/projects/exports.yml b/.mock/definition/projects/exports.yml index b21fcf186..1f0e3adb0 100644 --- a/.mock/definition/projects/exports.yml +++ b/.mock/definition/projects/exports.yml @@ -409,5 +409,7 @@ service: export_pk: export_pk audiences: - public + source: + openapi: openapi/openapi.yaml imports: root: ../__package__.yml diff --git a/.mock/definition/projects/labels.yml b/.mock/definition/projects/labels.yml index 561d70637..42df7cf9c 100644 --- a/.mock/definition/projects/labels.yml +++ b/.mock/definition/projects/labels.yml @@ -13,6 +13,8 @@ types: validation: format: uri results: list + source: + openapi: openapi/openapi.yaml service: auth: false base-path: '' @@ -163,3 +165,5 @@ service: - {} audiences: - internal + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/prompts.yml b/.mock/definition/prompts.yml new file mode 100644 index 000000000..355d276c6 --- /dev/null +++ b/.mock/definition/prompts.yml @@ -0,0 +1,101 @@ +imports: + root: __package__.yml +service: + auth: false + base-path: '' + endpoints: + list: + path: /api/prompts/ + method: GET + auth: true + docs: | + Get a list of prompts. + display-name: List prompts + response: + docs: '' + type: list + examples: + - response: + body: + - title: title + description: description + created_by: 1 + created_at: '2024-01-15T09:30:00Z' + updated_at: '2024-01-15T09:30:00Z' + organization: 1 + input_fields: + - input_fields + output_classes: + - output_classes + associated_projects: + - 1 + skill_name: skill_name + audiences: + - public + create: + path: /api/prompts/ + method: POST + auth: true + docs: | + Create a new prompt. + display-name: Create prompt + request: + body: root.Prompt + response: + docs: '' + type: root.Prompt + examples: + - request: + title: title + input_fields: + - input_fields + output_classes: + - output_classes + response: + body: + title: title + description: description + created_by: 1 + created_at: '2024-01-15T09:30:00Z' + updated_at: '2024-01-15T09:30:00Z' + organization: 1 + input_fields: + - input_fields + output_classes: + - output_classes + associated_projects: + - 1 + skill_name: skill_name + audiences: + - public + batch_predictions: + path: /api/model-run/batch-predictions + method: POST + auth: true + docs: | + Create a new batch prediction. + display-name: Create batch predictions + request: + name: PromptsBatchPredictionsRequest + body: + properties: + modelrun_id: optional + results: optional>> + response: + docs: '' + type: PromptsBatchPredictionsResponse + examples: + - request: {} + response: + body: + detail: detail + audiences: + - public + source: + openapi: openapi/openapi.yaml +types: + PromptsBatchPredictionsResponse: + properties: + detail: optional + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/prompts/versions.yml b/.mock/definition/prompts/versions.yml new file mode 100644 index 000000000..55b3a7469 --- /dev/null +++ b/.mock/definition/prompts/versions.yml @@ -0,0 +1,89 @@ +imports: + root: ../__package__.yml +service: + auth: false + base-path: '' + endpoints: + create: + path: /api/prompts/{id}/versions + method: POST + auth: true + docs: | + Create a new version of a prompt. + path-parameters: + id: + type: integer + docs: Prompt ID + display-name: Create prompt version + request: + body: root.PromptVersion + response: + docs: '' + type: root.PromptVersion + examples: + - path-parameters: + id: 1 + request: + title: title + prompt: prompt + provider: OpenAI + provider_model_id: provider_model_id + response: + body: + title: title + parent_model: 1 + prompt: prompt + provider: OpenAI + provider_model_id: provider_model_id + created_by: 1 + created_at: '2024-01-15T09:30:00Z' + updated_at: '2024-01-15T09:30:00Z' + organization: 1 + audiences: + - public + create_run: + path: /api/prompts/{id}/versions/{version_id}/inference-runs + method: POST + auth: true + docs: | + Run a prompt inference. + path-parameters: + id: + type: integer + docs: Prompt ID + version_id: + type: integer + docs: Prompt Version ID + display-name: Run prompt inference + request: + body: root.InferenceRun + response: + docs: '' + type: root.InferenceRun + examples: + - path-parameters: + id: 1 + version_id: 1 + request: + project: 1 + project_subset: All + response: + body: + organization: 1 + project: 1 + model_version: model_version + created_by: 1 + project_subset: All + status: Pending + job_id: job_id + total_predictions: 1 + total_correct_predictions: 1 + total_tasks: 1 + created_at: '2024-01-15T09:30:00Z' + triggered_at: '2024-01-15T09:30:00Z' + predictions_updated_at: '2024-01-15T09:30:00Z' + completed_at: '2024-01-15T09:30:00Z' + audiences: + - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/tasks.yml b/.mock/definition/tasks.yml index 5e6f25a45..6578fa6bb 100644 --- a/.mock/definition/tasks.yml +++ b/.mock/definition/tasks.yml @@ -501,12 +501,16 @@ service: - 1 audiences: - public + source: + openapi: openapi/openapi.yaml types: TasksListRequestFields: enum: - all - task_only default: task_only + source: + openapi: openapi/openapi.yaml TasksListResponse: properties: tasks: @@ -523,3 +527,5 @@ types: total_predictions: type: optional docs: Total number of predictions + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/users.yml b/.mock/definition/users.yml index 27a3b11fb..288839e79 100644 --- a/.mock/definition/users.yml +++ b/.mock/definition/users.yml @@ -5,12 +5,16 @@ types: token: type: optional docs: Token + source: + openapi: openapi/openapi.yaml UsersGetTokenResponse: docs: User token properties: detail: type: optional docs: Token + source: + openapi: openapi/openapi.yaml service: auth: false base-path: '' @@ -297,5 +301,7 @@ service: allow_newsletters: true audiences: - public + source: + openapi: openapi/openapi.yaml imports: root: __package__.yml diff --git a/.mock/definition/views.yml b/.mock/definition/views.yml index 34e864cf4..a0c0772f0 100644 --- a/.mock/definition/views.yml +++ b/.mock/definition/views.yml @@ -230,6 +230,8 @@ service: project: 1 audiences: - public + source: + openapi: openapi/openapi.yaml types: ViewsCreateRequestDataFiltersConjunction: enum: @@ -240,6 +242,8 @@ types: "and") will be applied to all items in the filters list. It is not possible to combine "or" and "and" within one list of filters. All filters will be either combined with "or" or with "and", but not a mix of both. + source: + openapi: openapi/openapi.yaml ViewsCreateRequestDataFiltersItemsItemFilter: enum: - value: filter:tasks:agreement @@ -333,6 +337,8 @@ types: Number of unresolved comments in a task
  • `filter:tasks:updated_at`
    (Datetime) Time the task was updated at (e.g. new annotation was created, review added, etc)
  • + source: + openapi: openapi/openapi.yaml ViewsCreateRequestDataFiltersItemsItemOperator: enum: - contains @@ -362,6 +368,8 @@ types: to
  • `not_exists`
    Does not exist

  • `not_in`
    Is not between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `starts_with`
    Starts with
  • + source: + openapi: openapi/openapi.yaml ViewsCreateRequestDataFiltersItemsItemValue: discriminated: false docs: Value to filter by @@ -374,8 +382,10 @@ types: docs: Float - type: boolean docs: Boolean - - docs: List of strings or integers - type: map + - type: map + docs: List of strings or integers + source: + openapi: openapi/openapi.yaml ViewsCreateRequestDataFiltersItemsItem: properties: filter: @@ -457,6 +467,8 @@ types: value: type: ViewsCreateRequestDataFiltersItemsItemValue docs: Value to filter by + source: + openapi: openapi/openapi.yaml ViewsCreateRequestDataFilters: docs: >- Filters to apply on tasks. You can use [the helper class `Filters` from @@ -476,6 +488,8 @@ types: items: docs: List of filter items type: list + source: + openapi: openapi/openapi.yaml ViewsCreateRequestDataOrderingItem: enum: - value: tasks:agreement @@ -522,6 +536,8 @@ types: name: TasksUnresolvedCommentCount - value: tasks:updated_at name: TasksUpdatedAt + source: + openapi: openapi/openapi.yaml ViewsCreateRequestData: docs: Custom view data properties: @@ -539,6 +555,8 @@ types: List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. + source: + openapi: openapi/openapi.yaml ViewsUpdateRequestDataFiltersConjunction: enum: - or @@ -548,6 +566,8 @@ types: "and") will be applied to all items in the filters list. It is not possible to combine "or" and "and" within one list of filters. All filters will be either combined with "or" or with "and", but not a mix of both. + source: + openapi: openapi/openapi.yaml ViewsUpdateRequestDataFiltersItemsItemFilter: enum: - value: filter:tasks:agreement @@ -641,6 +661,8 @@ types: Number of unresolved comments in a task
  • `filter:tasks:updated_at`
    (Datetime) Time the task was updated at (e.g. new annotation was created, review added, etc)
  • + source: + openapi: openapi/openapi.yaml ViewsUpdateRequestDataFiltersItemsItemOperator: enum: - contains @@ -670,6 +692,8 @@ types: to
  • `not_exists`
    Does not exist

  • `not_in`
    Is not between min and max values, so the filter `value` should be e.g. `{"min": 1, "max": 7}`

  • `starts_with`
    Starts with
  • + source: + openapi: openapi/openapi.yaml ViewsUpdateRequestDataFiltersItemsItemValue: discriminated: false docs: Value to filter by @@ -682,8 +706,10 @@ types: docs: Float - type: boolean docs: Boolean - - docs: List of strings or integers - type: map + - type: map + docs: List of strings or integers + source: + openapi: openapi/openapi.yaml ViewsUpdateRequestDataFiltersItemsItem: properties: filter: @@ -765,6 +791,8 @@ types: value: type: ViewsUpdateRequestDataFiltersItemsItemValue docs: Value to filter by + source: + openapi: openapi/openapi.yaml ViewsUpdateRequestDataFilters: docs: >- Filters to apply on tasks. You can use [the helper class `Filters` from @@ -784,6 +812,8 @@ types: items: docs: List of filter items type: list + source: + openapi: openapi/openapi.yaml ViewsUpdateRequestDataOrderingItem: enum: - value: tasks:agreement @@ -830,6 +860,8 @@ types: name: TasksUnresolvedCommentCount - value: tasks:updated_at name: TasksUpdatedAt + source: + openapi: openapi/openapi.yaml ViewsUpdateRequestData: docs: Custom view data properties: @@ -847,3 +879,5 @@ types: List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/webhooks.yml b/.mock/definition/webhooks.yml index 737e2d08f..7c09ba02e 100644 --- a/.mock/definition/webhooks.yml +++ b/.mock/definition/webhooks.yml @@ -307,6 +307,8 @@ service: updated_at: '2024-01-15T09:30:00Z' audiences: - public + source: + openapi: openapi/openapi.yaml types: ApiWebhooksUpdateRequestActionsItem: enum: @@ -322,6 +324,8 @@ types: - LABEL_LINK_CREATED - LABEL_LINK_UPDATED - LABEL_LINK_DELETED + source: + openapi: openapi/openapi.yaml WebhooksUpdateRequestActionsItem: enum: - PROJECT_CREATED @@ -336,3 +340,5 @@ types: - LABEL_LINK_CREATED - LABEL_LINK_UPDATED - LABEL_LINK_DELETED + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/workspaces.yml b/.mock/definition/workspaces.yml index fb5cc1713..6c016308c 100644 --- a/.mock/definition/workspaces.yml +++ b/.mock/definition/workspaces.yml @@ -206,3 +206,5 @@ service: color: color audiences: - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/workspaces/members.yml b/.mock/definition/workspaces/members.yml index 1ceac3c4b..713020e45 100644 --- a/.mock/definition/workspaces/members.yml +++ b/.mock/definition/workspaces/members.yml @@ -4,11 +4,15 @@ types: user: type: optional> docs: User ID of the workspace member + source: + openapi: openapi/openapi.yaml MembersCreateResponse: properties: user: type: optional docs: User ID of the workspace member + source: + openapi: openapi/openapi.yaml service: auth: false base-path: '' @@ -98,3 +102,5 @@ service: request: {} audiences: - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/fern.config.json b/.mock/fern.config.json index 6ca34b87f..f261c50fb 100644 --- a/.mock/fern.config.json +++ b/.mock/fern.config.json @@ -1,4 +1,4 @@ { "organization" : "humansignal-org", - "version" : "0.37.6" + "version" : "0.39.1" } \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index fe476df59..efc4be167 100644 --- a/poetry.lock +++ b/poetry.lock @@ -387,21 +387,21 @@ files = [ [[package]] name = "importlib-resources" -version = "6.4.0" +version = "6.4.3" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, - {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, + {file = "importlib_resources-6.4.3-py3-none-any.whl", hash = "sha256:2d6dfe3b9e055f72495c2085890837fc8c758984e209115c8792bddcb762cd93"}, + {file = "importlib_resources-6.4.3.tar.gz", hash = "sha256:4a202b9b9d38563b46da59221d77bb73862ab5d79d461307bcb826d725448b98"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] [[package]] name = "iniconfig" @@ -676,13 +676,13 @@ files = [ [[package]] name = "nltk" -version = "3.8.2" +version = "3.9.1" description = "Natural Language Toolkit" optional = false python-versions = ">=3.8" files = [ - {file = "nltk-3.8.2-py3-none-any.whl", hash = "sha256:bae044ae22ebe0b694a87c0012233373209f27d5c76d3572599c842740a62fe0"}, - {file = "nltk-3.8.2.tar.gz", hash = "sha256:9c051aa981c6745894906d5c3aad27417f3d1c10d91eefca50382fc922966f31"}, + {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, + {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, ] [package.dependencies] @@ -1597,4 +1597,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "f2c8147564391bfc18ecd7453f7e2cdcbbc343f8e1eab32ce1966f9df74e1978" +content-hash = "ca60e17c7aa3f1b33475d3fa20713f581799d6ea53bffe4059c232bc46e1c1d7" diff --git a/pyproject.toml b/pyproject.toml index cae3faa2a..5865894eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,7 @@ httpx = ">=0.21.2" ijson = ">=3.2.3" jsonschema = ">=4.23.0" lxml = ">=4.2.5" -nltk = "^3.8.2" +nltk = "^3.8.1" numpy = "<2.0.0" pandas = ">=0.24.0" pydantic = ">= 1.9.2" diff --git a/reference.md b/reference.md index 684bf9038..3d2dce7ae 100644 --- a/reference.md +++ b/reference.md @@ -2022,15 +2022,13 @@ curl -H 'Authorization: Token abc123' \ -X POST 'https://localhost:8080/api/impo
    ```python -from label_studio_sdk import FileUpload from label_studio_sdk.client import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) client.files.update( - id=1, - request=FileUpload(), + id_=1, ) ``` @@ -2047,7 +2045,15 @@ client.files.update(
    -**id:** `int` — A unique integer value identifying this file upload. +**id_:** `int` — A unique integer value identifying this file upload. + +
    +
    + +
    +
    + +**id:** `typing.Optional[int]`
    @@ -2055,7 +2061,7 @@ client.files.update(
    -**request:** `FileUpload` +**file:** `typing.Optional[str]`
    @@ -3134,7 +3140,6 @@ The project ID can be found in the URL when viewing the project in Label Studio,
    ```python -from label_studio_sdk import ProjectLabelConfig from label_studio_sdk.client import LabelStudio client = LabelStudio( @@ -3142,9 +3147,7 @@ client = LabelStudio( ) client.projects.validate_config( id=1, - request=ProjectLabelConfig( - label_config="label_config", - ), + label_config="label_config", ) ``` @@ -3169,7 +3172,7 @@ client.projects.validate_config(
    -**request:** `ProjectLabelConfig` +**label_config:** `str` — Label config in XML format. See more about it in documentation
    @@ -4721,15 +4724,13 @@ For more information, see the [Label Studio documentation on exporting annotatio
    ```python -from label_studio_sdk import ExportCreate from label_studio_sdk.client import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) client.projects.exports.create( - id=1, - request=ExportCreate(), + id_=1, ) ``` @@ -4746,7 +4747,95 @@ client.projects.exports.create(
    -**id:** `int` — A unique integer value identifying this project. +**id_:** `int` — A unique integer value identifying this project. + +
    +
    + +
    +
    + +**title:** `typing.Optional[str]` + +
    +
    + +
    +
    + +**id:** `typing.Optional[int]` + +
    +
    + +
    +
    + +**created_by:** `typing.Optional[UserSimple]` + +
    +
    + +
    +
    + +**created_at:** `typing.Optional[dt.datetime]` — Creation time + +
    +
    + +
    +
    + +**finished_at:** `typing.Optional[dt.datetime]` — Complete or fail time + +
    +
    + +
    +
    + +**status:** `typing.Optional[ExportCreateStatus]` + +
    +
    + +
    +
    + +**md5:** `typing.Optional[str]` + +
    +
    + +
    +
    + +**counters:** `typing.Optional[typing.Dict[str, typing.Any]]` + +
    +
    + +
    +
    + +**converted_formats:** `typing.Optional[typing.Sequence[ConvertedFormat]]` + +
    +
    + +
    +
    + +**task_filter_options:** `typing.Optional[TaskFilterOptions]` + +
    +
    + +
    +
    + +**annotation_filter_options:** `typing.Optional[AnnotationFilterOptions]`
    @@ -4754,7 +4843,7 @@ client.projects.exports.create(
    -**request:** `ExportCreate` +**serialization_options:** `typing.Optional[SerializationOptions]`
    @@ -4971,7 +5060,6 @@ The project ID can be found in the URL when viewing the project in Label Studio,
    ```python -from label_studio_sdk import ExportConvert from label_studio_sdk.client import LabelStudio client = LabelStudio( @@ -4980,9 +5068,7 @@ client = LabelStudio( client.projects.exports.convert( id=1, export_pk="export_pk", - request=ExportConvert( - export_type="export_type", - ), + export_type="export_type", ) ``` @@ -5015,7 +5101,7 @@ client.projects.exports.convert(
    -**request:** `ExportConvert` +**export_type:** `str` — Export file format.
    @@ -13247,16 +13333,13 @@ If you want to create your own custom webhook, refer to [Create custom events fo
    ```python -from label_studio_sdk import Webhook from label_studio_sdk.client import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) client.webhooks.create( - request=Webhook( - url="url", - ), + url="url", ) ``` @@ -13273,7 +13356,87 @@ client.webhooks.create(
    -**request:** `Webhook` +**url:** `str` — URL of webhook + +
    +
    + +
    +
    + +**id:** `typing.Optional[int]` + +
    +
    + +
    +
    + +**organization:** `typing.Optional[int]` + +
    +
    + +
    +
    + +**project:** `typing.Optional[int]` + +
    +
    + +
    +
    + +**send_payload:** `typing.Optional[bool]` — If value is False send only action + +
    +
    + +
    +
    + +**send_for_all_actions:** `typing.Optional[bool]` — If value is False - used only for actions from WebhookAction + +
    +
    + +
    +
    + +**headers:** `typing.Optional[typing.Dict[str, typing.Any]]` — Key Value Json of headers + +
    +
    + +
    +
    + +**is_active:** `typing.Optional[bool]` — If value is False the webhook is disabled + +
    +
    + +
    +
    + +**actions:** `typing.Optional[typing.Sequence[WebhookActionsItem]]` + +
    +
    + +
    +
    + +**created_at:** `typing.Optional[dt.datetime]` — Creation time + +
    +
    + +
    +
    + +**updated_at:** `typing.Optional[dt.datetime]` — Last update time
    @@ -13534,18 +13697,15 @@ For more information about webhooks, see [Set up webhooks in Label Studio](https
    ```python -from label_studio_sdk import WebhookSerializerForUpdate from label_studio_sdk.client import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) client.webhooks.update( - id=1, + id_=1, url="url", - request=WebhookSerializerForUpdate( - url="url", - ), + webhook_serializer_for_update_url="url", ) ``` @@ -13562,7 +13722,7 @@ client.webhooks.update(
    -**id:** `int` — A unique integer value identifying this webhook. +**id_:** `int` — A unique integer value identifying this webhook.
    @@ -13578,7 +13738,7 @@ client.webhooks.update(
    -**request:** `WebhookSerializerForUpdate` +**webhook_serializer_for_update_url:** `str` — URL of webhook
    @@ -13631,6 +13791,842 @@ client.webhooks.update(
    +**id:** `typing.Optional[int]` + +
    +
    + +
    +
    + +**organization:** `typing.Optional[int]` + +
    +
    + +
    +
    + +**project:** `typing.Optional[int]` + +
    +
    + +
    +
    + +**webhook_serializer_for_update_send_payload:** `typing.Optional[bool]` — If value is False send only action + +
    +
    + +
    +
    + +**webhook_serializer_for_update_send_for_all_actions:** `typing.Optional[bool]` — If value is False - used only for actions from WebhookAction + +
    +
    + +
    +
    + +**webhook_serializer_for_update_headers:** `typing.Optional[typing.Dict[str, typing.Any]]` — Key Value Json of headers + +
    +
    + +
    +
    + +**webhook_serializer_for_update_is_active:** `typing.Optional[bool]` — If value is False the webhook is disabled + +
    +
    + +
    +
    + +**webhook_serializer_for_update_actions:** `typing.Optional[typing.Sequence[WebhookSerializerForUpdateActionsItem]]` + +
    +
    + +
    +
    + +**created_at:** `typing.Optional[dt.datetime]` — Creation time + +
    +
    + +
    +
    + +**updated_at:** `typing.Optional[dt.datetime]` — Last update time + +
    +
    + +
    +
    + +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
    +
    +
    + + + + + + + +## Prompts +
    client.prompts.list() +
    +
    + +#### 📝 Description + +
    +
    + +
    +
    + +Get a list of prompts. +
    +
    +
    +
    + +#### 🔌 Usage + +
    +
    + +
    +
    + +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.prompts.list() + +``` +
    +
    +
    +
    + +#### ⚙️ Parameters + +
    +
    + +
    +
    + +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
    +
    +
    +
    + + +
    +
    +
    + +
    client.prompts.create(...) +
    +
    + +#### 📝 Description + +
    +
    + +
    +
    + +Create a new prompt. +
    +
    +
    +
    + +#### 🔌 Usage + +
    +
    + +
    +
    + +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.prompts.create( + title="title", + input_fields=["input_fields"], + output_classes=["output_classes"], +) + +``` +
    +
    +
    +
    + +#### ⚙️ Parameters + +
    +
    + +
    +
    + +**title:** `str` — Title of the prompt + +
    +
    + +
    +
    + +**input_fields:** `typing.Sequence[str]` — List of input fields + +
    +
    + +
    +
    + +**output_classes:** `typing.Sequence[str]` — List of output classes + +
    +
    + +
    +
    + +**description:** `typing.Optional[str]` — Description of the prompt + +
    +
    + +
    +
    + +**created_by:** `typing.Optional[PromptCreatedBy]` — User ID of the creator of the prompt + +
    +
    + +
    +
    + +**created_at:** `typing.Optional[dt.datetime]` — Date and time the prompt was created + +
    +
    + +
    +
    + +**updated_at:** `typing.Optional[dt.datetime]` — Date and time the prompt was last updated + +
    +
    + +
    +
    + +**organization:** `typing.Optional[PromptOrganization]` — Organization ID of the prompt + +
    +
    + +
    +
    + +**associated_projects:** `typing.Optional[typing.Sequence[int]]` — List of associated projects IDs + +
    +
    + +
    +
    + +**skill_name:** `typing.Optional[str]` — Name of the skill + +
    +
    + +
    +
    + +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
    +
    +
    +
    + + +
    +
    +
    + +
    client.prompts.batch_predictions(...) +
    +
    + +#### 📝 Description + +
    +
    + +
    +
    + +Create a new batch prediction. +
    +
    +
    +
    + +#### 🔌 Usage + +
    +
    + +
    +
    + +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.prompts.batch_predictions() + +``` +
    +
    +
    +
    + +#### ⚙️ Parameters + +
    +
    + +
    +
    + +**modelrun_id:** `typing.Optional[int]` + +
    +
    + +
    +
    + +**results:** `typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]]` + +
    +
    + +
    +
    + +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
    +
    +
    +
    + + +
    +
    +
    + +## Prompts Versions +
    client.prompts.versions.create(...) +
    +
    + +#### 📝 Description + +
    +
    + +
    +
    + +Create a new version of a prompt. +
    +
    +
    +
    + +#### 🔌 Usage + +
    +
    + +
    +
    + +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.prompts.versions.create( + id=1, + title="title", + prompt="prompt", + provider="OpenAI", + provider_model_id="provider_model_id", +) + +``` +
    +
    +
    +
    + +#### ⚙️ Parameters + +
    +
    + +
    +
    + +**id:** `int` — Prompt ID + +
    +
    + +
    +
    + +**title:** `str` + +
    +
    + +
    +
    + +**prompt:** `str` + +
    +
    + +
    +
    + +**provider:** `PromptVersionProvider` + +
    +
    + +
    +
    + +**provider_model_id:** `str` + +
    +
    + +
    +
    + +**parent_model:** `typing.Optional[int]` + +
    +
    + +
    +
    + +**created_by:** `typing.Optional[PromptVersionCreatedBy]` + +
    +
    + +
    +
    + +**created_at:** `typing.Optional[dt.datetime]` + +
    +
    + +
    +
    + +**updated_at:** `typing.Optional[dt.datetime]` + +
    +
    + +
    +
    + +**organization:** `typing.Optional[PromptVersionOrganization]` + +
    +
    + +
    +
    + +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
    +
    +
    +
    + + +
    +
    +
    + +
    client.prompts.versions.create_run(...) +
    +
    + +#### 📝 Description + +
    +
    + +
    +
    + +Run a prompt inference. +
    +
    +
    +
    + +#### 🔌 Usage + +
    +
    + +
    +
    + +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.prompts.versions.create_run( + id=1, + version_id=1, + project=1, + project_subset="All", +) + +``` +
    +
    +
    +
    + +#### ⚙️ Parameters + +
    +
    + +
    +
    + +**id:** `int` — Prompt ID + +
    +
    + +
    +
    + +**version_id:** `int` — Prompt Version ID + +
    +
    + +
    +
    + +**project:** `int` + +
    +
    + +
    +
    + +**project_subset:** `InferenceRunProjectSubset` + +
    +
    + +
    +
    + +**organization:** `typing.Optional[InferenceRunOrganization]` + +
    +
    + +
    +
    + +**model_version:** `typing.Optional[str]` + +
    +
    + +
    +
    + +**created_by:** `typing.Optional[InferenceRunCreatedBy]` + +
    +
    + +
    +
    + +**status:** `typing.Optional[InferenceRunStatus]` + +
    +
    + +
    +
    + +**job_id:** `typing.Optional[str]` + +
    +
    + +
    +
    + +**total_predictions:** `typing.Optional[int]` + +
    +
    + +
    +
    + +**total_correct_predictions:** `typing.Optional[int]` + +
    +
    + +
    +
    + +**total_tasks:** `typing.Optional[int]` + +
    +
    + +
    +
    + +**created_at:** `typing.Optional[dt.datetime]` + +
    +
    + +
    +
    + +**triggered_at:** `typing.Optional[dt.datetime]` + +
    +
    + +
    +
    + +**predictions_updated_at:** `typing.Optional[dt.datetime]` + +
    +
    + +
    +
    + +**completed_at:** `typing.Optional[dt.datetime]` + +
    +
    + +
    +
    + +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
    +
    +
    +
    + + +
    +
    +
    + +## ModelProviders +
    client.model_providers.create(...) +
    +
    + +#### 📝 Description + +
    +
    + +
    +
    + +Create a new model provider connection. +
    +
    +
    +
    + +#### 🔌 Usage + +
    +
    + +
    +
    + +```python +from label_studio_sdk.client import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.model_providers.create( + provider="OpenAI", +) + +``` +
    +
    +
    +
    + +#### ⚙️ Parameters + +
    +
    + +
    +
    + +**provider:** `ModelProviderConnectionProvider` + +
    +
    + +
    +
    + +**api_key:** `typing.Optional[str]` + +
    +
    + +
    +
    + +**deployment_name:** `typing.Optional[str]` + +
    +
    + +
    +
    + +**endpoint:** `typing.Optional[str]` + +
    +
    + +
    +
    + +**scope:** `typing.Optional[ModelProviderConnectionScope]` + +
    +
    + +
    +
    + +**organization:** `typing.Optional[ModelProviderConnectionOrganization]` + +
    +
    + +
    +
    + +**created_by:** `typing.Optional[ModelProviderConnectionCreatedBy]` + +
    +
    + +
    +
    + +**created_at:** `typing.Optional[dt.datetime]` + +
    +
    + +
    +
    + +**updated_at:** `typing.Optional[dt.datetime]` + +
    +
    + +
    +
    + **request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
    diff --git a/src/label_studio_sdk/__init__.py b/src/label_studio_sdk/__init__.py index e89545133..c3cc754cb 100644 --- a/src/label_studio_sdk/__init__.py +++ b/src/label_studio_sdk/__init__.py @@ -34,6 +34,11 @@ GcsExportStorageStatus, GcsImportStorage, GcsImportStorageStatus, + InferenceRun, + InferenceRunCreatedBy, + InferenceRunOrganization, + InferenceRunProjectSubset, + InferenceRunStatus, LocalFilesExportStorage, LocalFilesExportStorageStatus, LocalFilesImportStorage, @@ -41,6 +46,11 @@ MlBackend, MlBackendAuthMethod, MlBackendState, + ModelProviderConnection, + ModelProviderConnectionCreatedBy, + ModelProviderConnectionOrganization, + ModelProviderConnectionProvider, + ModelProviderConnectionScope, Prediction, Project, ProjectImport, @@ -48,6 +58,13 @@ ProjectLabelConfig, ProjectSampling, ProjectSkipQueue, + Prompt, + PromptCreatedBy, + PromptOrganization, + PromptVersion, + PromptVersionCreatedBy, + PromptVersionOrganization, + PromptVersionProvider, RedisExportStorage, RedisExportStorageStatus, RedisImportStorage, @@ -81,8 +98,10 @@ files, import_storage, ml, + model_providers, predictions, projects, + prompts, tasks, users, views, @@ -115,6 +134,7 @@ MlUpdateResponseAuthMethod, ) from .projects import ProjectsCreateResponse, ProjectsImportTasksResponse, ProjectsListResponse, ProjectsUpdateResponse +from .prompts import PromptsBatchPredictionsResponse from .tasks import TasksListRequestFields, TasksListResponse from .users import UsersGetTokenResponse, UsersResetTokenResponse from .version import __version__ @@ -187,6 +207,11 @@ "GcsImportStorage", "GcsImportStorageStatus", "ImportStorageListTypesResponseItem", + "InferenceRun", + "InferenceRunCreatedBy", + "InferenceRunOrganization", + "InferenceRunProjectSubset", + "InferenceRunStatus", "InternalServerError", "LabelStudioEnvironment", "LocalFilesExportStorage", @@ -202,6 +227,11 @@ "MlUpdateRequestAuthMethod", "MlUpdateResponse", "MlUpdateResponseAuthMethod", + "ModelProviderConnection", + "ModelProviderConnectionCreatedBy", + "ModelProviderConnectionOrganization", + "ModelProviderConnectionProvider", + "ModelProviderConnectionScope", "Prediction", "Project", "ProjectImport", @@ -213,6 +243,14 @@ "ProjectsImportTasksResponse", "ProjectsListResponse", "ProjectsUpdateResponse", + "Prompt", + "PromptCreatedBy", + "PromptOrganization", + "PromptVersion", + "PromptVersionCreatedBy", + "PromptVersionOrganization", + "PromptVersionProvider", + "PromptsBatchPredictionsResponse", "RedisExportStorage", "RedisExportStorageStatus", "RedisImportStorage", @@ -265,8 +303,10 @@ "files", "import_storage", "ml", + "model_providers", "predictions", "projects", + "prompts", "tasks", "users", "views", diff --git a/src/label_studio_sdk/base_client.py b/src/label_studio_sdk/base_client.py index 8594ad277..175150891 100644 --- a/src/label_studio_sdk/base_client.py +++ b/src/label_studio_sdk/base_client.py @@ -15,8 +15,10 @@ from .files.client import AsyncFilesClient, FilesClient from .import_storage.client import AsyncImportStorageClient, ImportStorageClient from .ml.client import AsyncMlClient, MlClient +from .model_providers.client import AsyncModelProvidersClient, ModelProvidersClient from .predictions.client import AsyncPredictionsClient, PredictionsClient from .projects.client import AsyncProjectsClient, ProjectsClient +from .prompts.client import AsyncPromptsClient, PromptsClient from .tasks.client import AsyncTasksClient, TasksClient from .users.client import AsyncUsersClient, UsersClient from .views.client import AsyncViewsClient, ViewsClient @@ -98,6 +100,8 @@ def __init__( self.import_storage = ImportStorageClient(client_wrapper=self._client_wrapper) self.export_storage = ExportStorageClient(client_wrapper=self._client_wrapper) self.webhooks = WebhooksClient(client_wrapper=self._client_wrapper) + self.prompts = PromptsClient(client_wrapper=self._client_wrapper) + self.model_providers = ModelProvidersClient(client_wrapper=self._client_wrapper) self.comments = CommentsClient(client_wrapper=self._client_wrapper) self.workspaces = WorkspacesClient(client_wrapper=self._client_wrapper) @@ -176,6 +180,8 @@ def __init__( self.import_storage = AsyncImportStorageClient(client_wrapper=self._client_wrapper) self.export_storage = AsyncExportStorageClient(client_wrapper=self._client_wrapper) self.webhooks = AsyncWebhooksClient(client_wrapper=self._client_wrapper) + self.prompts = AsyncPromptsClient(client_wrapper=self._client_wrapper) + self.model_providers = AsyncModelProvidersClient(client_wrapper=self._client_wrapper) self.comments = AsyncCommentsClient(client_wrapper=self._client_wrapper) self.workspaces = AsyncWorkspacesClient(client_wrapper=self._client_wrapper) diff --git a/src/label_studio_sdk/core/client_wrapper.py b/src/label_studio_sdk/core/client_wrapper.py index 6676cb579..1c4002b5d 100644 --- a/src/label_studio_sdk/core/client_wrapper.py +++ b/src/label_studio_sdk/core/client_wrapper.py @@ -17,7 +17,7 @@ def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "label-studio-sdk", - "X-Fern-SDK-Version": "1.0.5", + "X-Fern-SDK-Version": "1.0.6", } headers["Authorization"] = f"Token {self.api_key}" return headers diff --git a/src/label_studio_sdk/files/client.py b/src/label_studio_sdk/files/client.py index a047936f5..4a68269a4 100644 --- a/src/label_studio_sdk/files/client.py +++ b/src/label_studio_sdk/files/client.py @@ -96,7 +96,12 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = raise ApiError(status_code=_response.status_code, body=_response_json) def update( - self, id: int, *, request: FileUpload, request_options: typing.Optional[RequestOptions] = None + self, + id_: int, + *, + id: typing.Optional[int] = OMIT, + file: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, ) -> FileUpload: """ Update a specific uploaded file. To get the file upload ID, use [Get files list](list). @@ -109,10 +114,12 @@ def update( Parameters ---------- - id : int + id_ : int A unique integer value identifying this file upload. - request : FileUpload + id : typing.Optional[int] + + file : typing.Optional[str] request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -124,21 +131,19 @@ def update( Examples -------- - from label_studio_sdk import FileUpload from label_studio_sdk.client import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) client.files.update( - id=1, - request=FileUpload(), + id_=1, ) """ _response = self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", + f"api/import/file-upload/{jsonable_encoder(id_)}", method="PATCH", - json=request, + json={"id": id, "file": file}, request_options=request_options, omit=OMIT, ) @@ -367,7 +372,12 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio raise ApiError(status_code=_response.status_code, body=_response_json) async def update( - self, id: int, *, request: FileUpload, request_options: typing.Optional[RequestOptions] = None + self, + id_: int, + *, + id: typing.Optional[int] = OMIT, + file: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, ) -> FileUpload: """ Update a specific uploaded file. To get the file upload ID, use [Get files list](list). @@ -380,10 +390,12 @@ async def update( Parameters ---------- - id : int + id_ : int A unique integer value identifying this file upload. - request : FileUpload + id : typing.Optional[int] + + file : typing.Optional[str] request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -395,21 +407,19 @@ async def update( Examples -------- - from label_studio_sdk import FileUpload from label_studio_sdk.client import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) await client.files.update( - id=1, - request=FileUpload(), + id_=1, ) """ _response = await self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", + f"api/import/file-upload/{jsonable_encoder(id_)}", method="PATCH", - json=request, + json={"id": id, "file": file}, request_options=request_options, omit=OMIT, ) diff --git a/src/label_studio_sdk/label_interface/interface.py b/src/label_studio_sdk/label_interface/interface.py index 3408d6753..e14b7eb88 100644 --- a/src/label_studio_sdk/label_interface/interface.py +++ b/src/label_studio_sdk/label_interface/interface.py @@ -30,9 +30,11 @@ ) from .object_tags import ObjectTag from .label_tags import LabelTag -from .objects import AnnotationValue, TaskValue, PredictionValue +from .objects import AnnotationValue, TaskValue, PredictionValue, Region from . import create as CE +logger = logging.getLogger(__name__) + dir_path = os.path.dirname(os.path.realpath(__file__)) file_path = os.path.join(dir_path, "..", "_legacy", "schema", "label_config_schema.json") @@ -250,8 +252,7 @@ def create_instance(cls, *args, **kwargs): """ config = cls.create(*args, **kwargs) return cls(config=config, **kwargs) - - + def __init__(self, config: str, tags_mapping=None, *args, **kwargs): """ Initialize a LabelInterface instance using a config string. @@ -299,9 +300,35 @@ def __init__(self, config: str, tags_mapping=None, *args, **kwargs): self._labels = labels self._tree = tree - + def create_regions(self, data: Dict[str, Union[Dict, List[Dict]]]) -> List[Region]: + """ + Takes raw data representation and maps keys to control tag names. + If name is not found, it will be skipped + + Args: + data (Dict): Raw data representation. Example: {"choices_name": "Positive", "labels_name": [{"start": 0, "end": 10, "label": "person"}]} + raise_if_control_not_found (bool): Raise an exception if control tag is not found. + """ + regions = [] + for control_tag_name, payload in data.items(): + if control_tag_name not in self._controls: + logger.info(f"Control tag '{control_tag_name}' not found in the config") + continue + + control = self._controls[control_tag_name] + # TODO: I don't really like this part, looks like a workaround + # 1. we should allow control.label to process custom payload outside of those strictly containing "label" + # 2. we should be less open regarding the payload type and defining the strict typing elsewhere, + # but likely that requires rewriting of how ControlTag.label() is working now + if isinstance(payload, str): + payload = {'label': payload} + if isinstance(payload, Dict): + payload = [payload] + for item in payload: + + regions.append(control.label(**item)) - ##### NEW API + return regions @property def config(self): diff --git a/src/label_studio_sdk/model_providers/__init__.py b/src/label_studio_sdk/model_providers/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/src/label_studio_sdk/model_providers/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/src/label_studio_sdk/model_providers/client.py b/src/label_studio_sdk/model_providers/client.py new file mode 100644 index 000000000..0cd59e8a5 --- /dev/null +++ b/src/label_studio_sdk/model_providers/client.py @@ -0,0 +1,190 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.pydantic_utilities import pydantic_v1 +from ..core.request_options import RequestOptions +from ..types.model_provider_connection import ModelProviderConnection +from ..types.model_provider_connection_created_by import ModelProviderConnectionCreatedBy +from ..types.model_provider_connection_organization import ModelProviderConnectionOrganization +from ..types.model_provider_connection_provider import ModelProviderConnectionProvider +from ..types.model_provider_connection_scope import ModelProviderConnectionScope + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class ModelProvidersClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def create( + self, + *, + provider: ModelProviderConnectionProvider, + api_key: typing.Optional[str] = OMIT, + deployment_name: typing.Optional[str] = OMIT, + endpoint: typing.Optional[str] = OMIT, + scope: typing.Optional[ModelProviderConnectionScope] = OMIT, + organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, + created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None + ) -> ModelProviderConnection: + """ + Create a new model provider connection. + + Parameters + ---------- + provider : ModelProviderConnectionProvider + + api_key : typing.Optional[str] + + deployment_name : typing.Optional[str] + + endpoint : typing.Optional[str] + + scope : typing.Optional[ModelProviderConnectionScope] + + organization : typing.Optional[ModelProviderConnectionOrganization] + + created_by : typing.Optional[ModelProviderConnectionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ModelProviderConnection + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.model_providers.create( + provider="OpenAI", + ) + """ + _response = self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="POST", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": organization, + "created_by": created_by, + "created_at": created_at, + "updated_at": updated_at, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ModelProviderConnection, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncModelProvidersClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def create( + self, + *, + provider: ModelProviderConnectionProvider, + api_key: typing.Optional[str] = OMIT, + deployment_name: typing.Optional[str] = OMIT, + endpoint: typing.Optional[str] = OMIT, + scope: typing.Optional[ModelProviderConnectionScope] = OMIT, + organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, + created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None + ) -> ModelProviderConnection: + """ + Create a new model provider connection. + + Parameters + ---------- + provider : ModelProviderConnectionProvider + + api_key : typing.Optional[str] + + deployment_name : typing.Optional[str] + + endpoint : typing.Optional[str] + + scope : typing.Optional[ModelProviderConnectionScope] + + organization : typing.Optional[ModelProviderConnectionOrganization] + + created_by : typing.Optional[ModelProviderConnectionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ModelProviderConnection + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.model_providers.create( + provider="OpenAI", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="POST", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": organization, + "created_by": created_by, + "created_at": created_at, + "updated_at": updated_at, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(ModelProviderConnection, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/projects/client.py b/src/label_studio_sdk/projects/client.py index e880e8e18..5bb99bc20 100644 --- a/src/label_studio_sdk/projects/client.py +++ b/src/label_studio_sdk/projects/client.py @@ -577,7 +577,7 @@ def import_tasks( raise ApiError(status_code=_response.status_code, body=_response_json) def validate_config( - self, id: int, *, request: ProjectLabelConfig, request_options: typing.Optional[RequestOptions] = None + self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None ) -> ProjectLabelConfig: """ Determine whether the label configuration for a specific project is valid. For more information about setting up labeling configs, see [Configure labeling interface](https://labelstud.io/guide/setup) and our [Tags reference](https://labelstud.io/tags/). @@ -589,7 +589,8 @@ def validate_config( id : int A unique integer value identifying this project. - request : ProjectLabelConfig + label_config : str + Label config in XML format. See more about it in documentation request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -601,7 +602,6 @@ def validate_config( Examples -------- - from label_studio_sdk import ProjectLabelConfig from label_studio_sdk.client import LabelStudio client = LabelStudio( @@ -609,15 +609,13 @@ def validate_config( ) client.projects.validate_config( id=1, - request=ProjectLabelConfig( - label_config="label_config", - ), + label_config="label_config", ) """ _response = self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/validate/", method="POST", - json=request, + json={"label_config": label_config}, request_options=request_options, omit=OMIT, ) @@ -1185,7 +1183,7 @@ async def import_tasks( raise ApiError(status_code=_response.status_code, body=_response_json) async def validate_config( - self, id: int, *, request: ProjectLabelConfig, request_options: typing.Optional[RequestOptions] = None + self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None ) -> ProjectLabelConfig: """ Determine whether the label configuration for a specific project is valid. For more information about setting up labeling configs, see [Configure labeling interface](https://labelstud.io/guide/setup) and our [Tags reference](https://labelstud.io/tags/). @@ -1197,7 +1195,8 @@ async def validate_config( id : int A unique integer value identifying this project. - request : ProjectLabelConfig + label_config : str + Label config in XML format. See more about it in documentation request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -1209,7 +1208,6 @@ async def validate_config( Examples -------- - from label_studio_sdk import ProjectLabelConfig from label_studio_sdk.client import AsyncLabelStudio client = AsyncLabelStudio( @@ -1217,15 +1215,13 @@ async def validate_config( ) await client.projects.validate_config( id=1, - request=ProjectLabelConfig( - label_config="label_config", - ), + label_config="label_config", ) """ _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/validate/", method="POST", - json=request, + json={"label_config": label_config}, request_options=request_options, omit=OMIT, ) diff --git a/src/label_studio_sdk/projects/exports/client.py b/src/label_studio_sdk/projects/exports/client.py index 131d0f05d..07a3f9e6f 100644 --- a/src/label_studio_sdk/projects/exports/client.py +++ b/src/label_studio_sdk/projects/exports/client.py @@ -1,5 +1,6 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing from json.decoder import JSONDecodeError @@ -8,9 +9,15 @@ from ...core.jsonable_encoder import jsonable_encoder from ...core.pydantic_utilities import pydantic_v1 from ...core.request_options import RequestOptions +from ...types.annotation_filter_options import AnnotationFilterOptions +from ...types.converted_format import ConvertedFormat from ...types.export import Export from ...types.export_convert import ExportConvert from ...types.export_create import ExportCreate +from ...types.export_create_status import ExportCreateStatus +from ...types.serialization_options import SerializationOptions +from ...types.task_filter_options import TaskFilterOptions +from ...types.user_simple import UserSimple # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -199,7 +206,22 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No raise ApiError(status_code=_response.status_code, body=_response_json) def create( - self, id: int, *, request: ExportCreate, request_options: typing.Optional[RequestOptions] = None + self, + id_: int, + *, + title: typing.Optional[str] = OMIT, + id: typing.Optional[int] = OMIT, + created_by: typing.Optional[UserSimple] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + finished_at: typing.Optional[dt.datetime] = OMIT, + status: typing.Optional[ExportCreateStatus] = OMIT, + md5: typing.Optional[str] = OMIT, + counters: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + converted_formats: typing.Optional[typing.Sequence[ConvertedFormat]] = OMIT, + task_filter_options: typing.Optional[TaskFilterOptions] = OMIT, + annotation_filter_options: typing.Optional[AnnotationFilterOptions] = OMIT, + serialization_options: typing.Optional[SerializationOptions] = OMIT, + request_options: typing.Optional[RequestOptions] = None, ) -> ExportCreate: """ Create a new export request to start a background task and generate an export file (snapshot) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). @@ -210,10 +232,34 @@ def create( Parameters ---------- - id : int + id_ : int A unique integer value identifying this project. - request : ExportCreate + title : typing.Optional[str] + + id : typing.Optional[int] + + created_by : typing.Optional[UserSimple] + + created_at : typing.Optional[dt.datetime] + Creation time + + finished_at : typing.Optional[dt.datetime] + Complete or fail time + + status : typing.Optional[ExportCreateStatus] + + md5 : typing.Optional[str] + + counters : typing.Optional[typing.Dict[str, typing.Any]] + + converted_formats : typing.Optional[typing.Sequence[ConvertedFormat]] + + task_filter_options : typing.Optional[TaskFilterOptions] + + annotation_filter_options : typing.Optional[AnnotationFilterOptions] + + serialization_options : typing.Optional[SerializationOptions] request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -225,21 +271,32 @@ def create( Examples -------- - from label_studio_sdk import ExportCreate from label_studio_sdk.client import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) client.projects.exports.create( - id=1, - request=ExportCreate(), + id_=1, ) """ _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/exports/", + f"api/projects/{jsonable_encoder(id_)}/exports/", method="POST", - json=request, + json={ + "title": title, + "id": id, + "created_by": created_by, + "created_at": created_at, + "finished_at": finished_at, + "status": status, + "md5": md5, + "counters": counters, + "converted_formats": converted_formats, + "task_filter_options": task_filter_options, + "annotation_filter_options": annotation_filter_options, + "serialization_options": serialization_options, + }, request_options=request_options, omit=OMIT, ) @@ -347,12 +404,7 @@ def delete(self, id: int, export_pk: str, *, request_options: typing.Optional[Re raise ApiError(status_code=_response.status_code, body=_response_json) def convert( - self, - id: int, - export_pk: str, - *, - request: ExportConvert, - request_options: typing.Optional[RequestOptions] = None, + self, id: int, export_pk: str, *, export_type: str, request_options: typing.Optional[RequestOptions] = None ) -> ExportConvert: """ You can use this to convert an export snapshot into the selected format. @@ -371,7 +423,8 @@ def convert( export_pk : str Primary key identifying the export file. - request : ExportConvert + export_type : str + Export file format. request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -383,7 +436,6 @@ def convert( Examples -------- - from label_studio_sdk import ExportConvert from label_studio_sdk.client import LabelStudio client = LabelStudio( @@ -392,15 +444,13 @@ def convert( client.projects.exports.convert( id=1, export_pk="export_pk", - request=ExportConvert( - export_type="export_type", - ), + export_type="export_type", ) """ _response = self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/exports/{jsonable_encoder(export_pk)}/convert", method="POST", - json=request, + json={"export_type": export_type}, request_options=request_options, omit=OMIT, ) @@ -657,7 +707,22 @@ async def list(self, id: int, *, request_options: typing.Optional[RequestOptions raise ApiError(status_code=_response.status_code, body=_response_json) async def create( - self, id: int, *, request: ExportCreate, request_options: typing.Optional[RequestOptions] = None + self, + id_: int, + *, + title: typing.Optional[str] = OMIT, + id: typing.Optional[int] = OMIT, + created_by: typing.Optional[UserSimple] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + finished_at: typing.Optional[dt.datetime] = OMIT, + status: typing.Optional[ExportCreateStatus] = OMIT, + md5: typing.Optional[str] = OMIT, + counters: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + converted_formats: typing.Optional[typing.Sequence[ConvertedFormat]] = OMIT, + task_filter_options: typing.Optional[TaskFilterOptions] = OMIT, + annotation_filter_options: typing.Optional[AnnotationFilterOptions] = OMIT, + serialization_options: typing.Optional[SerializationOptions] = OMIT, + request_options: typing.Optional[RequestOptions] = None, ) -> ExportCreate: """ Create a new export request to start a background task and generate an export file (snapshot) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). @@ -668,10 +733,34 @@ async def create( Parameters ---------- - id : int + id_ : int A unique integer value identifying this project. - request : ExportCreate + title : typing.Optional[str] + + id : typing.Optional[int] + + created_by : typing.Optional[UserSimple] + + created_at : typing.Optional[dt.datetime] + Creation time + + finished_at : typing.Optional[dt.datetime] + Complete or fail time + + status : typing.Optional[ExportCreateStatus] + + md5 : typing.Optional[str] + + counters : typing.Optional[typing.Dict[str, typing.Any]] + + converted_formats : typing.Optional[typing.Sequence[ConvertedFormat]] + + task_filter_options : typing.Optional[TaskFilterOptions] + + annotation_filter_options : typing.Optional[AnnotationFilterOptions] + + serialization_options : typing.Optional[SerializationOptions] request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -683,21 +772,32 @@ async def create( Examples -------- - from label_studio_sdk import ExportCreate from label_studio_sdk.client import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) await client.projects.exports.create( - id=1, - request=ExportCreate(), + id_=1, ) """ _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/exports/", + f"api/projects/{jsonable_encoder(id_)}/exports/", method="POST", - json=request, + json={ + "title": title, + "id": id, + "created_by": created_by, + "created_at": created_at, + "finished_at": finished_at, + "status": status, + "md5": md5, + "counters": counters, + "converted_formats": converted_formats, + "task_filter_options": task_filter_options, + "annotation_filter_options": annotation_filter_options, + "serialization_options": serialization_options, + }, request_options=request_options, omit=OMIT, ) @@ -805,12 +905,7 @@ async def delete(self, id: int, export_pk: str, *, request_options: typing.Optio raise ApiError(status_code=_response.status_code, body=_response_json) async def convert( - self, - id: int, - export_pk: str, - *, - request: ExportConvert, - request_options: typing.Optional[RequestOptions] = None, + self, id: int, export_pk: str, *, export_type: str, request_options: typing.Optional[RequestOptions] = None ) -> ExportConvert: """ You can use this to convert an export snapshot into the selected format. @@ -829,7 +924,8 @@ async def convert( export_pk : str Primary key identifying the export file. - request : ExportConvert + export_type : str + Export file format. request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -841,7 +937,6 @@ async def convert( Examples -------- - from label_studio_sdk import ExportConvert from label_studio_sdk.client import AsyncLabelStudio client = AsyncLabelStudio( @@ -850,15 +945,13 @@ async def convert( await client.projects.exports.convert( id=1, export_pk="export_pk", - request=ExportConvert( - export_type="export_type", - ), + export_type="export_type", ) """ _response = await self._client_wrapper.httpx_client.request( f"api/projects/{jsonable_encoder(id)}/exports/{jsonable_encoder(export_pk)}/convert", method="POST", - json=request, + json={"export_type": export_type}, request_options=request_options, omit=OMIT, ) diff --git a/src/label_studio_sdk/prompts/__init__.py b/src/label_studio_sdk/prompts/__init__.py new file mode 100644 index 000000000..99b6e8137 --- /dev/null +++ b/src/label_studio_sdk/prompts/__init__.py @@ -0,0 +1,6 @@ +# This file was auto-generated by Fern from our API Definition. + +from .types import PromptsBatchPredictionsResponse +from . import versions + +__all__ = ["PromptsBatchPredictionsResponse", "versions"] diff --git a/src/label_studio_sdk/prompts/client.py b/src/label_studio_sdk/prompts/client.py new file mode 100644 index 000000000..310f21423 --- /dev/null +++ b/src/label_studio_sdk/prompts/client.py @@ -0,0 +1,388 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.pydantic_utilities import pydantic_v1 +from ..core.request_options import RequestOptions +from ..types.prompt import Prompt +from ..types.prompt_created_by import PromptCreatedBy +from ..types.prompt_organization import PromptOrganization +from .types.prompts_batch_predictions_response import PromptsBatchPredictionsResponse +from .versions.client import AsyncVersionsClient, VersionsClient + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class PromptsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + self.versions = VersionsClient(client_wrapper=self._client_wrapper) + + def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Prompt]: + """ + Get a list of prompts. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[Prompt] + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.prompts.list() + """ + _response = self._client_wrapper.httpx_client.request( + "api/prompts/", method="GET", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[Prompt], _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def create( + self, + *, + title: str, + input_fields: typing.Sequence[str], + output_classes: typing.Sequence[str], + description: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptOrganization] = OMIT, + associated_projects: typing.Optional[typing.Sequence[int]] = OMIT, + skill_name: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None + ) -> Prompt: + """ + Create a new prompt. + + Parameters + ---------- + title : str + Title of the prompt + + input_fields : typing.Sequence[str] + List of input fields + + output_classes : typing.Sequence[str] + List of output classes + + description : typing.Optional[str] + Description of the prompt + + created_by : typing.Optional[PromptCreatedBy] + User ID of the creator of the prompt + + created_at : typing.Optional[dt.datetime] + Date and time the prompt was created + + updated_at : typing.Optional[dt.datetime] + Date and time the prompt was last updated + + organization : typing.Optional[PromptOrganization] + Organization ID of the prompt + + associated_projects : typing.Optional[typing.Sequence[int]] + List of associated projects IDs + + skill_name : typing.Optional[str] + Name of the skill + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + Prompt + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.prompts.create( + title="title", + input_fields=["input_fields"], + output_classes=["output_classes"], + ) + """ + _response = self._client_wrapper.httpx_client.request( + "api/prompts/", + method="POST", + json={ + "title": title, + "description": description, + "created_by": created_by, + "created_at": created_at, + "updated_at": updated_at, + "organization": organization, + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": associated_projects, + "skill_name": skill_name, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def batch_predictions( + self, + *, + modelrun_id: typing.Optional[int] = OMIT, + results: typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] = OMIT, + request_options: typing.Optional[RequestOptions] = None + ) -> PromptsBatchPredictionsResponse: + """ + Create a new batch prediction. + + Parameters + ---------- + modelrun_id : typing.Optional[int] + + results : typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + PromptsBatchPredictionsResponse + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.prompts.batch_predictions() + """ + _response = self._client_wrapper.httpx_client.request( + "api/model-run/batch-predictions", + method="POST", + json={"modelrun_id": modelrun_id, "results": results}, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(PromptsBatchPredictionsResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncPromptsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + self.versions = AsyncVersionsClient(client_wrapper=self._client_wrapper) + + async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Prompt]: + """ + Get a list of prompts. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[Prompt] + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.prompts.list() + """ + _response = await self._client_wrapper.httpx_client.request( + "api/prompts/", method="GET", request_options=request_options + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[Prompt], _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def create( + self, + *, + title: str, + input_fields: typing.Sequence[str], + output_classes: typing.Sequence[str], + description: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptOrganization] = OMIT, + associated_projects: typing.Optional[typing.Sequence[int]] = OMIT, + skill_name: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None + ) -> Prompt: + """ + Create a new prompt. + + Parameters + ---------- + title : str + Title of the prompt + + input_fields : typing.Sequence[str] + List of input fields + + output_classes : typing.Sequence[str] + List of output classes + + description : typing.Optional[str] + Description of the prompt + + created_by : typing.Optional[PromptCreatedBy] + User ID of the creator of the prompt + + created_at : typing.Optional[dt.datetime] + Date and time the prompt was created + + updated_at : typing.Optional[dt.datetime] + Date and time the prompt was last updated + + organization : typing.Optional[PromptOrganization] + Organization ID of the prompt + + associated_projects : typing.Optional[typing.Sequence[int]] + List of associated projects IDs + + skill_name : typing.Optional[str] + Name of the skill + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + Prompt + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.prompts.create( + title="title", + input_fields=["input_fields"], + output_classes=["output_classes"], + ) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/prompts/", + method="POST", + json={ + "title": title, + "description": description, + "created_by": created_by, + "created_at": created_at, + "updated_at": updated_at, + "organization": organization, + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": associated_projects, + "skill_name": skill_name, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def batch_predictions( + self, + *, + modelrun_id: typing.Optional[int] = OMIT, + results: typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] = OMIT, + request_options: typing.Optional[RequestOptions] = None + ) -> PromptsBatchPredictionsResponse: + """ + Create a new batch prediction. + + Parameters + ---------- + modelrun_id : typing.Optional[int] + + results : typing.Optional[typing.Sequence[typing.Dict[str, typing.Any]]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + PromptsBatchPredictionsResponse + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.prompts.batch_predictions() + """ + _response = await self._client_wrapper.httpx_client.request( + "api/model-run/batch-predictions", + method="POST", + json={"modelrun_id": modelrun_id, "results": results}, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(PromptsBatchPredictionsResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/prompts/types/__init__.py b/src/label_studio_sdk/prompts/types/__init__.py new file mode 100644 index 000000000..0d11673b2 --- /dev/null +++ b/src/label_studio_sdk/prompts/types/__init__.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +from .prompts_batch_predictions_response import PromptsBatchPredictionsResponse + +__all__ = ["PromptsBatchPredictionsResponse"] diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py new file mode 100644 index 000000000..432b25b29 --- /dev/null +++ b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py @@ -0,0 +1,29 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ...core.datetime_utils import serialize_datetime +from ...core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class PromptsBatchPredictionsResponse(pydantic_v1.BaseModel): + detail: typing.Optional[str] = None + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/label_studio_sdk/prompts/versions/__init__.py b/src/label_studio_sdk/prompts/versions/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/src/label_studio_sdk/prompts/versions/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/src/label_studio_sdk/prompts/versions/client.py b/src/label_studio_sdk/prompts/versions/client.py new file mode 100644 index 000000000..2fc52b0ec --- /dev/null +++ b/src/label_studio_sdk/prompts/versions/client.py @@ -0,0 +1,435 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.request_options import RequestOptions +from ...types.inference_run import InferenceRun +from ...types.inference_run_created_by import InferenceRunCreatedBy +from ...types.inference_run_organization import InferenceRunOrganization +from ...types.inference_run_project_subset import InferenceRunProjectSubset +from ...types.inference_run_status import InferenceRunStatus +from ...types.prompt_version import PromptVersion +from ...types.prompt_version_created_by import PromptVersionCreatedBy +from ...types.prompt_version_organization import PromptVersionOrganization +from ...types.prompt_version_provider import PromptVersionProvider + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class VersionsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def create( + self, + id: int, + *, + title: str, + prompt: str, + provider: PromptVersionProvider, + provider_model_id: str, + parent_model: typing.Optional[int] = OMIT, + created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptVersionOrganization] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> PromptVersion: + """ + Create a new version of a prompt. + + Parameters + ---------- + id : int + Prompt ID + + title : str + + prompt : str + + provider : PromptVersionProvider + + provider_model_id : str + + parent_model : typing.Optional[int] + + created_by : typing.Optional[PromptVersionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + organization : typing.Optional[PromptVersionOrganization] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + PromptVersion + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.prompts.versions.create( + id=1, + title="title", + prompt="prompt", + provider="OpenAI", + provider_model_id="provider_model_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="POST", + json={ + "title": title, + "parent_model": parent_model, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": created_by, + "created_at": created_at, + "updated_at": updated_at, + "organization": organization, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def create_run( + self, + id: int, + version_id: int, + *, + project: int, + project_subset: InferenceRunProjectSubset, + organization: typing.Optional[InferenceRunOrganization] = OMIT, + model_version: typing.Optional[str] = OMIT, + created_by: typing.Optional[InferenceRunCreatedBy] = OMIT, + status: typing.Optional[InferenceRunStatus] = OMIT, + job_id: typing.Optional[str] = OMIT, + total_predictions: typing.Optional[int] = OMIT, + total_correct_predictions: typing.Optional[int] = OMIT, + total_tasks: typing.Optional[int] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + triggered_at: typing.Optional[dt.datetime] = OMIT, + predictions_updated_at: typing.Optional[dt.datetime] = OMIT, + completed_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> InferenceRun: + """ + Run a prompt inference. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + project : int + + project_subset : InferenceRunProjectSubset + + organization : typing.Optional[InferenceRunOrganization] + + model_version : typing.Optional[str] + + created_by : typing.Optional[InferenceRunCreatedBy] + + status : typing.Optional[InferenceRunStatus] + + job_id : typing.Optional[str] + + total_predictions : typing.Optional[int] + + total_correct_predictions : typing.Optional[int] + + total_tasks : typing.Optional[int] + + created_at : typing.Optional[dt.datetime] + + triggered_at : typing.Optional[dt.datetime] + + predictions_updated_at : typing.Optional[dt.datetime] + + completed_at : typing.Optional[dt.datetime] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + InferenceRun + + + Examples + -------- + from label_studio_sdk.client import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.prompts.versions.create_run( + id=1, + version_id=1, + project=1, + project_subset="All", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="POST", + json={ + "organization": organization, + "project": project, + "model_version": model_version, + "created_by": created_by, + "project_subset": project_subset, + "status": status, + "job_id": job_id, + "total_predictions": total_predictions, + "total_correct_predictions": total_correct_predictions, + "total_tasks": total_tasks, + "created_at": created_at, + "triggered_at": triggered_at, + "predictions_updated_at": predictions_updated_at, + "completed_at": completed_at, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(InferenceRun, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncVersionsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def create( + self, + id: int, + *, + title: str, + prompt: str, + provider: PromptVersionProvider, + provider_model_id: str, + parent_model: typing.Optional[int] = OMIT, + created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptVersionOrganization] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> PromptVersion: + """ + Create a new version of a prompt. + + Parameters + ---------- + id : int + Prompt ID + + title : str + + prompt : str + + provider : PromptVersionProvider + + provider_model_id : str + + parent_model : typing.Optional[int] + + created_by : typing.Optional[PromptVersionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + organization : typing.Optional[PromptVersionOrganization] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + PromptVersion + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.prompts.versions.create( + id=1, + title="title", + prompt="prompt", + provider="OpenAI", + provider_model_id="provider_model_id", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="POST", + json={ + "title": title, + "parent_model": parent_model, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": created_by, + "created_at": created_at, + "updated_at": updated_at, + "organization": organization, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def create_run( + self, + id: int, + version_id: int, + *, + project: int, + project_subset: InferenceRunProjectSubset, + organization: typing.Optional[InferenceRunOrganization] = OMIT, + model_version: typing.Optional[str] = OMIT, + created_by: typing.Optional[InferenceRunCreatedBy] = OMIT, + status: typing.Optional[InferenceRunStatus] = OMIT, + job_id: typing.Optional[str] = OMIT, + total_predictions: typing.Optional[int] = OMIT, + total_correct_predictions: typing.Optional[int] = OMIT, + total_tasks: typing.Optional[int] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + triggered_at: typing.Optional[dt.datetime] = OMIT, + predictions_updated_at: typing.Optional[dt.datetime] = OMIT, + completed_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> InferenceRun: + """ + Run a prompt inference. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + project : int + + project_subset : InferenceRunProjectSubset + + organization : typing.Optional[InferenceRunOrganization] + + model_version : typing.Optional[str] + + created_by : typing.Optional[InferenceRunCreatedBy] + + status : typing.Optional[InferenceRunStatus] + + job_id : typing.Optional[str] + + total_predictions : typing.Optional[int] + + total_correct_predictions : typing.Optional[int] + + total_tasks : typing.Optional[int] + + created_at : typing.Optional[dt.datetime] + + triggered_at : typing.Optional[dt.datetime] + + predictions_updated_at : typing.Optional[dt.datetime] + + completed_at : typing.Optional[dt.datetime] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + InferenceRun + + + Examples + -------- + from label_studio_sdk.client import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + await client.prompts.versions.create_run( + id=1, + version_id=1, + project=1, + project_subset="All", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="POST", + json={ + "organization": organization, + "project": project, + "model_version": model_version, + "created_by": created_by, + "project_subset": project_subset, + "status": status, + "job_id": job_id, + "total_predictions": total_predictions, + "total_correct_predictions": total_correct_predictions, + "total_tasks": total_tasks, + "created_at": created_at, + "triggered_at": triggered_at, + "predictions_updated_at": predictions_updated_at, + "completed_at": completed_at, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(InferenceRun, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/types/__init__.py b/src/label_studio_sdk/types/__init__.py index ed9e52d77..407b9e7cb 100644 --- a/src/label_studio_sdk/types/__init__.py +++ b/src/label_studio_sdk/types/__init__.py @@ -33,6 +33,11 @@ from .gcs_export_storage_status import GcsExportStorageStatus from .gcs_import_storage import GcsImportStorage from .gcs_import_storage_status import GcsImportStorageStatus +from .inference_run import InferenceRun +from .inference_run_created_by import InferenceRunCreatedBy +from .inference_run_organization import InferenceRunOrganization +from .inference_run_project_subset import InferenceRunProjectSubset +from .inference_run_status import InferenceRunStatus from .local_files_export_storage import LocalFilesExportStorage from .local_files_export_storage_status import LocalFilesExportStorageStatus from .local_files_import_storage import LocalFilesImportStorage @@ -40,6 +45,11 @@ from .ml_backend import MlBackend from .ml_backend_auth_method import MlBackendAuthMethod from .ml_backend_state import MlBackendState +from .model_provider_connection import ModelProviderConnection +from .model_provider_connection_created_by import ModelProviderConnectionCreatedBy +from .model_provider_connection_organization import ModelProviderConnectionOrganization +from .model_provider_connection_provider import ModelProviderConnectionProvider +from .model_provider_connection_scope import ModelProviderConnectionScope from .prediction import Prediction from .project import Project from .project_import import ProjectImport @@ -47,6 +57,13 @@ from .project_label_config import ProjectLabelConfig from .project_sampling import ProjectSampling from .project_skip_queue import ProjectSkipQueue +from .prompt import Prompt +from .prompt_created_by import PromptCreatedBy +from .prompt_organization import PromptOrganization +from .prompt_version import PromptVersion +from .prompt_version_created_by import PromptVersionCreatedBy +from .prompt_version_organization import PromptVersionOrganization +from .prompt_version_provider import PromptVersionProvider from .redis_export_storage import RedisExportStorage from .redis_export_storage_status import RedisExportStorageStatus from .redis_import_storage import RedisImportStorage @@ -105,6 +122,11 @@ "GcsExportStorageStatus", "GcsImportStorage", "GcsImportStorageStatus", + "InferenceRun", + "InferenceRunCreatedBy", + "InferenceRunOrganization", + "InferenceRunProjectSubset", + "InferenceRunStatus", "LocalFilesExportStorage", "LocalFilesExportStorageStatus", "LocalFilesImportStorage", @@ -112,6 +134,11 @@ "MlBackend", "MlBackendAuthMethod", "MlBackendState", + "ModelProviderConnection", + "ModelProviderConnectionCreatedBy", + "ModelProviderConnectionOrganization", + "ModelProviderConnectionProvider", + "ModelProviderConnectionScope", "Prediction", "Project", "ProjectImport", @@ -119,6 +146,13 @@ "ProjectLabelConfig", "ProjectSampling", "ProjectSkipQueue", + "Prompt", + "PromptCreatedBy", + "PromptOrganization", + "PromptVersion", + "PromptVersionCreatedBy", + "PromptVersionOrganization", + "PromptVersionProvider", "RedisExportStorage", "RedisExportStorageStatus", "RedisImportStorage", diff --git a/src/label_studio_sdk/types/inference_run.py b/src/label_studio_sdk/types/inference_run.py new file mode 100644 index 000000000..427bcb6dd --- /dev/null +++ b/src/label_studio_sdk/types/inference_run.py @@ -0,0 +1,46 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .inference_run_created_by import InferenceRunCreatedBy +from .inference_run_organization import InferenceRunOrganization +from .inference_run_project_subset import InferenceRunProjectSubset +from .inference_run_status import InferenceRunStatus + + +class InferenceRun(pydantic_v1.BaseModel): + organization: typing.Optional[InferenceRunOrganization] = None + project: int + model_version: typing.Optional[str] = None + created_by: typing.Optional[InferenceRunCreatedBy] = None + project_subset: InferenceRunProjectSubset + status: typing.Optional[InferenceRunStatus] = None + job_id: typing.Optional[str] = None + total_predictions: typing.Optional[int] = None + total_correct_predictions: typing.Optional[int] = None + total_tasks: typing.Optional[int] = None + created_at: typing.Optional[dt.datetime] = None + triggered_at: typing.Optional[dt.datetime] = None + predictions_updated_at: typing.Optional[dt.datetime] = None + completed_at: typing.Optional[dt.datetime] = None + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/label_studio_sdk/types/inference_run_created_by.py b/src/label_studio_sdk/types/inference_run_created_by.py new file mode 100644 index 000000000..2da9ece87 --- /dev/null +++ b/src/label_studio_sdk/types/inference_run_created_by.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +InferenceRunCreatedBy = typing.Union[int, typing.Dict[str, typing.Any]] diff --git a/src/label_studio_sdk/types/inference_run_organization.py b/src/label_studio_sdk/types/inference_run_organization.py new file mode 100644 index 000000000..d430254f0 --- /dev/null +++ b/src/label_studio_sdk/types/inference_run_organization.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +InferenceRunOrganization = typing.Union[int, typing.Dict[str, typing.Any]] diff --git a/src/label_studio_sdk/types/inference_run_project_subset.py b/src/label_studio_sdk/types/inference_run_project_subset.py new file mode 100644 index 000000000..f0ae0442e --- /dev/null +++ b/src/label_studio_sdk/types/inference_run_project_subset.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +InferenceRunProjectSubset = typing.Union[typing.Literal["All", "HasGT", "Sample"], typing.Any] diff --git a/src/label_studio_sdk/types/inference_run_status.py b/src/label_studio_sdk/types/inference_run_status.py new file mode 100644 index 000000000..b832b23ad --- /dev/null +++ b/src/label_studio_sdk/types/inference_run_status.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +InferenceRunStatus = typing.Union[ + typing.Literal["Pending", "InProgress", "Completed", "Failed", "Canceled"], typing.Any +] diff --git a/src/label_studio_sdk/types/model_provider_connection.py b/src/label_studio_sdk/types/model_provider_connection.py new file mode 100644 index 000000000..6e9f784d9 --- /dev/null +++ b/src/label_studio_sdk/types/model_provider_connection.py @@ -0,0 +1,41 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .model_provider_connection_created_by import ModelProviderConnectionCreatedBy +from .model_provider_connection_organization import ModelProviderConnectionOrganization +from .model_provider_connection_provider import ModelProviderConnectionProvider +from .model_provider_connection_scope import ModelProviderConnectionScope + + +class ModelProviderConnection(pydantic_v1.BaseModel): + provider: ModelProviderConnectionProvider + api_key: typing.Optional[str] = None + deployment_name: typing.Optional[str] = None + endpoint: typing.Optional[str] = None + scope: typing.Optional[ModelProviderConnectionScope] = None + organization: typing.Optional[ModelProviderConnectionOrganization] = None + created_by: typing.Optional[ModelProviderConnectionCreatedBy] = None + created_at: typing.Optional[dt.datetime] = None + updated_at: typing.Optional[dt.datetime] = None + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/label_studio_sdk/types/model_provider_connection_created_by.py b/src/label_studio_sdk/types/model_provider_connection_created_by.py new file mode 100644 index 000000000..9ec9d319d --- /dev/null +++ b/src/label_studio_sdk/types/model_provider_connection_created_by.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +ModelProviderConnectionCreatedBy = typing.Union[int, typing.Dict[str, typing.Any]] diff --git a/src/label_studio_sdk/types/model_provider_connection_organization.py b/src/label_studio_sdk/types/model_provider_connection_organization.py new file mode 100644 index 000000000..0ce796632 --- /dev/null +++ b/src/label_studio_sdk/types/model_provider_connection_organization.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +ModelProviderConnectionOrganization = typing.Union[int, typing.Dict[str, typing.Any]] diff --git a/src/label_studio_sdk/types/model_provider_connection_provider.py b/src/label_studio_sdk/types/model_provider_connection_provider.py new file mode 100644 index 000000000..c4f11b7df --- /dev/null +++ b/src/label_studio_sdk/types/model_provider_connection_provider.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +ModelProviderConnectionProvider = typing.Union[typing.Literal["OpenAI", "AzureOpenAI"], typing.Any] diff --git a/src/label_studio_sdk/types/model_provider_connection_scope.py b/src/label_studio_sdk/types/model_provider_connection_scope.py new file mode 100644 index 000000000..e5586694a --- /dev/null +++ b/src/label_studio_sdk/types/model_provider_connection_scope.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +ModelProviderConnectionScope = typing.Union[typing.Literal["Organization", "User", "Model"], typing.Any] diff --git a/src/label_studio_sdk/types/prompt.py b/src/label_studio_sdk/types/prompt.py new file mode 100644 index 000000000..c732a57b4 --- /dev/null +++ b/src/label_studio_sdk/types/prompt.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .prompt_created_by import PromptCreatedBy +from .prompt_organization import PromptOrganization + + +class Prompt(pydantic_v1.BaseModel): + title: str = pydantic_v1.Field() + """ + Title of the prompt + """ + + description: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Description of the prompt + """ + + created_by: typing.Optional[PromptCreatedBy] = pydantic_v1.Field(default=None) + """ + User ID of the creator of the prompt + """ + + created_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + """ + Date and time the prompt was created + """ + + updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field(default=None) + """ + Date and time the prompt was last updated + """ + + organization: typing.Optional[PromptOrganization] = pydantic_v1.Field(default=None) + """ + Organization ID of the prompt + """ + + input_fields: typing.List[str] = pydantic_v1.Field() + """ + List of input fields + """ + + output_classes: typing.List[str] = pydantic_v1.Field() + """ + List of output classes + """ + + associated_projects: typing.Optional[typing.List[int]] = pydantic_v1.Field(default=None) + """ + List of associated projects IDs + """ + + skill_name: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Name of the skill + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/label_studio_sdk/types/prompt_created_by.py b/src/label_studio_sdk/types/prompt_created_by.py new file mode 100644 index 000000000..efe14c6c3 --- /dev/null +++ b/src/label_studio_sdk/types/prompt_created_by.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +PromptCreatedBy = typing.Union[int, typing.Dict[str, typing.Any]] diff --git a/src/label_studio_sdk/types/prompt_organization.py b/src/label_studio_sdk/types/prompt_organization.py new file mode 100644 index 000000000..1f1a1158c --- /dev/null +++ b/src/label_studio_sdk/types/prompt_organization.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +PromptOrganization = typing.Union[int, typing.Dict[str, typing.Any]] diff --git a/src/label_studio_sdk/types/prompt_version.py b/src/label_studio_sdk/types/prompt_version.py new file mode 100644 index 000000000..786f39b51 --- /dev/null +++ b/src/label_studio_sdk/types/prompt_version.py @@ -0,0 +1,40 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .prompt_version_created_by import PromptVersionCreatedBy +from .prompt_version_organization import PromptVersionOrganization +from .prompt_version_provider import PromptVersionProvider + + +class PromptVersion(pydantic_v1.BaseModel): + title: str + parent_model: typing.Optional[int] = None + prompt: str + provider: PromptVersionProvider + provider_model_id: str + created_by: typing.Optional[PromptVersionCreatedBy] = None + created_at: typing.Optional[dt.datetime] = None + updated_at: typing.Optional[dt.datetime] = None + organization: typing.Optional[PromptVersionOrganization] = None + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/label_studio_sdk/types/prompt_version_created_by.py b/src/label_studio_sdk/types/prompt_version_created_by.py new file mode 100644 index 000000000..a0e0d8668 --- /dev/null +++ b/src/label_studio_sdk/types/prompt_version_created_by.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +PromptVersionCreatedBy = typing.Union[int, typing.Dict[str, typing.Any]] diff --git a/src/label_studio_sdk/types/prompt_version_organization.py b/src/label_studio_sdk/types/prompt_version_organization.py new file mode 100644 index 000000000..28c02e65d --- /dev/null +++ b/src/label_studio_sdk/types/prompt_version_organization.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +PromptVersionOrganization = typing.Union[int, typing.Dict[str, typing.Any]] diff --git a/src/label_studio_sdk/types/prompt_version_provider.py b/src/label_studio_sdk/types/prompt_version_provider.py new file mode 100644 index 000000000..82213666c --- /dev/null +++ b/src/label_studio_sdk/types/prompt_version_provider.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +PromptVersionProvider = typing.Union[typing.Literal["OpenAI", "AzureOpenAI"], typing.Any] diff --git a/src/label_studio_sdk/webhooks/client.py b/src/label_studio_sdk/webhooks/client.py index f77322b6d..2fa681c5f 100644 --- a/src/label_studio_sdk/webhooks/client.py +++ b/src/label_studio_sdk/webhooks/client.py @@ -1,5 +1,6 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing from json.decoder import JSONDecodeError @@ -9,7 +10,9 @@ from ..core.pydantic_utilities import pydantic_v1 from ..core.request_options import RequestOptions from ..types.webhook import Webhook +from ..types.webhook_actions_item import WebhookActionsItem from ..types.webhook_serializer_for_update import WebhookSerializerForUpdate +from ..types.webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem from .types.webhooks_update_request_actions_item import WebhooksUpdateRequestActionsItem # this is used as the default value for optional parameters @@ -63,7 +66,22 @@ def list( raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def create(self, *, request: Webhook, request_options: typing.Optional[RequestOptions] = None) -> Webhook: + def create( + self, + *, + url: str, + id: typing.Optional[int] = OMIT, + organization: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + send_payload: typing.Optional[bool] = OMIT, + send_for_all_actions: typing.Optional[bool] = OMIT, + headers: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + is_active: typing.Optional[bool] = OMIT, + actions: typing.Optional[typing.Sequence[WebhookActionsItem]] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> Webhook: """ Create a webhook. Label Studio provides several out-of-the box webhook events, which you can find listed here: [Available Label Studio webhooks](https://labelstud.io/guide/webhooks#Available-Label-Studio-webhooks). @@ -74,7 +92,34 @@ def create(self, *, request: Webhook, request_options: typing.Optional[RequestOp Parameters ---------- - request : Webhook + url : str + URL of webhook + + id : typing.Optional[int] + + organization : typing.Optional[int] + + project : typing.Optional[int] + + send_payload : typing.Optional[bool] + If value is False send only action + + send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + headers : typing.Optional[typing.Dict[str, typing.Any]] + Key Value Json of headers + + is_active : typing.Optional[bool] + If value is False the webhook is disabled + + actions : typing.Optional[typing.Sequence[WebhookActionsItem]] + + created_at : typing.Optional[dt.datetime] + Creation time + + updated_at : typing.Optional[dt.datetime] + Last update time request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -86,20 +131,33 @@ def create(self, *, request: Webhook, request_options: typing.Optional[RequestOp Examples -------- - from label_studio_sdk import Webhook from label_studio_sdk.client import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) client.webhooks.create( - request=Webhook( - url="url", - ), + url="url", ) """ _response = self._client_wrapper.httpx_client.request( - "api/webhooks/", method="POST", json=request, request_options=request_options, omit=OMIT + "api/webhooks/", + method="POST", + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, + request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: @@ -236,10 +294,10 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = def update( self, - id: int, + id_: int, *, url: str, - request: WebhookSerializerForUpdate, + webhook_serializer_for_update_url: str, send_payload: typing.Optional[bool] = None, send_for_all_actions: typing.Optional[bool] = None, headers: typing.Optional[str] = None, @@ -247,6 +305,18 @@ def update( actions: typing.Optional[ typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]] ] = None, + id: typing.Optional[int] = OMIT, + organization: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + webhook_serializer_for_update_send_payload: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_send_for_all_actions: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_headers: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + webhook_serializer_for_update_is_active: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_actions: typing.Optional[ + typing.Sequence[WebhookSerializerForUpdateActionsItem] + ] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> WebhookSerializerForUpdate: """ @@ -256,13 +326,14 @@ def update( Parameters ---------- - id : int + id_ : int A unique integer value identifying this webhook. url : str URL of webhook - request : WebhookSerializerForUpdate + webhook_serializer_for_update_url : str + URL of webhook send_payload : typing.Optional[bool] If value is False send only action @@ -278,6 +349,32 @@ def update( actions : typing.Optional[typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]]] + id : typing.Optional[int] + + organization : typing.Optional[int] + + project : typing.Optional[int] + + webhook_serializer_for_update_send_payload : typing.Optional[bool] + If value is False send only action + + webhook_serializer_for_update_send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + webhook_serializer_for_update_headers : typing.Optional[typing.Dict[str, typing.Any]] + Key Value Json of headers + + webhook_serializer_for_update_is_active : typing.Optional[bool] + If value is False the webhook is disabled + + webhook_serializer_for_update_actions : typing.Optional[typing.Sequence[WebhookSerializerForUpdateActionsItem]] + + created_at : typing.Optional[dt.datetime] + Creation time + + updated_at : typing.Optional[dt.datetime] + Last update time + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -288,22 +385,19 @@ def update( Examples -------- - from label_studio_sdk import WebhookSerializerForUpdate from label_studio_sdk.client import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) client.webhooks.update( - id=1, + id_=1, url="url", - request=WebhookSerializerForUpdate( - url="url", - ), + webhook_serializer_for_update_url="url", ) """ _response = self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", + f"api/webhooks/{jsonable_encoder(id_)}/", method="PATCH", params={ "url": url, @@ -313,7 +407,19 @@ def update( "is_active": is_active, "actions": actions, }, - json=request, + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, request_options=request_options, omit=OMIT, ) @@ -373,7 +479,22 @@ async def list( raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def create(self, *, request: Webhook, request_options: typing.Optional[RequestOptions] = None) -> Webhook: + async def create( + self, + *, + url: str, + id: typing.Optional[int] = OMIT, + organization: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + send_payload: typing.Optional[bool] = OMIT, + send_for_all_actions: typing.Optional[bool] = OMIT, + headers: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + is_active: typing.Optional[bool] = OMIT, + actions: typing.Optional[typing.Sequence[WebhookActionsItem]] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> Webhook: """ Create a webhook. Label Studio provides several out-of-the box webhook events, which you can find listed here: [Available Label Studio webhooks](https://labelstud.io/guide/webhooks#Available-Label-Studio-webhooks). @@ -384,7 +505,34 @@ async def create(self, *, request: Webhook, request_options: typing.Optional[Req Parameters ---------- - request : Webhook + url : str + URL of webhook + + id : typing.Optional[int] + + organization : typing.Optional[int] + + project : typing.Optional[int] + + send_payload : typing.Optional[bool] + If value is False send only action + + send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + headers : typing.Optional[typing.Dict[str, typing.Any]] + Key Value Json of headers + + is_active : typing.Optional[bool] + If value is False the webhook is disabled + + actions : typing.Optional[typing.Sequence[WebhookActionsItem]] + + created_at : typing.Optional[dt.datetime] + Creation time + + updated_at : typing.Optional[dt.datetime] + Last update time request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -396,20 +544,33 @@ async def create(self, *, request: Webhook, request_options: typing.Optional[Req Examples -------- - from label_studio_sdk import Webhook from label_studio_sdk.client import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) await client.webhooks.create( - request=Webhook( - url="url", - ), + url="url", ) """ _response = await self._client_wrapper.httpx_client.request( - "api/webhooks/", method="POST", json=request, request_options=request_options, omit=OMIT + "api/webhooks/", + method="POST", + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, + request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: @@ -546,10 +707,10 @@ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptio async def update( self, - id: int, + id_: int, *, url: str, - request: WebhookSerializerForUpdate, + webhook_serializer_for_update_url: str, send_payload: typing.Optional[bool] = None, send_for_all_actions: typing.Optional[bool] = None, headers: typing.Optional[str] = None, @@ -557,6 +718,18 @@ async def update( actions: typing.Optional[ typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]] ] = None, + id: typing.Optional[int] = OMIT, + organization: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + webhook_serializer_for_update_send_payload: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_send_for_all_actions: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_headers: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, + webhook_serializer_for_update_is_active: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_actions: typing.Optional[ + typing.Sequence[WebhookSerializerForUpdateActionsItem] + ] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> WebhookSerializerForUpdate: """ @@ -566,13 +739,14 @@ async def update( Parameters ---------- - id : int + id_ : int A unique integer value identifying this webhook. url : str URL of webhook - request : WebhookSerializerForUpdate + webhook_serializer_for_update_url : str + URL of webhook send_payload : typing.Optional[bool] If value is False send only action @@ -588,6 +762,32 @@ async def update( actions : typing.Optional[typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]]] + id : typing.Optional[int] + + organization : typing.Optional[int] + + project : typing.Optional[int] + + webhook_serializer_for_update_send_payload : typing.Optional[bool] + If value is False send only action + + webhook_serializer_for_update_send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + webhook_serializer_for_update_headers : typing.Optional[typing.Dict[str, typing.Any]] + Key Value Json of headers + + webhook_serializer_for_update_is_active : typing.Optional[bool] + If value is False the webhook is disabled + + webhook_serializer_for_update_actions : typing.Optional[typing.Sequence[WebhookSerializerForUpdateActionsItem]] + + created_at : typing.Optional[dt.datetime] + Creation time + + updated_at : typing.Optional[dt.datetime] + Last update time + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -598,22 +798,19 @@ async def update( Examples -------- - from label_studio_sdk import WebhookSerializerForUpdate from label_studio_sdk.client import AsyncLabelStudio client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) await client.webhooks.update( - id=1, + id_=1, url="url", - request=WebhookSerializerForUpdate( - url="url", - ), + webhook_serializer_for_update_url="url", ) """ _response = await self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", + f"api/webhooks/{jsonable_encoder(id_)}/", method="PATCH", params={ "url": url, @@ -623,7 +820,19 @@ async def update( "is_active": is_active, "actions": actions, }, - json=request, + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, request_options=request_options, omit=OMIT, ) diff --git a/tests/projects/test_exports.py b/tests/projects/test_exports.py index 673209006..c17042c1e 100644 --- a/tests/projects/test_exports.py +++ b/tests/projects/test_exports.py @@ -2,7 +2,6 @@ import typing -from label_studio_sdk import ExportConvert, ExportCreate from label_studio_sdk.client import AsyncLabelStudio, LabelStudio from ..utilities import validate_response @@ -120,10 +119,10 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No "interpolate_key_frames": None, }, } - response = client.projects.exports.create(id=1, request=ExportCreate()) + response = client.projects.exports.create(id_=1) validate_response(response, expected_response, expected_types) - async_response = await async_client.projects.exports.create(id=1, request=ExportCreate()) + async_response = await async_client.projects.exports.create(id_=1) validate_response(async_response, expected_response, expected_types) @@ -173,14 +172,10 @@ async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_convert(client: LabelStudio, async_client: AsyncLabelStudio) -> None: expected_response: typing.Any = {"export_type": "export_type"} expected_types: typing.Any = {"export_type": None} - response = client.projects.exports.convert( - id=1, export_pk="export_pk", request=ExportConvert(export_type="export_type") - ) + response = client.projects.exports.convert(id=1, export_pk="export_pk", export_type="export_type") validate_response(response, expected_response, expected_types) - async_response = await async_client.projects.exports.convert( - id=1, export_pk="export_pk", request=ExportConvert(export_type="export_type") - ) + async_response = await async_client.projects.exports.convert(id=1, export_pk="export_pk", export_type="export_type") validate_response(async_response, expected_response, expected_types) diff --git a/tests/prompts/__init__.py b/tests/prompts/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/tests/prompts/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/tests/prompts/test_versions.py b/tests/prompts/test_versions.py new file mode 100644 index 000000000..664039168 --- /dev/null +++ b/tests/prompts/test_versions.py @@ -0,0 +1,81 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from label_studio_sdk.client import AsyncLabelStudio, LabelStudio + +from ..utilities import validate_response + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "parent_model": 1, + "prompt": "prompt", + "provider": "OpenAI", + "provider_model_id": "provider_model_id", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + } + expected_types: typing.Any = { + "title": None, + "parent_model": "integer", + "prompt": None, + "provider": None, + "provider_model_id": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + } + response = client.prompts.versions.create( + id=1, title="title", prompt="prompt", provider="OpenAI", provider_model_id="provider_model_id" + ) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.versions.create( + id=1, title="title", prompt="prompt", provider="OpenAI", provider_model_id="provider_model_id" + ) + validate_response(async_response, expected_response, expected_types) + + +async def test_create_run(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "organization": 1, + "project": 1, + "model_version": "model_version", + "created_by": 1, + "project_subset": "All", + "status": "Pending", + "job_id": "job_id", + "total_predictions": 1, + "total_correct_predictions": 1, + "total_tasks": 1, + "created_at": "2024-01-15T09:30:00Z", + "triggered_at": "2024-01-15T09:30:00Z", + "predictions_updated_at": "2024-01-15T09:30:00Z", + "completed_at": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "organization": "integer", + "project": "integer", + "model_version": None, + "created_by": "integer", + "project_subset": None, + "status": None, + "job_id": None, + "total_predictions": "integer", + "total_correct_predictions": "integer", + "total_tasks": "integer", + "created_at": "datetime", + "triggered_at": "datetime", + "predictions_updated_at": "datetime", + "completed_at": "datetime", + } + response = client.prompts.versions.create_run(id=1, version_id=1, project=1, project_subset="All") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.versions.create_run(id=1, version_id=1, project=1, project_subset="All") + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_files.py b/tests/test_files.py index d1232b956..4dd052667 100644 --- a/tests/test_files.py +++ b/tests/test_files.py @@ -2,7 +2,6 @@ import typing -from label_studio_sdk import FileUpload from label_studio_sdk.client import AsyncLabelStudio, LabelStudio from .utilities import validate_response @@ -28,10 +27,10 @@ async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> No async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: expected_response: typing.Any = {"id": 1, "file": "file"} expected_types: typing.Any = {"id": "integer", "file": None} - response = client.files.update(id=1, request=FileUpload()) + response = client.files.update(id_=1) validate_response(response, expected_response, expected_types) - async_response = await async_client.files.update(id=1, request=FileUpload()) + async_response = await async_client.files.update(id_=1) validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_model_providers.py b/tests/test_model_providers.py new file mode 100644 index 000000000..73f0e226d --- /dev/null +++ b/tests/test_model_providers.py @@ -0,0 +1,37 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from label_studio_sdk.client import AsyncLabelStudio, LabelStudio + +from .utilities import validate_response + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "provider": "OpenAI", + "api_key": "api_key", + "deployment_name": "deployment_name", + "endpoint": "endpoint", + "scope": "Organization", + "organization": 1, + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "provider": None, + "api_key": None, + "deployment_name": None, + "endpoint": None, + "scope": None, + "organization": "integer", + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + } + response = client.model_providers.create(provider="OpenAI") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.model_providers.create(provider="OpenAI") + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_projects.py b/tests/test_projects.py index 4de51931a..61376a911 100644 --- a/tests/test_projects.py +++ b/tests/test_projects.py @@ -2,7 +2,6 @@ import typing -from label_studio_sdk import ProjectLabelConfig from label_studio_sdk.client import AsyncLabelStudio, LabelStudio from .utilities import validate_response @@ -230,10 +229,8 @@ async def test_import_tasks(client: LabelStudio, async_client: AsyncLabelStudio) async def test_validate_config(client: LabelStudio, async_client: AsyncLabelStudio) -> None: expected_response: typing.Any = {"label_config": "label_config"} expected_types: typing.Any = {"label_config": None} - response = client.projects.validate_config(id=1, request=ProjectLabelConfig(label_config="label_config")) + response = client.projects.validate_config(id=1, label_config="label_config") validate_response(response, expected_response, expected_types) - async_response = await async_client.projects.validate_config( - id=1, request=ProjectLabelConfig(label_config="label_config") - ) + async_response = await async_client.projects.validate_config(id=1, label_config="label_config") validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_prompts.py b/tests/test_prompts.py new file mode 100644 index 000000000..4517ff868 --- /dev/null +++ b/tests/test_prompts.py @@ -0,0 +1,90 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from label_studio_sdk.client import AsyncLabelStudio, LabelStudio + +from .utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "title": "title", + "description": "description", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + "input_fields": ["input_fields"], + "output_classes": ["output_classes"], + "associated_projects": [1], + "skill_name": "skill_name", + } + ] + expected_types: typing.Any = ( + "list", + { + 0: { + "title": None, + "description": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + "input_fields": ("list", {0: None}), + "output_classes": ("list", {0: None}), + "associated_projects": ("list", {0: "integer"}), + "skill_name": None, + } + }, + ) + response = client.prompts.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "description": "description", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + "input_fields": ["input_fields"], + "output_classes": ["output_classes"], + "associated_projects": [1], + "skill_name": "skill_name", + } + expected_types: typing.Any = { + "title": None, + "description": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + "input_fields": ("list", {0: None}), + "output_classes": ("list", {0: None}), + "associated_projects": ("list", {0: "integer"}), + "skill_name": None, + } + response = client.prompts.create(title="title", input_fields=["input_fields"], output_classes=["output_classes"]) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.create( + title="title", input_fields=["input_fields"], output_classes=["output_classes"] + ) + validate_response(async_response, expected_response, expected_types) + + +async def test_batch_predictions(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"detail": "detail"} + expected_types: typing.Any = {"detail": None} + response = client.prompts.batch_predictions() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.batch_predictions() + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_webhooks.py b/tests/test_webhooks.py index 8fa37e33c..39cd3b462 100644 --- a/tests/test_webhooks.py +++ b/tests/test_webhooks.py @@ -2,7 +2,6 @@ import typing -from label_studio_sdk import Webhook, WebhookSerializerForUpdate from label_studio_sdk.client import AsyncLabelStudio, LabelStudio from .utilities import validate_response @@ -76,10 +75,10 @@ async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> No "created_at": "datetime", "updated_at": "datetime", } - response = client.webhooks.create(request=Webhook(url="url")) + response = client.webhooks.create(url="url") validate_response(response, expected_response, expected_types) - async_response = await async_client.webhooks.create(request=Webhook(url="url")) + async_response = await async_client.webhooks.create(url="url") validate_response(async_response, expected_response, expected_types) @@ -158,8 +157,8 @@ async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> No "created_at": "datetime", "updated_at": "datetime", } - response = client.webhooks.update(id=1, url="url", request=WebhookSerializerForUpdate(url="url")) + response = client.webhooks.update(id_=1, url="url", webhook_serializer_for_update_url="url") validate_response(response, expected_response, expected_types) - async_response = await async_client.webhooks.update(id=1, url="url", request=WebhookSerializerForUpdate(url="url")) + async_response = await async_client.webhooks.update(id_=1, url="url", webhook_serializer_for_update_url="url") validate_response(async_response, expected_response, expected_types)